From 305855a8a44537db9dc82dcc7ed5330a0bc8b9c3 Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Fri, 19 Jan 2024 19:32:29 -0500 Subject: [PATCH] Re-enable zlib-ng --- .github/workflows/build-binaries.yml | 438 +-- .github/workflows/ci.yml | 78 - Cargo.lock | 352 +- crates/puffin/Cargo.toml | 64 - crates/puffin/src/commands/add.rs | 74 - crates/puffin/src/commands/clean.rs | 52 - crates/puffin/src/commands/freeze.rs | 53 - crates/puffin/src/commands/mod.rs | 76 - crates/puffin/src/commands/pip_compile.rs | 379 -- crates/puffin/src/commands/pip_install.rs | 659 ---- crates/puffin/src/commands/pip_sync.rs | 469 --- crates/puffin/src/commands/pip_uninstall.rs | 151 - crates/puffin/src/commands/remove.rs | 74 - crates/puffin/src/commands/reporters.rs | 323 -- crates/puffin/src/commands/venv.rs | 194 - crates/puffin/src/logging.rs | 100 - crates/puffin/src/main.rs | 799 +--- crates/puffin/src/printer.rs | 41 - crates/puffin/src/requirements.rs | 210 -- crates/puffin/tests/add.rs | 169 - crates/puffin/tests/common/mod.rs | 39 - crates/puffin/tests/pip_compile.rs | 3501 ------------------ crates/puffin/tests/pip_install.rs | 942 ----- crates/puffin/tests/pip_install_scenarios.rs | 3119 ---------------- crates/puffin/tests/pip_sync.rs | 3022 --------------- crates/puffin/tests/pip_uninstall.rs | 546 --- crates/puffin/tests/remove.rs | 282 -- crates/puffin/tests/venv.rs | 111 - 28 files changed, 4 insertions(+), 16313 deletions(-) delete mode 100644 .github/workflows/ci.yml delete mode 100644 crates/puffin/src/commands/add.rs delete mode 100644 crates/puffin/src/commands/clean.rs delete mode 100644 crates/puffin/src/commands/freeze.rs delete mode 100644 crates/puffin/src/commands/mod.rs delete mode 100644 crates/puffin/src/commands/pip_compile.rs delete mode 100644 crates/puffin/src/commands/pip_install.rs delete mode 100644 crates/puffin/src/commands/pip_sync.rs delete mode 100644 crates/puffin/src/commands/pip_uninstall.rs delete mode 100644 crates/puffin/src/commands/remove.rs delete mode 100644 crates/puffin/src/commands/reporters.rs delete mode 100644 crates/puffin/src/commands/venv.rs delete mode 100644 crates/puffin/src/logging.rs delete mode 100644 crates/puffin/src/printer.rs delete mode 100644 crates/puffin/src/requirements.rs delete mode 100644 crates/puffin/tests/add.rs delete mode 100644 crates/puffin/tests/common/mod.rs delete mode 100644 crates/puffin/tests/pip_compile.rs delete mode 100644 crates/puffin/tests/pip_install.rs delete mode 100644 crates/puffin/tests/pip_install_scenarios.rs delete mode 100644 crates/puffin/tests/pip_sync.rs delete mode 100644 crates/puffin/tests/pip_uninstall.rs delete mode 100644 crates/puffin/tests/remove.rs delete mode 100644 crates/puffin/tests/venv.rs diff --git a/.github/workflows/build-binaries.yml b/.github/workflows/build-binaries.yml index 6c3c2e1fbda9..d08ea29f4192 100644 --- a/.github/workflows/build-binaries.yml +++ b/.github/workflows/build-binaries.yml @@ -33,315 +33,6 @@ env: RUSTUP_MAX_RETRIES: 10 jobs: - sdist: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build sdist" - uses: PyO3/maturin-action@v1 - with: - command: sdist - args: --out dist - - name: "Test sdist" - run: | - pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall - ${{ env.MODULE_NAME }} --help - python -m ${{ env.MODULE_NAME }} --help - - name: "Upload sdist" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - macos-x86_64: - runs-on: macos-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - architecture: x64 - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels - x86_64" - uses: PyO3/maturin-action@v1 - with: - target: x86_64 - args: --release --locked --out dist - - name: "Test wheel - x86_64" - run: | - pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall - ${{ env.MODULE_NAME }} --help - python -m ${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - run: | - TARGET=x86_64-apple-darwin - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 - - macos-universal: - runs-on: macos-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - architecture: x64 - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels - universal2" - uses: PyO3/maturin-action@v1 - with: - args: --release --locked --target universal2-apple-darwin --out dist - - name: "Test wheel - universal2" - run: | - pip install dist/${{ env.PACKAGE_NAME }}-*universal2.whl --force-reinstall - ${{ env.MODULE_NAME }} --help - python -m ${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - run: | - TARGET=aarch64-apple-darwin - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 - - windows: - # TODO(charlie): Enable Windows builds. - if: false - runs-on: windows-latest - strategy: - matrix: - platform: - - target: x86_64-pc-windows-msvc - arch: x64 - - target: i686-pc-windows-msvc - arch: x86 - - target: aarch64-pc-windows-msvc - arch: x64 - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - architecture: ${{ matrix.platform.arch }} - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels" - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.platform.target }} - args: --release --locked --out dist - - name: "Test wheel" - if: ${{ !startsWith(matrix.platform.target, 'aarch64') }} - shell: bash - run: | - python -m pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall - ${{ env.MODULE_NAME }} --help - python -m ${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - shell: bash - run: | - ARCHIVE_FILE=puffin-${{ matrix.platform.target }}.zip - 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/puffin.exe - sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.zip - *.sha256 - - linux: - runs-on: ubuntu-latest - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - i686-unknown-linux-gnu - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - architecture: x64 - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels" - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.target }} - manylinux: auto - args: --release --locked --out dist - # See: https://github.com/sfackler/rust-openssl/issues/2036#issuecomment-1724324145 - before-script-linux: | - # If we're running on rhel centos, install needed packages. - if command -v yum &> /dev/null; then - yum update -y && yum install -y perl-core openssl openssl-devel pkgconfig libatomic - - # If we're running on i686 we need to symlink libatomic - # in order to build openssl with -latomic flag. - if [[ ! -d "/usr/lib64" ]]; then - ln -s /usr/lib/libatomic.so.1 /usr/lib/libatomic.so - fi - else - # If we're running on debian-based system. - apt update -y && apt-get install -y libssl-dev openssl pkg-config - fi - - name: "Test wheel" - if: ${{ startsWith(matrix.target, 'x86_64') }} - run: | - pip install dist/${{ env.PACKAGE_NAME }}-*.whl --force-reinstall - ${{ env.MODULE_NAME }} --help - python -m ${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - shell: bash - run: | - set -euo pipefail - - TARGET=${{ matrix.target }} - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 - - linux-arm: - runs-on: ubuntu-latest - strategy: - matrix: - platform: - - target: aarch64-unknown-linux-gnu - arch: aarch64 - # see https://github.com/astral-sh/ruff/issues/3791 - # and https://github.com/gnzlbg/jemallocator/issues/170#issuecomment-1503228963 - maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16 - - target: armv7-unknown-linux-gnueabihf - arch: armv7 - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels" - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.platform.target }} - manylinux: 2_28 - docker-options: ${{ matrix.platform.maturin_docker_options }} - args: --release --locked --out dist - - uses: uraimo/run-on-arch-action@v2 - if: matrix.platform.arch != 'ppc64' - name: Test wheel - with: - arch: ${{ matrix.platform.arch }} - distro: ubuntu20.04 - githubToken: ${{ github.token }} - install: | - apt-get update - apt-get install -y --no-install-recommends python3 python3-pip - pip3 install -U pip - run: | - pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall - ${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - shell: bash - run: | - set -euo pipefail - - TARGET=${{ matrix.platform.target }} - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 - # Like `linux-arm`, but use `--no-default-features --features flate2-rust_backend` when # building Puffin. linux-s390x: @@ -367,7 +58,7 @@ jobs: target: ${{ matrix.platform.target }} manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} - args: --release --locked --out dist --no-default-features --features flate2-rust_backend + args: --release --locked --out dist - uses: uraimo/run-on-arch-action@v2 if: matrix.platform.arch != 'ppc64' name: Test wheel @@ -436,7 +127,7 @@ jobs: target: ${{ matrix.platform.target }} manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} - args: --release --locked --out dist --no-default-features --features flate2-rust_backend + args: --release --locked --out dist before-script-linux: | if command -v yum &> /dev/null; then yum update -y @@ -483,128 +174,3 @@ jobs: path: | *.tar.gz *.sha256 - - musllinux: - runs-on: ubuntu-latest - strategy: - matrix: - target: - - x86_64-unknown-linux-musl - - i686-unknown-linux-musl - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - architecture: x64 - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels" - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.target }} - manylinux: musllinux_1_2 - args: --release --locked --out dist - - name: "Test wheel" - if: matrix.target == 'x86_64-unknown-linux-musl' - uses: addnab/docker-run-action@v3 - with: - image: alpine:latest - options: -v ${{ github.workspace }}:/io -w /io - run: | - apk add python3 - python -m venv .venv - .venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall - .venv/bin/${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - shell: bash - run: | - set -euo pipefail - - TARGET=${{ matrix.target }} - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 - - musllinux-cross: - runs-on: ubuntu-latest - strategy: - matrix: - platform: - - target: aarch64-unknown-linux-musl - arch: aarch64 - maturin_docker_options: -e JEMALLOC_SYS_WITH_LG_PAGE=16 - - target: armv7-unknown-linux-musleabihf - arch: armv7 - - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: "Prep README.md" - run: echo "# Puffin" > README.md - - name: "Build wheels" - uses: PyO3/maturin-action@v1 - with: - target: ${{ matrix.platform.target }} - manylinux: musllinux_1_2 - args: --release --locked --out dist - docker-options: ${{ matrix.platform.maturin_docker_options }} - - uses: uraimo/run-on-arch-action@v2 - name: Test wheel - with: - arch: ${{ matrix.platform.arch }} - distro: alpine_latest - githubToken: ${{ github.token }} - install: | - apk add python3 - run: | - python -m venv .venv - .venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall - .venv/bin/${{ env.MODULE_NAME }} --help - - name: "Upload wheels" - uses: actions/upload-artifact@v3 - with: - name: wheels - path: dist - - name: "Archive binary" - shell: bash - run: | - set -euo pipefail - - TARGET=${{ matrix.platform.target }} - ARCHIVE_NAME=puffin-$TARGET - ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz - - mkdir -p $ARCHIVE_NAME - cp target/$TARGET/release/puffin $ARCHIVE_NAME/puffin - tar czvf $ARCHIVE_FILE $ARCHIVE_NAME - shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - - name: "Upload binary" - uses: actions/upload-artifact@v3 - with: - name: artifacts - path: | - *.tar.gz - *.sha256 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 52c2d631cdaa..000000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,78 +0,0 @@ -name: CI - -on: - push: - branches: [main] - pull_request: - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }} - cancel-in-progress: true - -env: - CARGO_INCREMENTAL: 0 - CARGO_NET_RETRY: 10 - CARGO_TERM_COLOR: always - RUSTUP_MAX_RETRIES: 10 - PYTHON_VERSION: "3.12" - -jobs: - cargo-fmt: - name: "cargo fmt" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: "Install Rust toolchain" - run: rustup component add rustfmt - - name: "rustfmt" - run: cargo fmt --all --check - - cargo-clippy: - name: "cargo clippy" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: "Install Rust toolchain" - run: | - rustup component add clippy - - uses: Swatinem/rust-cache@v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - - name: "Clippy" - run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings - - cargo-test: - strategy: - matrix: - os: [ubuntu-latest] - runs-on: - # We use the large GitHub actions runners for faster testing - # For Ubuntu and Windows, this requires Organization-level configuration - # See: https://docs.github.com/en/actions/using-github-hosted-runners/about-larger-runners/about-larger-runners#about-ubuntu-and-windows-larger-runners - labels: ${{ matrix.os }}-large - name: "cargo test | ${{ matrix.os }}" - steps: - - uses: actions/checkout@v4 - - name: "Install Python" - uses: actions/setup-python@v4 - with: - python-version: | - 3.7 - 3.8 - 3.9 - 3.10 - 3.11 - 3.12 - - name: "Install Rust toolchain" - run: rustup show - - uses: rui314/setup-mold@v1 - - name: "Install cargo nextest" - uses: taiki-e/install-action@v2 - with: - tool: cargo-nextest - - uses: Swatinem/rust-cache@v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - - name: "Tests" - run: cargo nextest run --all --all-features --status-level skip --failure-output immediate-final --no-fail-fast -j 12 diff --git a/Cargo.lock b/Cargo.lock index 516f679bac01..cdfcbe0bf0d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -137,36 +137,6 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "assert_cmd" -version = "2.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00ad3f3a942eee60335ab4342358c161ee296829e0d16ff42fc1d6cb07815467" -dependencies = [ - "anstyle", - "bstr", - "doc-comment", - "predicates", - "predicates-core", - "predicates-tree", - "wait-timeout", -] - -[[package]] -name = "assert_fs" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cd762e110c8ed629b11b6cde59458cc1c71de78ebbcc30099fc8e0403a2a2ec" -dependencies = [ - "anstyle", - "doc-comment", - "globwalk", - "predicates", - "predicates-core", - "predicates-tree", - "tempfile", -] - [[package]] name = "async-compression" version = "0.4.6" @@ -247,15 +217,6 @@ dependencies = [ "rustc-demangle", ] -[[package]] -name = "backtrace-ext" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" -dependencies = [ - "backtrace", -] - [[package]] name = "base64" version = "0.13.1" @@ -325,17 +286,6 @@ dependencies = [ "alloc-stdlib", ] -[[package]] -name = "bstr" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc" -dependencies = [ - "memchr", - "regex-automata 0.4.3", - "serde", -] - [[package]] name = "bumpalo" version = "3.14.0" @@ -763,12 +713,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "difflib" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" - [[package]] name = "digest" version = "0.10.7" @@ -839,12 +783,6 @@ dependencies = [ "url", ] -[[package]] -name = "doc-comment" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" - [[package]] name = "either" version = "1.9.0" @@ -917,15 +855,6 @@ dependencies = [ "miniz_oxide", ] -[[package]] -name = "float-cmp" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" -dependencies = [ - "num-traits", -] - [[package]] name = "fnv" version = "1.0.7" @@ -1113,30 +1042,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "globset" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" -dependencies = [ - "aho-corasick", - "bstr", - "log", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", -] - -[[package]] -name = "globwalk" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" -dependencies = [ - "bitflags 2.4.2", - "ignore", - "walkdir", -] - [[package]] name = "goblin" version = "0.8.0" @@ -1375,22 +1280,6 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "ignore" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" -dependencies = [ - "crossbeam-deque", - "globset", - "log", - "memchr", - "regex-automata 0.4.3", - "same-file", - "walkdir", - "winapi-util", -] - [[package]] name = "indexmap" version = "1.9.3" @@ -1441,23 +1330,10 @@ dependencies = [ "console", "lazy_static", "linked-hash-map", - "regex", - "serde", "similar", "yaml-rust", ] -[[package]] -name = "insta-cmd" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809d3023d1d6e8d5c2206f199251f75cb26180e41f18cb0f22dd119161cb5127" -dependencies = [ - "insta", - "serde", - "serde_json", -] - [[package]] name = "install-wheel-rs" version = "0.0.1" @@ -1526,12 +1402,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "is_ci" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb" - [[package]] name = "itertools" version = "0.10.5" @@ -1747,36 +1617,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "miette" -version = "5.10.0" -source = "git+https://github.com/zkat/miette.git?rev=b0744462adbbfbb6d845f382db36be883c7f3c45#b0744462adbbfbb6d845f382db36be883c7f3c45" -dependencies = [ - "backtrace", - "backtrace-ext", - "is-terminal", - "miette-derive", - "once_cell", - "owo-colors", - "supports-color", - "supports-hyperlinks", - "supports-unicode", - "terminal_size", - "textwrap", - "thiserror", - "unicode-width", -] - -[[package]] -name = "miette-derive" -version = "5.10.0" -source = "git+https://github.com/zkat/miette.git?rev=b0744462adbbfbb6d845f382db36be883c7f3c45#b0744462adbbfbb6d845f382db36be883c7f3c45" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "mimalloc" version = "0.1.39" @@ -1831,12 +1671,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "normalize-line-endings" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" - [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -1847,15 +1681,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "nu-ansi-term" -version = "0.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68" -dependencies = [ - "windows-sys 0.48.0", -] - [[package]] name = "num-traits" version = "0.2.17" @@ -2192,36 +2017,6 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" -[[package]] -name = "predicates" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b87bfd4605926cdfefc1c3b5f8fe560e3feca9d5552cf68c466d3d8236c7e8" -dependencies = [ - "anstyle", - "difflib", - "float-cmp", - "normalize-line-endings", - "predicates-core", - "regex", -] - -[[package]] -name = "predicates-core" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" - -[[package]] -name = "predicates-tree" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" -dependencies = [ - "predicates-core", - "termtree", -] - [[package]] name = "priority-queue" version = "1.3.2" @@ -2257,64 +2052,7 @@ dependencies = [ name = "puffin" version = "0.0.3" dependencies = [ - "anstream", - "anyhow", - "assert_cmd", - "assert_fs", - "bitflags 2.4.2", - "chrono", - "clap", - "distribution-filename", - "distribution-types", "flate2", - "fs-err", - "futures", - "gourgeist", - "indicatif", - "indoc", - "insta", - "insta-cmd", - "install-wheel-rs", - "itertools 0.12.0", - "miette", - "mimalloc", - "owo-colors", - "pep440_rs 0.3.12", - "pep508_rs", - "platform-host", - "platform-tags", - "predicates", - "pubgrub", - "puffin-build", - "puffin-cache", - "puffin-client", - "puffin-dispatch", - "puffin-distribution", - "puffin-installer", - "puffin-interpreter", - "puffin-normalize", - "puffin-resolver", - "puffin-traits", - "puffin-warnings", - "puffin-workspace", - "pypi-types", - "pyproject-toml", - "requirements-txt", - "reqwest", - "rustc-hash", - "tempfile", - "textwrap", - "thiserror", - "tikv-jemallocator", - "tokio", - "toml", - "tracing", - "tracing-durations-export", - "tracing-subscriber", - "tracing-tree", - "url", - "waitmap", - "which", ] [[package]] @@ -3363,12 +3101,6 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e" -[[package]] -name = "smawk" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" - [[package]] name = "socket2" version = "0.5.5" @@ -3391,34 +3123,6 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" -[[package]] -name = "supports-color" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89" -dependencies = [ - "is-terminal", - "is_ci", -] - -[[package]] -name = "supports-hyperlinks" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84231692eb0d4d41e4cdd0cabfdd2e6cd9e255e65f80c9aa7c98dd502b4233d" -dependencies = [ - "is-terminal", -] - -[[package]] -name = "supports-unicode" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6c2cb240ab5dd21ed4906895ee23fe5a48acdbd15a3ce388e7b62a9b66baf7" -dependencies = [ - "is-terminal", -] - [[package]] name = "svg" version = "0.14.0" @@ -3507,22 +3211,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "terminal_size" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "termtree" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" - [[package]] name = "test-case" version = "3.3.1" @@ -3565,17 +3253,6 @@ dependencies = [ "log", ] -[[package]] -name = "textwrap" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7b3e525a49ec206798b40326a44121291b530c963cfb01018f63e135bac543d" -dependencies = [ - "smawk", - "unicode-linebreak", - "unicode-width", -] - [[package]] name = "thiserror" version = "1.0.56" @@ -3882,7 +3559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", - "nu-ansi-term 0.46.0", + "nu-ansi-term", "once_cell", "regex", "sharded-slab", @@ -3893,18 +3570,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "tracing-tree" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675" -dependencies = [ - "nu-ansi-term 0.49.0", - "tracing-core", - "tracing-log", - "tracing-subscriber", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -3938,12 +3603,6 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" -[[package]] -name = "unicode-linebreak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" - [[package]] name = "unicode-normalization" version = "0.1.22" @@ -4052,15 +3711,6 @@ dependencies = [ "quote", ] -[[package]] -name = "wait-timeout" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" -dependencies = [ - "libc", -] - [[package]] name = "waitmap" version = "1.1.0" diff --git a/crates/puffin/Cargo.toml b/crates/puffin/Cargo.toml index 1e6af229cc8d..9fc3caee8c53 100644 --- a/crates/puffin/Cargo.toml +++ b/crates/puffin/Cargo.toml @@ -14,76 +14,12 @@ default-run = "puffin" workspace = true [dependencies] -distribution-filename = { path = "../distribution-filename" } -distribution-types = { path = "../distribution-types" } -gourgeist = { path = "../gourgeist" } -install-wheel-rs = { path = "../install-wheel-rs", default-features = false } -pep440_rs = { path = "../pep440-rs" } -pep508_rs = { path = "../pep508-rs" } -platform-host = { path = "../platform-host" } -platform-tags = { path = "../platform-tags" } -puffin-build = { path = "../puffin-build" } -puffin-cache = { path = "../puffin-cache", features = ["clap"] } -puffin-client = { path = "../puffin-client" } -puffin-dispatch = { path = "../puffin-dispatch" } -puffin-distribution = { path = "../puffin-distribution" } -puffin-installer = { path = "../puffin-installer" } -puffin-interpreter = { path = "../puffin-interpreter" } -puffin-normalize = { path = "../puffin-normalize" } -puffin-resolver = { path = "../puffin-resolver", features = ["clap"] } -puffin-traits = { path = "../puffin-traits" } -puffin-warnings = { path = "../puffin-warnings" } -puffin-workspace = { path = "../puffin-workspace" } -pypi-types = { path = "../pypi-types" } -requirements-txt = { path = "../requirements-txt" } # This tells flate2 (and all libraries that depend on it, including async_compression # and async_zip) to use zlib-ng, which about 2x faster than the default flate2 backend # at decompression. See https://github.com/rust-lang/flate2-rs#backends flate2 = { workspace = true, default-features = false } -anstream = { workspace = true } -anyhow = { workspace = true } -bitflags = { workspace = true } -chrono = { workspace = true } -clap = { workspace = true, features = ["derive"] } -fs-err = { workspace = true, features = ["tokio"] } -futures = { workspace = true } -indicatif = { workspace = true } -itertools = { workspace = true } -miette = { workspace = true, features = ["fancy"] } -owo-colors = { workspace = true } -pubgrub = { workspace = true } -pyproject-toml = { workspace = true } -rustc-hash = { workspace = true } -tempfile = { workspace = true } -textwrap = { workspace = true } -thiserror = { workspace = true } -tokio = { workspace = true } -toml = { workspace = true } -tracing = { workspace = true } -tracing-durations-export = { workspace = true, features = ["plot"], optional = true } -tracing-subscriber = { workspace = true } -tracing-tree = { workspace = true } -url = { workspace = true } -waitmap = { workspace = true } -which = { workspace = true } - -[target.'cfg(target_os = "windows")'.dependencies] -mimalloc = "0.1.39" - -[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dependencies] -tikv-jemallocator = "0.5.4" - -[dev-dependencies] -assert_cmd = { version = "2.0.12" } -assert_fs = { version = "1.1.0" } -indoc = { version = "2.0.4" } -insta-cmd = { version = "0.4.0" } -insta = { version = "1.34.0", features = ["filters"] } -predicates = { version = "3.0.4" } -reqwest = { version = "0.11.23", features = ["blocking", "rustls"], default-features = false } - [features] default = ["flate2-zlib-ng"] # Introduces a dependency on a local Python installation. diff --git a/crates/puffin/src/commands/add.rs b/crates/puffin/src/commands/add.rs deleted file mode 100644 index 12ae3b5f0b53..000000000000 --- a/crates/puffin/src/commands/add.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::PathBuf; - -use anyhow::Result; -use miette::{Diagnostic, IntoDiagnostic}; -use thiserror::Error; -use tracing::info; - -use puffin_workspace::WorkspaceError; - -use crate::commands::ExitStatus; -use crate::printer::Printer; - -/// Add a dependency to the workspace. -#[allow(clippy::unnecessary_wraps)] -pub(crate) fn add(name: &str, _printer: Printer) -> Result { - match add_impl(name) { - Ok(status) => Ok(status), - Err(err) => { - #[allow(clippy::print_stderr)] - { - eprint!("{err:?}"); - } - Ok(ExitStatus::Failure) - } - } -} - -#[derive(Error, Debug, Diagnostic)] -enum AddError { - #[error( - "Could not find a `pyproject.toml` file in the current directory or any of its parents" - )] - #[diagnostic(code(puffin::add::workspace_not_found))] - WorkspaceNotFound, - - #[error("Failed to parse requirement: `{0}`")] - #[diagnostic(code(puffin::add::invalid_requirement))] - InvalidRequirement(String, #[source] pep508_rs::Pep508Error), - - #[error("Failed to parse `pyproject.toml` at: `{0}`")] - #[diagnostic(code(puffin::add::parse))] - ParseError(PathBuf, #[source] WorkspaceError), - - #[error("Failed to write `pyproject.toml` to: `{0}`")] - #[diagnostic(code(puffin::add::write))] - WriteError(PathBuf, #[source] WorkspaceError), -} - -fn add_impl(name: &str) -> miette::Result { - let requirement = puffin_workspace::VerbatimRequirement::try_from(name) - .map_err(|err| AddError::InvalidRequirement(name.to_string(), err))?; - - // Locate the workspace. - let cwd = std::env::current_dir().into_diagnostic()?; - let Some(workspace_root) = puffin_workspace::find_pyproject_toml(cwd) else { - return Err(AddError::WorkspaceNotFound.into()); - }; - - info!("Found workspace at: {}", workspace_root.display()); - - // Parse the manifest. - let mut manifest = puffin_workspace::Workspace::try_from(workspace_root.as_path()) - .map_err(|err| AddError::ParseError(workspace_root.clone(), err))?; - - // Add the dependency. - manifest.add_dependency(&requirement); - - // Write the manifest back to disk. - manifest - .save(&workspace_root) - .map_err(|err| AddError::WriteError(workspace_root.clone(), err))?; - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/commands/clean.rs b/crates/puffin/src/commands/clean.rs deleted file mode 100644 index 173d1090da57..000000000000 --- a/crates/puffin/src/commands/clean.rs +++ /dev/null @@ -1,52 +0,0 @@ -use std::fmt::Write; - -use anyhow::{Context, Result}; -use fs_err as fs; -use owo_colors::OwoColorize; - -use puffin_cache::Cache; -use puffin_normalize::PackageName; - -use crate::commands::ExitStatus; -use crate::printer::Printer; - -/// Clear the cache. -pub(crate) fn clean( - cache: &Cache, - packages: &[PackageName], - mut printer: Printer, -) -> Result { - if !cache.root().exists() { - writeln!( - printer, - "No cache found at: {}", - cache.root().display().cyan() - )?; - return Ok(ExitStatus::Success); - } - - if packages.is_empty() { - writeln!( - printer, - "Clearing cache at: {}", - cache.root().display().cyan() - )?; - fs::remove_dir_all(cache.root()) - .with_context(|| format!("Failed to clear cache at: {}", cache.root().display()))?; - } else { - for package in packages { - let count = cache.purge(package)?; - match count { - 0 => writeln!(printer, "No entries found for package: {}", package.cyan())?, - 1 => writeln!(printer, "Cleared 1 entry for package: {}", package.cyan())?, - count => writeln!( - printer, - "Cleared {count} entries for package: {}", - package.cyan() - )?, - } - } - } - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/commands/freeze.rs b/crates/puffin/src/commands/freeze.rs deleted file mode 100644 index 7a9d0896fb63..000000000000 --- a/crates/puffin/src/commands/freeze.rs +++ /dev/null @@ -1,53 +0,0 @@ -use std::fmt::Write; - -use anyhow::Result; -use itertools::Itertools; -use owo_colors::OwoColorize; -use tracing::debug; - -use distribution_types::Name; -use platform_host::Platform; -use puffin_cache::Cache; -use puffin_installer::SitePackages; -use puffin_interpreter::Virtualenv; - -use crate::commands::ExitStatus; -use crate::printer::Printer; - -/// Enumerate the installed packages in the current environment. -pub(crate) fn freeze(cache: &Cache, strict: bool, mut printer: Printer) -> Result { - // Detect the current Python interpreter. - let platform = Platform::current()?; - let python = Virtualenv::from_env(platform, cache)?; - debug!( - "Using Python interpreter: {}", - python.python_executable().display() - ); - - // Build the installed index. - let site_packages = SitePackages::from_executable(&python)?; - for dist in site_packages - .iter() - .sorted_unstable_by(|a, b| a.name().cmp(b.name())) - { - #[allow(clippy::print_stdout)] - { - println!("{dist}"); - } - } - - // Validate that the environment is consistent. - if strict { - for diagnostic in site_packages.diagnostics()? { - writeln!( - printer, - "{}{} {}", - "warning".yellow().bold(), - ":".bold(), - diagnostic.message().bold() - )?; - } - } - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/commands/mod.rs b/crates/puffin/src/commands/mod.rs deleted file mode 100644 index 67e65c345ea4..000000000000 --- a/crates/puffin/src/commands/mod.rs +++ /dev/null @@ -1,76 +0,0 @@ -use std::process::ExitCode; -use std::time::Duration; - -pub(crate) use add::add; -pub(crate) use clean::clean; -use distribution_types::InstalledMetadata; -pub(crate) use freeze::freeze; -pub(crate) use pip_compile::{extra_name_with_clap_error, pip_compile, Upgrade}; -pub(crate) use pip_install::pip_install; -pub(crate) use pip_sync::pip_sync; -pub(crate) use pip_uninstall::pip_uninstall; -pub(crate) use remove::remove; -pub(crate) use venv::venv; - -mod add; -mod clean; -mod freeze; -mod pip_compile; -mod pip_install; -mod pip_sync; -mod pip_uninstall; -mod remove; -mod reporters; -mod venv; - -#[derive(Copy, Clone)] -pub(crate) enum ExitStatus { - /// The command succeeded. - #[allow(unused)] - Success, - - /// The command failed due to an error in the user input. - #[allow(unused)] - Failure, - - /// The command failed with an unexpected error. - #[allow(unused)] - Error, -} - -impl From for ExitCode { - fn from(status: ExitStatus) -> Self { - match status { - ExitStatus::Success => ExitCode::from(0), - ExitStatus::Failure => ExitCode::from(1), - ExitStatus::Error => ExitCode::from(2), - } - } -} - -/// Format a duration as a human-readable string, Cargo-style. -pub(super) fn elapsed(duration: Duration) -> String { - let secs = duration.as_secs(); - - if secs >= 60 { - format!("{}m {:02}s", secs / 60, secs % 60) - } else if secs > 0 { - format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000) - } else { - format!("{}ms", duration.subsec_millis()) - } -} - -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] -pub(super) enum ChangeEventKind { - /// The package was removed from the environment. - Removed, - /// The package was added to the environment. - Added, -} - -#[derive(Debug)] -pub(super) struct ChangeEvent { - dist: T, - kind: ChangeEventKind, -} diff --git a/crates/puffin/src/commands/pip_compile.rs b/crates/puffin/src/commands/pip_compile.rs deleted file mode 100644 index 24fd6f3c5579..000000000000 --- a/crates/puffin/src/commands/pip_compile.rs +++ /dev/null @@ -1,379 +0,0 @@ -use std::borrow::Cow; -use std::env; -use std::fmt::Write; -use std::io::stdout; -use std::ops::Deref; -use std::path::Path; -use std::str::FromStr; - -use anstream::AutoStream; -use anyhow::{anyhow, Context, Result}; -use chrono::{DateTime, Utc}; -use itertools::Itertools; -use owo_colors::OwoColorize; -use rustc_hash::FxHashSet; -use tempfile::tempdir_in; -use tracing::debug; - -use distribution_types::{IndexLocations, LocalEditable}; -use pep508_rs::Requirement; -use platform_host::Platform; -use platform_tags::Tags; -use puffin_cache::Cache; -use puffin_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder}; -use puffin_dispatch::BuildDispatch; -use puffin_installer::{Downloader, NoBinary}; -use puffin_interpreter::{Interpreter, PythonVersion}; -use puffin_normalize::{ExtraName, PackageName}; -use puffin_resolver::{ - DisplayResolutionGraph, InMemoryIndex, Manifest, PreReleaseMode, ResolutionMode, - ResolutionOptions, Resolver, -}; -use puffin_traits::{InFlight, SetupPyStrategy}; -use requirements_txt::EditableRequirement; - -use crate::commands::reporters::{DownloadReporter, ResolverReporter}; -use crate::commands::{elapsed, ExitStatus}; -use crate::printer::Printer; -use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification}; - -const VERSION: &str = env!("CARGO_PKG_VERSION"); - -/// Resolve a set of requirements into a set of pinned versions. -#[allow(clippy::too_many_arguments)] -pub(crate) async fn pip_compile( - requirements: &[RequirementsSource], - constraints: &[RequirementsSource], - overrides: &[RequirementsSource], - extras: ExtrasSpecification<'_>, - output_file: Option<&Path>, - resolution_mode: ResolutionMode, - prerelease_mode: PreReleaseMode, - upgrade: Upgrade, - generate_hashes: bool, - index_locations: IndexLocations, - setup_py: SetupPyStrategy, - no_build: bool, - python_version: Option, - exclude_newer: Option>, - cache: Cache, - mut printer: Printer, -) -> Result { - let start = std::time::Instant::now(); - - // If the user requests `extras` but does not provide a pyproject toml source - if !matches!(extras, ExtrasSpecification::None) - && !requirements - .iter() - .any(|source| matches!(source, RequirementsSource::PyprojectToml(_))) - { - return Err(anyhow!( - "Requesting extras requires a pyproject.toml input file." - )); - } - - // Read all requirements from the provided sources. - let RequirementsSpecification { - project, - requirements, - constraints, - overrides, - editables, - extras: used_extras, - } = RequirementsSpecification::from_sources(requirements, constraints, overrides, &extras)?; - - // Check that all provided extras are used - if let ExtrasSpecification::Some(extras) = extras { - let mut unused_extras = extras - .iter() - .filter(|extra| !used_extras.contains(extra)) - .collect::>(); - if !unused_extras.is_empty() { - unused_extras.sort_unstable(); - unused_extras.dedup(); - let s = if unused_extras.len() == 1 { "" } else { "s" }; - return Err(anyhow!( - "Requested extra{s} not found: {}", - unused_extras.iter().join(", ") - )); - } - } - - let preferences: Vec = output_file - // As an optimization, skip reading the lockfile is we're upgrading all packages anyway. - .filter(|_| !upgrade.is_all()) - .filter(|output_file| output_file.exists()) - .map(Path::to_path_buf) - .map(RequirementsSource::from) - .as_ref() - .map(|source| RequirementsSpecification::from_source(source, &extras)) - .transpose()? - .map(|spec| spec.requirements) - .map(|requirements| match upgrade { - // Respect all pinned versions from the existing lockfile. - Upgrade::None => requirements, - // Ignore all pinned versions from the existing lockfile. - Upgrade::All => vec![], - // Ignore pinned versions for the specified packages. - Upgrade::Packages(packages) => requirements - .into_iter() - .filter(|requirement| !packages.contains(&requirement.name)) - .collect(), - }) - .unwrap_or_default(); - - // Detect the current Python interpreter. - let platform = Platform::current()?; - let interpreter = Interpreter::find(python_version.as_ref(), platform, &cache)?; - - debug!( - "Using Python {} at {}", - interpreter.markers().python_version, - interpreter.sys_executable().display() - ); - - // Create a shared in-memory index. - let source_index = InMemoryIndex::default(); - - // If we're resolving against a different Python version, use a separate index. Source - // distributions will be built against the installed version, and so the index may contain - // different package priorities than in the top-level resolution. - let top_level_index = if python_version.is_some() { - InMemoryIndexRef::Owned(InMemoryIndex::default()) - } else { - InMemoryIndexRef::Borrowed(&source_index) - }; - - // Determine the tags, markers, and interpreter to use for resolution. - let tags = if let Some(python_version) = python_version.as_ref() { - Cow::Owned(Tags::from_env( - interpreter.platform(), - python_version.simple_version(), - )?) - } else { - Cow::Borrowed(interpreter.tags()?) - }; - let markers = python_version.map_or_else( - || Cow::Borrowed(interpreter.markers()), - |python_version| Cow::Owned(python_version.markers(interpreter.markers())), - ); - - // Instantiate a client. - let client = RegistryClientBuilder::new(cache.clone()) - .index_urls(index_locations.index_urls()) - .build(); - - // Resolve the flat indexes from `--find-links`. - let flat_index = { - let client = FlatIndexClient::new(&client, &cache); - let entries = client.fetch(index_locations.flat_indexes()).await?; - FlatIndex::from_entries(entries, &tags) - }; - - // Track in-flight downloads, builds, etc., across resolutions. - let in_flight = InFlight::default(); - - let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer); - let build_dispatch = BuildDispatch::new( - &client, - &cache, - &interpreter, - &index_locations, - &flat_index, - &source_index, - &in_flight, - interpreter.sys_executable().to_path_buf(), - setup_py, - no_build, - &NoBinary::None, - ) - .with_options(options); - - // Build the editables and add their requirements - let editable_metadata = if editables.is_empty() { - Vec::new() - } else { - let start = std::time::Instant::now(); - - let editables: Vec = editables - .into_iter() - .map(|editable| { - let EditableRequirement { path, url } = editable; - Ok(LocalEditable { url, path }) - }) - .collect::>()?; - - let downloader = Downloader::new(&cache, &tags, &client, &build_dispatch) - .with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64)); - - let editable_wheel_dir = tempdir_in(cache.root())?; - let editable_metadata: Vec<_> = downloader - .build_editables(editables, editable_wheel_dir.path()) - .await - .context("Failed to build editables")? - .into_iter() - .map(|built_editable| (built_editable.editable, built_editable.metadata)) - .collect(); - - let s = if editable_metadata.len() == 1 { - "" - } else { - "s" - }; - writeln!( - printer, - "{}", - format!( - "Built {} in {}", - format!("{} editable{}", editable_metadata.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - editable_metadata - }; - - // Create a manifest of the requirements. - let manifest = Manifest::new( - requirements, - constraints, - overrides, - preferences, - project, - editable_metadata, - ); - - // Resolve the dependencies. - let resolver = Resolver::new( - manifest, - options, - &markers, - &interpreter, - &tags, - &client, - &flat_index, - &top_level_index, - &build_dispatch, - ) - .with_reporter(ResolverReporter::from(printer)); - let resolution = match resolver.resolve().await { - Err(puffin_resolver::ResolveError::NoSolution(err)) => { - #[allow(clippy::print_stderr)] - { - let report = miette::Report::msg(format!("{err}")) - .context("No solution found when resolving dependencies:"); - eprint!("{report:?}"); - } - return Ok(ExitStatus::Failure); - } - result => result, - }?; - - let s = if resolution.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Resolved {} in {}", - format!("{} package{}", resolution.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - // Notify the user of any diagnostics. - for diagnostic in resolution.diagnostics() { - writeln!( - printer, - "{}{} {}", - "warning".yellow().bold(), - ":".bold(), - diagnostic.message().bold() - )?; - } - - // Write the resolved dependencies to the output channel. - let mut writer: Box = if let Some(output_file) = output_file { - Box::new(AutoStream::::auto( - fs_err::File::create(output_file)?.into(), - )) - } else { - Box::new(AutoStream::auto(stdout())) - }; - - writeln!( - writer, - "{}", - format!("# This file was autogenerated by Puffin v{VERSION} via the following command:") - .green() - )?; - writeln!( - writer, - "{}", - format!("# puffin {}", env::args().skip(1).join(" ")).green() - )?; - write!( - writer, - "{}", - DisplayResolutionGraph::new(&resolution, generate_hashes) - )?; - - Ok(ExitStatus::Success) -} - -/// Whether to allow package upgrades. -#[derive(Debug)] -pub(crate) enum Upgrade { - /// Prefer pinned versions from the existing lockfile, if possible. - None, - - /// Allow package upgrades for all packages, ignoring the existing lockfile. - All, - - /// Allow package upgrades, but only for the specified packages. - Packages(FxHashSet), -} - -impl Upgrade { - /// Determine the upgrade strategy from the command-line arguments. - pub(crate) fn from_args(upgrade: bool, upgrade_package: Vec) -> Self { - if upgrade { - Self::All - } else if !upgrade_package.is_empty() { - Self::Packages(upgrade_package.into_iter().collect()) - } else { - Self::None - } - } - - /// Returns `true` if all packages should be upgraded. - pub(crate) fn is_all(&self) -> bool { - matches!(self, Self::All) - } -} - -pub(crate) fn extra_name_with_clap_error(arg: &str) -> Result { - ExtraName::from_str(arg).map_err(|_err| { - anyhow!( - "Extra names must start and end with a letter or digit and may only \ - contain -, _, ., and alphanumeric characters" - ) - }) -} - -/// An owned or unowned [`InMemoryIndex`]. -enum InMemoryIndexRef<'a> { - Owned(InMemoryIndex), - Borrowed(&'a InMemoryIndex), -} - -impl Deref for InMemoryIndexRef<'_> { - type Target = InMemoryIndex; - - fn deref(&self) -> &Self::Target { - match self { - Self::Owned(index) => index, - Self::Borrowed(index) => index, - } - } -} diff --git a/crates/puffin/src/commands/pip_install.rs b/crates/puffin/src/commands/pip_install.rs deleted file mode 100644 index 015d74b0657d..000000000000 --- a/crates/puffin/src/commands/pip_install.rs +++ /dev/null @@ -1,659 +0,0 @@ -use std::fmt::Write; -use std::path::Path; - -use anstream::eprint; -use anyhow::{anyhow, Context, Result}; -use chrono::{DateTime, Utc}; -use itertools::Itertools; -use owo_colors::OwoColorize; -use tempfile::tempdir_in; -use tracing::debug; - -use distribution_types::{ - IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name, Resolution, -}; -use install_wheel_rs::linker::LinkMode; -use pep508_rs::{MarkerEnvironment, Requirement}; -use platform_host::Platform; -use platform_tags::Tags; -use puffin_cache::Cache; -use puffin_client::{FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder}; -use puffin_dispatch::BuildDispatch; -use puffin_installer::{ - BuiltEditable, Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages, -}; -use puffin_interpreter::{Interpreter, Virtualenv}; -use puffin_normalize::PackageName; -use puffin_resolver::{ - InMemoryIndex, Manifest, PreReleaseMode, ResolutionGraph, ResolutionMode, ResolutionOptions, - Resolver, -}; -use puffin_traits::{InFlight, SetupPyStrategy}; -use requirements_txt::EditableRequirement; - -use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter}; -use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus}; -use crate::printer::Printer; -use crate::requirements::{ExtrasSpecification, RequirementsSource, RequirementsSpecification}; - -/// Install packages into the current environment. -#[allow(clippy::too_many_arguments)] -pub(crate) async fn pip_install( - requirements: &[RequirementsSource], - constraints: &[RequirementsSource], - overrides: &[RequirementsSource], - extras: &ExtrasSpecification<'_>, - resolution_mode: ResolutionMode, - prerelease_mode: PreReleaseMode, - index_locations: IndexLocations, - reinstall: &Reinstall, - link_mode: LinkMode, - setup_py: SetupPyStrategy, - no_build: bool, - no_binary: &NoBinary, - strict: bool, - exclude_newer: Option>, - cache: Cache, - mut printer: Printer, -) -> Result { - let start = std::time::Instant::now(); - - // Read all requirements from the provided sources. - let RequirementsSpecification { - project, - requirements, - constraints, - overrides, - editables, - extras: used_extras, - } = specification(requirements, constraints, overrides, extras)?; - - // Check that all provided extras are used - if let ExtrasSpecification::Some(extras) = extras { - let mut unused_extras = extras - .iter() - .filter(|extra| !used_extras.contains(extra)) - .collect::>(); - if !unused_extras.is_empty() { - unused_extras.sort_unstable(); - unused_extras.dedup(); - let s = if unused_extras.len() == 1 { "" } else { "s" }; - return Err(anyhow!( - "Requested extra{s} not found: {}", - unused_extras.iter().join(", ") - )); - } - } - - // Detect the current Python interpreter. - let platform = Platform::current()?; - let venv = Virtualenv::from_env(platform, &cache)?; - debug!( - "Using Python interpreter: {}", - venv.python_executable().display() - ); - let _lock = venv.lock()?; - - // Determine the set of installed packages. - let site_packages = - SitePackages::from_executable(&venv).context("Failed to list installed packages")?; - - // If the requirements are already satisfied, we're done. Ideally, the resolver would be fast - // enough to let us remove this check. But right now, for large environments, it's an order of - // magnitude faster to validate the environment than to resolve the requirements. - if reinstall.is_none() && site_packages.satisfies(&requirements, &editables, &constraints)? { - let num_requirements = requirements.len() + editables.len(); - let s = if num_requirements == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Audited {} in {}", - format!("{num_requirements} package{s}").bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - return Ok(ExitStatus::Success); - } - - // Determine the tags, markers, and interpreter to use for resolution. - let interpreter = venv.interpreter().clone(); - let tags = venv.interpreter().tags()?; - let markers = venv.interpreter().markers(); - - // Instantiate a client. - let client = RegistryClientBuilder::new(cache.clone()) - .index_urls(index_locations.index_urls()) - .build(); - - // Resolve the flat indexes from `--find-links`. - let flat_index = { - let client = FlatIndexClient::new(&client, &cache); - let entries = client.fetch(index_locations.flat_indexes()).await?; - FlatIndex::from_entries(entries, tags) - }; - - // Create a shared in-memory index. - let index = InMemoryIndex::default(); - - // Track in-flight downloads, builds, etc., across resolutions. - let in_flight = InFlight::default(); - - let options = ResolutionOptions::new(resolution_mode, prerelease_mode, exclude_newer); - - let resolve_dispatch = BuildDispatch::new( - &client, - &cache, - &interpreter, - &index_locations, - &flat_index, - &index, - &in_flight, - venv.python_executable(), - setup_py, - no_build, - no_binary, - ) - .with_options(options); - - // Build all editable distributions. The editables are shared between resolution and - // installation, and should live for the duration of the command. If an editable is already - // installed in the environment, we'll still re-build it here. - let editable_wheel_dir; - let editables = if editables.is_empty() { - vec![] - } else { - editable_wheel_dir = tempdir_in(venv.root())?; - build_editables( - &editables, - editable_wheel_dir.path(), - &cache, - tags, - &client, - &resolve_dispatch, - printer, - ) - .await? - }; - - // Resolve the requirements. - let resolution = match resolve( - requirements, - constraints, - overrides, - project, - &editables, - &site_packages, - reinstall, - &interpreter, - tags, - markers, - &client, - &flat_index, - &index, - &resolve_dispatch, - options, - printer, - ) - .await - { - Ok(resolution) => Resolution::from(resolution), - Err(Error::Resolve(puffin_resolver::ResolveError::NoSolution(err))) => { - #[allow(clippy::print_stderr)] - { - let report = miette::Report::msg(format!("{err}")) - .context("No solution found when resolving dependencies:"); - eprint!("{report:?}"); - } - return Ok(ExitStatus::Failure); - } - Err(err) => return Err(err.into()), - }; - - // Re-initialize the in-flight map. - let in_flight = InFlight::default(); - - // If we're running with `--reinstall`, initialize a separate `BuildDispatch`, since we may - // end up removing some distributions from the environment. - let install_dispatch = if reinstall.is_none() { - resolve_dispatch - } else { - BuildDispatch::new( - &client, - &cache, - &interpreter, - &index_locations, - &flat_index, - &index, - &in_flight, - venv.python_executable(), - setup_py, - no_build, - no_binary, - ) - }; - - // Sync the environment. - install( - &resolution, - editables, - site_packages, - reinstall, - no_binary, - link_mode, - &index_locations, - tags, - &client, - &in_flight, - &install_dispatch, - &cache, - &venv, - printer, - ) - .await?; - - // Validate the environment. - if strict { - validate(&resolution, &venv, printer)?; - } - - Ok(ExitStatus::Success) -} - -/// Consolidate the requirements for an installation. -fn specification( - requirements: &[RequirementsSource], - constraints: &[RequirementsSource], - overrides: &[RequirementsSource], - extras: &ExtrasSpecification<'_>, -) -> Result { - // If the user requests `extras` but does not provide a pyproject toml source - if !matches!(extras, ExtrasSpecification::None) - && !requirements - .iter() - .any(|source| matches!(source, RequirementsSource::PyprojectToml(_))) - { - return Err(anyhow!("Requesting extras requires a pyproject.toml input file.").into()); - } - - // Read all requirements from the provided sources. - let spec = - RequirementsSpecification::from_sources(requirements, constraints, overrides, extras)?; - - // Check that all provided extras are used - if let ExtrasSpecification::Some(extras) = extras { - let mut unused_extras = extras - .iter() - .filter(|extra| !spec.extras.contains(extra)) - .collect::>(); - if !unused_extras.is_empty() { - unused_extras.sort_unstable(); - unused_extras.dedup(); - let s = if unused_extras.len() == 1 { "" } else { "s" }; - return Err(anyhow!( - "Requested extra{s} not found: {}", - unused_extras.iter().join(", ") - ) - .into()); - } - } - - Ok(spec) -} - -/// Build a set of editable distributions. -async fn build_editables( - editables: &[EditableRequirement], - editable_wheel_dir: &Path, - cache: &Cache, - tags: &Tags, - client: &RegistryClient, - build_dispatch: &BuildDispatch<'_>, - mut printer: Printer, -) -> Result, Error> { - let start = std::time::Instant::now(); - - let downloader = Downloader::new(cache, tags, client, build_dispatch) - .with_reporter(DownloadReporter::from(printer).with_length(editables.len() as u64)); - - let editables: Vec = editables - .iter() - .map(|editable| { - let EditableRequirement { path, url } = editable; - Ok(LocalEditable { - path: path.clone(), - url: url.clone(), - }) - }) - .collect::>()?; - - let editables: Vec<_> = downloader - .build_editables(editables, editable_wheel_dir) - .await - .context("Failed to build editables")? - .into_iter() - .collect(); - - let s = if editables.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Built {} in {}", - format!("{} editable{}", editables.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - Ok(editables) -} - -/// Resolve a set of requirements, similar to running `pip compile`. -#[allow(clippy::too_many_arguments)] -async fn resolve( - requirements: Vec, - constraints: Vec, - overrides: Vec, - project: Option, - editables: &[BuiltEditable], - site_packages: &SitePackages<'_>, - reinstall: &Reinstall, - interpreter: &Interpreter, - tags: &Tags, - markers: &MarkerEnvironment, - client: &RegistryClient, - flat_index: &FlatIndex, - index: &InMemoryIndex, - build_dispatch: &BuildDispatch<'_>, - options: ResolutionOptions, - mut printer: Printer, -) -> Result { - let start = std::time::Instant::now(); - - // Respect preferences from the existing environments. - let preferences: Vec = match reinstall { - Reinstall::All => vec![], - Reinstall::None => site_packages.requirements().collect(), - Reinstall::Packages(packages) => site_packages - .requirements() - .filter(|requirement| !packages.contains(&requirement.name)) - .collect(), - }; - - // Map the editables to their metadata. - let editables = editables - .iter() - .map(|built_editable| { - ( - built_editable.editable.clone(), - built_editable.metadata.clone(), - ) - }) - .collect(); - - // Create a manifest of the requirements. - let manifest = Manifest::new( - requirements, - constraints, - overrides, - preferences, - project, - editables, - ); - - // Resolve the dependencies. - let resolver = Resolver::new( - manifest, - options, - markers, - interpreter, - tags, - client, - flat_index, - index, - build_dispatch, - ) - .with_reporter(ResolverReporter::from(printer)); - let resolution = resolver.resolve().await?; - - let s = if resolution.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Resolved {} in {}", - format!("{} package{}", resolution.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - Ok(resolution) -} - -/// Install a set of requirements into the current environment. -#[allow(clippy::too_many_arguments)] -async fn install( - resolution: &Resolution, - built_editables: Vec, - site_packages: SitePackages<'_>, - reinstall: &Reinstall, - no_binary: &NoBinary, - link_mode: LinkMode, - index_urls: &IndexLocations, - tags: &Tags, - client: &RegistryClient, - in_flight: &InFlight, - build_dispatch: &BuildDispatch<'_>, - cache: &Cache, - venv: &Virtualenv, - mut printer: Printer, -) -> Result<(), Error> { - let start = std::time::Instant::now(); - - // Partition into those that should be linked from the cache (`local`), those that need to be - // downloaded (`remote`), and those that should be removed (`extraneous`). - let requirements = resolution.requirements(); - let editables = built_editables - .into_iter() - .map(ResolvedEditable::Built) - .collect::>(); - - let Plan { - local, - remote, - reinstalls, - extraneous: _, - } = Planner::with_requirements(&requirements) - .with_editable_requirements(editables) - .build( - site_packages, - reinstall, - no_binary, - index_urls, - cache, - venv, - tags, - ) - .context("Failed to determine installation plan")?; - - // Nothing to do. - if remote.is_empty() && local.is_empty() && reinstalls.is_empty() { - let s = if resolution.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Audited {} in {}", - format!("{} package{}", resolution.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - return Ok(()); - } - - // Map any registry-based requirements back to those returned by the resolver. - let remote = remote - .iter() - .map(|dist| { - resolution - .get(&dist.name) - .cloned() - .expect("Resolution should contain all packages") - }) - .collect::>(); - - // Download, build, and unzip any missing distributions. - let wheels = if remote.is_empty() { - vec![] - } else { - let start = std::time::Instant::now(); - - let downloader = Downloader::new(cache, tags, client, build_dispatch) - .with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); - - let wheels = downloader - .download(remote, in_flight) - .await - .context("Failed to download distributions")?; - - let s = if wheels.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Downloaded {} in {}", - format!("{} package{}", wheels.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - wheels - }; - - // Remove any existing installations. - if !reinstalls.is_empty() { - for dist_info in &reinstalls { - let summary = puffin_installer::uninstall(dist_info).await?; - debug!( - "Uninstalled {} ({} file{}, {} director{})", - dist_info.name(), - summary.file_count, - if summary.file_count == 1 { "" } else { "s" }, - summary.dir_count, - if summary.dir_count == 1 { "y" } else { "ies" }, - ); - } - } - - // Install the resolved distributions. - let wheels = wheels.into_iter().chain(local).collect::>(); - if !wheels.is_empty() { - let start = std::time::Instant::now(); - puffin_installer::Installer::new(venv) - .with_link_mode(link_mode) - .with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64)) - .install(&wheels)?; - - let s = if wheels.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Installed {} in {}", - format!("{} package{}", wheels.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - } - - for event in reinstalls - .into_iter() - .map(|distribution| ChangeEvent { - dist: LocalDist::from(distribution), - kind: ChangeEventKind::Removed, - }) - .chain(wheels.into_iter().map(|distribution| ChangeEvent { - dist: LocalDist::from(distribution), - kind: ChangeEventKind::Added, - })) - .sorted_unstable_by(|a, b| { - a.dist - .name() - .cmp(b.dist.name()) - .then_with(|| a.kind.cmp(&b.kind)) - }) - { - match event.kind { - ChangeEventKind::Added => { - writeln!( - printer, - " {} {}{}", - "+".green(), - event.dist.name().as_ref().white().bold(), - event.dist.installed_version().to_string().dimmed() - )?; - } - ChangeEventKind::Removed => { - writeln!( - printer, - " {} {}{}", - "-".red(), - event.dist.name().as_ref().white().bold(), - event.dist.installed_version().to_string().dimmed() - )?; - } - } - } - - Ok(()) -} - -/// Validate the installed packages in the virtual environment. -fn validate(resolution: &Resolution, venv: &Virtualenv, mut printer: Printer) -> Result<(), Error> { - let site_packages = SitePackages::from_executable(venv)?; - let diagnostics = site_packages.diagnostics()?; - for diagnostic in diagnostics { - // Only surface diagnostics that are "relevant" to the current resolution. - if resolution - .packages() - .any(|package| diagnostic.includes(package)) - { - writeln!( - printer, - "{}{} {}", - "warning".yellow().bold(), - ":".bold(), - diagnostic.message().bold() - )?; - } - } - Ok(()) -} - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error(transparent)] - Resolve(#[from] puffin_resolver::ResolveError), - - #[error(transparent)] - Client(#[from] puffin_client::Error), - - #[error(transparent)] - Platform(#[from] platform_host::PlatformError), - - #[error(transparent)] - Io(#[from] std::io::Error), - - #[error(transparent)] - Fmt(#[from] std::fmt::Error), - - #[error(transparent)] - Anyhow(#[from] anyhow::Error), -} diff --git a/crates/puffin/src/commands/pip_sync.rs b/crates/puffin/src/commands/pip_sync.rs deleted file mode 100644 index e45c97643e54..000000000000 --- a/crates/puffin/src/commands/pip_sync.rs +++ /dev/null @@ -1,469 +0,0 @@ -use std::fmt::Write; - -use anyhow::{Context, Result}; -use itertools::Itertools; -use owo_colors::OwoColorize; -use tracing::debug; - -use distribution_types::{IndexLocations, InstalledMetadata, LocalDist, LocalEditable, Name}; -use install_wheel_rs::linker::LinkMode; -use platform_host::Platform; -use platform_tags::Tags; -use puffin_cache::Cache; -use puffin_client::{FlatIndex, FlatIndexClient, RegistryClient, RegistryClientBuilder}; -use puffin_dispatch::BuildDispatch; -use puffin_installer::{ - Downloader, NoBinary, Plan, Planner, Reinstall, ResolvedEditable, SitePackages, -}; -use puffin_interpreter::Virtualenv; -use puffin_resolver::InMemoryIndex; -use puffin_traits::{InFlight, SetupPyStrategy}; -use pypi_types::Yanked; -use requirements_txt::EditableRequirement; - -use crate::commands::reporters::{DownloadReporter, FinderReporter, InstallReporter}; -use crate::commands::{elapsed, ChangeEvent, ChangeEventKind, ExitStatus}; -use crate::printer::Printer; -use crate::requirements::{RequirementsSource, RequirementsSpecification}; - -/// Install a set of locked requirements into the current Python environment. -#[allow(clippy::too_many_arguments)] -pub(crate) async fn pip_sync( - sources: &[RequirementsSource], - reinstall: &Reinstall, - link_mode: LinkMode, - index_locations: IndexLocations, - setup_py: SetupPyStrategy, - no_build: bool, - no_binary: &NoBinary, - strict: bool, - cache: Cache, - mut printer: Printer, -) -> Result { - let start = std::time::Instant::now(); - - // Read all requirements from the provided sources. - let (requirements, editables) = RequirementsSpecification::requirements_and_editables(sources)?; - let num_requirements = requirements.len() + editables.len(); - if num_requirements == 0 { - writeln!(printer, "No requirements found")?; - return Ok(ExitStatus::Success); - } - - // Detect the current Python interpreter. - let platform = Platform::current()?; - let venv = Virtualenv::from_env(platform, &cache)?; - debug!( - "Using Python interpreter: {}", - venv.python_executable().display() - ); - let _lock = venv.lock()?; - - // Determine the current environment markers. - let tags = venv.interpreter().tags()?; - - // Prep the registry client. - let client = RegistryClientBuilder::new(cache.clone()) - .index_urls(index_locations.index_urls()) - .build(); - - // Resolve the flat indexes from `--find-links`. - let flat_index = { - let client = FlatIndexClient::new(&client, &cache); - let entries = client.fetch(index_locations.flat_indexes()).await?; - FlatIndex::from_entries(entries, tags) - }; - - // Create a shared in-memory index. - let index = InMemoryIndex::default(); - - // Track in-flight downloads, builds, etc., across resolutions. - let in_flight = InFlight::default(); - - // Prep the build context. - let build_dispatch = BuildDispatch::new( - &client, - &cache, - venv.interpreter(), - &index_locations, - &flat_index, - &index, - &in_flight, - venv.python_executable(), - setup_py, - no_build, - no_binary, - ); - - // Determine the set of installed packages. - let site_packages = - SitePackages::from_executable(&venv).context("Failed to list installed packages")?; - - // Resolve any editables. - let resolved_editables = resolve_editables( - editables, - &site_packages, - reinstall, - &venv, - tags, - &cache, - &client, - &build_dispatch, - printer, - ) - .await?; - - // Partition into those that should be linked from the cache (`local`), those that need to be - // downloaded (`remote`), and those that should be removed (`extraneous`). - let Plan { - local, - remote, - reinstalls, - extraneous, - } = Planner::with_requirements(&requirements) - .with_editable_requirements(resolved_editables.editables) - .build( - site_packages, - reinstall, - no_binary, - &index_locations, - &cache, - &venv, - tags, - ) - .context("Failed to determine installation plan")?; - - // Nothing to do. - if remote.is_empty() && local.is_empty() && reinstalls.is_empty() && extraneous.is_empty() { - let s = if num_requirements == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Audited {} in {}", - format!("{num_requirements} package{s}").bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - return Ok(ExitStatus::Success); - } - - // Instantiate a client. - let client = RegistryClientBuilder::new(cache.clone()) - .index_urls(index_locations.index_urls()) - .build(); - - // Resolve any registry-based requirements. - let remote = if remote.is_empty() { - Vec::new() - } else { - let start = std::time::Instant::now(); - - // Resolve the flat indexes from `--find-links`. - let flat_index = { - let client = FlatIndexClient::new(&client, &cache); - let entries = client.fetch(index_locations.flat_indexes()).await?; - FlatIndex::from_entries(entries, tags) - }; - - let wheel_finder = puffin_resolver::DistFinder::new( - tags, - &client, - venv.interpreter(), - &flat_index, - no_binary, - ) - .with_reporter(FinderReporter::from(printer).with_length(remote.len() as u64)); - let resolution = wheel_finder.resolve(&remote).await?; - - let s = if resolution.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Resolved {} in {}", - format!("{} package{}", resolution.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - resolution.into_distributions().collect::>() - }; - - // TODO(konstin): Also check the cache whether any cached or installed dist is already known to - // have been yanked, we currently don't show this message on the second run anymore - for dist in &remote { - let Some(file) = dist.file() else { - continue; - }; - match &file.yanked { - None | Some(Yanked::Bool(false)) => {} - Some(Yanked::Bool(true)) => { - writeln!( - printer, - "{}{} {dist} is yanked. Refresh your lockfile to pin an un-yanked version.", - "warning".yellow().bold(), - ":".bold(), - )?; - } - Some(Yanked::Reason(reason)) => { - writeln!( - printer, - "{}{} {dist} is yanked (reason: \"{reason}\"). Refresh your lockfile to pin an un-yanked version.", - "warning".yellow().bold(), - ":".bold(), - )?; - } - } - } - - // Download, build, and unzip any missing distributions. - let wheels = if remote.is_empty() { - Vec::new() - } else { - let start = std::time::Instant::now(); - - let downloader = Downloader::new(&cache, tags, &client, &build_dispatch) - .with_reporter(DownloadReporter::from(printer).with_length(remote.len() as u64)); - - let wheels = downloader - .download(remote, &in_flight) - .await - .context("Failed to download distributions")?; - - let s = if wheels.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Downloaded {} in {}", - format!("{} package{}", wheels.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - wheels - }; - - // Remove any unnecessary packages. - if !extraneous.is_empty() || !reinstalls.is_empty() { - let start = std::time::Instant::now(); - - for dist_info in extraneous.iter().chain(reinstalls.iter()) { - let summary = puffin_installer::uninstall(dist_info).await?; - debug!( - "Uninstalled {} ({} file{}, {} director{})", - dist_info.name(), - summary.file_count, - if summary.file_count == 1 { "" } else { "s" }, - summary.dir_count, - if summary.dir_count == 1 { "y" } else { "ies" }, - ); - } - - let s = if extraneous.len() + reinstalls.len() == 1 { - "" - } else { - "s" - }; - writeln!( - printer, - "{}", - format!( - "Uninstalled {} in {}", - format!("{} package{}", extraneous.len() + reinstalls.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - } - - // Install the resolved distributions. - let wheels = wheels.into_iter().chain(local).collect::>(); - if !wheels.is_empty() { - let start = std::time::Instant::now(); - puffin_installer::Installer::new(&venv) - .with_link_mode(link_mode) - .with_reporter(InstallReporter::from(printer).with_length(wheels.len() as u64)) - .install(&wheels)?; - - let s = if wheels.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Installed {} in {}", - format!("{} package{}", wheels.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - } - - // Report on any changes in the environment. - for event in extraneous - .into_iter() - .chain(reinstalls.into_iter()) - .map(|distribution| ChangeEvent { - dist: LocalDist::from(distribution), - kind: ChangeEventKind::Removed, - }) - .chain(wheels.into_iter().map(|distribution| ChangeEvent { - dist: LocalDist::from(distribution), - kind: ChangeEventKind::Added, - })) - .sorted_unstable_by(|a, b| { - a.dist - .name() - .cmp(b.dist.name()) - .then_with(|| a.kind.cmp(&b.kind)) - }) - { - match event.kind { - ChangeEventKind::Added => { - writeln!( - printer, - " {} {}{}", - "+".green(), - event.dist.name().as_ref().white().bold(), - event.dist.installed_version().to_string().dimmed() - )?; - } - ChangeEventKind::Removed => { - writeln!( - printer, - " {} {}{}", - "-".red(), - event.dist.name().as_ref().white().bold(), - event.dist.installed_version().to_string().dimmed() - )?; - } - } - } - - // Validate that the environment is consistent. - if strict { - let site_packages = SitePackages::from_executable(&venv)?; - for diagnostic in site_packages.diagnostics()? { - writeln!( - printer, - "{}{} {}", - "warning".yellow().bold(), - ":".bold(), - diagnostic.message().bold() - )?; - } - } - - Ok(ExitStatus::Success) -} - -#[derive(Debug)] -struct ResolvedEditables { - /// The set of resolved editables, including both those that were already installed and those - /// that were built. - editables: Vec, - /// The temporary directory in which the built editables were stored. - #[allow(dead_code)] - temp_dir: Option, -} - -/// Resolve the set of editables that need to be installed. -#[allow(clippy::too_many_arguments)] -async fn resolve_editables( - editables: Vec, - site_packages: &SitePackages<'_>, - reinstall: &Reinstall, - venv: &Virtualenv, - tags: &Tags, - cache: &Cache, - client: &RegistryClient, - build_dispatch: &BuildDispatch<'_>, - mut printer: Printer, -) -> Result { - // Partition the editables into those that are already installed, and those that must be built. - let mut installed = Vec::with_capacity(editables.len()); - let mut uninstalled = Vec::with_capacity(editables.len()); - for editable in editables { - match reinstall { - Reinstall::None => { - if let Some(dist) = site_packages.get_editable(editable.raw()) { - installed.push(dist.clone()); - } else { - uninstalled.push(editable); - } - } - Reinstall::All => { - uninstalled.push(editable); - } - Reinstall::Packages(packages) => { - if let Some(dist) = site_packages.get_editable(editable.raw()) { - if packages.contains(dist.name()) { - uninstalled.push(editable); - } else { - installed.push(dist.clone()); - } - } else { - uninstalled.push(editable); - } - } - } - } - - // Build any editable installs. - let (built_editables, temp_dir) = if uninstalled.is_empty() { - (Vec::new(), None) - } else { - let start = std::time::Instant::now(); - - let temp_dir = tempfile::tempdir_in(venv.root())?; - - let downloader = Downloader::new(cache, tags, client, build_dispatch) - .with_reporter(DownloadReporter::from(printer).with_length(uninstalled.len() as u64)); - - let local_editables: Vec = uninstalled - .iter() - .map(|editable| { - let EditableRequirement { path, url } = editable; - Ok(LocalEditable { - path: path.clone(), - url: url.clone(), - }) - }) - .collect::>()?; - - let built_editables: Vec<_> = downloader - .build_editables(local_editables, temp_dir.path()) - .await - .context("Failed to build editables")? - .into_iter() - .collect(); - - let s = if built_editables.len() == 1 { "" } else { "s" }; - writeln!( - printer, - "{}", - format!( - "Built {} in {}", - format!("{} editable{}", built_editables.len(), s).bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - (built_editables, Some(temp_dir)) - }; - - Ok(ResolvedEditables { - editables: installed - .into_iter() - .map(ResolvedEditable::Installed) - .chain(built_editables.into_iter().map(ResolvedEditable::Built)) - .collect::>(), - temp_dir, - }) -} diff --git a/crates/puffin/src/commands/pip_uninstall.rs b/crates/puffin/src/commands/pip_uninstall.rs deleted file mode 100644 index c1856055fd24..000000000000 --- a/crates/puffin/src/commands/pip_uninstall.rs +++ /dev/null @@ -1,151 +0,0 @@ -use std::fmt::Write; - -use anyhow::Result; -use owo_colors::OwoColorize; -use tracing::debug; - -use distribution_types::{InstalledMetadata, Name}; -use platform_host::Platform; -use puffin_cache::Cache; -use puffin_interpreter::Virtualenv; - -use crate::commands::{elapsed, ExitStatus}; -use crate::printer::Printer; -use crate::requirements::{RequirementsSource, RequirementsSpecification}; - -/// Uninstall packages from the current environment. -pub(crate) async fn pip_uninstall( - sources: &[RequirementsSource], - cache: Cache, - mut printer: Printer, -) -> Result { - let start = std::time::Instant::now(); - - // Read all requirements from the provided sources. - let (requirements, editables) = RequirementsSpecification::requirements_and_editables(sources)?; - - // Detect the current Python interpreter. - let platform = Platform::current()?; - let venv = Virtualenv::from_env(platform, &cache)?; - debug!( - "Using Python interpreter: {}", - venv.python_executable().display() - ); - let _lock = venv.lock()?; - - // Index the current `site-packages` directory. - let site_packages = puffin_installer::SitePackages::from_executable(&venv)?; - - // Sort and deduplicate the packages, which are keyed by name. - let packages = { - let mut packages = requirements - .into_iter() - .map(|requirement| requirement.name) - .collect::>(); - packages.sort_unstable(); - packages.dedup(); - packages - }; - - // Sort and deduplicate the editable packages, which are keyed by URL rather than package name. - let editables = { - let mut editables = editables - .iter() - .map(requirements_txt::EditableRequirement::raw) - .collect::>(); - editables.sort_unstable(); - editables.dedup(); - editables - }; - - // Map to the local distributions. - let distributions = { - let mut distributions = Vec::with_capacity(packages.len() + editables.len()); - - // Identify all packages that are installed. - for package in &packages { - if let Some(distribution) = site_packages.get(package) { - distributions.push(distribution); - } else { - writeln!( - printer, - "{}{} Skipping {} as it is not installed.", - "warning".yellow().bold(), - ":".bold(), - package.as_ref().bold() - )?; - }; - } - - // Identify all editables that are installed. - for editable in &editables { - if let Some(distribution) = site_packages.get_editable(editable) { - distributions.push(distribution); - } else { - writeln!( - printer, - "{}{} Skipping {} as it is not installed.", - "warning".yellow().bold(), - ":".bold(), - editable.as_ref().bold() - )?; - }; - } - - // Deduplicate, since a package could be listed both by name and editable URL. - distributions.sort_unstable_by_key(|dist| dist.path()); - distributions.dedup_by_key(|dist| dist.path()); - distributions - }; - - if distributions.is_empty() { - writeln!( - printer, - "{}{} No packages to uninstall.", - "warning".yellow().bold(), - ":".bold(), - )?; - return Ok(ExitStatus::Success); - } - - // Uninstall each package. - for distribution in &distributions { - let summary = puffin_installer::uninstall(distribution).await?; - debug!( - "Uninstalled {} ({} file{}, {} director{})", - distribution.name(), - summary.file_count, - if summary.file_count == 1 { "" } else { "s" }, - summary.dir_count, - if summary.dir_count == 1 { "y" } else { "ies" }, - ); - } - - writeln!( - printer, - "{}", - format!( - "Uninstalled {} in {}", - format!( - "{} package{}", - distributions.len(), - if distributions.len() == 1 { "" } else { "s" } - ) - .bold(), - elapsed(start.elapsed()) - ) - .dimmed() - )?; - - for distribution in distributions { - writeln!( - printer, - " {} {}{}", - "-".red(), - distribution.name().as_ref().white().bold(), - distribution.installed_version().to_string().dimmed() - )?; - } - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/commands/remove.rs b/crates/puffin/src/commands/remove.rs deleted file mode 100644 index 3a227fe72e25..000000000000 --- a/crates/puffin/src/commands/remove.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::PathBuf; - -use anyhow::Result; -use miette::{Diagnostic, IntoDiagnostic}; -use thiserror::Error; -use tracing::info; - -use puffin_normalize::PackageName; -use puffin_workspace::WorkspaceError; - -use crate::commands::ExitStatus; -use crate::printer::Printer; - -/// Remove a dependency from the workspace. -#[allow(clippy::unnecessary_wraps)] -pub(crate) fn remove(name: &PackageName, _printer: Printer) -> Result { - match remove_impl(name) { - Ok(status) => Ok(status), - Err(err) => { - #[allow(clippy::print_stderr)] - { - eprint!("{err:?}"); - } - Ok(ExitStatus::Failure) - } - } -} - -#[derive(Error, Debug, Diagnostic)] -enum RemoveError { - #[error( - "Could not find a `pyproject.toml` file in the current directory or any of its parents" - )] - #[diagnostic(code(puffin::remove::workspace_not_found))] - WorkspaceNotFound, - - #[error("Failed to parse `pyproject.toml` at: `{0}`")] - #[diagnostic(code(puffin::remove::parse))] - ParseError(PathBuf, #[source] WorkspaceError), - - #[error("Failed to write `pyproject.toml` to: `{0}`")] - #[diagnostic(code(puffin::remove::write))] - WriteError(PathBuf, #[source] WorkspaceError), - - #[error("Failed to remove `{0}` from `pyproject.toml`")] - #[diagnostic(code(puffin::remove::parse))] - RemovalError(String, #[source] WorkspaceError), -} - -fn remove_impl(name: &PackageName) -> miette::Result { - // Locate the workspace. - let cwd = std::env::current_dir().into_diagnostic()?; - let Some(workspace_root) = puffin_workspace::find_pyproject_toml(cwd) else { - return Err(RemoveError::WorkspaceNotFound.into()); - }; - - info!("Found workspace at: {}", workspace_root.display()); - - // Parse the manifest. - let mut manifest = puffin_workspace::Workspace::try_from(workspace_root.as_path()) - .map_err(|err| RemoveError::ParseError(workspace_root.clone(), err))?; - - // Remove the dependency. - manifest - .remove_dependency(name) - .map_err(|err| RemoveError::RemovalError(name.to_string(), err))?; - - // Write the manifest back to disk. - manifest - .save(&workspace_root) - .map_err(|err| RemoveError::WriteError(workspace_root.clone(), err))?; - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/commands/reporters.rs b/crates/puffin/src/commands/reporters.rs deleted file mode 100644 index 69ad97fda0d5..000000000000 --- a/crates/puffin/src/commands/reporters.rs +++ /dev/null @@ -1,323 +0,0 @@ -use std::sync::{Arc, Mutex}; -use std::time::Duration; - -use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; -use owo_colors::OwoColorize; -use url::Url; - -use distribution_types::{ - CachedDist, Dist, DistributionMetadata, LocalEditable, Name, SourceDist, VersionOrUrl, -}; -use puffin_normalize::PackageName; - -use crate::printer::Printer; - -#[derive(Debug)] -pub(crate) struct FinderReporter { - progress: ProgressBar, -} - -impl From for FinderReporter { - fn from(printer: Printer) -> Self { - let progress = ProgressBar::with_draw_target(None, printer.target()); - progress.set_style( - ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(), - ); - progress.set_message("Resolving dependencies..."); - Self { progress } - } -} - -impl FinderReporter { - #[must_use] - pub(crate) fn with_length(self, length: u64) -> Self { - self.progress.set_length(length); - self - } -} - -impl puffin_resolver::FinderReporter for FinderReporter { - fn on_progress(&self, dist: &Dist) { - self.progress.set_message(format!("{dist}")); - self.progress.inc(1); - } - - fn on_complete(&self) { - self.progress.finish_and_clear(); - } -} - -#[derive(Debug)] -pub(crate) struct DownloadReporter { - printer: Printer, - multi_progress: MultiProgress, - progress: ProgressBar, - bars: Arc>>, -} - -impl From for DownloadReporter { - fn from(printer: Printer) -> Self { - let multi_progress = MultiProgress::with_draw_target(printer.target()); - - let progress = multi_progress.add(ProgressBar::with_draw_target(None, printer.target())); - progress.set_style( - ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(), - ); - progress.set_message("Fetching packages..."); - - Self { - printer, - multi_progress, - progress, - bars: Arc::new(Mutex::new(Vec::new())), - } - } -} - -impl DownloadReporter { - #[must_use] - pub(crate) fn with_length(self, length: u64) -> Self { - self.progress.set_length(length); - self - } -} - -impl DownloadReporter { - fn on_any_build_start(&self, color_string: &str) -> usize { - let progress = self.multi_progress.insert_before( - &self.progress, - ProgressBar::with_draw_target(None, self.printer.target()), - ); - - progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap()); - progress.set_message(format!("{} {}", "Building".bold().cyan(), color_string)); - - let mut bars = self.bars.lock().unwrap(); - bars.push(progress); - bars.len() - 1 - } - - fn on_any_build_complete(&self, color_string: &str, id: usize) { - let bars = self.bars.lock().unwrap(); - let progress = &bars[id]; - progress.finish_with_message(format!(" {} {}", "Built".bold().green(), color_string)); - } -} - -impl puffin_installer::DownloadReporter for DownloadReporter { - fn on_progress(&self, dist: &CachedDist) { - self.progress.set_message(format!("{dist}")); - self.progress.inc(1); - } - - fn on_complete(&self) { - self.progress.finish_and_clear(); - } - - fn on_build_start(&self, dist: &SourceDist) -> usize { - self.on_any_build_start(&dist.to_color_string()) - } - - fn on_build_complete(&self, dist: &SourceDist, index: usize) { - self.on_any_build_complete(&dist.to_color_string(), index); - } - - fn on_editable_build_start(&self, dist: &LocalEditable) -> usize { - self.on_any_build_start(&dist.to_color_string()) - } - - fn on_editable_build_complete(&self, dist: &LocalEditable, id: usize) { - self.on_any_build_complete(&dist.to_color_string(), id); - } - - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { - let progress = self.multi_progress.insert_before( - &self.progress, - ProgressBar::with_draw_target(None, self.printer.target()), - ); - - progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap()); - progress.set_message(format!( - "{} {} ({})", - "Updating".bold().cyan(), - url, - rev.dimmed() - )); - progress.finish(); - - let mut bars = self.bars.lock().unwrap(); - bars.push(progress); - bars.len() - 1 - } - - fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize) { - let bars = self.bars.lock().unwrap(); - let progress = &bars[index]; - progress.finish_with_message(format!( - " {} {} ({})", - "Updated".bold().green(), - url, - rev.dimmed() - )); - } -} - -#[derive(Debug)] -pub(crate) struct InstallReporter { - progress: ProgressBar, -} - -impl From for InstallReporter { - fn from(printer: Printer) -> Self { - let progress = ProgressBar::with_draw_target(None, printer.target()); - progress.set_style( - ProgressStyle::with_template("{bar:20} [{pos}/{len}] {wide_msg:.dim}").unwrap(), - ); - progress.set_message("Installing wheels..."); - Self { progress } - } -} - -impl InstallReporter { - #[must_use] - pub(crate) fn with_length(self, length: u64) -> Self { - self.progress.set_length(length); - self - } -} - -impl puffin_installer::InstallReporter for InstallReporter { - fn on_install_progress(&self, wheel: &CachedDist) { - self.progress.set_message(format!("{wheel}")); - self.progress.inc(1); - } - - fn on_install_complete(&self) { - self.progress.finish_and_clear(); - } -} - -#[derive(Debug)] -pub(crate) struct ResolverReporter { - printer: Printer, - multi_progress: MultiProgress, - progress: ProgressBar, - bars: Arc>>, -} - -impl From for ResolverReporter { - fn from(printer: Printer) -> Self { - let multi_progress = MultiProgress::with_draw_target(printer.target()); - - let progress = multi_progress.add(ProgressBar::with_draw_target(None, printer.target())); - progress.enable_steady_tick(Duration::from_millis(200)); - progress.set_style( - ProgressStyle::with_template("{spinner:.white} {wide_msg:.dim}") - .unwrap() - .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]), - ); - progress.set_message("Resolving dependencies..."); - - Self { - printer, - multi_progress, - progress, - bars: Arc::new(Mutex::new(Vec::new())), - } - } -} - -impl puffin_resolver::ResolverReporter for ResolverReporter { - fn on_progress(&self, name: &PackageName, version_or_url: VersionOrUrl) { - match version_or_url { - VersionOrUrl::Version(version) => { - self.progress.set_message(format!("{name}=={version}")); - } - VersionOrUrl::Url(url) => { - self.progress.set_message(format!("{name} @ {url}")); - } - } - } - - fn on_complete(&self) { - self.progress.finish_and_clear(); - } - - fn on_build_start(&self, dist: &SourceDist) -> usize { - let progress = self.multi_progress.insert_before( - &self.progress, - ProgressBar::with_draw_target(None, self.printer.target()), - ); - - progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap()); - progress.set_message(format!( - "{} {}", - "Building".bold().cyan(), - dist.to_color_string(), - )); - - let mut bars = self.bars.lock().unwrap(); - bars.push(progress); - bars.len() - 1 - } - - fn on_build_complete(&self, dist: &SourceDist, index: usize) { - let bars = self.bars.lock().unwrap(); - let progress = &bars[index]; - progress.finish_with_message(format!( - " {} {}", - "Built".bold().green(), - dist.to_color_string(), - )); - } - - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { - let progress = self.multi_progress.insert_before( - &self.progress, - ProgressBar::with_draw_target(None, self.printer.target()), - ); - - progress.set_style(ProgressStyle::with_template("{wide_msg}").unwrap()); - progress.set_message(format!( - "{} {} ({})", - "Updating".bold().cyan(), - url, - rev.dimmed() - )); - progress.finish(); - - let mut bars = self.bars.lock().unwrap(); - bars.push(progress); - bars.len() - 1 - } - - fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize) { - let bars = self.bars.lock().unwrap(); - let progress = &bars[index]; - progress.finish_with_message(format!( - " {} {} ({})", - "Updated".bold().green(), - url, - rev.dimmed() - )); - } -} - -/// Like [`std::fmt::Display`], but with colors. -trait ColorDisplay { - fn to_color_string(&self) -> String; -} - -impl ColorDisplay for SourceDist { - fn to_color_string(&self) -> String { - let name = self.name(); - let version_or_url = self.version_or_url(); - format!("{}{}", name, version_or_url.to_string().dimmed()) - } -} - -impl ColorDisplay for LocalEditable { - fn to_color_string(&self) -> String { - format!("{}", self.to_string().dimmed()) - } -} diff --git a/crates/puffin/src/commands/venv.rs b/crates/puffin/src/commands/venv.rs deleted file mode 100644 index 7ba6e4139e51..000000000000 --- a/crates/puffin/src/commands/venv.rs +++ /dev/null @@ -1,194 +0,0 @@ -use std::fmt::Write; -use std::path::{Path, PathBuf}; -use std::str::FromStr; - -use anyhow::Result; -use fs_err as fs; -use miette::{Diagnostic, IntoDiagnostic}; -use owo_colors::OwoColorize; -use puffin_installer::NoBinary; -use thiserror::Error; - -use distribution_types::{DistributionMetadata, IndexLocations, Name}; -use pep508_rs::Requirement; -use platform_host::Platform; -use puffin_cache::Cache; -use puffin_client::{FlatIndex, FlatIndexClient, RegistryClientBuilder}; -use puffin_dispatch::BuildDispatch; -use puffin_interpreter::Interpreter; -use puffin_resolver::InMemoryIndex; -use puffin_traits::{BuildContext, InFlight, SetupPyStrategy}; - -use crate::commands::ExitStatus; -use crate::printer::Printer; - -/// Create a virtual environment. -#[allow(clippy::unnecessary_wraps)] -pub(crate) async fn venv( - path: &Path, - base_python: Option<&Path>, - index_locations: &IndexLocations, - seed: bool, - cache: &Cache, - printer: Printer, -) -> Result { - match venv_impl(path, base_python, index_locations, seed, cache, printer).await { - Ok(status) => Ok(status), - Err(err) => { - #[allow(clippy::print_stderr)] - { - eprint!("{err:?}"); - } - Ok(ExitStatus::Failure) - } - } -} - -#[derive(Error, Debug, Diagnostic)] -enum VenvError { - #[error("Unable to find a Python interpreter")] - #[diagnostic(code(puffin::venv::python_not_found))] - PythonNotFound, - - #[error("Unable to find a Python interpreter {0}")] - #[diagnostic(code(puffin::venv::python_not_found))] - UserPythonNotFound(PathBuf), - - #[error("Failed to extract Python interpreter info")] - #[diagnostic(code(puffin::venv::interpreter))] - InterpreterError(#[source] puffin_interpreter::Error), - - #[error("Failed to create virtual environment")] - #[diagnostic(code(puffin::venv::creation))] - CreationError(#[source] gourgeist::Error), - - #[error("Failed to install seed packages")] - #[diagnostic(code(puffin::venv::seed))] - SeedError(#[source] anyhow::Error), - - #[error("Failed to extract interpreter tags")] - #[diagnostic(code(puffin::venv::tags))] - TagsError(#[source] platform_host::PlatformError), - - #[error("Failed to resolve `--find-links` entry")] - #[diagnostic(code(puffin::venv::flat_index))] - FlatIndexError(#[source] puffin_client::FlatIndexError), -} - -/// Create a virtual environment. -async fn venv_impl( - path: &Path, - base_python: Option<&Path>, - index_locations: &IndexLocations, - seed: bool, - cache: &Cache, - mut printer: Printer, -) -> miette::Result { - // Locate the Python interpreter. - let base_python = if let Some(base_python) = base_python { - fs::canonicalize( - which::which_global(base_python) - .map_err(|_| VenvError::UserPythonNotFound(base_python.to_path_buf()))?, - ) - .into_diagnostic()? - } else { - fs::canonicalize( - which::which_global("python3") - .or_else(|_| which::which_global("python")) - .map_err(|_| VenvError::PythonNotFound)?, - ) - .into_diagnostic()? - }; - - let platform = Platform::current().into_diagnostic()?; - let interpreter = - Interpreter::query(&base_python, platform, cache).map_err(VenvError::InterpreterError)?; - - writeln!( - printer, - "Using Python {} at {}", - interpreter.version(), - interpreter.sys_executable().display().cyan() - ) - .into_diagnostic()?; - - writeln!( - printer, - "Creating virtual environment at: {}", - path.display().cyan() - ) - .into_diagnostic()?; - - // Create the virtual environment. - let venv = gourgeist::create_venv(path, interpreter).map_err(VenvError::CreationError)?; - - // Install seed packages. - if seed { - // Extract the interpreter. - let interpreter = venv.interpreter(); - - // Instantiate a client. - let client = RegistryClientBuilder::new(cache.clone()).build(); - - // Resolve the flat indexes from `--find-links`. - let flat_index = { - let tags = interpreter.tags().map_err(VenvError::TagsError)?; - let client = FlatIndexClient::new(&client, cache); - let entries = client - .fetch(index_locations.flat_indexes()) - .await - .map_err(VenvError::FlatIndexError)?; - FlatIndex::from_entries(entries, tags) - }; - - // Create a shared in-memory index. - let index = InMemoryIndex::default(); - - // Track in-flight downloads, builds, etc., across resolutions. - let in_flight = InFlight::default(); - - // Prep the build context. - let build_dispatch = BuildDispatch::new( - &client, - cache, - interpreter, - index_locations, - &flat_index, - &index, - &in_flight, - venv.python_executable(), - SetupPyStrategy::default(), - true, - &NoBinary::None, - ); - - // Resolve the seed packages. - let resolution = build_dispatch - .resolve(&[ - Requirement::from_str("wheel").unwrap(), - Requirement::from_str("pip").unwrap(), - Requirement::from_str("setuptools").unwrap(), - ]) - .await - .map_err(VenvError::SeedError)?; - - // Install into the environment. - build_dispatch - .install(&resolution, &venv) - .await - .map_err(VenvError::SeedError)?; - - for distribution in resolution.distributions() { - writeln!( - printer, - " {} {}{}", - "+".green(), - distribution.name().as_ref().white().bold(), - distribution.version_or_url().dimmed() - ) - .into_diagnostic()?; - } - } - - Ok(ExitStatus::Success) -} diff --git a/crates/puffin/src/logging.rs b/crates/puffin/src/logging.rs deleted file mode 100644 index cab4680c6d52..000000000000 --- a/crates/puffin/src/logging.rs +++ /dev/null @@ -1,100 +0,0 @@ -use tracing::level_filters::LevelFilter; -#[cfg(feature = "tracing-durations-export")] -use tracing_durations_export::{ - plot::PlotConfig, DurationsLayer, DurationsLayerBuilder, DurationsLayerDropGuard, -}; -use tracing_subscriber::layer::SubscriberExt; -use tracing_subscriber::util::SubscriberInitExt; -use tracing_subscriber::{EnvFilter, Layer, Registry}; -use tracing_tree::time::Uptime; -use tracing_tree::HierarchicalLayer; - -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] -pub(crate) enum Level { - /// Suppress all tracing output by default (overrideable by `RUST_LOG`). - #[default] - Default, - /// Show debug messages by default (overrideable by `RUST_LOG`). - Verbose, -} - -/// Configure `tracing` based on the given [`Level`], taking into account the `RUST_LOG` environment -/// variable. -/// -/// The [`Level`] is used to dictate the default filters (which can be overridden by the `RUST_LOG` -/// environment variable) along with the formatting of the output. For example, [`Level::Verbose`] -/// includes targets and timestamps, along with all `puffin=debug` messages by default. -pub(crate) fn setup_logging(level: Level, duration: impl Layer + Send + Sync) { - match level { - Level::Default => { - // Show nothing, but allow `RUST_LOG` to override. - let filter = EnvFilter::builder() - .with_default_directive(LevelFilter::OFF.into()) - .from_env_lossy(); - - // Regardless of the tracing level, show messages without any adornment. - tracing_subscriber::registry() - .with(duration) - .with(filter) - .with( - tracing_subscriber::fmt::layer() - .without_time() - .with_target(false) - .with_writer(std::io::sink), - ) - .init(); - } - Level::Verbose => { - // Show `DEBUG` messages from the CLI crate, but allow `RUST_LOG` to override. - let filter = EnvFilter::try_from_default_env() - .or_else(|_| EnvFilter::try_new("puffin=debug")) - .unwrap(); - - // Regardless of the tracing level, include the uptime and target for each message. - tracing_subscriber::registry() - .with(duration) - .with(filter) - .with( - HierarchicalLayer::default() - .with_targets(true) - .with_timer(Uptime::default()) - .with_writer(std::io::stderr), - ) - .init(); - } - } -} - -/// Setup the `TRACING_DURATIONS_FILE` environment variable to enable tracing durations. -#[cfg(feature = "tracing-durations-export")] -pub(crate) fn setup_duration() -> ( - Option>, - Option, -) { - if let Ok(location) = std::env::var("TRACING_DURATIONS_FILE") { - let location = std::path::PathBuf::from(location); - if let Some(parent) = location.parent() { - fs_err::create_dir_all(parent) - .expect("Failed to create parent of TRACING_DURATIONS_FILE"); - } - let plot_config = PlotConfig { - multi_lane: true, - min_length: Some(std::time::Duration::from_secs_f32(0.002)), - remove: Some( - ["get_cached_with_callback".to_string()] - .into_iter() - .collect(), - ), - ..PlotConfig::default() - }; - let (layer, guard) = DurationsLayerBuilder::default() - .durations_file(&location) - .plot_file(location.with_extension("svg")) - .plot_config(plot_config) - .build() - .expect("Couldn't create TRACING_DURATIONS_FILE files"); - (Some(layer), Some(guard)) - } else { - (None, None) - } -} diff --git a/crates/puffin/src/main.rs b/crates/puffin/src/main.rs index 06b87ea4434b..f79c691f0853 100644 --- a/crates/puffin/src/main.rs +++ b/crates/puffin/src/main.rs @@ -1,799 +1,2 @@ -use std::env; -use std::path::PathBuf; -use std::process::ExitCode; -use std::str::FromStr; - -use anstream::eprintln; -use anyhow::Result; -use chrono::{DateTime, Days, NaiveDate, NaiveTime, Utc}; -use clap::{Args, Parser, Subcommand}; -use owo_colors::OwoColorize; - -use distribution_types::{FlatIndexLocation, IndexLocations, IndexUrl}; -use puffin_cache::{Cache, CacheArgs}; -use puffin_installer::{NoBinary, Reinstall}; -use puffin_interpreter::PythonVersion; -use puffin_normalize::{ExtraName, PackageName}; -use puffin_resolver::{PreReleaseMode, ResolutionMode}; -use puffin_traits::SetupPyStrategy; -use requirements::ExtrasSpecification; - -use crate::commands::{extra_name_with_clap_error, ExitStatus, Upgrade}; -use crate::requirements::RequirementsSource; - -#[cfg(target_os = "windows")] -#[global_allocator] -static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; - -#[cfg(all( - not(target_os = "windows"), - not(target_os = "openbsd"), - any( - target_arch = "x86_64", - target_arch = "aarch64", - target_arch = "powerpc64" - ) -))] -#[global_allocator] -static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; - -mod commands; -mod logging; -mod printer; -mod requirements; - -#[derive(Parser)] -#[command(author, version, about)] -#[command(propagate_version = true)] -struct Cli { - #[command(subcommand)] - command: Commands, - - /// Do not print any output. - #[arg(global = true, long, short, conflicts_with = "verbose")] - quiet: bool, - - /// Use verbose output. - #[arg(global = true, long, short, conflicts_with = "quiet")] - verbose: bool, - - #[command(flatten)] - cache_args: CacheArgs, -} - -#[derive(Subcommand)] -#[allow(clippy::large_enum_variant)] -enum Commands { - /// Resolve and install Python packages. - Pip(PipArgs), - /// Create a virtual environment. - #[clap(alias = "virtualenv", alias = "v")] - Venv(VenvArgs), - /// Clear the cache. - Clean(CleanArgs), - /// Add a dependency to the workspace. - #[clap(hide = true)] - Add(AddArgs), - /// Remove a dependency from the workspace. - #[clap(hide = true)] - Remove(RemoveArgs), -} - -#[derive(Args)] -struct PipArgs { - #[clap(subcommand)] - command: PipCommand, -} - -#[derive(Subcommand)] -enum PipCommand { - /// Compile a `requirements.in` file to a `requirements.txt` file. - Compile(PipCompileArgs), - /// Sync dependencies from a `requirements.txt` file. - Sync(PipSyncArgs), - /// Install packages into the current environment. - Install(PipInstallArgs), - /// Uninstall packages from the current environment. - Uninstall(PipUninstallArgs), - /// Enumerate the installed packages in the current environment. - Freeze(PipFreezeArgs), -} - -/// Clap parser for the union of date and datetime -fn date_or_datetime(input: &str) -> Result, String> { - let date_err = match NaiveDate::from_str(input) { - Ok(date) => { - // Midnight that day is 00:00:00 the next day - return Ok((date + Days::new(1)).and_time(NaiveTime::MIN).and_utc()); - } - Err(err) => err, - }; - let datetime_err = match DateTime::parse_from_rfc3339(input) { - Ok(datetime) => return Ok(datetime.with_timezone(&Utc)), - Err(err) => err, - }; - Err(format!( - "Neither a valid date ({date_err}) not a valid datetime ({datetime_err})" - )) -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct PipCompileArgs { - /// Include all packages listed in the given `requirements.in` files. - #[clap(required(true))] - src_file: Vec, - - /// Constrain versions using the given requirements files. - /// - /// Constraints files are `requirements.txt`-like files that only control the _version_ of a - /// requirement that's installed. However, including a package in a constraints file will _not_ - /// trigger the installation of that package. - /// - /// This is equivalent to pip's `--constraint` option. - #[clap(short, long)] - constraint: Vec, - - /// Override versions using the given requirements files. - /// - /// Overrides files are `requirements.txt`-like files that force a specific version of a - /// requirement to be installed, regardless of the requirements declared by any constituent - /// package, and regardless of whether this would be considered an invalid resolution. - /// - /// While constraints are _additive_, in that they're combined with the requirements of the - /// constituent packages, overrides are _absolute_, in that they completely replace the - /// requirements of the constituent packages. - #[clap(long)] - r#override: Vec, - - /// Include optional dependencies in the given extra group name; may be provided more than once. - #[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)] - extra: Vec, - - /// Include all optional dependencies. - #[clap(long, conflicts_with = "extra")] - all_extras: bool, - - #[clap(long, value_enum, default_value_t = ResolutionMode::default())] - resolution: ResolutionMode, - - #[clap(long, value_enum, default_value_t = PreReleaseMode::default())] - prerelease: PreReleaseMode, - - /// Write the compiled requirements to the given `requirements.txt` file. - #[clap(short, long)] - output_file: Option, - - /// The URL of the Python Package Index. - #[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")] - index_url: IndexUrl, - - /// Extra URLs of package indexes to use, in addition to `--index-url`. - #[clap(long)] - extra_index_url: Vec, - - /// Locations to search for candidate distributions, beyond those found in the indexes. - /// - /// If a path, the target must be a directory that contains package as wheel files (`.whl`) or - /// source distributions (`.tar.gz` or `.zip`) at the top level. - /// - /// If a URL, the page must contain a flat list of links to package files. - #[clap(long)] - find_links: Vec, - - /// Ignore the package index, instead relying on local archives and caches. - #[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")] - no_index: bool, - - /// Allow package upgrades, ignoring pinned versions in the existing output file. - #[clap(long)] - upgrade: bool, - - /// Allow upgrades for a specific package, ignoring pinned versions in the existing output - /// file. - #[clap(long)] - upgrade_package: Vec, - - /// Include distribution hashes in the output file. - #[clap(long)] - generate_hashes: bool, - - /// Use legacy `setuptools` behavior when building source distributions without a - /// `pyproject.toml`. - #[clap(long)] - legacy_setup_py: bool, - - /// Don't build source distributions. - /// - /// When enabled, resolving will not run arbitrary code. The cached wheels of already-built - /// source distributions will be reused, but operations that require building distributions will - /// exit with an error. - #[clap(long)] - no_build: bool, - - /// The minimum Python version that should be supported by the compiled requirements (e.g., - /// `3.7` or `3.7.9`). - /// - /// If a patch version is omitted, the most recent known patch version for that minor version - /// is assumed. For example, `3.7` is mapped to `3.7.17`. - #[arg(long, short)] - python_version: Option, - - /// Try to resolve at a past time. - /// - /// This works by filtering out files with a more recent upload time, so if the index you use - /// does not provide upload times, the results might be inaccurate. pypi provides upload times - /// for all files. - /// - /// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as - /// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this - /// day, i.e. until midnight UTC that day. - #[arg(long, value_parser = date_or_datetime)] - exclude_newer: Option>, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct PipSyncArgs { - /// Include all packages listed in the given `requirements.txt` files. - #[clap(required(true))] - src_file: Vec, - - /// Reinstall all packages, overwriting any entries in the cache and replacing any existing - /// packages in the environment. - #[clap(long)] - reinstall: bool, - - /// Reinstall a specific package, overwriting any entries in the cache and replacing any - /// existing versions in the environment. - #[clap(long)] - reinstall_package: Vec, - - /// The method to use when installing packages from the global cache. - #[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())] - link_mode: install_wheel_rs::linker::LinkMode, - - /// The URL of the Python Package Index. - #[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")] - index_url: IndexUrl, - - /// Extra URLs of package indexes to use, in addition to `--index-url`. - #[clap(long)] - extra_index_url: Vec, - - /// Locations to search for candidate distributions, beyond those found in the indexes. - /// - /// If a path, the target must be a directory that contains package as wheel files (`.whl`) or - /// source distributions (`.tar.gz` or `.zip`) at the top level. - /// - /// If a URL, the page must contain a flat list of links to package files. - #[clap(long)] - find_links: Vec, - - /// Ignore the registry index (e.g., PyPI), instead relying on local caches and `--find-links` - /// directories and URLs. - #[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")] - no_index: bool, - - /// Use legacy `setuptools` behavior when building source distributions without a - /// `pyproject.toml`. - #[clap(long)] - legacy_setup_py: bool, - - /// Don't build source distributions. - /// - /// When enabled, resolving will not run arbitrary code. The cached wheels of already-built - /// source distributions will be reused, but operations that require building distributions will - /// exit with an error. - #[clap(long)] - no_build: bool, - - /// Don't install pre-built wheels. - /// - /// When enabled, all installed packages will be installed from a source distribution. The resolver - /// will still use pre-built wheels for metadata. - #[clap(long)] - no_binary: bool, - - /// Don't install pre-built wheels for a specific package. - /// - /// When enabled, the specified packages will be installed from a source distribution. The resolver - /// will still use pre-built wheels for metadata. - #[clap(long)] - no_binary_package: Vec, - - /// Validate the virtual environment after completing the installation, to detect packages with - /// missing dependencies or other issues. - #[clap(long)] - strict: bool, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -struct PipInstallArgs { - /// Install all listed packages. - #[clap(group = "sources")] - package: Vec, - - /// Install all packages listed in the given requirements files. - #[clap(short, long, group = "sources")] - requirement: Vec, - - /// Install the editable package based on the provided local file path. - #[clap(short, long, group = "sources")] - editable: Vec, - - /// Constrain versions using the given requirements files. - /// - /// Constraints files are `requirements.txt`-like files that only control the _version_ of a - /// requirement that's installed. However, including a package in a constraints file will _not_ - /// trigger the installation of that package. - /// - /// This is equivalent to pip's `--constraint` option. - #[clap(short, long)] - constraint: Vec, - - /// Override versions using the given requirements files. - /// - /// Overrides files are `requirements.txt`-like files that force a specific version of a - /// requirement to be installed, regardless of the requirements declared by any constituent - /// package, and regardless of whether this would be considered an invalid resolution. - /// - /// While constraints are _additive_, in that they're combined with the requirements of the - /// constituent packages, overrides are _absolute_, in that they completely replace the - /// requirements of the constituent packages. - #[clap(long)] - r#override: Vec, - - /// Include optional dependencies in the given extra group name; may be provided more than once. - #[clap(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)] - extra: Vec, - - /// Include all optional dependencies. - #[clap(long, conflicts_with = "extra")] - all_extras: bool, - - /// Reinstall all packages, overwriting any entries in the cache and replacing any existing - /// packages in the environment. - #[clap(long)] - reinstall: bool, - - /// Reinstall a specific package, overwriting any entries in the cache and replacing any - /// existing versions in the environment. - #[clap(long)] - reinstall_package: Vec, - - /// The method to use when installing packages from the global cache. - #[clap(long, value_enum, default_value_t = install_wheel_rs::linker::LinkMode::default())] - link_mode: install_wheel_rs::linker::LinkMode, - - #[clap(long, value_enum, default_value_t = ResolutionMode::default())] - resolution: ResolutionMode, - - #[clap(long, value_enum, default_value_t = PreReleaseMode::default())] - prerelease: PreReleaseMode, - - /// Write the compiled requirements to the given `requirements.txt` file. - #[clap(short, long)] - output_file: Option, - - /// The URL of the Python Package Index. - #[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")] - index_url: IndexUrl, - - /// Extra URLs of package indexes to use, in addition to `--index-url`. - #[clap(long)] - extra_index_url: Vec, - - /// Locations to search for candidate distributions, beyond those found in the indexes. - /// - /// If a path, the target must be a directory that contains package as wheel files (`.whl`) or - /// source distributions (`.tar.gz` or `.zip`) at the top level. - /// - /// If a URL, the page must contain a flat list of links to package files. - #[clap(long)] - find_links: Vec, - - /// Ignore the package index, instead relying on local archives and caches. - #[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")] - no_index: bool, - - /// Use legacy `setuptools` behavior when building source distributions without a - /// `pyproject.toml`. - #[clap(long)] - legacy_setup_py: bool, - - /// Don't build source distributions. - /// - /// When enabled, resolving will not run arbitrary code. The cached wheels of already-built - /// source distributions will be reused, but operations that require building distributions will - /// exit with an error. - #[clap(long)] - no_build: bool, - - /// Don't install pre-built wheels. - /// - /// When enabled, all installed packages will be installed from a source distribution. The resolver - /// will still use pre-built wheels for metadata. - #[clap(long)] - no_binary: bool, - - /// Don't install pre-built wheels for a specific package. - /// - /// When enabled, the specified packages will be installed from a source distribution. The resolver - /// will still use pre-built wheels for metadata. - #[clap(long)] - no_binary_package: Vec, - - /// Validate the virtual environment after completing the installation, to detect packages with - /// missing dependencies or other issues. - #[clap(long)] - strict: bool, - - /// Try to resolve at a past time. - /// - /// This works by filtering out files with a more recent upload time, so if the index you use - /// does not provide upload times, the results might be inaccurate. pypi provides upload times - /// for all files. - /// - /// Timestamps are given either as RFC 3339 timestamps such as `2006-12-02T02:07:43Z` or as - /// UTC dates in the same format such as `2006-12-02`. Dates are interpreted as including this - /// day, i.e. until midnight UTC that day. - #[arg(long, value_parser = date_or_datetime)] - exclude_newer: Option>, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -struct PipUninstallArgs { - /// Uninstall all listed packages. - #[clap(group = "sources")] - package: Vec, - - /// Uninstall all packages listed in the given requirements files. - #[clap(short, long, group = "sources")] - requirement: Vec, - - /// Uninstall the editable package based on the provided local file path. - #[clap(short, long, group = "sources")] - editable: Vec, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct PipFreezeArgs { - /// Validate the virtual environment, to detect packages with missing dependencies or other - /// issues. - #[clap(long)] - strict: bool, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct CleanArgs { - /// The packages to remove from the cache. - package: Vec, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct VenvArgs { - /// The Python interpreter to use for the virtual environment. - // Short `-p` to match `virtualenv` - // TODO(konstin): Support e.g. `-p 3.10` - #[clap(short, long)] - python: Option, - - /// Install seed packages (`pip`, `setuptools`, and `wheel`) into the virtual environment. - #[clap(long)] - seed: bool, - - /// The path to the virtual environment to create. - #[clap(default_value = ".venv")] - name: PathBuf, - - /// The URL of the Python Package Index. - #[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")] - index_url: IndexUrl, - - /// Extra URLs of package indexes to use, in addition to `--index-url`. - #[clap(long)] - extra_index_url: Vec, - - /// Ignore the package index, instead relying on local archives and caches. - #[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")] - no_index: bool, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct AddArgs { - /// The name of the package to add (e.g., `Django==4.2.6`). - name: String, -} - -#[derive(Args)] -#[allow(clippy::struct_excessive_bools)] -struct RemoveArgs { - /// The name of the package to remove (e.g., `Django`). - name: PackageName, -} - -async fn inner() -> Result { - let cli = Cli::parse(); - - // Configure the `tracing` crate, which controls internal logging. - #[cfg(feature = "tracing-durations-export")] - let (duration_layer, _duration_guard) = logging::setup_duration(); - #[cfg(not(feature = "tracing-durations-export"))] - let duration_layer = None::; - logging::setup_logging( - if cli.verbose { - logging::Level::Verbose - } else { - logging::Level::Default - }, - duration_layer, - ); - - // Configure the `Printer`, which controls user-facing output in the CLI. - let printer = if cli.quiet { - printer::Printer::Quiet - } else if cli.verbose { - printer::Printer::Verbose - } else { - printer::Printer::Default - }; - - // Configure the `warn!` macros, which control user-facing warnings in the CLI. - if !cli.quiet { - puffin_warnings::enable(); - } - - miette::set_hook(Box::new(|_| { - Box::new( - miette::MietteHandlerOpts::new() - .break_words(false) - .word_separator(textwrap::WordSeparator::AsciiSpace) - .word_splitter(textwrap::WordSplitter::NoHyphenation) - .wrap_lines(env::var("PUFFIN_NO_WRAP").map(|_| false).unwrap_or(true)) - .build(), - ) - }))?; - - let cache = Cache::try_from(cli.cache_args)?; - - match cli.command { - Commands::Pip(PipArgs { - command: PipCommand::Compile(args), - }) => { - let requirements = args - .src_file - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let constraints = args - .constraint - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let overrides = args - .r#override - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let index_urls = IndexLocations::from_args( - args.index_url, - args.extra_index_url, - args.find_links, - args.no_index, - ); - let extras = if args.all_extras { - ExtrasSpecification::All - } else if args.extra.is_empty() { - ExtrasSpecification::None - } else { - ExtrasSpecification::Some(&args.extra) - }; - let upgrade = Upgrade::from_args(args.upgrade, args.upgrade_package); - commands::pip_compile( - &requirements, - &constraints, - &overrides, - extras, - args.output_file.as_deref(), - args.resolution, - args.prerelease, - upgrade, - args.generate_hashes, - index_urls, - if args.legacy_setup_py { - SetupPyStrategy::Setuptools - } else { - SetupPyStrategy::Pep517 - }, - args.no_build, - args.python_version, - args.exclude_newer, - cache, - printer, - ) - .await - } - Commands::Pip(PipArgs { - command: PipCommand::Sync(args), - }) => { - let index_urls = IndexLocations::from_args( - args.index_url, - args.extra_index_url, - args.find_links, - args.no_index, - ); - let sources = args - .src_file - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package); - let no_binary = NoBinary::from_args(args.no_binary, args.no_binary_package); - commands::pip_sync( - &sources, - &reinstall, - args.link_mode, - index_urls, - if args.legacy_setup_py { - SetupPyStrategy::Setuptools - } else { - SetupPyStrategy::Pep517 - }, - args.no_build, - &no_binary, - args.strict, - cache, - printer, - ) - .await - } - Commands::Pip(PipArgs { - command: PipCommand::Install(args), - }) => { - let requirements = args - .package - .into_iter() - .map(RequirementsSource::Package) - .chain(args.editable.into_iter().map(RequirementsSource::Editable)) - .chain(args.requirement.into_iter().map(RequirementsSource::from)) - .collect::>(); - let constraints = args - .constraint - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let overrides = args - .r#override - .into_iter() - .map(RequirementsSource::from) - .collect::>(); - let index_urls = IndexLocations::from_args( - args.index_url, - args.extra_index_url, - args.find_links, - args.no_index, - ); - let extras = if args.all_extras { - ExtrasSpecification::All - } else if args.extra.is_empty() { - ExtrasSpecification::None - } else { - ExtrasSpecification::Some(&args.extra) - }; - let reinstall = Reinstall::from_args(args.reinstall, args.reinstall_package); - let no_binary = NoBinary::from_args(args.no_binary, args.no_binary_package); - commands::pip_install( - &requirements, - &constraints, - &overrides, - &extras, - args.resolution, - args.prerelease, - index_urls, - &reinstall, - args.link_mode, - if args.legacy_setup_py { - SetupPyStrategy::Setuptools - } else { - SetupPyStrategy::Pep517 - }, - args.no_build, - &no_binary, - args.strict, - args.exclude_newer, - cache, - printer, - ) - .await - } - Commands::Pip(PipArgs { - command: PipCommand::Uninstall(args), - }) => { - let sources = args - .package - .into_iter() - .map(RequirementsSource::Package) - .chain(args.editable.into_iter().map(RequirementsSource::Editable)) - .chain(args.requirement.into_iter().map(RequirementsSource::from)) - .collect::>(); - commands::pip_uninstall(&sources, cache, printer).await - } - Commands::Pip(PipArgs { - command: PipCommand::Freeze(args), - }) => commands::freeze(&cache, args.strict, printer), - Commands::Clean(args) => commands::clean(&cache, &args.package, printer), - Commands::Venv(args) => { - let index_locations = IndexLocations::from_args( - args.index_url, - args.extra_index_url, - // No find links for the venv subcommand, to keep things simple - Vec::new(), - args.no_index, - ); - commands::venv( - &args.name, - args.python.as_deref(), - &index_locations, - args.seed, - &cache, - printer, - ) - .await - } - Commands::Add(args) => commands::add(&args.name, printer), - Commands::Remove(args) => commands::remove(&args.name, printer), - } -} - -fn main() -> ExitCode { - let result = if let Ok(stack_size) = env::var("PUFFIN_STACK_SIZE") { - // Artificially limit the stack size to test for stack overflows. Windows has a default stack size of 1MB, - // which is lower than the linux and mac default. - // https://learn.microsoft.com/en-us/cpp/build/reference/stack-stack-allocations?view=msvc-170 - let stack_size = stack_size.parse().expect("Invalid stack size"); - let tokio_main = move || { - tokio::runtime::Builder::new_multi_thread() - .enable_all() - .thread_stack_size(stack_size) - .build() - .expect("Failed building the Runtime") - .block_on(inner()) - }; - std::thread::Builder::new() - .stack_size(stack_size) - .spawn(tokio_main) - .expect("Tokio executor failed, was there a panic?") - .join() - .expect("Tokio executor failed, was there a panic?") - } else { - tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build() - .expect("Failed building the Runtime") - .block_on(inner()) - }; - - match result { - Ok(code) => code.into(), - Err(err) => { - #[allow(clippy::print_stderr)] - { - let mut causes = err.chain(); - eprintln!("{}: {}", "error".red().bold(), causes.next().unwrap()); - for err in causes { - eprintln!(" {}: {}", "Caused by".red().bold(), err); - } - } - ExitStatus::Error.into() - } - } +fn main() { } diff --git a/crates/puffin/src/printer.rs b/crates/puffin/src/printer.rs deleted file mode 100644 index 358287241b34..000000000000 --- a/crates/puffin/src/printer.rs +++ /dev/null @@ -1,41 +0,0 @@ -use anstream::eprint; -use indicatif::ProgressDrawTarget; - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum Printer { - /// A printer that prints to standard streams (e.g., stdout). - Default, - /// A printer that suppresses all output. - Quiet, - /// A printer that prints all output, including debug messages. - Verbose, -} - -impl Printer { - /// Return the [`ProgressDrawTarget`] for this printer. - pub(crate) fn target(self) -> ProgressDrawTarget { - match self { - Self::Default => ProgressDrawTarget::stderr(), - Self::Quiet => ProgressDrawTarget::hidden(), - // Confusingly, hide the progress bar when in verbose mode. - // Otherwise, it gets interleaved with debug messages. - Self::Verbose => ProgressDrawTarget::hidden(), - } - } -} - -impl std::fmt::Write for Printer { - fn write_str(&mut self, s: &str) -> std::fmt::Result { - match self { - Self::Default | Self::Verbose => { - #[allow(clippy::print_stderr, clippy::ignored_unit_patterns)] - { - eprint!("{s}"); - } - } - Self::Quiet => {} - } - - Ok(()) - } -} diff --git a/crates/puffin/src/requirements.rs b/crates/puffin/src/requirements.rs deleted file mode 100644 index 59482ddc95d4..000000000000 --- a/crates/puffin/src/requirements.rs +++ /dev/null @@ -1,210 +0,0 @@ -//! A standard interface for working with heterogeneous sources of requirements. - -use std::path::PathBuf; -use std::str::FromStr; - -use anyhow::{Context, Result}; -use fs_err as fs; -use rustc_hash::FxHashSet; - -use pep508_rs::Requirement; -use puffin_normalize::{ExtraName, PackageName}; -use requirements_txt::{EditableRequirement, RequirementsTxt}; - -#[derive(Debug)] -pub(crate) enum RequirementsSource { - /// A package was provided on the command line (e.g., `pip install flask`). - Package(String), - /// An editable path was provided on the command line (e.g., `pip install -e ../flask`). - Editable(String), - /// Dependencies were provided via a `requirements.txt` file (e.g., `pip install -r requirements.txt`). - RequirementsTxt(PathBuf), - /// Dependencies were provided via a `pyproject.toml` file (e.g., `pip-compile pyproject.toml`). - PyprojectToml(PathBuf), -} - -impl From for RequirementsSource { - fn from(path: PathBuf) -> Self { - if path.ends_with("pyproject.toml") { - Self::PyprojectToml(path) - } else { - Self::RequirementsTxt(path) - } - } -} - -#[derive(Debug, Default, Clone)] -pub(crate) enum ExtrasSpecification<'a> { - #[default] - None, - All, - Some(&'a [ExtraName]), -} - -impl ExtrasSpecification<'_> { - /// Returns true if a name is included in the extra specification. - fn contains(&self, name: &ExtraName) -> bool { - match self { - ExtrasSpecification::All => true, - ExtrasSpecification::None => false, - ExtrasSpecification::Some(extras) => extras.contains(name), - } - } -} - -#[derive(Debug, Default)] -pub(crate) struct RequirementsSpecification { - /// The name of the project specifying requirements. - pub(crate) project: Option, - /// The requirements for the project. - pub(crate) requirements: Vec, - /// The constraints for the project. - pub(crate) constraints: Vec, - /// The overrides for the project. - pub(crate) overrides: Vec, - /// Package to install as editable installs - pub(crate) editables: Vec, - /// The extras used to collect requirements. - pub(crate) extras: FxHashSet, -} - -impl RequirementsSpecification { - /// Read the requirements and constraints from a source. - pub(crate) fn from_source( - source: &RequirementsSource, - extras: &ExtrasSpecification, - ) -> Result { - Ok(match source { - RequirementsSource::Package(name) => { - let requirement = Requirement::from_str(name) - .with_context(|| format!("Failed to parse `{name}`"))?; - Self { - project: None, - requirements: vec![requirement], - constraints: vec![], - overrides: vec![], - editables: vec![], - extras: FxHashSet::default(), - } - } - RequirementsSource::Editable(name) => { - let requirement = EditableRequirement::from_str(name) - .with_context(|| format!("Failed to parse `{name}`"))?; - Self { - project: None, - requirements: vec![], - constraints: vec![], - overrides: vec![], - editables: vec![requirement], - extras: FxHashSet::default(), - } - } - RequirementsSource::RequirementsTxt(path) => { - let requirements_txt = RequirementsTxt::parse(path, std::env::current_dir()?)?; - Self { - project: None, - requirements: requirements_txt - .requirements - .into_iter() - .map(|entry| entry.requirement) - .collect(), - constraints: requirements_txt.constraints, - editables: requirements_txt.editables, - overrides: vec![], - extras: FxHashSet::default(), - } - } - RequirementsSource::PyprojectToml(path) => { - let contents = fs::read_to_string(path)?; - let pyproject_toml = toml::from_str::(&contents) - .with_context(|| format!("Failed to parse `{}`", path.display()))?; - let mut used_extras = FxHashSet::default(); - let mut requirements = Vec::new(); - let mut project_name = None; - if let Some(project) = pyproject_toml.project { - requirements.extend(project.dependencies.unwrap_or_default()); - // Include any optional dependencies specified in `extras` - if !matches!(extras, ExtrasSpecification::None) { - for (name, optional_requirements) in - project.optional_dependencies.unwrap_or_default() - { - // TODO(konstin): It's not ideal that pyproject-toml doesn't use - // `ExtraName` - let normalized_name = ExtraName::new(name)?; - if extras.contains(&normalized_name) { - used_extras.insert(normalized_name); - requirements.extend(optional_requirements); - } - } - } - // Parse the project name - project_name = Some(PackageName::new(project.name).with_context(|| { - format!("Invalid `project.name` in {}", path.display()) - })?); - } - - Self { - project: project_name, - requirements, - constraints: vec![], - overrides: vec![], - editables: vec![], - extras: used_extras, - } - } - }) - } - - /// Read the combined requirements and constraints from a set of sources. - pub(crate) fn from_sources( - requirements: &[RequirementsSource], - constraints: &[RequirementsSource], - overrides: &[RequirementsSource], - extras: &ExtrasSpecification, - ) -> Result { - let mut spec = Self::default(); - - // Read all requirements, and keep track of all requirements _and_ constraints. - // A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading - // a requirements file can also add constraints. - for source in requirements { - let source = Self::from_source(source, extras)?; - spec.requirements.extend(source.requirements); - spec.constraints.extend(source.constraints); - spec.overrides.extend(source.overrides); - spec.extras.extend(source.extras); - spec.editables.extend(source.editables); - - // Use the first project name discovered - if spec.project.is_none() { - spec.project = source.project; - } - } - - // Read all constraints, treating _everything_ as a constraint. - for source in constraints { - let source = Self::from_source(source, extras)?; - spec.constraints.extend(source.requirements); - spec.constraints.extend(source.constraints); - spec.constraints.extend(source.overrides); - } - - // Read all overrides, treating both requirements _and_ constraints as overrides. - for source in overrides { - let source = Self::from_source(source, extras)?; - spec.overrides.extend(source.requirements); - spec.overrides.extend(source.constraints); - spec.overrides.extend(source.overrides); - } - - Ok(spec) - } - - /// Read the requirements from a set of sources. - pub(crate) fn requirements_and_editables( - requirements: &[RequirementsSource], - ) -> Result<(Vec, Vec)> { - let specification = Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None)?; - Ok((specification.requirements, specification.editables)) - } -} diff --git a/crates/puffin/tests/add.rs b/crates/puffin/tests/add.rs deleted file mode 100644 index fadb64c878ca..000000000000 --- a/crates/puffin/tests/add.rs +++ /dev/null @@ -1,169 +0,0 @@ -use std::process::Command; - -use anyhow::Result; -use assert_fs::prelude::*; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::BIN_NAME; - -mod common; - -#[test] -fn missing_pyproject_toml() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("add") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - puffin::add::workspace_not_found - - × Could not find a `pyproject.toml` file in the current directory or any of - │ its parents - "###); - - pyproject_toml.assert(predicates::path::missing()); - - Ok(()) -} - -#[test] -fn missing_project_table() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("add") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -dependencies = [ - "flask", -] -"#, - ); - - Ok(()) -} - -#[test] -fn missing_dependencies_array() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("add") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "flask", -] -"#, - ); - - Ok(()) -} - -#[test] -fn replace_dependency() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", -] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("add") - .arg("flask==2.0.0") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "flask==2.0.0", -] -"#, - ); - - Ok(()) -} - -#[test] -fn reformat_array() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = ["flask==1.0.0"] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("add") - .arg("requests") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", - "requests", -] -"#, - ); - - Ok(()) -} diff --git a/crates/puffin/tests/common/mod.rs b/crates/puffin/tests/common/mod.rs deleted file mode 100644 index a31ba455569e..000000000000 --- a/crates/puffin/tests/common/mod.rs +++ /dev/null @@ -1,39 +0,0 @@ -#![allow(dead_code)] - -use assert_cmd::Command; -use assert_fs::assert::PathAssert; -use assert_fs::fixture::PathChild; -use assert_fs::TempDir; -use insta_cmd::get_cargo_bin; -use std::path::PathBuf; - -pub(crate) const BIN_NAME: &str = "puffin"; - -pub(crate) const INSTA_FILTERS: &[(&str, &str)] = &[ - (r"--cache-dir .*", "--cache-dir [CACHE_DIR]"), - (r"(\d+\.)?\d+(ms|s)", "[TIME]"), - (r"v\d+\.\d+\.\d+", "v[VERSION]"), -]; - -/// Create a virtual environment named `.venv` in a temporary directory. -pub(crate) fn create_venv_py312(temp_dir: &TempDir, cache_dir: &TempDir) -> PathBuf { - create_venv(temp_dir, cache_dir, "python3.12") -} - -/// Create a virtual environment named `.venv` in a temporary directory with the given -/// Python version. Expected format for `python` is "python". -pub(crate) fn create_venv(temp_dir: &TempDir, cache_dir: &TempDir, python: &str) -> PathBuf { - let venv = temp_dir.child(".venv"); - Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv.as_os_str()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--python") - .arg(python) - .current_dir(temp_dir) - .assert() - .success(); - venv.assert(predicates::path::is_dir()); - venv.to_path_buf() -} diff --git a/crates/puffin/tests/pip_compile.rs b/crates/puffin/tests/pip_compile.rs deleted file mode 100644 index 5ee0acbc137c..000000000000 --- a/crates/puffin/tests/pip_compile.rs +++ /dev/null @@ -1,3501 +0,0 @@ -#![cfg(all(feature = "python", feature = "pypi"))] - -use std::path::PathBuf; -use std::process::Command; -use std::{fs, iter}; - -use anyhow::{bail, Context, Result}; -use assert_cmd::prelude::*; -use assert_fs::prelude::*; -use assert_fs::TempDir; -use indoc::indoc; -use insta::assert_snapshot; -use insta_cmd::_macro_support::insta; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; -use itertools::Itertools; - -use common::{create_venv_py312, BIN_NAME, INSTA_FILTERS}; - -mod common; - -// Exclude any packages uploaded after this date. -static EXCLUDE_NEWER: &str = "2023-11-18T12:00:00Z"; - -/// Resolve a specific version of Django from a `requirements.in` file. -#[test] -fn compile_requirements_in() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("django==5.0b1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - asgiref==3.7.2 - # via django - django==5.0b1 - sqlparse==0.4.4 - # via django - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -#[test] -fn missing_requirements_in() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let requirements_in = temp_dir.child("requirements.in"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.in` - Caused by: No such file or directory (os error 2) - "###); - - requirements_in.assert(predicates::path::missing()); - - Ok(()) -} - -#[test] -fn missing_venv() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = temp_dir.child(".venv"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.in` - Caused by: No such file or directory (os error 2) - "###); - - venv.assert(predicates::path::missing()); - - Ok(()) -} - -/// Resolve a specific version of Django from a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile pyproject.toml --cache-dir [CACHE_DIR] - asgiref==3.7.2 - # via django - django==5.0b1 - sqlparse==0.4.4 - # via django - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file. -#[test] -fn compile_constraints_txt() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("django==5.0b1")?; - - let constraints_txt = temp_dir.child("constraints.txt"); - constraints_txt.write_str("sqlparse<0.4.4")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--constraint") - .arg("constraints.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - asgiref==3.7.2 - # via django - django==5.0b1 - sqlparse==0.4.3 - # via django - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a package from a `requirements.in` file, with an inline constraint. -#[test] -fn compile_constraints_inline() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("django==5.0b1")?; - requirements_in.write_str("-c constraints.txt")?; - - let constraints_txt = temp_dir.child("constraints.txt"); - constraints_txt.write_str("sqlparse<0.4.4")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - - ----- stderr ----- - Resolved 0 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that -/// uses markers. -#[test] -fn compile_constraints_markers() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("anyio")?; - - // Constrain a transitive dependency based on the Python version - let constraints_txt = temp_dir.child("constraints.txt"); - // If constraints are ignored, these will conflict - constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?; - constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--constraint") - .arg("constraints.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - anyio==4.0.0 - idna==3.4 - # via anyio - sniffio==1.3.0 - # via anyio - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a package from an optional dependency group in a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml_extra() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [] -optional-dependencies.foo = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--extra") - .arg("foo") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile pyproject.toml --extra foo --cache-dir [CACHE_DIR] - asgiref==3.7.2 - # via django - django==5.0b1 - sqlparse==0.4.4 - # via django - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a package from an extra with unnormalized names in a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml_extra_name_normalization() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [] -optional-dependencies."FrIeNdLy-._.-bArD" = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--extra") - .arg("FRiENDlY-...-_-BARd") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile pyproject.toml --extra FRiENDlY-...-_-BARd --cache-dir [CACHE_DIR] - asgiref==3.7.2 - # via django - django==5.0b1 - sqlparse==0.4.4 - # via django - - ----- stderr ----- - Resolved 3 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Request an extra that does not exist as a dependency group in a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml_extra_missing() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [] -optional-dependencies.foo = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--extra") - .arg("bar") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Requested extra not found: bar - "###); - }); - - Ok(()) -} - -/// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml_extras_missing() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [] -optional-dependencies.foo = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--extra") - .arg("foo") - .arg("--extra") - .arg("bar") - .arg("--extra") - .arg("foobar") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Requested extras not found: bar, foobar - "###); - }); - - Ok(()) -} - -/// Request extras when using a `requirements.in` file which does not support extras. -#[test] -fn compile_requirements_file_extra() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("django==5.0b1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .arg("--all-extras") - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), - @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Requesting extras requires a pyproject.toml input file. - "###); - }); - - Ok(()) -} - -/// Request an extra with a name that does not conform to the specification. -#[test] -fn invalid_extra_name() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = [] -optional-dependencies.foo = [ - "django==5.0b1", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--extra") - .arg("invalid name!") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: invalid value 'invalid name!' for '--extra ': Extra names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters - - For more information, try '--help'. - "###); - }); - - Ok(()) -} - -/// Resolve a specific version of Black at Python 3.12. -#[test] -fn compile_python_312() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--python-version") - .arg("3.12") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR] - black==23.10.1 - click==8.1.7 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.2 - # via black - platformdirs==4.0.0 - # via black - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific version of Black at Python 3.7. -#[test] -fn compile_python_37() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--python-version") - .arg("3.7") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only Python<3.8 is available and black==23.10.1 depends on - Python>=3.8, we can conclude that black==23.10.1 cannot be used. - And because you require black==23.10.1 we can conclude that the - requirements are unsatisfiable. - "###); - }); - - Ok(()) -} - -/// Resolve a specific version of Black against an invalid Python version. -#[test] -fn compile_python_invalid_version() -> Result<()> { - let temp_dir = TempDir::new()?; - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--python-version") - .arg("3.7.x") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: invalid value '3.7.x' for '--python-version ': after parsing 3.7, found ".x" after it, which is not part of a valid version - - For more information, try '--help'. - "###); - }); - - Ok(()) -} - -/// Resolve a specific version of Black against an invalid Python version. -#[test] -fn compile_python_dev_version() -> Result<()> { - let temp_dir = TempDir::new()?; - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--python-version") - .arg("3.7-dev") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: invalid value '3.7-dev' for '--python-version ': Python version 3.7-dev is a development release - - For more information, try '--help'. - "###); - }); - - Ok(()) -} - -/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an -/// incompatible sdist -#[test] -fn compile_numpy_py38() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = temp_dir.child(".venv"); - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv.as_os_str()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--python") - .arg("python3.8") - .current_dir(&temp_dir) - .assert() - .success(); - venv.assert(predicates::path::is_dir()); - let venv = venv.to_path_buf(); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("numpy")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .arg("--no-build") - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - numpy==1.24.4 - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask wheel via a URL dependency. -#[test] -fn compile_wheel_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask source distribution via a URL dependency. -/// -/// Exercises the `prepare_metadata_for_build_wheel` hooks. -#[test] -fn compile_sdist_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask source distribution via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git")?; - - // In addition to the standard filters, remove the `main` commit, which will change frequently. - let filters: Vec<_> = iter::once((r"@(\d|\w){40}", "@[COMMIT]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@[COMMIT] - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask branch via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_branch_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@1.0.x")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask tag via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_tag_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@3.0.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@735a4701d6d5e848241e7d7535db898efb62d400 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask commit via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_long_commit_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str( - "flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91", - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask commit via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_short_commit_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ git+https://github.com/pallets/flask.git@d92b64a")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@d92b64aa275841b0c9aea3903aba72fbc4275d91 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Flask ref via a Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_refs_https_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in - .write_str("flask @ git+https://github.com/pallets/flask.git@refs/pull/5313/head")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ git+https://github.com/pallets/flask.git@7af0271f4703a71beef8e26d1f5f6f8da04100e6 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a specific Git dependency with a subdirectory. -#[test] -#[cfg(feature = "git")] -fn compile_git_subdirectory_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency. -#[test] -#[cfg(feature = "git")] -fn compile_git_concurrent_access() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in - .write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a - example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a Git dependency with a declared name that differs from the true name of the package. -#[test] -#[cfg(feature = "git")] -fn compile_git_mismatched_name() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in - .write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to download and build: dask @ git+https://github.com/pallets/flask.git@3.0.0 - Caused by: Package metadata name `flask` does not match given name `dask` - "###); - }); - - Ok(()) -} - -/// Request Flask, but include a URL dependency for Werkzeug, which should avoid adding a -/// duplicate dependency from `PyPI`. -#[test] -fn mixed_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask==3.0.0 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Request Werkzeug via both a version and a URL dependency at a _different_ version, which -/// should result in a conflict. -#[test] -fn conflicting_direct_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there is no version of werkzeug==3.0.0 and you require - werkzeug==3.0.0, we can conclude that the requirements are - unsatisfiable. - "###); - }); - - Ok(()) -} - -/// Request Werkzeug via both a version and a URL dependency at _the same_ version, which -/// should prefer the direct URL dependency. -#[test] -fn compatible_direct_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - Ok(()) -} - -/// Request Werkzeug via two different URLs at different versions, which should result in a conflict. -#[test] -fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ root dependencies are unusable: Conflicting URLs for package `werkzeug`: - - https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl - - https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl - "###); - }); - - Ok(()) -} - -/// Request Werkzeug via two different URLs at the same version. Despite mapping to the same -/// version, it should still result in a conflict. -#[test] -#[cfg(feature = "git")] -fn conflicting_repeated_url_dependency_version_match() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ root dependencies are unusable: Conflicting URLs for package `werkzeug`: - - git+https://github.com/pallets/werkzeug.git@2.0.0 - - https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl - "###); - }); - - Ok(()) -} - -/// Request Flask, but include a URL dependency for a conflicting version of Werkzeug. -#[test] -fn conflicting_transitive_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because flask==3.0.0 depends on werkzeug>=3.0.0 and only werkzeug<3.0.0 - is available, we can conclude that flask==3.0.0 cannot be used. - And because you require flask==3.0.0 we can conclude that the - requirements are unsatisfiable. - "###); - }); - - Ok(()) -} - -/// Request `transitive_url_dependency`, which depends on `git+https://github.com/pallets/werkzeug@2.0.0`. -/// Since this URL isn't declared upfront, we should reject it. -#[test] -#[cfg(feature = "git")] -fn disallowed_transitive_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Package `werkzeug` attempted to resolve via URL: git+https://github.com/pallets/werkzeug@2.0.0. URL dependencies must be expressed as direct requirements or constraints. Consider adding `werkzeug @ git+https://github.com/pallets/werkzeug@2.0.0` to your dependencies or constraints file. - "###); - }); - - Ok(()) -} - -/// Request `transitive_url_dependency`, which depends on `git+https://github.com/pallets/werkzeug@2.0.0`. -/// Since this URL is declared as a constraint, we should accept it. -#[test] -#[cfg(feature = "git")] -fn allowed_transitive_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; - - let constraints_txt = temp_dir.child("constraints.txt"); - constraints_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug@2.0.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--constraint") - .arg("constraints.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - transitive-url-dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip - werkzeug @ git+https://github.com/pallets/werkzeug@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74 - # via transitive-url-dependency - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Request `transitive_url_dependency`, which depends on `git+https://github.com/pallets/werkzeug@2.0.0`. -/// Since this `git+https://github.com/pallets/werkzeug@2.0.0.git` is declared as a constraint, and -/// those map to the same canonical URL, we should accept it. -#[test] -#[cfg(feature = "git")] -fn allowed_transitive_canonical_url_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("transitive_url_dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip")?; - - let constraints_txt = temp_dir.child("constraints.txt"); - constraints_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--constraint") - .arg("constraints.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --constraint constraints.txt --cache-dir [CACHE_DIR] - transitive-url-dependency @ https://github.com/astral-sh/ruff/files/13257454/transitive_url_dependency.zip - werkzeug @ git+https://github.com/pallets/werkzeug@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74 - # via transitive-url-dependency - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve packages from all optional dependency groups in a `pyproject.toml` file. -#[test] -fn compile_pyproject_toml_all_extras() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = ["django==5.0b1"] -optional-dependencies.foo = [ - "anyio==4.0.0", -] -optional-dependencies.bar = [ - "httpcore==0.18.0", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--all-extras") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile pyproject.toml --all-extras --cache-dir [CACHE_DIR] - anyio==4.0.0 - # via httpcore - asgiref==3.7.2 - # via django - certifi==2023.11.17 - # via httpcore - django==5.0b1 - h11==0.14.0 - # via httpcore - httpcore==0.18.0 - idna==3.4 - # via anyio - sniffio==1.3.0 - # via - # anyio - # httpcore - sqlparse==0.4.4 - # via django - - ----- stderr ----- - Resolved 9 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve packages from all optional dependency groups in a `pyproject.toml` file. -#[test] -fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "project" -dependencies = ["django==5.0b1"] -optional-dependencies.foo = [ - "anyio==4.0.0", -] -optional-dependencies.bar = [ - "httpcore==0.18.0", -] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--all-extras") - .arg("--extra") - .arg("foo") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), - @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: the argument '--all-extras' cannot be used with '--extra ' - - Usage: puffin pip compile --all-extras --cache-dir [CACHE_DIR] - - For more information, try '--help'. - "###); - }); - - Ok(()) -} - -/// Compile requirements that cannot be solved due to conflict in a `pyproject.toml` fil;e. -#[test] -fn compile_unsolvable_requirements() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "my-project" -dependencies = ["django==5.0b1", "django==5.0a1"] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ my-project dependencies are unusable: Conflicting versions for `django`: - `django==5.0b1` does not intersect with `django==5.0a1` - "###); - }); - - Ok(()) -} - -/// Compile requirements in a `pyproject.toml` file that cannot be resolved due to -/// a requirement with a version that is not available online. -#[test] -fn compile_unsolvable_requirements_version_not_available() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.write_str( - r#"[build-system] -requires = ["setuptools", "wheel"] - -[project] -name = "my-project" -dependencies = ["django==300.1.4"] -"#, - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("pyproject.toml") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there is no version of django==300.1.4 and my-project - depends on django==300.1.4, we can conclude that the requirements are - unsatisfiable. - "###); - }); - - Ok(()) -} - -/// Resolve at a specific time in the past -#[test] -fn compile_exclude_newer() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("tqdm")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--exclude-newer") - // 4.64.0: 2022-04-04T01:48:46.194635Z1 - // 4.64.1: 2022-09-03T11:10:27.148080Z - .arg("2022-04-04T12:00:00Z") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --exclude-newer 2022-04-04T12:00:00Z --cache-dir [CACHE_DIR] - tqdm==4.64.0 - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - // Use a date as input instead. - // We interpret a date as including this day - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--exclude-newer") - .arg("2022-04-04") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --exclude-newer 2022-04-04 --cache-dir [CACHE_DIR] - tqdm==4.64.0 - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - // Check the error message for invalid datetime - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--exclude-newer") - .arg("2022-04-04+02:00") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: invalid value '2022-04-04+02:00' for '--exclude-newer ': Neither a valid date (trailing input) not a valid datetime (input contains invalid characters) - - For more information, try '--help'. - "###); - }); - - Ok(()) -} - -/// Resolve a local path dependency on a specific wheel. -#[test] -fn compile_wheel_path_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - let flask_wheel = temp_dir.child("flask-3.0.0-py3-none-any.whl"); - let mut flask_wheel_file = std::fs::File::create(&flask_wheel)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?; - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a local path dependency on a specific source distribution. -#[test] -fn compile_source_distribution_path_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a source distribution. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?; - let flask_wheel = temp_dir.child("flask-3.0.0.tar.gz"); - let mut flask_wheel_file = std::fs::File::create(&flask_wheel)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?; - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ file://[TEMP_DIR]/flask-3.0.0.tar.gz - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a local path dependency to a non-existent file. -#[test] -fn compile_wheel_path_dependency_missing() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ file:///path/to/flask-3.0.0-py3-none-any.whl")?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Unable to locate distribution at: file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl - Caused by: No such file or directory (os error 2) - "###); - }); - - Ok(()) -} - -/// Resolve a yanked version of `attrs` by specifying the version directly. -#[test] -fn compile_yanked_version_direct() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("attrs==21.1.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - attrs==21.1.0 - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - Ok(()) -} - -/// Fail to resolve `attrs` due to the indirect use of a yanked version (`21.1.0`). -#[test] -fn compile_yanked_version_indirect() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("attrs>20.3.0,<21.2.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of attrs that satisfy attrs>20.3.0,<21.2.0 - and you require attrs>20.3.0,<21.2.0, we can conclude that the - requirements are unsatisfiable. - "###); - }); - - Ok(()) -} - -/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this -/// requirement with an incompatible version. -#[test] -fn override_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask==3.0.0")?; - - let overrides_txt = temp_dir.child("overrides.txt"); - overrides_txt.write_str("werkzeug==2.3.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--override") - .arg("overrides.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --override overrides.txt --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask==3.0.0 - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==2.3.0 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Black==23.10.1 depends on tomli>=1.1.0 for Python versions below 3.11. Demonstrate that we can -/// override it with a multi-line override. -#[test] -fn override_multi_dependency() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - let overrides_txt = temp_dir.child("overrides.txt"); - overrides_txt.write_str( - "tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'", - )?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--override") - .arg("overrides.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --override overrides.txt --cache-dir [CACHE_DIR] - black==23.10.1 - click==8.1.7 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.2 - # via black - platformdirs==4.0.0 - # via black - tomli==2.0.1 - # via black - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Request an extra that doesn't exist on the specified package. -#[test] -fn missing_registry_extra() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black[tensorboard]==23.10.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - black==23.10.1 - click==8.1.7 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.2 - # via black - platformdirs==4.0.0 - # via black - - ----- stderr ----- - Resolved 6 packages in [TIME] - warning: The package `black==23.10.1` does not have an extra named `tensorboard`. - "###); - }); - - Ok(()) -} - -/// Request an extra that doesn't exist on the specified package. -#[test] -fn missing_url_extra() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - warning: The package `flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl` does not have an extra named `tensorboard`. - "###); - }); - - Ok(()) -} - -/// Resolve a dependency from a URL, preserving the exact casing of the URL as specified in the -/// requirements file. -#[test] -fn preserve_url() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a dependency from a URL, preserving the unexpanded environment variable as specified in -/// the requirements file. -#[test] -fn preserve_env_var() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?; - let flask_wheel = temp_dir.child("flask-3.0.0-py3-none-any.whl"); - let mut flask_wheel_file = std::fs::File::create(flask_wheel)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?; - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - blinker==1.7.0 - # via flask - click==8.1.7 - # via flask - flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl - itsdangerous==2.1.2 - # via flask - jinja2==3.1.2 - # via flask - markupsafe==2.1.3 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -#[test] -#[cfg(feature = "maturin")] -fn compile_editable() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str(indoc! {r" - -e ../../scripts/editable-installs/poetry_editable - -e ${PROJECT_ROOT}/../../scripts/editable-installs/maturin_editable - -e file://../../scripts/editable-installs/black_editable - boltons # normal dependency for comparison - " - })?; - - let filter_path = requirements_in.display().to_string(); - let filters: Vec<_> = iter::once((filter_path.as_str(), "requirements.in")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg(requirements_in.path()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - -e file://../../scripts/editable-installs/black_editable - boltons==23.1.1 - -e ${PROJECT_ROOT}/../../scripts/editable-installs/maturin_editable - numpy==1.26.2 - # via poetry-editable - -e ../../scripts/editable-installs/poetry_editable - - ----- stderr ----- - Built 3 editables in [TIME] - Resolved 5 packages in [TIME] - "###); - }); - - Ok(()) -} - -#[test] -#[ignore] -fn cache_errors_are_non_fatal() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - // No git dep, git has its own locking strategy - requirements_in.write_str(indoc! {r" - # pypi wheel - pandas - # url wheel - flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl - # url source dist - werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz - " - })?; - - // Pick a file from each kind of cache - let interpreter_cache = cache_dir - .path() - .join("interpreter-v0") - .read_dir()? - .next() - .context("Expected a python interpreter cache file")?? - .path(); - let cache_files = [ - PathBuf::from("simple-v0/pypi/numpy.msgpack"), - PathBuf::from( - "wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack", - ), - PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"), - PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"), - interpreter_cache, - ]; - - let check = || { - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg(requirements_in.path()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - // It's sufficient to check that we resolve to a fix number of packages - .stdout(std::process::Stdio::null()) - .env("VIRTUAL_ENV", venv.as_os_str()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 13 packages in [TIME] - "###); - }); - }; - - insta::allow_duplicates! { - check(); - - // Replace some cache files with invalid contents - for file in &cache_files { - let file = cache_dir.join(file); - if !file.is_file() { - bail!("Missing cache file {}", file.display()); - } - fs_err::write(file, "I borken you cache")?; - } - - check(); - - #[cfg(unix)] - { - use fs_err::os::unix::fs::OpenOptionsExt; - - // Make some files unreadable, so that the read instead of the deserialization will fail - for file in cache_files { - let file = cache_dir.join(file); - if !file.is_file() { - bail!("Missing cache file {}", file.display()); - } - - fs_err::OpenOptions::new() - .create(true) - .write(true) - .mode(0o000) - .open(file)?; - } - } - - check(); - - Ok(()) - } -} - -/// Resolve a distribution from an HTML-only registry. -#[test] -fn compile_html() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("jinja2<=3.1.2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--index-url") - .arg("https://download.pytorch.org/whl") - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - jinja2==3.1.2 - markupsafe==2.1.3 - # via jinja2 - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a distribution from a registry with and without a trailing slash. -#[test] -fn trailing_slash() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("jinja2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--index-url") - .arg("https://test.pypi.org/simple") - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - jinja2==3.1.2 - markupsafe==2.1.3 - # via jinja2 - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--index-url") - .arg("https://test.pypi.org/simple/") - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - jinja2==3.1.2 - markupsafe==2.1.3 - # via jinja2 - - ----- stderr ----- - Resolved 2 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend (default). -#[test] -fn compile_legacy_sdist_pep_517() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --cache-dir [CACHE_DIR] - flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz - mccabe==0.7.0 - # via flake8 - pycodestyle==2.10.0 - # via flake8 - pyflakes==3.0.1 - # via flake8 - - ----- stderr ----- - Resolved 4 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Resolve a project without a `pyproject.toml`, using `setuptools` directly. -#[test] -fn compile_legacy_sdist_setuptools() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--legacy-setup-py") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --legacy-setup-py --cache-dir [CACHE_DIR] - flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz - mccabe==0.7.0 - # via flake8 - pycodestyle==2.10.0 - # via flake8 - pyflakes==3.0.1 - # via flake8 - - ----- stderr ----- - Resolved 4 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Include hashes in the generated output. -#[test] -fn generate_hashes() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flask==3.0.0")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--generate-hashes") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --generate-hashes --cache-dir [CACHE_DIR] - blinker==1.7.0 \ - --hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \ - --hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182 - # via flask - click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de - # via flask - flask==3.0.0 \ - --hash=sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638 \ - --hash=sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58 - itsdangerous==2.1.2 \ - --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \ - --hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a - # via flask - jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 - # via flask - markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 - # via - # jinja2 - # werkzeug - werkzeug==3.0.1 \ - --hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \ - --hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10 - # via flask - - ----- stderr ----- - Resolved 7 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Compile using `--find-links` with a local directory. -#[test] -fn find_links_directory() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str(indoc! {r" - tqdm - numpy - werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl - "})?; - - let project_root = fs_err::canonicalize(std::env::current_dir()?.join("../.."))?; - let project_root_string = project_root.display().to_string(); - let filters: Vec<_> = iter::once((project_root_string.as_str(), "[PROJECT_ROOT]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--find-links") - .arg(project_root.join("scripts/wheels/")) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --find-links [PROJECT_ROOT]/scripts/wheels/ --cache-dir [CACHE_DIR] - markupsafe==2.1.3 - # via werkzeug - numpy==1.26.2 - tqdm==1000.0.0 - werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl - - ----- stderr ----- - Resolved 4 packages in [TIME] - "###); - }); - - Ok(()) -} - -/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index. -#[test] -fn find_links_url() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("tqdm")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--no-index") - .arg("--find-links") - .arg("https://download.pytorch.org/whl/torch_stable.html") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - # This file was autogenerated by Puffin v[VERSION] via the following command: - # puffin pip compile requirements.in --no-index --find-links https://download.pytorch.org/whl/torch_stable.html --cache-dir [CACHE_DIR] - tqdm==4.64.1 - - ----- stderr ----- - Resolved 1 package in [TIME] - "###); - }); - - Ok(()) -} - -/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`. -/// Nothing should change. -#[test] -fn upgrade_none() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc! {r" - black==23.10.1 - click==8.1.2 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.0 - # via black - platformdirs==4.0.0 - # via black - "})?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--output-file") - .arg("requirements.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - // Read the output requirements, but skip the header. - let resolution = fs::read_to_string(requirements_txt.path())? - .lines() - .skip_while(|line| line.trim_start().starts_with('#')) - .join("\n"); - assert_snapshot!(resolution, @r###" - black==23.10.1 - click==8.1.2 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.0 - # via black - platformdirs==4.0.0 - # via black - "###); - - Ok(()) -} - -/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`. -/// Both packages should be upgraded. -#[test] -fn upgrade_all() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc! {r" - # This file was autogenerated by Puffin v0.0.1 via the following command: - # puffin pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR] - black==23.10.1 - click==8.1.2 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.0 - # via black - platformdirs==4.0.0 - # via black - "})?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--output-file") - .arg("requirements.txt") - .arg("--upgrade") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - // Read the output requirements, but skip the header. - let resolution = fs::read_to_string(requirements_txt.path())? - .lines() - .skip_while(|line| line.trim_start().starts_with('#')) - .join("\n"); - assert_snapshot!(resolution, @r###" - black==23.10.1 - click==8.1.7 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.2 - # via black - platformdirs==4.0.0 - # via black - "###); - - Ok(()) -} - -/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`. -/// Only `click` should be upgraded. -#[test] -fn upgrade_package() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("black==23.10.1")?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc! {r" - # This file was autogenerated by Puffin v0.0.1 via the following command: - # puffin pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR] - black==23.10.1 - click==8.1.2 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.0 - # via black - platformdirs==4.0.0 - # via black - "})?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--output-file") - .arg("requirements.txt") - .arg("--upgrade-package") - .arg("click") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 6 packages in [TIME] - "###); - }); - - // Read the output requirements, but skip the header. - let resolution = fs::read_to_string(requirements_txt.path())? - .lines() - .skip_while(|line| line.trim_start().starts_with('#')) - .join("\n"); - assert_snapshot!(resolution, @r###" - black==23.10.1 - click==8.1.7 - # via black - mypy-extensions==1.0.0 - # via black - packaging==23.2 - # via black - pathspec==0.11.0 - # via black - platformdirs==4.0.0 - # via black - "### - ); - - Ok(()) -} - -/// Attempt to resolve a requirement at a path that doesn't exist. -#[test] -fn missing_path_requirement() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("django @ file:///tmp/django-3.2.8.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Unable to locate distribution at: file:///tmp/django-3.2.8.tar.gz - Caused by: No such file or directory (os error 2) - "###); - }); - - Ok(()) -} - -/// Attempt to resolve an editable requirement at a path that doesn't exist. -#[test] -fn missing_editable_requirement() -> Result<()> { - let temp_dir = TempDir::new()?; - let cache_dir = TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("-e ../tmp/django-3.2.8.tar.gz")?; - - let filters: Vec<_> = iter::once((r"(file:/)?/.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect::>(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("compile") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to build editables - Caused by: Failed to build editable: file://[TEMP_DIR]/django-3.2.8.tar.gz - Caused by: Failed to build: file://[TEMP_DIR]/django-3.2.8.tar.gz - Caused by: failed to query metadata of file `file://[TEMP_DIR]/django-3.2.8.tar.gz` - Caused by: No such file or directory (os error 2) - "###); - }); - - Ok(()) -} diff --git a/crates/puffin/tests/pip_install.rs b/crates/puffin/tests/pip_install.rs deleted file mode 100644 index 09c49d208700..000000000000 --- a/crates/puffin/tests/pip_install.rs +++ /dev/null @@ -1,942 +0,0 @@ -#![cfg(all(feature = "python", feature = "pypi"))] - -use std::iter; -use std::path::Path; -use std::process::Command; - -use anyhow::Result; -use assert_cmd::assert::Assert; -use assert_cmd::prelude::*; -use assert_fs::prelude::*; -use indoc::indoc; -use insta_cmd::_macro_support::insta; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::{create_venv_py312, BIN_NAME, INSTA_FILTERS}; - -mod common; - -// Exclude any packages uploaded after this date. -static EXCLUDE_NEWER: &str = "2023-11-18T12:00:00Z"; - -fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert { - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() -} - -#[test] -fn missing_requirements_txt() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let requirements_txt = temp_dir.child("requirements.txt"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.txt` - Caused by: No such file or directory (os error 2) - "###); - - requirements_txt.assert(predicates::path::missing()); - - Ok(()) -} - -#[test] -fn no_solution() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("flask>=3.0.0") - .arg("WerkZeug<1.0.0") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only flask<=3.0.0 is available and flask==3.0.0 depends - on werkzeug>=3.0.0, we can conclude that flask>=3.0.0 depends on - werkzeug>=3.0.0. - And because you require flask>=3.0.0 and you require werkzeug<1.0.0, we - can conclude that the requirements are unsatisfiable. - "###); - - Ok(()) -} - -/// Install a package from the command line into a virtual environment. -#[test] -fn install_package() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Install Flask. - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - Ok(()) -} - -/// Install a package from a `requirements.txt` into a virtual environment. -#[test] -fn install_requirements_txt() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Install Flask. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str("Flask")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // Install Jinja2 (which should already be installed, but shouldn't remove other packages). - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str("Jinja2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - Ok(()) -} - -/// Respect installed versions when resolving. -#[test] -fn respect_installed() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Install Flask. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Flask==2.3.2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==2.3.2 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // Re-install Flask. We should respect the existing version. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Flask")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // Install a newer version of Flask. We should upgrade it. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Flask==2.3.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - - flask==2.3.2 - + flask==2.3.3 - "###); - }); - - // Re-install Flask. We should upgrade it. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Flask")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--reinstall-package") - .arg("Flask") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - - flask==2.3.3 - + flask==3.0.0 - "###); - }); - - Ok(()) -} - -/// Like `pip`, we (unfortunately) allow incompatible environments. -#[test] -fn allow_incompatibilities() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Install Flask, which relies on `Werkzeug>=3.0.0`. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Flask")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // Install an incompatible version of Jinja2. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("jinja2==2.11.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - - jinja2==3.1.2 - + jinja2==2.11.3 - warning: The package `flask` requires `jinja2 >=3.1.2`, but `2.11.3` is installed. - "###); - }); - - // This no longer works, since we have an incompatible version of Jinja2. - assert_command(&venv, "import flask", &temp_dir).failure(); - - Ok(()) -} - -#[test] -#[cfg(feature = "maturin")] -fn install_editable() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let filters = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect::>(); - - // Install the editable package. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-e") - .arg("../../scripts/editable-installs/poetry_editable") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 1 editable in [TIME] - Resolved 2 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 2 packages in [TIME] - + numpy==1.26.2 - + poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - // Install it again (no-op). - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-e") - .arg("../../scripts/editable-installs/poetry_editable") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - // Add another, non-editable dependency. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-e") - .arg("../../scripts/editable-installs/poetry_editable") - .arg("black") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 1 editable in [TIME] - Resolved 8 packages in [TIME] - Downloaded 6 packages in [TIME] - Installed 7 packages in [TIME] - + black==23.11.0 - + click==8.1.7 - + mypy-extensions==1.0.0 - + packaging==23.2 - + pathspec==0.11.2 - + platformdirs==4.0.0 - - poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - + poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - Ok(()) -} - -#[test] -fn install_editable_and_registry() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - // Install the registry-based version of Black. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("black") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 6 packages in [TIME] - Downloaded 6 packages in [TIME] - Installed 6 packages in [TIME] - + black==23.11.0 - + click==8.1.7 - + mypy-extensions==1.0.0 - + packaging==23.2 - + pathspec==0.11.2 - + platformdirs==4.0.0 - "###); - }); - - // Install the editable version of Black. This should remove the registry-based version. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("-e") - .arg("../../scripts/editable-installs/black_editable") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 1 editable in [TIME] - Resolved 1 package in [TIME] - Installed 1 package in [TIME] - - black==23.11.0 - + black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable) - "###); - }); - - // Re-install the registry-based version of Black. This should be a no-op, since we have a - // version of Black installed (the editable version) that satisfies the requirements. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("black") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - // Re-install Black at a specific version. This should replace the editable version. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("black==23.10.0") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 6 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - - black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable) - + black==23.10.0 - "###); - }); - - Ok(()) -} - -/// Install a source distribution that uses the `flit` build system, along with `flit` -/// at the top-level, along with `--reinstall` to force a re-download after resolution, to ensure -/// that the `flit` install and the source distribution build don't conflict. -#[test] -fn reinstall_build_system() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Install devpi. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc! {r" - flit_core<4.0.0 - flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz - " - })?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("--reinstall") - .arg("-r") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 8 packages in [TIME] - Downloaded 8 packages in [TIME] - Installed 8 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 (from https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz) - + flit-core==3.9.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - Ok(()) -} - -/// Install a package without using pre-built wheels. -#[test] -fn install_no_binary() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--no-binary") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - Ok(()) -} - -/// Install a package without using pre-built wheels for a subset of packages. -#[test] -fn install_no_binary_subset() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--no-binary-package") - .arg("click") - .arg("--no-binary-package") - .arg("flask") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - Ok(()) -} - -/// Install a package without using pre-built wheels. -#[test] -fn reinstall_no_binary() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // The first installation should use a pre-built wheel - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 7 packages in [TIME] - Installed 7 packages in [TIME] - + blinker==1.7.0 - + click==8.1.7 - + flask==3.0.0 - + itsdangerous==2.1.2 - + jinja2==3.1.2 - + markupsafe==2.1.3 - + werkzeug==3.0.1 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // Running installation again with `--no-binary` should be a no-op - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--no-binary") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - - // With `--reinstall`, `--no-binary` should have an affect - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("Flask") - .arg("--no-binary") - .arg("--reinstall-package") - .arg("Flask") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--exclude-newer") - .arg(EXCLUDE_NEWER) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 7 packages in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - - flask==3.0.0 - + flask==3.0.0 - "###); - }); - - assert_command(&venv, "import flask", &temp_dir).success(); - Ok(()) -} diff --git a/crates/puffin/tests/pip_install_scenarios.rs b/crates/puffin/tests/pip_install_scenarios.rs deleted file mode 100644 index d87bf051bae5..000000000000 --- a/crates/puffin/tests/pip_install_scenarios.rs +++ /dev/null @@ -1,3119 +0,0 @@ -//! DO NOT EDIT -//! -//! Generated with ./scripts/scenarios/update.py -//! Scenarios from -//! -#![cfg(all(feature = "python", feature = "pypi"))] - -use std::path::Path; -use std::process::Command; - -use anyhow::Result; -use assert_cmd::assert::Assert; -use assert_cmd::prelude::*; -use insta_cmd::_macro_support::insta; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::{create_venv, BIN_NAME, INSTA_FILTERS}; - -mod common; - -fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert { - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() -} - -fn assert_installed(venv: &Path, package: &'static str, version: &'static str, temp_dir: &Path) { - assert_command( - venv, - format!("import {package} as package; print(package.__version__, end='')").as_str(), - temp_dir, - ) - .success() - .stdout(version); -} - -fn assert_not_installed(venv: &Path, package: &'static str, temp_dir: &Path) { - assert_command(venv, format!("import {package}").as_str(), temp_dir).failure(); -} - -/// requires-package-does-not-exist -/// -/// The user requires any version of package `a` which does not exist. -/// -/// ```text -/// 57cd4136 -/// ├── environment -/// │ └── python3.7 -/// └── root -/// └── requires a -/// └── unsatisfied: no versions for package -/// ``` -#[test] -fn requires_package_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"-57cd4136", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-57cd4136") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Package `a` was not found in the registry. - "###); - }); - - assert_not_installed(&venv, "a_57cd4136", &temp_dir); - - Ok(()) -} - -/// requires-exact-version-does-not-exist -/// -/// The user requires an exact version of package `a` but only other versions exist -/// -/// ```text -/// eaa03067 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a==2.0.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn requires_exact_version_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-eaa03067", "albatross")); - filters.push((r"-eaa03067", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-eaa03067==2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there is no version of albatross==2.0.0 and you require albatross==2.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_eaa03067", &temp_dir); - - Ok(()) -} - -/// requires-greater-version-does-not-exist -/// -/// The user requires a version of `a` greater than `1.0.0` but only smaller or -/// equal versions exist -/// -/// ```text -/// 6e8e01df -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>1.0.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// ├── a-0.1.0 -/// └── a-1.0.0 -/// ``` -#[test] -fn requires_greater_version_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-6e8e01df", "albatross")); - filters.push((r"-6e8e01df", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-6e8e01df>1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only albatross<=1.0.0 is available and you require albatross>1.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_6e8e01df", &temp_dir); - - Ok(()) -} - -/// requires-less-version-does-not-exist -/// -/// The user requires a version of `a` less than `1.0.0` but only larger versions -/// exist -/// -/// ```text -/// e45cec3c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a<2.0.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// ├── a-2.0.0 -/// ├── a-3.0.0 -/// └── a-4.0.0 -/// ``` -#[test] -fn requires_less_version_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-e45cec3c", "albatross")); - filters.push((r"-e45cec3c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-e45cec3c<2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only albatross>=2.0.0 is available and you require albatross<2.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_e45cec3c", &temp_dir); - - Ok(()) -} - -/// transitive-requires-package-does-not-exist -/// -/// The user requires package `a` but `a` requires package `b` which does not exist -/// -/// ```text -/// aca2796a -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// └── requires b -/// └── unsatisfied: no versions for package -/// ``` -#[test] -fn transitive_requires_package_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-aca2796a", "albatross")); - filters.push((r"-aca2796a", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-aca2796a") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Package `b` was not found in the registry. - "###); - }); - - assert_not_installed(&venv, "a_aca2796a", &temp_dir); - - Ok(()) -} - -/// excluded-only-version -/// -/// Only one version of the requested package is available, but the user has banned -/// that version. -/// -/// ```text -/// 7a9ed79c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a!=1.0.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn excluded_only_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-7a9ed79c", "albatross")); - filters.push((r"-7a9ed79c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-7a9ed79c!=1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only albatross==1.0.0 is available and you require one of: - albatross<1.0.0 - albatross>1.0.0 - we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Only `a==1.0.0` is available but the user excluded it. - assert_not_installed(&venv, "a_7a9ed79c", &temp_dir); - - Ok(()) -} - -/// excluded-only-compatible-version -/// -/// Only one version of the requested package `a` is compatible, but the user has -/// banned that version. -/// -/// ```text -/// b6b89642 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a!=2.0.0 -/// │ │ ├── satisfied by a-1.0.0 -/// │ │ └── satisfied by a-3.0.0 -/// │ └── requires b<3.0.0,>=2.0.0 -/// │ └── satisfied by b-2.0.0 -/// ├── a -/// │ ├── a-1.0.0 -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ ├── a-2.0.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ └── a-3.0.0 -/// │ └── requires b==3.0.0 -/// │ └── satisfied by b-3.0.0 -/// └── b -/// ├── b-1.0.0 -/// ├── b-2.0.0 -/// └── b-3.0.0 -/// ``` -#[test] -fn excluded_only_compatible_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-b6b89642", "albatross")); - filters.push((r"b-b6b89642", "bluebird")); - filters.push((r"-b6b89642", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-b6b89642!=2.0.0") - .arg("b-b6b89642<3.0.0,>=2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of albatross that satisfy any of: - albatross<1.0.0 - albatross>1.0.0,<2.0.0 - albatross>2.0.0,<3.0.0 - albatross>3.0.0 - and albatross==1.0.0 depends on bluebird==1.0.0, we can conclude that albatross<2.0.0 depends on bluebird==1.0.0. - And because albatross==3.0.0 depends on bluebird==3.0.0 we can conclude that any of: - albatross<2.0.0 - albatross>2.0.0 - depends on one of: - bluebird<=1.0.0 - bluebird>=3.0.0 - - And because you require one of: - albatross<2.0.0 - albatross>2.0.0 - and you require bluebird>=2.0.0,<3.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Only `a==1.2.0` is available since `a==1.0.0` and `a==3.0.0` require - // incompatible versions of `b`. The user has excluded that version of `a` so - // resolution fails. - assert_not_installed(&venv, "a_b6b89642", &temp_dir); - assert_not_installed(&venv, "b_b6b89642", &temp_dir); - - Ok(()) -} - -/// dependency-excludes-range-of-compatible-versions -/// -/// There is a range of compatible versions for the requested package `a`, but -/// another dependency `c` excludes that range. -/// -/// ```text -/// 1cd99bd0 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ ├── satisfied by a-1.0.0 -/// │ │ ├── satisfied by a-2.0.0 -/// │ │ ├── satisfied by a-2.1.0 -/// │ │ ├── satisfied by a-2.2.0 -/// │ │ ├── satisfied by a-2.3.0 -/// │ │ └── satisfied by a-3.0.0 -/// │ ├── requires b<3.0.0,>=2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ └── requires c -/// │ ├── satisfied by c-1.0.0 -/// │ └── satisfied by c-2.0.0 -/// ├── a -/// │ ├── a-1.0.0 -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ ├── a-2.0.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ ├── a-2.1.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ ├── a-2.2.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ ├── a-2.3.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ └── a-3.0.0 -/// │ └── requires b==3.0.0 -/// │ └── satisfied by b-3.0.0 -/// ├── b -/// │ ├── b-1.0.0 -/// │ ├── b-2.0.0 -/// │ └── b-3.0.0 -/// └── c -/// ├── c-1.0.0 -/// │ └── requires a<2.0.0 -/// │ └── satisfied by a-1.0.0 -/// └── c-2.0.0 -/// └── requires a>=3.0.0 -/// └── satisfied by a-3.0.0 -/// ``` -#[test] -fn dependency_excludes_range_of_compatible_versions() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-1cd99bd0", "albatross")); - filters.push((r"b-1cd99bd0", "bluebird")); - filters.push((r"c-1cd99bd0", "crow")); - filters.push((r"-1cd99bd0", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-1cd99bd0") - .arg("b-1cd99bd0<3.0.0,>=2.0.0") - .arg("c-1cd99bd0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of albatross that satisfy any of: - albatross<1.0.0 - albatross>1.0.0,<2.0.0 - albatross>3.0.0 - and albatross==1.0.0 depends on bluebird==1.0.0, we can conclude that albatross<2.0.0 depends on bluebird==1.0.0. (1) - - Because there are no versions of crow that satisfy any of: - crow<1.0.0 - crow>1.0.0,<2.0.0 - crow>2.0.0 - and crow==1.0.0 depends on albatross<2.0.0, we can conclude that crow<2.0.0 depends on albatross<2.0.0. - And because crow==2.0.0 depends on albatross>=3.0.0 we can conclude that all versions of crow depend on one of: - albatross<2.0.0 - albatross>=3.0.0 - - And because we know from (1) that albatross<2.0.0 depends on bluebird==1.0.0, we can conclude that albatross!=3.0.0, bluebird!=1.0.0, all versions of crow are incompatible. - And because albatross==3.0.0 depends on bluebird==3.0.0 we can conclude that all versions of crow depend on one of: - bluebird<=1.0.0 - bluebird>=3.0.0 - - And because you require bluebird>=2.0.0,<3.0.0 and you require crow, we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` - // require incompatible versions of `b`, but all available versions of `c` exclude - // that range of `a` so resolution fails. - assert_not_installed(&venv, "a_1cd99bd0", &temp_dir); - assert_not_installed(&venv, "b_1cd99bd0", &temp_dir); - assert_not_installed(&venv, "c_1cd99bd0", &temp_dir); - - Ok(()) -} - -/// dependency-excludes-non-contiguous-range-of-compatible-versions -/// -/// There is a non-contiguous range of compatible versions for the requested package -/// `a`, but another dependency `c` excludes the range. This is the same as -/// `dependency-excludes-range-of-compatible-versions` but some of the versions of -/// `a` are incompatible for another reason e.g. dependency on non-existant package -/// `d`. -/// -/// ```text -/// 0fd25b39 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ ├── satisfied by a-1.0.0 -/// │ │ ├── satisfied by a-2.0.0 -/// │ │ ├── satisfied by a-2.1.0 -/// │ │ ├── satisfied by a-2.2.0 -/// │ │ ├── satisfied by a-2.3.0 -/// │ │ ├── satisfied by a-2.4.0 -/// │ │ └── satisfied by a-3.0.0 -/// │ ├── requires b<3.0.0,>=2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ └── requires c -/// │ ├── satisfied by c-1.0.0 -/// │ └── satisfied by c-2.0.0 -/// ├── a -/// │ ├── a-1.0.0 -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ ├── a-2.0.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ ├── a-2.1.0 -/// │ │ ├── requires b==2.0.0 -/// │ │ │ └── satisfied by b-2.0.0 -/// │ │ └── requires d -/// │ │ └── unsatisfied: no versions for package -/// │ ├── a-2.2.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ ├── a-2.3.0 -/// │ │ ├── requires b==2.0.0 -/// │ │ │ └── satisfied by b-2.0.0 -/// │ │ └── requires d -/// │ │ └── unsatisfied: no versions for package -/// │ ├── a-2.4.0 -/// │ │ └── requires b==2.0.0 -/// │ │ └── satisfied by b-2.0.0 -/// │ └── a-3.0.0 -/// │ └── requires b==3.0.0 -/// │ └── satisfied by b-3.0.0 -/// ├── b -/// │ ├── b-1.0.0 -/// │ ├── b-2.0.0 -/// │ └── b-3.0.0 -/// └── c -/// ├── c-1.0.0 -/// │ └── requires a<2.0.0 -/// │ └── satisfied by a-1.0.0 -/// └── c-2.0.0 -/// └── requires a>=3.0.0 -/// └── satisfied by a-3.0.0 -/// ``` -#[test] -fn dependency_excludes_non_contiguous_range_of_compatible_versions() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-0fd25b39", "albatross")); - filters.push((r"b-0fd25b39", "bluebird")); - filters.push((r"c-0fd25b39", "crow")); - filters.push((r"-0fd25b39", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-0fd25b39") - .arg("b-0fd25b39<3.0.0,>=2.0.0") - .arg("c-0fd25b39") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of crow that satisfy any of: - crow<1.0.0 - crow>1.0.0,<2.0.0 - crow>2.0.0 - and crow==1.0.0 depends on albatross<2.0.0, we can conclude that crow<2.0.0 depends on albatross<2.0.0. (1) - - Because albatross==1.0.0 depends on bluebird==1.0.0 and there are no versions of albatross that satisfy any of: - albatross<1.0.0 - albatross>1.0.0,<2.0.0 - we can conclude that albatross<2.0.0 depends on bluebird==1.0.0. - And because we know from (1) that crow<2.0.0 depends on albatross<2.0.0, we can conclude that crow<2.0.0 depends on bluebird==1.0.0. - And because crow==2.0.0 depends on albatross>=3.0.0 we can conclude that all versions of crow, bluebird!=1.0.0, albatross<3.0.0 are incompatible. (2) - - Because albatross==3.0.0 depends on bluebird==3.0.0 and only albatross<=3.0.0 is available, we can conclude that albatross>=3.0.0 depends on bluebird==3.0.0. - And because we know from (2) that all versions of crow, bluebird!=1.0.0, albatross<3.0.0 are incompatible, we can conclude that all versions of crow depend on one of: - bluebird<=1.0.0 - bluebird>=3.0.0 - - And because you require crow and you require bluebird>=2.0.0,<3.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` - // require incompatible versions of `b`, but all available versions of `c` exclude - // that range of `a` so resolution fails. - assert_not_installed(&venv, "a_0fd25b39", &temp_dir); - assert_not_installed(&venv, "b_0fd25b39", &temp_dir); - assert_not_installed(&venv, "c_0fd25b39", &temp_dir); - - Ok(()) -} - -/// extra-required -/// -/// Optional dependencies are requested for the package. -/// -/// ```text -/// 76e5355c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra] -/// │ ├── satisfied by a-1.0.0 -/// │ └── satisfied by a-1.0.0[extra] -/// ├── a -/// │ ├── a-1.0.0 -/// │ └── a-1.0.0[extra] -/// │ └── requires b -/// │ └── satisfied by b-1.0.0 -/// └── b -/// └── b-1.0.0 -/// ``` -#[test] -fn extra_required() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-76e5355c", "albatross")); - filters.push((r"b-76e5355c", "bluebird")); - filters.push((r"-76e5355c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-76e5355c[extra]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + albatross==1.0.0 - + bluebird==1.0.0 - "###); - }); - - assert_installed(&venv, "a_76e5355c", "1.0.0", &temp_dir); - assert_installed(&venv, "b_76e5355c", "1.0.0", &temp_dir); - - Ok(()) -} - -/// missing-extra -/// -/// Optional dependencies are requested for the package, but the extra does not -/// exist. -/// -/// ```text -/// 06e7489c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra] -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn missing_extra() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-06e7489c", "albatross")); - filters.push((r"-06e7489c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-06e7489c[extra]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - // Missing extras are ignored during resolution. - assert_installed(&venv, "a_06e7489c", "1.0.0", &temp_dir); - - Ok(()) -} - -/// multiple-extras-required -/// -/// Multiple optional dependencies are requested for the package. -/// -/// ```text -/// e55f15c4 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra_b,extra_c] -/// │ ├── satisfied by a-1.0.0 -/// │ ├── satisfied by a-1.0.0[extra_b] -/// │ └── satisfied by a-1.0.0[extra_c] -/// ├── a -/// │ ├── a-1.0.0 -/// │ ├── a-1.0.0[extra_b] -/// │ │ └── requires b -/// │ │ └── satisfied by b-1.0.0 -/// │ └── a-1.0.0[extra_c] -/// │ └── requires c -/// │ └── satisfied by c-1.0.0 -/// ├── b -/// │ └── b-1.0.0 -/// └── c -/// └── c-1.0.0 -/// ``` -#[test] -fn multiple_extras_required() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-e55f15c4", "albatross")); - filters.push((r"b-e55f15c4", "bluebird")); - filters.push((r"c-e55f15c4", "crow")); - filters.push((r"-e55f15c4", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-e55f15c4[extra_b,extra_c]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 3 packages in [TIME] - Downloaded 3 packages in [TIME] - Installed 3 packages in [TIME] - + albatross==1.0.0 - + bluebird==1.0.0 - + crow==1.0.0 - "###); - }); - - assert_installed(&venv, "a_e55f15c4", "1.0.0", &temp_dir); - assert_installed(&venv, "b_e55f15c4", "1.0.0", &temp_dir); - assert_installed(&venv, "c_e55f15c4", "1.0.0", &temp_dir); - - Ok(()) -} - -/// extra-incompatible-with-extra -/// -/// Multiple optional dependencies are requested for the package, but they have -/// conflicting requirements with each other. -/// -/// ```text -/// 492741b0 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra_b,extra_c] -/// │ ├── satisfied by a-1.0.0 -/// │ ├── satisfied by a-1.0.0[extra_b] -/// │ └── satisfied by a-1.0.0[extra_c] -/// ├── a -/// │ ├── a-1.0.0 -/// │ ├── a-1.0.0[extra_b] -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ └── a-1.0.0[extra_c] -/// │ └── requires b==2.0.0 -/// │ └── satisfied by b-2.0.0 -/// └── b -/// ├── b-1.0.0 -/// └── b-2.0.0 -/// ``` -#[test] -fn extra_incompatible_with_extra() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-492741b0", "albatross")); - filters.push((r"b-492741b0", "bluebird")); - filters.push((r"-492741b0", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-492741b0[extra_b,extra_c]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only albatross[extra-c]==1.0.0 is available and albatross[extra-c]==1.0.0 depends on bluebird==2.0.0, we can conclude that all versions of albatross[extra-c] depend on bluebird==2.0.0. - And because albatross[extra-b]==1.0.0 depends on bluebird==1.0.0 and only albatross[extra-b]==1.0.0 is available, we can conclude that all versions of albatross[extra-b] and all versions of albatross[extra-c] are incompatible. - And because you require albatross[extra-c] and you require albatross[extra-b], we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Because both `extra_b` and `extra_c` are requested and they require incompatible - // versions of `b`, `a` cannot be installed. - assert_not_installed(&venv, "a_492741b0", &temp_dir); - - Ok(()) -} - -/// extra-incompatible-with-extra-not-requested -/// -/// One of two incompatible optional dependencies are requested for the package. -/// -/// ```text -/// f0b0089a -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra_c] -/// │ ├── satisfied by a-1.0.0 -/// │ ├── satisfied by a-1.0.0[extra_b] -/// │ └── satisfied by a-1.0.0[extra_c] -/// ├── a -/// │ ├── a-1.0.0 -/// │ ├── a-1.0.0[extra_b] -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ └── a-1.0.0[extra_c] -/// │ └── requires b==2.0.0 -/// │ └── satisfied by b-2.0.0 -/// └── b -/// ├── b-1.0.0 -/// └── b-2.0.0 -/// ``` -#[test] -fn extra_incompatible_with_extra_not_requested() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-f0b0089a", "albatross")); - filters.push((r"b-f0b0089a", "bluebird")); - filters.push((r"-f0b0089a", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-f0b0089a[extra_c]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + albatross==1.0.0 - + bluebird==2.0.0 - "###); - }); - - // Because the user does not request both extras, it is okay that one is - // incompatible with the other. - assert_installed(&venv, "a_f0b0089a", "1.0.0", &temp_dir); - assert_installed(&venv, "b_f0b0089a", "2.0.0", &temp_dir); - - Ok(()) -} - -/// extra-incompatible-with-root -/// -/// Optional dependencies are requested for the package, but the extra is not -/// compatible with other requested versions. -/// -/// ```text -/// 9d588075 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a[extra] -/// │ │ ├── satisfied by a-1.0.0 -/// │ │ └── satisfied by a-1.0.0[extra] -/// │ └── requires b==2.0.0 -/// │ └── satisfied by b-2.0.0 -/// ├── a -/// │ ├── a-1.0.0 -/// │ └── a-1.0.0[extra] -/// │ └── requires b==1.0.0 -/// │ └── satisfied by b-1.0.0 -/// └── b -/// ├── b-1.0.0 -/// └── b-2.0.0 -/// ``` -#[test] -fn extra_incompatible_with_root() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-9d588075", "albatross")); - filters.push((r"b-9d588075", "bluebird")); - filters.push((r"-9d588075", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-9d588075[extra]") - .arg("b-9d588075==2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because albatross[extra]==1.0.0 depends on bluebird==1.0.0 and only albatross[extra]==1.0.0 is available, we can conclude that all versions of albatross[extra] depend on bluebird==1.0.0. - And because you require albatross[extra] and you require bluebird==2.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - // Because the user requested `b==2.0.0` but the requested extra requires - // `b==1.0.0`, the dependencies cannot be satisfied. - assert_not_installed(&venv, "a_9d588075", &temp_dir); - assert_not_installed(&venv, "b_9d588075", &temp_dir); - - Ok(()) -} - -/// extra-does-not-exist-backtrack -/// -/// Optional dependencies are requested for the package, the extra is only available -/// on an older version. -/// -/// ```text -/// f1877db3 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a[extra] -/// │ ├── satisfied by a-1.0.0 -/// │ ├── satisfied by a-1.0.0[extra] -/// │ ├── satisfied by a-2.0.0 -/// │ └── satisfied by a-3.0.0 -/// ├── a -/// │ ├── a-1.0.0 -/// │ ├── a-1.0.0[extra] -/// │ │ └── requires b==1.0.0 -/// │ │ └── satisfied by b-1.0.0 -/// │ ├── a-2.0.0 -/// │ └── a-3.0.0 -/// └── b -/// └── b-1.0.0 -/// ``` -#[test] -fn extra_does_not_exist_backtrack() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-f1877db3", "albatross")); - filters.push((r"b-f1877db3", "bluebird")); - filters.push((r"-f1877db3", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-f1877db3[extra]") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==3.0.0 - "###); - }); - - // The resolver should not backtrack to `a==1.0.0` because missing extras are - // allowed during resolution. `b` should not be installed. - assert_installed(&venv, "a_f1877db3", "3.0.0", &temp_dir); - - Ok(()) -} - -/// direct-incompatible-versions -/// -/// The user requires two incompatible, existing versions of package `a` -/// -/// ```text -/// 80d82ee8 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a==1.0.0 -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires a==2.0.0 -/// │ └── satisfied by a-2.0.0 -/// └── a -/// ├── a-1.0.0 -/// └── a-2.0.0 -/// ``` -#[test] -fn direct_incompatible_versions() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-80d82ee8", "albatross")); - filters.push((r"-80d82ee8", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-80d82ee8==1.0.0") - .arg("a-80d82ee8==2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ root dependencies are unusable: Conflicting versions for `albatross`: `albatross==1.0.0` does not intersect with `albatross==2.0.0` - "###); - }); - - assert_not_installed(&venv, "a_80d82ee8", &temp_dir); - assert_not_installed(&venv, "a_80d82ee8", &temp_dir); - - Ok(()) -} - -/// transitive-incompatible-with-root-version -/// -/// The user requires packages `a` and `b` but `a` requires a different version of -/// `b` -/// -/// ```text -/// a967e815 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires b==1.0.0 -/// │ └── satisfied by b-1.0.0 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires b==2.0.0 -/// │ └── satisfied by b-2.0.0 -/// └── b -/// ├── b-1.0.0 -/// └── b-2.0.0 -/// ``` -#[test] -fn transitive_incompatible_with_root_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-a967e815", "albatross")); - filters.push((r"b-a967e815", "bluebird")); - filters.push((r"-a967e815", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-a967e815") - .arg("b-a967e815==1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because albatross==1.0.0 depends on bluebird==2.0.0 and only albatross==1.0.0 is available, we can conclude that all versions of albatross depend on bluebird==2.0.0. - And because you require bluebird==1.0.0 and you require albatross, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_a967e815", &temp_dir); - assert_not_installed(&venv, "b_a967e815", &temp_dir); - - Ok(()) -} - -/// transitive-incompatible-with-transitive -/// -/// The user requires package `a` and `b`; `a` and `b` require different versions of -/// `c` -/// -/// ```text -/// 6866d8dc -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires b -/// │ └── satisfied by b-1.0.0 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires c==1.0.0 -/// │ └── satisfied by c-1.0.0 -/// ├── b -/// │ └── b-1.0.0 -/// │ └── requires c==2.0.0 -/// │ └── satisfied by c-2.0.0 -/// └── c -/// ├── c-1.0.0 -/// └── c-2.0.0 -/// ``` -#[test] -fn transitive_incompatible_with_transitive() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-6866d8dc", "albatross")); - filters.push((r"b-6866d8dc", "bluebird")); - filters.push((r"c-6866d8dc", "crow")); - filters.push((r"-6866d8dc", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-6866d8dc") - .arg("b-6866d8dc") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only bluebird==1.0.0 is available and bluebird==1.0.0 depends on crow==2.0.0, we can conclude that all versions of bluebird depend on crow==2.0.0. - And because albatross==1.0.0 depends on crow==1.0.0 and only albatross==1.0.0 is available, we can conclude that all versions of bluebird and all versions of albatross are incompatible. - And because you require bluebird and you require albatross, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_6866d8dc", &temp_dir); - assert_not_installed(&venv, "b_6866d8dc", &temp_dir); - - Ok(()) -} - -/// package-only-prereleases -/// -/// The user requires any version of package `a` which only has prerelease versions -/// available. -/// -/// ```text -/// 9a1b3dda -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── unsatisfied: no matching version -/// └── a -/// └── a-1.0.0a1 -/// ``` -#[test] -fn package_only_prereleases() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-9a1b3dda", "albatross")); - filters.push((r"-9a1b3dda", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-9a1b3dda") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0a1 - "###); - }); - - // Since there are only prerelease versions of `a` available, it should be - // installed even though the user did not include a prerelease specifier. - assert_installed(&venv, "a_9a1b3dda", "1.0.0a1", &temp_dir); - - Ok(()) -} - -/// package-only-prereleases-in-range -/// -/// The user requires a version of package `a` which only matches prerelease -/// versions but they did not include a prerelease specifier. -/// -/// ```text -/// 19673198 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>0.1.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// ├── a-0.1.0 -/// └── a-1.0.0a1 -/// ``` -#[test] -fn package_only_prereleases_in_range() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-19673198", "albatross")); - filters.push((r"-19673198", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-19673198>0.1.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only albatross<=0.1.0 is available and you require albatross>0.1.0, we can conclude that the requirements are unsatisfiable. - - hint: Pre-releases are available for albatross in the requested range (e.g., 1.0.0a1), but pre-releases weren't enabled (try: `--prerelease=allow`) - "###); - }); - - // Since there are stable versions of `a` available, prerelease versions should not - // be selected without explicit opt-in. - assert_not_installed(&venv, "a_19673198", &temp_dir); - - Ok(()) -} - -/// requires-package-only-prereleases-in-range-global-opt-in -/// -/// The user requires a version of package `a` which only matches prerelease -/// versions. They did not include a prerelease specifier for the package, but they -/// opted into prereleases globally. -/// -/// ```text -/// 51f94da2 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>0.1.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// ├── a-0.1.0 -/// └── a-1.0.0a1 -/// ``` -#[test] -fn requires_package_only_prereleases_in_range_global_opt_in() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-51f94da2", "albatross")); - filters.push((r"-51f94da2", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-51f94da2>0.1.0") - .arg("--prerelease=allow") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0a1 - "###); - }); - - assert_installed(&venv, "a_51f94da2", "1.0.0a1", &temp_dir); - - Ok(()) -} - -/// requires-package-prerelease-and-final-any -/// -/// The user requires any version of package `a` has a prerelease version available -/// and an older non-prerelease version. -/// -/// ```text -/// eebe53a6 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-0.1.0 -/// └── a -/// ├── a-0.1.0 -/// └── a-1.0.0a1 -/// ``` -#[test] -fn requires_package_prerelease_and_final_any() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-eebe53a6", "albatross")); - filters.push((r"-eebe53a6", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-eebe53a6") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==0.1.0 - "###); - }); - - // Since the user did not provide a prerelease specifier, the older stable version - // should be selected. - assert_installed(&venv, "a_eebe53a6", "0.1.0", &temp_dir); - - Ok(()) -} - -/// package-prerelease-specified-only-final-available -/// -/// The user requires a version of `a` with a prerelease specifier and only stable -/// releases are available. -/// -/// ```text -/// 9d4725eb -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>=0.1.0a1 -/// │ ├── satisfied by a-0.1.0 -/// │ ├── satisfied by a-0.2.0 -/// │ └── satisfied by a-0.3.0 -/// └── a -/// ├── a-0.1.0 -/// ├── a-0.2.0 -/// └── a-0.3.0 -/// ``` -#[test] -fn package_prerelease_specified_only_final_available() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-9d4725eb", "albatross")); - filters.push((r"-9d4725eb", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-9d4725eb>=0.1.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==0.3.0 - "###); - }); - - // The latest stable version should be selected. - assert_installed(&venv, "a_9d4725eb", "0.3.0", &temp_dir); - - Ok(()) -} - -/// package-prerelease-specified-only-prerelease-available -/// -/// The user requires a version of `a` with a prerelease specifier and only -/// prerelease releases are available. -/// -/// ```text -/// 6cc95bc8 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>=0.1.0a1 -/// │ ├── satisfied by a-0.1.0a1 -/// │ ├── satisfied by a-0.2.0a1 -/// │ └── satisfied by a-0.3.0a1 -/// └── a -/// ├── a-0.1.0a1 -/// ├── a-0.2.0a1 -/// └── a-0.3.0a1 -/// ``` -#[test] -fn package_prerelease_specified_only_prerelease_available() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-6cc95bc8", "albatross")); - filters.push((r"-6cc95bc8", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-6cc95bc8>=0.1.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==0.3.0a1 - "###); - }); - - // The latest prerelease version should be selected. - assert_installed(&venv, "a_6cc95bc8", "0.3.0a1", &temp_dir); - - Ok(()) -} - -/// package-prerelease-specified-mixed-available -/// -/// The user requires a version of `a` with a prerelease specifier and both -/// prerelease and stable releases are available. -/// -/// ```text -/// c97845e2 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>=0.1.0a1 -/// │ ├── satisfied by a-0.1.0 -/// │ ├── satisfied by a-0.2.0a1 -/// │ ├── satisfied by a-0.3.0 -/// │ └── satisfied by a-1.0.0a1 -/// └── a -/// ├── a-0.1.0 -/// ├── a-0.2.0a1 -/// ├── a-0.3.0 -/// └── a-1.0.0a1 -/// ``` -#[test] -fn package_prerelease_specified_mixed_available() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-c97845e2", "albatross")); - filters.push((r"-c97845e2", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-c97845e2>=0.1.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0a1 - "###); - }); - - // Since the user provided a prerelease specifier, the latest prerelease version - // should be selected. - assert_installed(&venv, "a_c97845e2", "1.0.0a1", &temp_dir); - - Ok(()) -} - -/// package-multiple-prereleases-kinds -/// -/// The user requires `a` which has multiple prereleases available with different -/// labels. -/// -/// ```text -/// e290bf29 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>=1.0.0a1 -/// │ ├── satisfied by a-1.0.0a1 -/// │ ├── satisfied by a-1.0.0b1 -/// │ └── satisfied by a-1.0.0rc1 -/// └── a -/// ├── a-1.0.0a1 -/// ├── a-1.0.0b1 -/// └── a-1.0.0rc1 -/// ``` -#[test] -fn package_multiple_prereleases_kinds() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-e290bf29", "albatross")); - filters.push((r"-e290bf29", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-e290bf29>=1.0.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0rc1 - "###); - }); - - // Release candidates should be the highest precedence prerelease kind. - assert_installed(&venv, "a_e290bf29", "1.0.0rc1", &temp_dir); - - Ok(()) -} - -/// package-multiple-prereleases-numbers -/// -/// The user requires `a` which has multiple alphas available. -/// -/// ```text -/// f5948c28 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a>=1.0.0a1 -/// │ ├── satisfied by a-1.0.0a1 -/// │ ├── satisfied by a-1.0.0a2 -/// │ └── satisfied by a-1.0.0a3 -/// └── a -/// ├── a-1.0.0a1 -/// ├── a-1.0.0a2 -/// └── a-1.0.0a3 -/// ``` -#[test] -fn package_multiple_prereleases_numbers() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-f5948c28", "albatross")); - filters.push((r"-f5948c28", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-f5948c28>=1.0.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0a3 - "###); - }); - - // The latest alpha version should be selected. - assert_installed(&venv, "a_f5948c28", "1.0.0a3", &temp_dir); - - Ok(()) -} - -/// transitive-package-only-prereleases -/// -/// The user requires any version of package `a` which requires `b` which only has -/// prerelease versions available. -/// -/// ```text -/// 44ebef16 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-0.1.0 -/// ├── a -/// │ └── a-0.1.0 -/// │ └── requires b -/// │ └── unsatisfied: no matching version -/// └── b -/// └── b-1.0.0a1 -/// ``` -#[test] -fn transitive_package_only_prereleases() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-44ebef16", "albatross")); - filters.push((r"b-44ebef16", "bluebird")); - filters.push((r"-44ebef16", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-44ebef16") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + albatross==0.1.0 - + bluebird==1.0.0a1 - "###); - }); - - // Since there are only prerelease versions of `b` available, it should be selected - // even though the user did not opt-in to prereleases. - assert_installed(&venv, "a_44ebef16", "0.1.0", &temp_dir); - assert_installed(&venv, "b_44ebef16", "1.0.0a1", &temp_dir); - - Ok(()) -} - -/// transitive-package-only-prereleases-in-range -/// -/// The user requires package `a` which has a dependency on a package which only -/// matches prerelease versions but they did not include a prerelease specifier. -/// -/// ```text -/// 27759187 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-0.1.0 -/// ├── a -/// │ └── a-0.1.0 -/// │ └── requires b>0.1 -/// │ └── unsatisfied: no matching version -/// └── b -/// ├── b-0.1.0 -/// └── b-1.0.0a1 -/// ``` -#[test] -fn transitive_package_only_prereleases_in_range() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-27759187", "albatross")); - filters.push((r"b-27759187", "bluebird")); - filters.push((r"-27759187", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-27759187") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only bluebird<=0.1 is available and albatross==0.1.0 depends on bluebird>0.1, we can conclude that albatross==0.1.0 cannot be used. - And because only albatross==0.1.0 is available and you require albatross, we can conclude that the requirements are unsatisfiable. - - hint: Pre-releases are available for bluebird in the requested range (e.g., 1.0.0a1), but pre-releases weren't enabled (try: `--prerelease=allow`) - "###); - }); - - // Since there are stable versions of `b` available, the prerelease version should - // not be selected without explicit opt-in. The available version is excluded by - // the range requested by the user. - assert_not_installed(&venv, "a_27759187", &temp_dir); - - Ok(()) -} - -/// transitive-package-only-prereleases-in-range-opt-in -/// -/// The user requires package `a` which has a dependency on a package which only -/// matches prerelease versions; the user has opted into allowing prereleases in `b` -/// explicitly. -/// -/// ```text -/// 26efb6c5 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-0.1.0 -/// │ └── requires b>0.0.0a1 -/// │ └── satisfied by b-0.1.0 -/// ├── a -/// │ └── a-0.1.0 -/// │ └── requires b>0.1 -/// │ └── unsatisfied: no matching version -/// └── b -/// ├── b-0.1.0 -/// └── b-1.0.0a1 -/// ``` -#[test] -fn transitive_package_only_prereleases_in_range_opt_in() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-26efb6c5", "albatross")); - filters.push((r"b-26efb6c5", "bluebird")); - filters.push((r"-26efb6c5", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-26efb6c5") - .arg("b-26efb6c5>0.0.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + albatross==0.1.0 - + bluebird==1.0.0a1 - "###); - }); - - // Since the user included a dependency on `b` with a prerelease specifier, a - // prerelease version can be selected. - assert_installed(&venv, "a_26efb6c5", "0.1.0", &temp_dir); - assert_installed(&venv, "b_26efb6c5", "1.0.0a1", &temp_dir); - - Ok(()) -} - -/// transitive-prerelease-and-stable-dependency -/// -/// A transitive dependency has both a prerelease and a stable selector, but can -/// only be satisfied by a prerelease -/// -/// ```text -/// f8aeea37 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires b -/// │ └── satisfied by b-1.0.0 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires c==2.0.0b1 -/// │ └── satisfied by c-2.0.0b1 -/// ├── b -/// │ └── b-1.0.0 -/// │ └── requires c<=3.0.0,>=1.0.0 -/// │ └── satisfied by c-1.0.0 -/// └── c -/// ├── c-1.0.0 -/// └── c-2.0.0b1 -/// ``` -#[test] -fn transitive_prerelease_and_stable_dependency() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-f8aeea37", "albatross")); - filters.push((r"b-f8aeea37", "bluebird")); - filters.push((r"c-f8aeea37", "crow")); - filters.push((r"-f8aeea37", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-f8aeea37") - .arg("b-f8aeea37") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there is no version of crow==2.0.0b1 and albatross==1.0.0 depends on crow==2.0.0b1, we can conclude that albatross==1.0.0 cannot be used. - And because only albatross==1.0.0 is available and you require albatross, we can conclude that the requirements are unsatisfiable. - - hint: crow was requested with a pre-release marker (e.g., crow==2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`) - "###); - }); - - // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed(&venv, "a_f8aeea37", &temp_dir); - assert_not_installed(&venv, "b_f8aeea37", &temp_dir); - - Ok(()) -} - -/// transitive-prerelease-and-stable-dependency-opt-in -/// -/// A transitive dependency has both a prerelease and a stable selector, but can -/// only be satisfied by a prerelease. The user includes an opt-in to prereleases of -/// the transitive dependency. -/// -/// ```text -/// 184fc65f -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ ├── requires b -/// │ │ └── satisfied by b-1.0.0 -/// │ └── requires c>=0.0.0a1 -/// │ ├── satisfied by c-1.0.0 -/// │ └── satisfied by c-2.0.0b1 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires c==2.0.0b1 -/// │ └── satisfied by c-2.0.0b1 -/// ├── b -/// │ └── b-1.0.0 -/// │ └── requires c<=3.0.0,>=1.0.0 -/// │ └── satisfied by c-1.0.0 -/// └── c -/// ├── c-1.0.0 -/// └── c-2.0.0b1 -/// ``` -#[test] -fn transitive_prerelease_and_stable_dependency_opt_in() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-184fc65f", "albatross")); - filters.push((r"b-184fc65f", "bluebird")); - filters.push((r"c-184fc65f", "crow")); - filters.push((r"-184fc65f", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-184fc65f") - .arg("b-184fc65f") - .arg("c-184fc65f>=0.0.0a1") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 3 packages in [TIME] - Downloaded 3 packages in [TIME] - Installed 3 packages in [TIME] - + albatross==1.0.0 - + bluebird==1.0.0 - + crow==2.0.0b1 - "###); - }); - - // Since the user explicitly opted-in to a prerelease for `c`, it can be installed. - assert_installed(&venv, "a_184fc65f", "1.0.0", &temp_dir); - assert_installed(&venv, "b_184fc65f", "1.0.0", &temp_dir); - assert_installed(&venv, "c_184fc65f", "2.0.0b1", &temp_dir); - - Ok(()) -} - -/// transitive-prerelease-and-stable-dependency-many-versions -/// -/// A transitive dependency has both a prerelease and a stable selector, but can -/// only be satisfied by a prerelease. There are many prerelease versions. -/// -/// ```text -/// 7017673e -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires b -/// │ └── satisfied by b-1.0.0 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires c>=2.0.0b1 -/// │ ├── satisfied by c-2.0.0b1 -/// │ ├── satisfied by c-2.0.0b2 -/// │ ├── satisfied by c-2.0.0b3 -/// │ ├── satisfied by c-2.0.0b4 -/// │ ├── satisfied by c-2.0.0b5 -/// │ ├── satisfied by c-2.0.0b6 -/// │ ├── satisfied by c-2.0.0b7 -/// │ ├── satisfied by c-2.0.0b8 -/// │ └── satisfied by c-2.0.0b9 -/// ├── b -/// │ └── b-1.0.0 -/// │ └── requires c<=3.0.0,>=1.0.0 -/// │ └── satisfied by c-1.0.0 -/// └── c -/// ├── c-1.0.0 -/// ├── c-2.0.0a1 -/// ├── c-2.0.0a2 -/// ├── c-2.0.0a3 -/// ├── c-2.0.0a4 -/// ├── c-2.0.0a5 -/// ├── c-2.0.0a6 -/// ├── c-2.0.0a7 -/// ├── c-2.0.0a8 -/// ├── c-2.0.0a9 -/// ├── c-2.0.0b1 -/// ├── c-2.0.0b2 -/// ├── c-2.0.0b3 -/// ├── c-2.0.0b4 -/// ├── c-2.0.0b5 -/// ├── c-2.0.0b6 -/// ├── c-2.0.0b7 -/// ├── c-2.0.0b8 -/// └── c-2.0.0b9 -/// ``` -#[test] -fn transitive_prerelease_and_stable_dependency_many_versions() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-7017673e", "albatross")); - filters.push((r"b-7017673e", "bluebird")); - filters.push((r"c-7017673e", "crow")); - filters.push((r"-7017673e", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-7017673e") - .arg("b-7017673e") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only bluebird==1.0.0 is available and bluebird==1.0.0 depends on crow, we can conclude that all versions of bluebird depend on crow. - And because only crow<2.0.0b1 is available we can conclude that all versions of bluebird depend on crow<2.0.0b1. - And because albatross==1.0.0 depends on crow>=2.0.0b1 and only albatross==1.0.0 is available, we can conclude that all versions of bluebird and all versions of albatross are incompatible. - And because you require bluebird and you require albatross, we can conclude that the requirements are unsatisfiable. - - hint: crow was requested with a pre-release marker (e.g., crow>=2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`) - "###); - }); - - // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed(&venv, "a_7017673e", &temp_dir); - assert_not_installed(&venv, "b_7017673e", &temp_dir); - - Ok(()) -} - -/// transitive-prerelease-and-stable-dependency-many-versions-holes -/// -/// A transitive dependency has both a prerelease and a stable selector, but can -/// only be satisfied by a prerelease. There are many prerelease versions and some -/// are excluded. -/// -/// ```text -/// aaee5052 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ ├── requires a -/// │ │ └── satisfied by a-1.0.0 -/// │ └── requires b -/// │ └── satisfied by b-1.0.0 -/// ├── a -/// │ └── a-1.0.0 -/// │ └── requires c!=2.0.0a5,!=2.0.0a6,!=2.0.0a7,!=2.0.0b1,<2.0.0b5,>1.0.0 -/// │ └── unsatisfied: no matching version -/// ├── b -/// │ └── b-1.0.0 -/// │ └── requires c<=3.0.0,>=1.0.0 -/// │ └── satisfied by c-1.0.0 -/// └── c -/// ├── c-1.0.0 -/// ├── c-2.0.0a1 -/// ├── c-2.0.0a2 -/// ├── c-2.0.0a3 -/// ├── c-2.0.0a4 -/// ├── c-2.0.0a5 -/// ├── c-2.0.0a6 -/// ├── c-2.0.0a7 -/// ├── c-2.0.0a8 -/// ├── c-2.0.0a9 -/// ├── c-2.0.0b1 -/// ├── c-2.0.0b2 -/// ├── c-2.0.0b3 -/// ├── c-2.0.0b4 -/// ├── c-2.0.0b5 -/// ├── c-2.0.0b6 -/// ├── c-2.0.0b7 -/// ├── c-2.0.0b8 -/// └── c-2.0.0b9 -/// ``` -#[test] -fn transitive_prerelease_and_stable_dependency_many_versions_holes() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-aaee5052", "albatross")); - filters.push((r"b-aaee5052", "bluebird")); - filters.push((r"c-aaee5052", "crow")); - filters.push((r"-aaee5052", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-aaee5052") - .arg("b-aaee5052") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of crow that satisfy any of: - crow>1.0.0,<2.0.0a5 - crow>2.0.0a7,<2.0.0b1 - crow>2.0.0b1,<2.0.0b5 - and albatross==1.0.0 depends on one of: - crow>1.0.0,<2.0.0a5 - crow>2.0.0a7,<2.0.0b1 - crow>2.0.0b1,<2.0.0b5 - we can conclude that albatross==1.0.0 cannot be used. - And because only albatross==1.0.0 is available and you require albatross, we can conclude that the requirements are unsatisfiable. - - hint: crow was requested with a pre-release marker (e.g., any of: - crow>1.0.0,<2.0.0a5 - crow>2.0.0a7,<2.0.0b1 - crow>2.0.0b1,<2.0.0b5 - ), but pre-releases weren't enabled (try: `--prerelease=allow`) - "###); - }); - - // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed(&venv, "a_aaee5052", &temp_dir); - assert_not_installed(&venv, "b_aaee5052", &temp_dir); - - Ok(()) -} - -/// requires-python-version-does-not-exist -/// -/// The user requires a package which requires a Python version that does not exist -/// -/// ```text -/// 0825b69c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a==1.0.0 -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// └── requires python>=4.0 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_does_not_exist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-0825b69c", "albatross")); - filters.push((r"-0825b69c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-0825b69c==1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only Python<4.0 is available and albatross==1.0.0 depends on Python>=4.0, we can conclude that albatross==1.0.0 cannot be used. - And because you require albatross==1.0.0 we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_0825b69c", &temp_dir); - - Ok(()) -} - -/// requires-python-version-less-than-current -/// -/// The user requires a package which requires a Python version less than the -/// current version -/// -/// ```text -/// f9296b84 -/// ├── environment -/// │ └── python3.9 -/// ├── root -/// │ └── requires a==1.0.0 -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// └── requires python<=3.8 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_less_than_current() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.9"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-f9296b84", "albatross")); - filters.push((r"-f9296b84", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-f9296b84==1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only Python>3.8 is available and albatross==1.0.0 depends on Python<=3.8, we can conclude that albatross==1.0.0 cannot be used. - And because you require albatross==1.0.0 we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_f9296b84", &temp_dir); - - Ok(()) -} - -/// requires-python-version-greater-than-current -/// -/// The user requires a package which requires a Python version greater than the -/// current version -/// -/// ```text -/// a11d5394 -/// ├── environment -/// │ └── python3.9 -/// ├── root -/// │ └── requires a==1.0.0 -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// └── requires python>=3.10 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_greater_than_current() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.9"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-a11d5394", "albatross")); - filters.push((r"-a11d5394", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-a11d5394==1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because only Python<3.10 is available and albatross==1.0.0 depends on Python>=3.10, we can conclude that albatross==1.0.0 cannot be used. - And because you require albatross==1.0.0 we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_a11d5394", &temp_dir); - - Ok(()) -} - -/// requires-python-version-greater-than-current-many -/// -/// The user requires a package which has many versions which all require a Python -/// version greater than the current version -/// -/// ```text -/// 02dc550c -/// ├── environment -/// │ └── python3.9 -/// ├── root -/// │ └── requires a==1.0.0 -/// │ └── unsatisfied: no matching version -/// └── a -/// ├── a-2.0.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-2.1.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-2.2.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-2.3.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-2.4.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-2.5.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-3.0.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// ├── a-3.1.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// ├── a-3.2.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// ├── a-3.3.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// ├── a-3.4.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// └── a-3.5.0 -/// └── requires python>=3.11 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_greater_than_current_many() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.9"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-02dc550c", "albatross")); - filters.push((r"-02dc550c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-02dc550c==1.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there is no version of albatross==1.0.0 and you require albatross==1.0.0, we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_02dc550c", &temp_dir); - - Ok(()) -} - -/// requires-python-version-greater-than-current-backtrack -/// -/// The user requires a package where recent versions require a Python version -/// greater than the current version, but an older version is compatible. -/// -/// ```text -/// ef060cef -/// ├── environment -/// │ └── python3.9 -/// ├── root -/// │ └── requires a -/// │ ├── satisfied by a-1.0.0 -/// │ ├── satisfied by a-2.0.0 -/// │ ├── satisfied by a-3.0.0 -/// │ └── satisfied by a-4.0.0 -/// └── a -/// ├── a-1.0.0 -/// ├── a-2.0.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-3.0.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// └── a-4.0.0 -/// └── requires python>=3.12 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_greater_than_current_backtrack() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.9"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-ef060cef", "albatross")); - filters.push((r"-ef060cef", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-ef060cef") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - assert_installed(&venv, "a_ef060cef", "1.0.0", &temp_dir); - - Ok(()) -} - -/// requires-python-version-greater-than-current-excluded -/// -/// The user requires a package where recent versions require a Python version -/// greater than the current version, but an excluded older version is compatible. -/// -/// ```text -/// 1bde0c18 -/// ├── environment -/// │ └── python3.9 -/// ├── root -/// │ └── requires a>=2.0.0 -/// │ ├── satisfied by a-2.0.0 -/// │ ├── satisfied by a-3.0.0 -/// │ └── satisfied by a-4.0.0 -/// └── a -/// ├── a-1.0.0 -/// ├── a-2.0.0 -/// │ └── requires python>=3.10 (incompatible with environment) -/// ├── a-3.0.0 -/// │ └── requires python>=3.11 (incompatible with environment) -/// └── a-4.0.0 -/// └── requires python>=3.12 (incompatible with environment) -/// ``` -#[test] -fn requires_python_version_greater_than_current_excluded() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.9"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-1bde0c18", "albatross")); - filters.push((r"-1bde0c18", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-1bde0c18>=2.0.0") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - × No solution found when resolving dependencies: - ╰─▶ Because there are no versions of Python that satisfy Python>=3.10,<3.11 and only Python<3.12 is available, we can conclude that any of: - Python>=3.10,<3.11 - Python>=3.12 - are incompatible. - And because there are no versions of Python that satisfy Python>=3.11,<3.12 we can conclude that Python>=3.10 are incompatible. - And because albatross==2.0.0 depends on Python>=3.10 and there are no versions of albatross that satisfy any of: - albatross>2.0.0,<3.0.0 - albatross>3.0.0,<4.0.0 - albatross>4.0.0 - we can conclude that albatross>=2.0.0,<3.0.0 cannot be used. (1) - - Because there are no versions of Python that satisfy Python>=3.11,<3.12 and only Python<3.12 is available, we can conclude that Python>=3.11 are incompatible. - And because albatross==3.0.0 depends on Python>=3.11 we can conclude that albatross==3.0.0 cannot be used. - And because we know from (1) that albatross>=2.0.0,<3.0.0 cannot be used, we can conclude that albatross>=2.0.0,<4.0.0 cannot be used. (2) - - Because only Python<3.12 is available and albatross==4.0.0 depends on Python>=3.12, we can conclude that albatross==4.0.0 cannot be used. - And because we know from (2) that albatross>=2.0.0,<4.0.0 cannot be used, we can conclude that albatross>=2.0.0 cannot be used. - And because you require albatross>=2.0.0 we can conclude that the requirements are unsatisfiable. - "###); - }); - - assert_not_installed(&venv, "a_1bde0c18", &temp_dir); - - Ok(()) -} - -/// specific-tag-and-default -/// -/// A wheel for a specific platform is available alongside the default. -/// -/// ```text -/// 74e4a459 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn specific_tag_and_default() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-74e4a459", "albatross")); - filters.push((r"-74e4a459", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-74e4a459") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - Ok(()) -} - -/// only-wheels -/// -/// No source distributions are available, only wheels. -/// -/// ```text -/// 4f019491 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn only_wheels() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-4f019491", "albatross")); - filters.push((r"-4f019491", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-4f019491") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - Ok(()) -} - -/// no-wheels -/// -/// No wheels are available, only source distributions. -/// -/// ```text -/// 614d801c -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn no_wheels() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-614d801c", "albatross")); - filters.push((r"-614d801c", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-614d801c") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - Ok(()) -} - -/// no-wheels-with-matching-platform -/// -/// No wheels with valid tags are available, just source distributions. -/// -/// ```text -/// 737bbfd4 -/// ├── environment -/// │ └── python3.7 -/// ├── root -/// │ └── requires a -/// │ └── satisfied by a-1.0.0 -/// └── a -/// └── a-1.0.0 -/// ``` -#[test] -fn no_wheels_with_matching_platform() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv(&temp_dir, &cache_dir, "python3.7"); - - // In addition to the standard filters, swap out package names for more realistic messages - let mut filters = INSTA_FILTERS.to_vec(); - filters.push((r"a-737bbfd4", "albatross")); - filters.push((r"-737bbfd4", "")); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("install") - .arg("a-737bbfd4") - .arg("--extra-index-url") - .arg("https://test.pypi.org/simple") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("PUFFIN_NO_WRAP", "1") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + albatross==1.0.0 - "###); - }); - - Ok(()) -} diff --git a/crates/puffin/tests/pip_sync.rs b/crates/puffin/tests/pip_sync.rs deleted file mode 100644 index 3f2684a672d5..000000000000 --- a/crates/puffin/tests/pip_sync.rs +++ /dev/null @@ -1,3022 +0,0 @@ -#![cfg(all(feature = "python", feature = "pypi"))] - -use std::iter; -use std::path::Path; -use std::process::Command; - -use anyhow::{Context, Result}; -use assert_cmd::prelude::*; -use assert_fs::prelude::*; -use indoc::indoc; -use insta_cmd::_macro_support::insta; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::{create_venv_py312, BIN_NAME, INSTA_FILTERS}; - -mod common; - -fn check_command(venv: &Path, command: &str, temp_dir: &Path) { - Command::new(venv.join("bin").join("python")) - // https://github.com/python/cpython/issues/75953 - .arg("-B") - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() - .success(); -} - -#[test] -fn missing_requirements_txt() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let requirements_txt = temp_dir.child("requirements.txt"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.txt` - Caused by: No such file or directory (os error 2) - "###); - - requirements_txt.assert(predicates::path::missing()); - - Ok(()) -} - -#[test] -fn missing_venv() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = temp_dir.child(".venv"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.txt` - Caused by: No such file or directory (os error 2) - "###); - - venv.assert(predicates::path::missing()); - - Ok(()) -} - -/// Install a package into a virtual environment using the default link semantics. (On macOS, -/// this using `clone` semantics.) -#[test] -fn install() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment using copy semantics. -#[test] -fn install_copy() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--link-mode") - .arg("copy") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment using hardlink semantics. -#[test] -fn install_hardlink() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--link-mode") - .arg("hardlink") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - - Ok(()) -} - -/// Install multiple packages into a virtual environment. -#[test] -fn install_many() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + markupsafe==2.1.3 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe; import tomli", &cache_dir); - - Ok(()) -} - -/// Attempt to install an already-installed package into a virtual environment. -#[test] -fn noop() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment, then install the same package into a different -/// virtual environment. -#[test] -fn link() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv1 = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv1.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let venv2 = temp_dir.child(".venv2"); - Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv2.as_os_str()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .arg("--python") - .arg("python3.12") - .current_dir(&temp_dir) - .assert() - .success(); - venv2.assert(predicates::path::is_dir()); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv2.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - check_command(&venv2, "import markupsafe", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment, then sync the virtual environment with a -/// different requirements file. -#[test] -fn add_remove() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("tomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - markupsafe==2.1.3 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import markupsafe") - .current_dir(&temp_dir) - .assert() - .failure(); - - Ok(()) -} - -/// Install a package into a virtual environment, then install a second package into the same -/// virtual environment. -#[test] -fn install_sequential() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe; import tomli", &cache_dir); - - Ok(()) -} - -/// Install a package into a virtual environment, then install a second package into the same -/// virtual environment. -#[test] -fn upgrade() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("tomli==2.0.0")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("tomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - tomli==2.0.0 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment from a URL. -#[test] -fn install_url() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment from a Git repository. -#[test] -#[cfg(feature = "git")] -fn install_git_commit() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install a package into a virtual environment from a Git repository. -#[test] -#[cfg(feature = "git")] -fn install_git_tag() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@2.0.0) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install two packages from the same Git repository. -#[test] -#[cfg(feature = "git")] -fn install_git_subdirectories() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + example-pkg-a==1 (from git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a) - + example-pkg-b==1 (from git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b) - "###); - }); - - check_command(&venv, "import example_pkg", &temp_dir); - check_command(&venv, "import example_pkg.a", &temp_dir); - check_command(&venv, "import example_pkg.b", &temp_dir); - - Ok(()) -} - -/// Install a source distribution into a virtual environment. -#[test] -fn install_sdist() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Werkzeug==0.9.6")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==0.9.6 - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install a source distribution into a virtual environment. -#[test] -fn install_sdist_url() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("Werkzeug @ https://files.pythonhosted.org/packages/63/69/5702e5eb897d1a144001e21d676676bcb87b88c0862f947509ea95ea54fc/Werkzeug-0.9.6.tar.gz")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==0.9.6 (from https://files.pythonhosted.org/packages/63/69/5702e5eb897d1a144001e21d676676bcb87b88c0862f947509ea95ea54fc/Werkzeug-0.9.6.tar.gz) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Attempt to re-install a package into a virtual environment from a URL. The second install -/// should be a no-op. -#[test] -fn install_url_then_install_url() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install a package via a URL, then via a registry version. The second install _should_ remove the -/// URL-based version, but doesn't right now. -#[test] -fn install_url_then_install_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug==2.0.0")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Install a package via a registry version, then via a direct URL version. The second install -/// should remove the registry-based version. -#[test] -fn install_version_then_install_url() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug==2.0.0")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?; - - insta::with_settings!({ - filters => vec![ - (r"(\d|\.)+(ms|s)", "[TIME]"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - werkzeug==2.0.0 - + werkzeug==2.0.0 (from https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an -/// incompatible sdist -#[test] -fn install_numpy_py38() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = temp_dir.child(".venv"); - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv.as_os_str()) - .arg("--python") - // TODO(konstin): Mock the venv in the installer test so we don't need this anymore - .arg(which::which("python3.8").context("python3.8 must be installed")?) - .arg("--cache-dir") - .arg(cache_dir.path()) - .current_dir(&temp_dir) - .assert() - .success(); - venv.assert(predicates::path::is_dir()); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("numpy")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + numpy==1.24.4 - "###); - }); - - check_command(&venv, "import numpy", &temp_dir); - - Ok(()) -} - -/// Install a package without using pre-built wheels. -#[test] -fn install_no_binary() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--no-binary") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - - Ok(()) -} - -#[test] -fn warn_on_yanked_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.txt"); - requirements_in.touch()?; - - // This version is yanked. - requirements_in.write_str("colorama==0.4.2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - warning: colorama==0.4.2 is yanked (reason: "Bad build, missing files, will not install"). Refresh your lockfile to pin an un-yanked version. - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + colorama==0.4.2 - "###); - }); - - Ok(()) -} - -/// Resolve a local wheel. -#[test] -fn install_local_wheel() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl")?; - let archive = temp_dir.child("tomli-2.0.1-py3-none-any.whl"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("tomli @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - // Create a new virtual environment. - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Reinstall. The wheel should come from the cache, so there shouldn't be a "download". - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - // Create a new virtual environment. - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // "Modify" the wheel. - let archive_file = std::fs::File::open(&archive)?; - archive_file.set_modified(std::time::SystemTime::now())?; - - // Reinstall. The wheel should be "downloaded" again. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - // "Modify" the wheel. - let archive_file = std::fs::File::open(&archive)?; - archive_file.set_modified(std::time::SystemTime::now())?; - - // Reinstall into the same virtual environment. The wheel should be reinstalled. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - Ok(()) -} - -/// Install a wheel whose actual version doesn't match the version encoded in the filename. -#[test] -fn mismatched_version() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl")?; - let archive = temp_dir.child("tomli-3.7.2-py3-none-any.whl"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("tomli @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - error: Failed to install: tomli-3.7.2-py3-none-any.whl (tomli==3.7.2 (from file://[TEMP_DIR]/tomli-3.7.2-py3-none-any.whl)) - Caused by: Wheel version does not match filename: 2.0.1 != 3.7.2 - "###); - }); - - Ok(()) -} - -/// Install a wheel whose actual name doesn't match the name encoded in the filename. -#[test] -fn mismatched_name() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl")?; - let archive = temp_dir.child("foo-2.0.1-py3-none-any.whl"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("tomli @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - error: Failed to install: foo-2.0.1-py3-none-any.whl (foo==2.0.1 (from file://[TEMP_DIR]/foo-2.0.1-py3-none-any.whl)) - Caused by: Wheel package name does not match filename: tomli != foo - "###); - }); - - Ok(()) -} - -/// Install a local source distribution. -#[test] -fn install_local_source_distribution() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a source distribution. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/b0/b4/bc2baae3970c282fae6c2cb8e0f179923dceb7eaffb0e76170628f9af97b/wheel-0.42.0.tar.gz")?; - let archive = temp_dir.child("wheel-0.42.0.tar.gz"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("wheel @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz) - "###); - }); - - check_command(&venv, "import wheel", &temp_dir); - - Ok(()) -} - -/// The `ujson` package includes a `[build-system]`, but no `build-backend`. It lists some explicit -/// build requirements, but _also_ depends on `wheel` and `setuptools`: -/// ```toml -/// [build-system] -/// requires = ["setuptools>=42", "setuptools_scm[toml]>=3.4"] -/// ``` -/// -/// Like `pip` and `build`, we should use PEP 517 here and respect the `requires`, but use the -/// default build backend. -#[test] -fn install_ujson() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("ujson @ https://files.pythonhosted.org/packages/43/1a/b0a027144aa5c8f4ea654f4afdd634578b450807bb70b9f8bad00d6f6d3c/ujson-5.7.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + ujson==5.7.0 (from https://files.pythonhosted.org/packages/43/1a/b0a027144aa5c8f4ea654f4afdd634578b450807bb70b9f8bad00d6f6d3c/ujson-5.7.0.tar.gz) - "###); - }); - - check_command(&venv, "import ujson", &temp_dir); - - Ok(()) -} - -/// This package includes a `[build-system]`, but no `build-backend`. -/// -/// It lists some explicit build requirements that are necessary to build the distribution: -/// ```toml -/// [build-system] -/// requires = ["Cython<3", "setuptools", "wheel"] -/// ``` -/// -/// Like `pip` and `build`, we should use PEP 517 here and respect the `requires`, but use the -/// default build backend. -/// -/// The example is based `DTLSSocket==0.1.16` -#[test] -fn install_build_system_no_backend() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("build-system-no-backend @ https://files.pythonhosted.org/packages/ec/25/1e531108ca027dc3a3b37d351f4b86d811df4884c6a81cd99e73b8b589f5/build-system-no-backend-0.1.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + build-system-no-backend==0.1.0 (from https://files.pythonhosted.org/packages/ec/25/1e531108ca027dc3a3b37d351f4b86d811df4884c6a81cd99e73b8b589f5/build-system-no-backend-0.1.0.tar.gz) - "###); - }); - - check_command(&venv, "import build_system_no_backend", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, direct URL source distribution installs. -#[test] -fn install_url_source_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("tqdm @ https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("tqdm") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 1 entry for package: tqdm - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, Git source distribution installs. -#[test] -#[cfg(feature = "git")] -fn install_git_source_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("werkzeug") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 1 entry for package: werkzeug - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, registry source distribution installs. -#[test] -fn install_registry_source_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("future==0.18.3")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + future==0.18.3 - "###); - }); - - check_command(&venv, "import future", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + future==0.18.3 - "###); - }); - - check_command(&venv, "import future", &temp_dir); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("future") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 2 entries for package: future - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + future==0.18.3 - "###); - }); - - check_command(&venv, "import future", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, local source distribution installs. -#[test] -fn install_path_source_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a source distribution. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/b0/b4/bc2baae3970c282fae6c2cb8e0f179923dceb7eaffb0e76170628f9af97b/wheel-0.42.0.tar.gz")?; - let archive = temp_dir.child("wheel-0.42.0.tar.gz"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("wheel @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz) - "###); - }); - - check_command(&venv, "import wheel", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz) - "###); - }); - - check_command(&venv, "import wheel", &temp_dir); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("wheel") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 1 entry for package: wheel - "###); - }); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + wheel==0.42.0 (from file://[TEMP_DIR]/wheel-0.42.0.tar.gz) - "###); - }); - - check_command(&venv, "import wheel", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, local source distribution installs. -#[test] -fn install_path_built_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - // Download a wheel. - let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl")?; - let archive = temp_dir.child("tomli-2.0.1-py3-none-any.whl"); - let mut archive_file = std::fs::File::create(&archive)?; - std::io::copy(&mut response.bytes()?.as_ref(), &mut archive_file)?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("tomli @ file://{}", archive.path().display()))?; - - // In addition to the standard filters, remove the temporary directory from the snapshot. - let filters: Vec<_> = iter::once((r"file://.*/", "file://[TEMP_DIR]/")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &parent); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("tomli") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 1 entry for package: tomli - "###); - }); - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tomli==2.0.1 (from file://[TEMP_DIR]/tomli-2.0.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tomli", &temp_dir); - - Ok(()) -} - -/// Check that we show the right messages on cached, direct URL built distribution installs. -#[test] -fn install_url_built_dist_cached() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("tqdm @ https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - // Re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - // Clear the cache, then re-run the installation in a new virtual environment. - let parent = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&parent, &cache_dir); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("clean") - .arg("tqdm") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Cleared 1 entry for package: tqdm - "###); - }); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + tqdm==4.66.1 (from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl) - "###); - }); - - check_command(&venv, "import tqdm", &temp_dir); - - Ok(()) -} - -/// Verify that fail with an appropriate error when a package is repeated. -#[test] -fn duplicate_package_overlap() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\nMarkupSafe==2.1.2")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to determine installation plan - Caused by: Detected duplicate package in requirements: markupsafe - "###); - }); - - Ok(()) -} - -/// Verify that allow duplicate packages when they are disjoint. -#[test] -fn duplicate_package_disjoint() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\nMarkupSafe==2.1.2 ; python_version < '3.6'")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + markupsafe==2.1.3 - "###); - }); - - Ok(()) -} - -/// Verify that we can force reinstall of packages. -#[test] -fn reinstall() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + markupsafe==2.1.3 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - check_command(&venv, "import tomli", &temp_dir); - - // Re-run the installation with `--reinstall`. - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--reinstall") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Uninstalled 2 packages in [TIME] - Installed 2 packages in [TIME] - - markupsafe==2.1.3 - + markupsafe==2.1.3 - - tomli==2.0.1 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - check_command(&venv, "import tomli", &temp_dir); - - Ok(()) -} - -/// Verify that we can force reinstall of selective packages. -#[test] -fn reinstall_package() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 2 packages in [TIME] - + markupsafe==2.1.3 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - check_command(&venv, "import tomli", &temp_dir); - - // Re-run the installation with `--reinstall`. - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--reinstall-package") - .arg("tomli") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - tomli==2.0.1 - + tomli==2.0.1 - "###); - }); - - check_command(&venv, "import markupsafe", &temp_dir); - check_command(&venv, "import tomli", &temp_dir); - - Ok(()) -} - -/// Verify that we can force reinstall of Git dependencies. -#[test] -#[cfg(feature = "git")] -fn reinstall_git() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - // Re-run the installation with `--reinstall`. - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--reinstall-package") - .arg("WerkZeug") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - + werkzeug==2.0.0 (from git+https://github.com/pallets/werkzeug.git@af160e0b6b7ddd81c22f1652c728ff5ac72d5c74) - "###); - }); - - check_command(&venv, "import werkzeug", &temp_dir); - - Ok(()) -} - -#[test] -#[cfg(feature = "maturin")] -fn sync_editable() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&indoc::formatdoc! {r" - boltons==23.1.1 - -e ../../scripts/editable-installs/maturin_editable - numpy==1.26.2 - # via poetry-editable - -e file://{current_dir}/../../scripts/editable-installs/poetry_editable - ", - current_dir = current_dir.display(), - })?; - - let filter_path = requirements_txt.display().to_string(); - let filters = INSTA_FILTERS - .iter() - .chain(&[ - (filter_path.as_str(), "requirements.txt"), - ( - r"file://.*/../../scripts/editable-installs/poetry_editable", - "file://[TEMP_DIR]/../../scripts/editable-installs/poetry_editable", - ), - (workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"), - ]) - .copied() - .collect::>(); - - // Install the editable packages. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 2 editables in [TIME] - Resolved 2 packages in [TIME] - Downloaded 2 packages in [TIME] - Installed 4 packages in [TIME] - + boltons==23.1.1 - + maturin-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/maturin_editable) - + numpy==1.26.2 - + poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - // Reinstall the editable packages. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--reinstall-package") - .arg("poetry-editable") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 1 editable in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - + poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - // Make sure we have the right base case. - let python_source_file = - "../../scripts/editable-installs/maturin_editable/python/maturin_editable/__init__.py"; - let python_version_1 = indoc::indoc! {r" - from .maturin_editable import * - - version = 1 - "}; - fs_err::write(python_source_file, python_version_1)?; - - let command = indoc::indoc! {r#" - from maturin_editable import sum_as_string, version - - assert version == 1, version - assert sum_as_string(1, 2) == "3", sum_as_string(1, 2) - "#}; - check_command(&venv, command, &temp_dir); - - // Edit the sources. - let python_version_2 = indoc::indoc! {r" - from .maturin_editable import * - - version = 2 - "}; - fs_err::write(python_source_file, python_version_2)?; - - let command = indoc::indoc! {r#" - from maturin_editable import sum_as_string, version - from pathlib import Path - - assert version == 2, version - assert sum_as_string(1, 2) == "3", sum_as_string(1, 2) - "#}; - check_command(&venv, command, &temp_dir); - - // Don't create a git diff. - fs_err::write(python_source_file, python_version_1)?; - - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 4 packages in [TIME] - "###); - }); - - Ok(()) -} - -#[test] -fn sync_editable_and_registry() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - // Install the registry-based version of Black. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc::indoc! {r" - black - " - })?; - - let filter_path = requirements_txt.display().to_string(); - let filters = INSTA_FILTERS - .iter() - .chain(&[ - (filter_path.as_str(), "requirements.txt"), - (workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"), - ]) - .copied() - .collect::>(); - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + black==24.1a1 - warning: The package `black` requires `click >=8.0.0`, but it's not installed. - warning: The package `black` requires `mypy-extensions >=0.4.3`, but it's not installed. - warning: The package `black` requires `packaging >=22.0`, but it's not installed. - warning: The package `black` requires `pathspec >=0.9.0`, but it's not installed. - warning: The package `black` requires `platformdirs >=2`, but it's not installed. - warning: The package `black` requires `aiohttp >=3.7.4 ; sys_platform != 'win32' or (implementation_name != 'pypy' and extra == 'd')`, but it's not installed. - "###); - }); - - // Install the editable version of Black. This should remove the registry-based version. - // Use the `file:` syntax for extra coverage. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc::indoc! {r" - -e file:../../scripts/editable-installs/black_editable - " - })?; - - let filter_path = requirements_txt.display().to_string(); - let filters = INSTA_FILTERS - .iter() - .chain(&[ - (filter_path.as_str(), "requirements.txt"), - (workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"), - ]) - .copied() - .collect::>(); - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Built 1 editable in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - black==24.1a1 - + black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable) - "###); - }); - - // Re-install the registry-based version of Black. This should be a no-op, since we have a - // version of Black installed (the editable version) that satisfies the requirements. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc::indoc! {r" - black - " - })?; - - let filter_path = requirements_txt.display().to_string(); - let filters = INSTA_FILTERS - .iter() - .chain(&[ - (filter_path.as_str(), "requirements.txt"), - (workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"), - ]) - .copied() - .collect::>(); - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Audited 1 package in [TIME] - "###); - }); - - // Re-install Black at a specific version. This should replace the editable version. - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc::indoc! {r" - black==23.10.0 - " - })?; - - let filter_path = requirements_txt.display().to_string(); - let filters = INSTA_FILTERS - .iter() - .chain(&[ - (filter_path.as_str(), "requirements.txt"), - (workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]"), - ]) - .copied() - .collect::>(); - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .env("CARGO_TARGET_DIR", "../../../target/target_install_editable"), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Uninstalled 1 package in [TIME] - Installed 1 package in [TIME] - - black==0.1.0+editable (from file://[WORKSPACE_DIR]/scripts/editable-installs/black_editable) - + black==23.10.0 - warning: The package `black` requires `click >=8.0.0`, but it's not installed. - warning: The package `black` requires `mypy-extensions >=0.4.3`, but it's not installed. - warning: The package `black` requires `packaging >=22.0`, but it's not installed. - warning: The package `black` requires `pathspec >=0.9.0`, but it's not installed. - warning: The package `black` requires `platformdirs >=2`, but it's not installed. - "###); - }); - - Ok(()) -} - -#[test] -fn incompatible_wheel() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let wheel_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let wheel = wheel_dir.child("foo-1.2.3-not-compatible-wheel.whl"); - wheel.touch()?; - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(&format!("foo @ file://{}", wheel.path().display()))?; - - let wheel_dir = wheel_dir.path().canonicalize()?.display().to_string(); - let filters: Vec<_> = iter::once((wheel_dir.as_str(), "[TEMP_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--strict") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to determine installation plan - Caused by: A path dependency is incompatible with the current platform: [TEMP_DIR]/foo-1.2.3-not-compatible-wheel.whl - "###); - }); - - Ok(()) -} - -/// Install a project without a `pyproject.toml`, using the PEP 517 build backend (default). -#[test] -fn sync_legacy_sdist_pep_517() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.in") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + flake8==6.0.0 (from https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz) - "###); - }); - - Ok(()) -} - -/// Install a project without a `pyproject.toml`, using `setuptools` directly. -#[test] -fn sync_legacy_sdist_setuptools() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_in = temp_dir.child("requirements.in"); - requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?; - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.in") - .arg("--legacy-setup-py") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 1 package in [TIME] - Downloaded 1 package in [TIME] - Installed 1 package in [TIME] - + flake8==6.0.0 (from https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz) - "###); - }); - - Ok(()) -} - -/// Sync using `--find-links` with a local directory. -#[test] -fn find_links() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.write_str(indoc! {r" - markupsafe==2.1.3 - numpy==1.26.3 - tqdm==1000.0.0 - werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl - "})?; - - let project_root = fs_err::canonicalize(std::env::current_dir()?.join("../.."))?; - let project_root_string = project_root.display().to_string(); - let filters: Vec<_> = iter::once((project_root_string.as_str(), "[PROJECT_ROOT]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--find-links") - .arg(project_root.join("scripts/wheels/")) - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 4 packages in [TIME] - Downloaded 4 packages in [TIME] - Installed 4 packages in [TIME] - + markupsafe==2.1.3 - + numpy==1.26.3 - + tqdm==1000.0.0 - + werkzeug==3.0.1 (from https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl) - "###); - }); - - Ok(()) -} diff --git a/crates/puffin/tests/pip_uninstall.rs b/crates/puffin/tests/pip_uninstall.rs deleted file mode 100644 index ab662a74aaea..000000000000 --- a/crates/puffin/tests/pip_uninstall.rs +++ /dev/null @@ -1,546 +0,0 @@ -use std::iter; -use std::process::Command; - -use anyhow::Result; -use assert_cmd::prelude::*; -use assert_fs::prelude::*; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::{BIN_NAME, INSTA_FILTERS}; - -use crate::common::create_venv_py312; - -mod common; - -#[test] -fn no_arguments() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: the following required arguments were not provided: - |--editable > - - Usage: puffin pip uninstall |--editable > - - For more information, try '--help'. - "###); - - Ok(()) -} - -#[test] -fn invalid_requirement() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("flask==1.0.x") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to parse `flask==1.0.x` - Caused by: after parsing 1.0, found ".x" after it, which is not part of a valid version - flask==1.0.x - ^^^^^^^ - "###); - - Ok(()) -} - -#[test] -fn missing_requirements_txt() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("requirements.txt") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `requirements.txt` - Caused by: No such file or directory (os error 2) - "###); - - Ok(()) -} - -#[test] -fn invalid_requirements_txt_requirement() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("flask==1.0.x")?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("requirements.txt") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Couldn't parse requirement in requirements.txt position 0 to 12 - Caused by: after parsing 1.0, found ".x" after it, which is not part of a valid version - flask==1.0.x - ^^^^^^^ - "###); - - Ok(()) -} - -#[test] -fn missing_pyproject_toml() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("pyproject.toml") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: failed to open file `pyproject.toml` - Caused by: No such file or directory (os error 2) - "###); - - Ok(()) -} - -#[test] -fn invalid_pyproject_toml_syntax() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str("123 - 456")?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("pyproject.toml") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to parse `pyproject.toml` - Caused by: TOML parse error at line 1, column 5 - | - 1 | 123 - 456 - | ^ - expected `.`, `=` - - "###); - - Ok(()) -} - -#[test] -fn invalid_pyproject_toml_schema() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str("[project]")?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("pyproject.toml") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to parse `pyproject.toml` - Caused by: TOML parse error at line 1, column 1 - | - 1 | [project] - | ^^^^^^^^^ - missing field `name` - - "###); - - Ok(()) -} - -#[test] -fn invalid_pyproject_toml_requirement() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = ["flask==1.0.x"] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-r") - .arg("pyproject.toml") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to parse `pyproject.toml` - Caused by: TOML parse error at line 3, column 16 - | - 3 | dependencies = ["flask==1.0.x"] - | ^^^^^^^^^^^^^^^^ - after parsing 1.0, found ".x" after it, which is not part of a valid version - flask==1.0.x - ^^^^^^^ - - "###); - - Ok(()) -} - -#[test] -fn uninstall() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import markupsafe") - .current_dir(&temp_dir) - .assert() - .success(); - - insta::with_settings!({ - filters => INSTA_FILTERS.to_vec() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("MarkupSafe") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Uninstalled 1 package in [TIME] - - markupsafe==2.1.3 - "###); - }); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import markupsafe") - .current_dir(&temp_dir) - .assert() - .failure(); - - Ok(()) -} - -#[test] -fn missing_record() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("MarkupSafe==2.1.3")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg("requirements.txt") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir) - .assert() - .success(); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import markupsafe") - .current_dir(&temp_dir) - .assert() - .success(); - - // Delete the RECORD file. - let dist_info = venv - .join("lib") - .join("python3.12") - .join("site-packages") - .join("MarkupSafe-2.1.3.dist-info"); - std::fs::remove_file(dist_info.join("RECORD"))?; - - let filters: Vec<_> = iter::once(( - "RECORD file not found at: .*/.venv", - "RECORD file not found at: [VENV_PATH]", - )) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - insta::with_settings!({ - filters => filters, - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("MarkupSafe") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .current_dir(&temp_dir), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Cannot uninstall package; RECORD file not found at: [VENV_PATH]/lib/python3.12/site-packages/MarkupSafe-2.1.3.dist-info/RECORD - "###); - }); - - Ok(()) -} - -#[test] -fn uninstall_editable_by_name() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .assert() - .success(); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); - - // Uninstall the editable by name. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("poetry-editable") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - , @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); - - Ok(()) -} - -#[test] -fn uninstall_editable_by_path() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .assert() - .success(); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); - - // Uninstall the editable by path. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("-e") - .arg("../../scripts/editable-installs/poetry_editable") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); - - Ok(()) -} - -#[test] -fn uninstall_duplicate_editable() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let cache_dir = assert_fs::TempDir::new()?; - let venv = create_venv_py312(&temp_dir, &cache_dir); - - let current_dir = std::env::current_dir()?; - let workspace_dir = current_dir.join("..").join("..").canonicalize()?; - - let filters: Vec<_> = iter::once((workspace_dir.to_str().unwrap(), "[WORKSPACE_DIR]")) - .chain(INSTA_FILTERS.to_vec()) - .collect(); - - let requirements_txt = temp_dir.child("requirements.txt"); - requirements_txt.touch()?; - requirements_txt.write_str("-e ../../scripts/editable-installs/poetry_editable")?; - - Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("sync") - .arg(requirements_txt.path()) - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()) - .assert() - .success(); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); - - // Uninstall the editable by both path and name. - insta::with_settings!({ - filters => filters.clone() - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("pip") - .arg("uninstall") - .arg("poetry-editable") - .arg("-e") - .arg("../../scripts/editable-installs/poetry_editable") - .arg("--cache-dir") - .arg(cache_dir.path()) - .env("VIRTUAL_ENV", venv.as_os_str()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE_DIR]/scripts/editable-installs/poetry_editable) - "###); - }); - - Command::new(venv.join("bin").join("python")) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); - - Ok(()) -} diff --git a/crates/puffin/tests/remove.rs b/crates/puffin/tests/remove.rs deleted file mode 100644 index 42a4394886f0..000000000000 --- a/crates/puffin/tests/remove.rs +++ /dev/null @@ -1,282 +0,0 @@ -use std::process::Command; - -use anyhow::Result; -use assert_fs::prelude::*; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::BIN_NAME; - -mod common; - -#[test] -fn missing_pyproject_toml() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - puffin::remove::workspace_not_found - - × Could not find a `pyproject.toml` file in the current directory or any of - │ its parents - "###); - - pyproject_toml.assert(predicates::path::missing()); - - Ok(()) -} - -#[test] -fn missing_project_table() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - puffin::remove::parse - - × Failed to remove `flask` from `pyproject.toml` - ╰─▶ no `[project]` table found in `pyproject.toml` - "###); - - pyproject_toml.assert(predicates::str::is_empty()); - - Ok(()) -} - -#[test] -fn missing_dependencies_array() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - puffin::remove::parse - - × Failed to remove `flask` from `pyproject.toml` - ╰─▶ no `[project.dependencies]` array found in `pyproject.toml` - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -"#, - ); - - Ok(()) -} - -#[test] -fn missing_dependency() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", -] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("requests") - .current_dir(&temp_dir), @r###" - success: false - exit_code: 1 - ----- stdout ----- - - ----- stderr ----- - puffin::remove::parse - - × Failed to remove `requests` from `pyproject.toml` - ╰─▶ unable to find package: `requests` - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", -] -"#, - ); - - Ok(()) -} - -#[test] -fn remove_dependency() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", - "requests", -] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("flask") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "requests", -] -"#, - ); - - Ok(()) -} - -#[test] -fn empty_array() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = [ - "requests", -] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("requests") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [] -"#, - ); - - Ok(()) -} - -#[test] -fn normalize_name() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", - "requests", -] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("Flask") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "requests", -] -"#, - ); - - Ok(()) -} - -#[test] -fn reformat_array() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let pyproject_toml = temp_dir.child("pyproject.toml"); - pyproject_toml.touch()?; - pyproject_toml.write_str( - r#"[project] -name = "project" -dependencies = ["flask==1.0.0", "requests"] -"#, - )?; - - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("remove") - .arg("requests") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - "###); - - pyproject_toml.assert( - r#"[project] -name = "project" -dependencies = [ - "flask==1.0.0", -] -"#, - ); - - Ok(()) -} diff --git a/crates/puffin/tests/venv.rs b/crates/puffin/tests/venv.rs deleted file mode 100644 index 03d88833c05c..000000000000 --- a/crates/puffin/tests/venv.rs +++ /dev/null @@ -1,111 +0,0 @@ -#![cfg(feature = "python")] - -use std::process::Command; - -use anyhow::Result; -use assert_fs::prelude::*; -use insta_cmd::_macro_support::insta; -use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; - -use common::BIN_NAME; - -mod common; - -#[test] -fn create_venv() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let venv = temp_dir.child(".venv"); - - insta::with_settings!({ - filters => vec![ - (r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"), - (temp_dir.to_str().unwrap(), "/home/ferris/project"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv.as_os_str()) - .arg("--python") - .arg("python3.12") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Using Python [VERSION] at [PATH] - Creating virtual environment at: /home/ferris/project/.venv - "###); - }); - - venv.assert(predicates::path::is_dir()); - - Ok(()) -} - -#[test] -fn create_venv_defaults_to_cwd() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let venv = temp_dir.child(".venv"); - - insta::with_settings!({ - filters => vec![ - (r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"), - (temp_dir.to_str().unwrap(), "/home/ferris/project"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg("--python") - .arg("python3.12") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Using Python [VERSION] at [PATH] - Creating virtual environment at: .venv - "###); - }); - - venv.assert(predicates::path::is_dir()); - - Ok(()) -} - -#[test] -fn seed() -> Result<()> { - let temp_dir = assert_fs::TempDir::new()?; - let venv = temp_dir.child(".venv"); - - insta::with_settings!({ - filters => vec![ - (r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"), - (temp_dir.to_str().unwrap(), "/home/ferris/project"), - ] - }, { - assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) - .arg("venv") - .arg(venv.as_os_str()) - .arg("--seed") - .arg("--python") - .arg("python3.12") - .current_dir(&temp_dir), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Using Python [VERSION] at [PATH] - Creating virtual environment at: /home/ferris/project/.venv - + setuptools==69.0.3 - + pip==23.3.2 - + wheel==0.42.0 - "###); - }); - - venv.assert(predicates::path::is_dir()); - - Ok(()) -}