diff --git a/.authors.yml b/.authors.yml index 626c87d60d..e0a69846c8 100644 --- a/.authors.yml +++ b/.authors.yml @@ -7,20 +7,20 @@ aliases: - Mike Sarahan - Michael Sarahan - num_commits: 4000 + num_commits: 2000 first_commit: 2015-09-04 21:31:08 - name: Jonathan J. Helmus email: jjhelmus@gmail.com aliases: - Jonathan Helmus - num_commits: 109 + num_commits: 110 first_commit: 2014-06-09 17:25:05 github: jjhelmus - name: Isuru Fernando email: isuruf@gmail.com alternate_emails: - isuru.11@cse.mrt.ac.lk - num_commits: 82 + num_commits: 83 first_commit: 2017-06-16 15:14:34 github: isuruf - name: Dan Blanchard @@ -63,7 +63,7 @@ alternate_emails: - mandeep@users.noreply.github.com - mbhutani@continuum.io - num_commits: 86 + num_commits: 43 first_commit: 2017-05-17 23:54:01 github: mandeep - name: Filipe Fernandes @@ -117,7 +117,7 @@ - heather999@users.noreply.github.com aliases: - heather999 - num_commits: 4 + num_commits: 2 first_commit: 2016-04-11 12:02:50 github: heather999 - name: Ryan Grout @@ -571,7 +571,7 @@ alternate_emails: - scastellarin95@gmail.com - scastellarin@anaconda.com - num_commits: 196 + num_commits: 98 first_commit: 2016-09-06 16:58:21 github: soapy1 - name: Bruno Oliveira @@ -754,7 +754,7 @@ alternate_emails: - kirkhamj@janelia.hhmi.org - jakirkham@gmail.com - num_commits: 140 + num_commits: 73 first_commit: 2015-04-21 13:26:39 github: jakirkham - name: Anthony Scopatz @@ -873,7 +873,7 @@ alternate_emails: - 5738695+183amir@users.noreply.github.com - amir.mohammadi@idiap.ch - num_commits: 12 + num_commits: 6 first_commit: 2018-02-27 16:37:19 - name: David Li email: li.davidm96@gmail.com @@ -967,7 +967,7 @@ first_commit: 2019-01-26 13:17:33 - name: Rachel Rigdon email: rrigdon@anaconda.com - num_commits: 268 + num_commits: 134 first_commit: 2019-01-24 15:12:09 github: rrigdon aliases: @@ -1018,7 +1018,7 @@ github: theultimate1 - name: Kai Tietz email: ktietz@anaconda.com - num_commits: 8 + num_commits: 9 first_commit: 2019-04-04 02:38:29 github: katietz alternate_emails: @@ -1056,12 +1056,12 @@ github: spalmrot-tic - name: Daniel Bast email: 2790401+dbast@users.noreply.github.com - num_commits: 15 + num_commits: 16 first_commit: 2019-06-07 02:44:13 github: dbast - name: Duncan Macleod email: duncan.macleod@ligo.org - num_commits: 5 + num_commits: 6 first_commit: 2019-06-13 08:07:25 github: duncanmmacleod - name: Chris Osborn @@ -1118,7 +1118,7 @@ alternate_emails: - becker.mr@gmail.com - beckermr@users.noreply.github.com - num_commits: 38 + num_commits: 19 first_commit: 2019-10-17 23:05:16 github: beckermr - name: Jinzhe Zeng @@ -1199,7 +1199,7 @@ alternate_emails: - clee@anaconda.com - name: Ken Odegard - num_commits: 106 + num_commits: 133 email: kodegard@anaconda.com first_commit: 2020-09-08 19:53:41 github: kenodegard @@ -1222,7 +1222,7 @@ first_commit: 2020-11-19 10:46:41 - name: Jannis Leidel email: jannis@leidel.info - num_commits: 33 + num_commits: 30 github: jezdez first_commit: 2020-11-19 10:46:41 - name: Christof Kaufmann @@ -1237,7 +1237,7 @@ github: pre-commit-ci[bot] aliases: - pre-commit-ci[bot] - num_commits: 38 + num_commits: 52 first_commit: 2021-11-20 01:47:17 - name: Jacob Walls email: jacobtylerwalls@gmail.com @@ -1248,7 +1248,7 @@ github: beeankha alternate_emails: - beeankha@gmail.com - num_commits: 14 + num_commits: 20 first_commit: 2022-01-19 16:40:06 - name: Conda Bot email: 18747875+conda-bot@users.noreply.github.com @@ -1259,7 +1259,7 @@ alternate_emails: - ad-team+condabot@anaconda.com - 18747875+conda-bot@users.noreply.github.com - num_commits: 42 + num_commits: 38 first_commit: 2022-01-17 18:09:22 - name: Uwe L. Korn email: xhochy@users.noreply.github.com @@ -1268,7 +1268,7 @@ - name: Daniel Holth email: dholth@anaconda.com github: dholth - num_commits: 6 + num_commits: 12 first_commit: 2022-04-28 05:22:14 - name: Rylan Chord email: rchord@users.noreply.github.com @@ -1294,8 +1294,10 @@ first_commit: 2022-04-18 12:03:05 - name: Jürgen Gmach email: juergen.gmach@googlemail.com + alternate_emails: + - juergen.gmach@canonical.com github: jugmac00 - num_commits: 2 + num_commits: 3 first_commit: 2022-05-31 07:52:17 - name: Katherine Kinnaman email: kkinnaman@anaconda.com @@ -1305,7 +1307,7 @@ - name: dependabot[bot] email: 49699333+dependabot[bot]@users.noreply.github.com github: dependabot[bot] - num_commits: 2 + num_commits: 4 first_commit: 2022-05-31 04:34:40 - name: Serhii Kupriienko email: 79282962+skupr-anaconda@users.noreply.github.com @@ -1322,7 +1324,7 @@ - name: Jaime Rodríguez-Guerra email: jaimergp@users.noreply.github.com github: jaimergp - num_commits: 2 + num_commits: 4 first_commit: 2022-11-02 19:34:51 - name: Dave Clements email: tnabtaf@gmail.com @@ -1336,7 +1338,7 @@ first_commit: 2022-11-16 21:54:14 - name: Srivas Venkatesh email: 110486050+sven6002@users.noreply.github.com - num_commits: 1 + num_commits: 2 first_commit: 2022-12-14 19:50:36 github: sven6002 - name: Ernst Luring @@ -1364,3 +1366,52 @@ num_commits: 1 first_commit: 2023-03-22 00:34:22 github: johnnynunez +- name: Ryan Keith + email: rkeith@anaconda.com + aliases: + - Ryan + github: ryanskeith + num_commits: 5 + first_commit: 2023-03-22 03:11:02 +- name: Rishabh Singh + email: 67859818+rishabh11336@users.noreply.github.com + aliases: + - rishabh11336 + github: rishabh11336 + num_commits: 2 + first_commit: 2023-05-15 11:19:48 +- name: Ferry Firmansjah + email: 103191403+ffirmanff@users.noreply.github.com + github: ffirmanff + num_commits: 1 + first_commit: 2023-04-14 11:54:03 +- name: Riadh Fezzani + email: rfezzani@gmail.com + github: rfezzani + num_commits: 1 + first_commit: 2023-05-23 13:46:49 +- name: Jose Diaz-Gonzalez + email: email@josediazgonzalez.com + github: josegonzalez + num_commits: 1 + first_commit: 2023-06-14 16:02:40 +- name: Jack Olivieri + email: boldorider4@gmail.com + github: boldorider4 + num_commits: 1 + first_commit: 2023-08-30 10:32:34 +- name: Wolf Vollprecht + email: w.vollprecht@gmail.com + github: wolfv + num_commits: 1 + first_commit: 2023-09-22 07:01:49 +- name: Dave Karetnyk + email: Dave.Karetnyk@gmail.com + github: DaveKaretnyk + num_commits: 1 + first_commit: 2023-09-16 05:21:09 +- name: Shaun Walbridge + email: 46331011+scdub@users.noreply.github.com + github: scdub + num_commits: 2 + first_commit: 2023-08-18 02:53:28 diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index ababc663b1..0000000000 --- a/.coveragerc +++ /dev/null @@ -1,3 +0,0 @@ -[run] -parallel=True -omit=conda_build/skeletons/_example_skeleton.py diff --git a/.codecov.yml b/.github/codecov.yml similarity index 100% rename from .codecov.yml rename to .github/codecov.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..07210519aa --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/docs/" + schedule: + interval: "weekly" + allow: + # Allow only production updates for Sphinx + - dependency-name: "sphinx" + dependency-type: "production" diff --git a/.github/labels.yml b/.github/labels.yml index cdd6853502..b072d62896 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -26,3 +26,9 @@ - name: knowledge-medium description: "[deprecated]" color: "888888" + +# Tags +- name: tag::noarch + description: related to noarch builds + color: "86C579" + aliases: [] diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index c7b933b6b9..ed22cae254 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check CLA - uses: conda/actions/check-cla@v23.5.1 + uses: conda/actions/check-cla@v23.7.0 with: # [required] # A token with ability to comment, label, and modify the commit status @@ -27,7 +27,7 @@ jobs: # (default: secrets.GITHUB_TOKEN) token: ${{ secrets.CLA_ACTION_TOKEN }} # [required] - # Label to apply to contributor's PR once CLA is singed + # Label to apply to contributor's PR once CLA is signed label: cla-signed # [required] diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index b2c2e821d8..ebfafa82a2 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -20,7 +20,7 @@ jobs: GLOBAL: https://raw.githubusercontent.com/conda/infra/main/.github/global.yml LOCAL: .github/labels.yml steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - id: has_local uses: andstor/file-existence-action@v2.0.0 with: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 06a07b6376..1e9e46e754 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -31,10 +31,10 @@ jobs: days-before-issue-close: 30 # [type::support] issues have a more aggressive stale/close timeline - only-issue-labels: type::support - days-before-issue-stale: 21 - days-before-issue-close: 7 + days-before-issue-stale: 90 + days-before-issue-close: 21 steps: - - uses: conda/actions/read-yaml@v23.4.0 + - uses: conda/actions/read-yaml@v23.7.0 id: read_yaml with: path: https://raw.githubusercontent.com/conda/infra/main/.github/messages.yml @@ -69,8 +69,6 @@ jobs: stale-pr-label: stale # Label to apply on closed PRs close-pr-label: stale::closed - # Reason to use when closing PRs - close-pr-reason: not_planned # Remove stale label from issues/PRs on updates/comments remove-stale-when-updated: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 19062be01a..9778c04d23 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -4,18 +4,24 @@ name: Tests on: - # NOTE: github.event context is push payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#push push: branches: - main - feature/** - '[0-9].*.x' # e.g., 3.24.x - # NOTE: github.event context is pull_request payload: - # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request pull_request: + # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch + workflow_dispatch: + + # no payload + schedule: + # https://crontab.guru/#37_18_*_*_* + - cron: 37 18 * * * + concurrency: # Concurrency group that uses the workflow name and PR number if available # or commit SHA as a fallback. If a new build is triggered under that @@ -35,9 +41,9 @@ jobs: code: ${{ steps.filter.outputs.code }} steps: - uses: actions/checkout@v3 - # dorny/paths-filter needs git clone for push events + # dorny/paths-filter needs git clone for non-PR events # https://github.com/marketplace/actions/paths-changes-filter#supported-workflows - if: github.event_name == 'push' + if: github.event_name != 'pull_request' - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 id: filter with: @@ -53,7 +59,7 @@ jobs: linux: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: ubuntu-latest defaults: @@ -63,14 +69,14 @@ jobs: fail-fast: false matrix: # test all lower versions (w/ stable conda) and upper version (w/ canary conda) - python-version: ['3.8', '3.9'] + python-version: ['3.8', '3.9', '3.10'] conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: @@ -110,7 +116,6 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-linux.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info @@ -140,18 +145,18 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},linux-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-Linux-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} @@ -161,7 +166,7 @@ jobs: windows: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: windows-2019 strategy: @@ -172,10 +177,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: @@ -216,7 +221,6 @@ jobs: --file .\tests\requirements.txt ` --file .\tests\requirements-windows.txt ` ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info @@ -246,21 +250,21 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},win-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" # windows-2019/powershell ships with GNU tar 1.28 which struggles with Windows paths # window-2019/cmd ships with bsdtar 3.5.2 which doesn't have this problem shell: cmd - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-Win-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: path: ${{ env.REPLAY_DIR }} @@ -270,7 +274,7 @@ jobs: macos: # only run test suite if there are code changes needs: changes - if: needs.changes.outputs.code == 'true' + if: github.event_name == 'schedule' || needs.changes.outputs.code == 'true' runs-on: macos-11 defaults: @@ -284,10 +288,10 @@ jobs: conda-version: [release] test-type: [serial, parallel] include: - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: serial - - python-version: '3.10' + - python-version: '3.11' conda-version: canary test-type: parallel env: @@ -328,7 +332,6 @@ jobs: --file ./tests/requirements.txt \ --file ./tests/requirements-macos.txt \ ${{ env.CONDA_CHANNEL_LABEL }}::conda - pip install allure-pytest pip install -e . - name: Show info @@ -358,18 +361,18 @@ jobs: flags: ${{ matrix.test-type }},${{ matrix.python-version }},osx-64 - name: Tar Allure Results - if: always() + if: '!cancelled()' run: tar -zcf "${{ env.ALLURE_DIR }}.tar.gz" "${{ env.ALLURE_DIR }}" - name: Upload Allure Results - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: allure-macOS-${{ matrix.conda-version }}-Py${{ matrix.python-version }}-${{ matrix.test-type }} path: allure-results.tar.gz - name: Upload Pytest Replay - if: always() + if: '!cancelled()' uses: actions/upload-artifact@v3 with: name: ${{ env.REPLAY_NAME }}-${{ matrix.test-type }} @@ -379,7 +382,12 @@ jobs: aggregate: # only aggregate test suite if there are code changes needs: [changes, linux, windows, macos] - if: needs.changes.outputs.code == 'true' && always() + if: >- + !cancelled() + && ( + github.event_name == 'schedule' + || needs.changes.outputs.code == 'true' + ) runs-on: ubuntu-latest steps: @@ -404,7 +412,7 @@ jobs: analyze: name: Analyze results needs: [linux, windows, macos, aggregate] - if: always() + if: '!cancelled()' runs-on: ubuntu-latest steps: @@ -423,7 +431,7 @@ jobs: # - this is the main repo, and # - we are on the main, feature, or release branch if: >- - success() + !cancelled() && !github.event.repository.fork && ( github.ref_name == 'main' @@ -449,11 +457,37 @@ jobs: clean: true fetch-depth: 0 + # Explicitly use Python 3.11 since each of the OSes has a different default Python + - uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Detect label + shell: python + run: | + from pathlib import Path + from re import match + from os import environ + + if "${{ github.ref_name }}" == "main": + # main branch commits are uploaded to the dev label + label = "dev" + elif "${{ github.ref_name }}".startswith("feature/"): + # feature branch commits are uploaded to a custom label + label = "${{ github.ref_name }}" + else: + # release branch commits are added to the rc label + # see https://github.com/conda/infrastructure/issues/760 + _, name = "${{ github.repository }}".split("/") + label = f"rc-{name}-${{ github.ref_name }}" + + Path(environ["GITHUB_ENV"]).write_text(f"ANACONDA_ORG_LABEL={label}") + - name: Create and upload canary build - uses: conda/actions/canary-release@v22.10.0 + uses: conda/actions/canary-release@v23.7.0 with: package-name: ${{ github.event.repository.name }} subdir: ${{ matrix.subdir }} anaconda-org-channel: conda-canary - anaconda-org-label: ${{ github.ref_name == 'main' && 'dev' || github.ref_name }} + anaconda-org-label: ${{ env.ANACONDA_ORG_LABEL }} anaconda-org-token: ${{ secrets.ANACONDA_ORG_CONDA_CANARY_TOKEN }} diff --git a/.mailmap b/.mailmap index 95320e0be9..2f7457f813 100644 --- a/.mailmap +++ b/.mailmap @@ -73,6 +73,7 @@ Daniel Holth Darren Dale Dave Clements Dave Hirschfeld David Hirschfeld +Dave Karetnyk David Froger David Li Derek Ludwig @@ -88,6 +89,7 @@ Ernst Luring Evan Hubinger Evan Klitzke Felix Kühnl +Ferry Firmansjah <103191403+ffirmanff@users.noreply.github.com> Filipe Fernandes ocefpaf Floris Bruynooghe Gabriel Reis @@ -106,6 +108,7 @@ Ilan Schnell Ilan Schnell Ilan Schnell Isuru Fernando Isuru Fernando Ivan Kalev ikalev +Jack Olivieri Jacob Walls Jaime Rodríguez-Guerra James Abbott @@ -129,12 +132,13 @@ John Kirkham John Kirkham John Omotani Johnny Jonathan J. Helmus Jonathan Helmus +Jose Diaz-Gonzalez Joseph Crail Joseph Hunkeler Juan Lasheras jlas Julian Rüth Julien Schueller -Jürgen Gmach +Jürgen Gmach Jürgen Gmach Jędrzej Nowak Jedrzej Nowak Kai Tietz Kai Tietz <47363620+katietz@users.noreply.github.com> Kale Franz Kale Franz @@ -203,10 +207,12 @@ Rachel Rigdon rrigdon <45607889+rrigdon@users.noreply.git Rachel Rigdon rrigdon Ray Donnelly Remi Chateauneu +Riadh Fezzani Riccardo Vianello Richard Frank Richard Hattersley Rick Izzo +Rishabh Singh <67859818+rishabh11336@users.noreply.github.com> rishabh11336 <67859818+rishabh11336@users.noreply.github.com> Robert Coop Robert Langlois Robert T. McGibbon Robert McGibbon @@ -216,6 +222,7 @@ Ruben Vorderman Ryan Dale daler Ryan Grout Ryan Grout Ryan Grout Ryan Grout +Ryan Keith Ryan Rylan Chord Satoshi Yagi satoshi Scheah @@ -225,6 +232,7 @@ Sean Yen seanyen Sergio Oller Serhii Kupriienko <79282962+skupr-anaconda@users.noreply.github.com> Shaun Walbridge +Shaun Walbridge <46331011+scdub@users.noreply.github.com> Siu Kwan Lam Sophia Castellarin sophia Sophia Castellarin sophia @@ -262,6 +270,7 @@ Uwe L. Korn Vlad Frolov Wes Turner Wim Glenn wim glenn +Wolf Vollprecht Wolfgang Ulmer Yann Yoav Ram diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 70201956a4..34f2c97018 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,8 +13,6 @@ exclude: | test-skeleton )/ | .*\.(patch|diff) | - versioneer.py | - conda_build/_version.py ) repos: # generic verification and formatting @@ -39,7 +37,7 @@ repos: - id: check-merge-conflict # Python verification and formatting - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.1 + rev: v1.5.4 hooks: # auto inject license blurb - id: insert-license @@ -47,29 +45,37 @@ repos: args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] exclude: ^conda_build/version.py - repo: https://github.com/asottile/pyupgrade - rev: v3.3.2 + rev: v3.13.0 hooks: # upgrade standard Python codes - id: pyupgrade args: [--py38-plus] - - repo: https://github.com/pycqa/isort - rev: 5.12.0 - hooks: - # auto sort Python imports - - id: isort - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.9.1 hooks: # auto format Python codes - id: black - repo: https://github.com/asottile/blacken-docs - rev: 1.13.0 + rev: 1.16.0 hooks: # auto format Python codes within docstrings - id: blacken-docs additional_dependencies: [black] - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.291 + hooks: + - id: ruff + args: [--fix] + - repo: meta + # see https://pre-commit.com/#meta-hooks + hooks: + - id: check-hooks-apply + - id: check-useless-excludes + - repo: local hooks: - # lint Python codes - - id: flake8 + - id: git-diff + name: git diff + entry: git diff --exit-code + language: system + pass_filenames: false + always_run: true diff --git a/.readthedocs.yml b/.readthedocs.yml index 72ad6563db..abdbda6254 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,6 +1,16 @@ version: 2 +build: + os: "ubuntu-22.04" + tools: + python: "3.11" + python: - version: "3" install: - requirements: docs/requirements.txt + +# Build PDF, ePub and zipped HTML +formats: + - epub + - pdf + - htmlzip diff --git a/AUTHORS.md b/AUTHORS.md index 60a06dd9fe..cbfba08e20 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -56,6 +56,7 @@ Authors are sorted alphabetically. * Darren Dale * Dave Clements * Dave Hirschfeld +* Dave Karetnyk * David Froger * David Li * Derek Ludwig @@ -71,6 +72,7 @@ Authors are sorted alphabetically. * Evan Hubinger * Evan Klitzke * Felix Kühnl +* Ferry Firmansjah * Filipe Fernandes * Floris Bruynooghe * Gabriel Reis @@ -88,6 +90,7 @@ Authors are sorted alphabetically. * Ilan Schnell * Isuru Fernando * Ivan Kalev +* Jack Olivieri * Jacob Walls * Jaime Rodríguez-Guerra * James Abbott @@ -108,6 +111,7 @@ Authors are sorted alphabetically. * John Omotani * Johnny * Jonathan J. Helmus +* Jose Diaz-Gonzalez * Joseph Crail * Joseph Hunkeler * Juan Lasheras @@ -168,10 +172,12 @@ Authors are sorted alphabetically. * Rachel Rigdon * Ray Donnelly * Remi Chateauneu +* Riadh Fezzani * Riccardo Vianello * Richard Frank * Richard Hattersley * Rick Izzo +* Rishabh Singh * Robert Coop * Robert Langlois * Robert T. McGibbon @@ -180,6 +186,7 @@ Authors are sorted alphabetically. * Ruben Vorderman * Ryan Dale * Ryan Grout +* Ryan Keith * Rylan Chord * Satoshi Yagi * Scheah @@ -188,6 +195,7 @@ Authors are sorted alphabetically. * Sergio Oller * Serhii Kupriienko * Shaun Walbridge +* Shaun Walbridge * Siu Kwan Lam * Sophia Castellarin * Sophian Guidara @@ -221,6 +229,7 @@ Authors are sorted alphabetically. * Vlad Frolov * Wes Turner * Wim Glenn +* Wolf Vollprecht * Wolfgang Ulmer * Yann * Yoav Ram diff --git a/CHANGELOG.md b/CHANGELOG.md index cdcbe6e6f6..3d14e6556f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,161 @@ [//]: # (current developments) +## 3.27.0 (2023-09-26) + +### Enhancements + +* Remove `glob2` dependency. As of Python 3.5, the '**', operator was available to `glob` when using `recursive=True`. Builtin glob is also much faster. (#5005) +* Handle `emscripten-wasm32` and `wasi-wasm32` platforms. (#4813) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding import-time side effects. (#4949) + +### Deprecations + +* When templating new recipes from a PyPI package, the build script `{{ PYTHON }} -m pip install . -vv` is deprecated in favor of `{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation`. (#4960) + +### Docs + +* Document `~=` (compatibility release) match spec. (#4553) +* Clarify that the `build` prefix is activated _after_ the `host` prefix. (#4942) +* Add explanation that conda-build should be run from the base environment. (#4995) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @DaveKaretnyk made their first contribution in https://github.com/conda/conda-build/pull/5004 +* @boldorider4 made their first contribution in https://github.com/conda/conda-build/pull/4960 +* @jaimergp +* @jezdez +* @jugmac00 +* @kenodegard +* @ryanskeith +* @scdub made their first contribution in https://github.com/conda/conda-build/pull/4965 +* @wolfv made their first contribution in https://github.com/conda/conda-build/pull/4813 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + +## 3.26.1 (2023-08-17) + +### Bug fixes + +* Delay imports in conda command plugin until the command is used, avoiding + import-time side effects including unwanted logging configuration. (#4949) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @jezdez +* @kenodegard +* @pre-commit-ci[bot] + + + +## 3.26.0 (2023-07-18) + +### Enhancements + +* Add `pip` to `env-doc make` command so function works correctly (`pip` is no longer added by default with the python conda package). (#4633) +* Log extra-meta data to make it easier to verify that the right extra-meta data is burned into packages (also helps to co-relate packages and their build-log). The feature was first introduced in #4303 and is now improved via the logging call. (#4901) +* Implement subcommands as conda plugins. (#4921) + +### Bug fixes + +* Fix handling of unknown binaries with newer `(py)lief` versions. (#4900) +* Disable `LIEF` logging to remove "Unknown format" warning message. (#4850) +* Revert `enable_static` default value in `conda_build.config` to remove "Failed to get_static_lib_exports" warning messages. (#4850) +* Avoid duplicate logging by not propagating the top-level conda-build logger. (#4903) +* Fix git cloning for repositories with submodules containing local relative paths. (#4914) + +### Deprecations + +* Mark executable invocations (e.g., `conda-build`) as pending deprecation. (#4921) +* Mark module based invocations (e.g., `python -m conda_build.cli.main_build`) as pending deprecation. (#4921) + +### Docs + +* Update `pkg-spec` docs to mention `.conda` package format. (#4633) +* Drop unnecessary Jinja package name variables from `variants.rst` docs file. (#4834) + +### Other + +* Drop duplicate `get_summary` call in `conda_build.skeletons.pypi`. (#3998) +* Fix failing `resolved_packages` test due to recent OpenSSL 3.0.8 release to defaults. (#4912) + +### Contributors + +* @beeankha +* @conda-bot +* @dbast +* @jaimergp +* @jakirkham +* @josegonzalez made their first contribution in https://github.com/conda/conda-build/pull/3998 +* @katietz +* @kenodegard +* @rfezzani made their first contribution in https://github.com/conda/conda-build/pull/4850 +* @ryanskeith +* @sven6002 +* @dependabot[bot] +* @pre-commit-ci[bot] + + + +## 3.25.0 (2023-05-22) + +### Enhancements + +* Noarch packages that use virtual packages have the virtual packages added to the hash contents of the package. This facilitates the building of noarch packages multiple times for different platforms with platform specific dependencies. (#4606) +* Add support for `svn` source credentials (`svn_username` and `svn_password`). (#4692) +* Depend on standalone `conda-index` instead of bundled indexing code. (#4828) +* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) +* Add Python 3.11 support. (#4852) + +### Bug fixes + +* Ensure `tests/commands` are also run in the presence of `run_test.*` (#4429) +* Require the source when rendering a recipe that uses the `load_file_data` function. (#4817) +* Download packages during build into the correct `subdir` folder. (#4832) +* Use a unique `subdir` variable name when rebuilding the index for multi-output builds. (#4862) + +### Deprecations + +* Inline `conda index` logic is pending deprecation. `conda-build` still provides `conda-index` a.k.a. `conda index` CLI, but uses standalone `conda-index` during builds. (#4828) +* Prefer the [standalone conda-index package](https://conda.github.io/conda-index/), instead of `conda-build index` or `conda index`, to use faster indexing code. (#4828) +* Mark `conda_build.metadata.ns_cfg` as pending deprecation. Use `conda_build.get_selectors.get_selectors` instead. (#4837) +* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) +* Mark `conda_build.config._ensure_dir` as pending deprecation. Use `stdlib`'s `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) + +### Other + +* Format with `black` and replaced pre-commit's `darker` hook with `black`. (#4836) +* Format with `isort` and add pre-commit `isort` hook. (#4836) +* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) +* Enable `xattr` test on macOS. (#4845) + +### Contributors + +* @beeankha +* @conda-bot +* @dholth +* @duncanmmacleod +* @ffirmanff made their first contribution in https://github.com/conda/conda-build/pull/4692 +* @isuruf +* @jezdez +* @jakirkham +* @jjhelmus +* @kenodegard +* @rishabh11336 made their first contribution in https://github.com/conda/conda-build/pull/4782 +* @ryanskeith made their first contribution in https://github.com/conda/conda-build/pull/4843 +* @pre-commit-ci[bot] + + ## 3.24.0 (2023-03-22) ### Bug fixes @@ -2657,7 +2813,7 @@ https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#host * pyldd: disambiguate java .class files from Mach-O fat files (same magic number) #2328 * fix hash regex for downloaded files in `src_cache` #2330 * fix `zip_keys` becoming a loop dimension when variants passed as object rather than loaded from file #2333 -* fix windows always warning about old compiler activation. Now only warns if {{ compiler() }} is not used. #2333 +* fix windows always warning about old compiler activation. Now only warns if `{{ compiler() }}` is not used. #2333 * Add `LD_RUN_PATH` back into Linux variables for now (may remove later, but will have deprecation cycle) #2334 ### Contributors diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 675651e3ae..683faf9597 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,7 +50,7 @@ impact the functionality of `conda/conda-build` installed in your base environme ``` bash # create/activate standalone dev env - $ CONDA_ENV=conda-build make setup + $ ENV_NAME=conda-build make setup $ conda activate conda-build # Run all tests on Linux and Mac OS X systems (this can take a long time) diff --git a/HOW_WE_USE_GITHUB.md b/HOW_WE_USE_GITHUB.md index 507d96c35b..dc32be1cf1 100644 --- a/HOW_WE_USE_GITHUB.md +++ b/HOW_WE_USE_GITHUB.md @@ -103,7 +103,7 @@ Sorting engineers are a conda governance [sub-team][sub-team]; they are a group ### How do items show up for sorting? -New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/projet.yml`][workflow-project]. +New issues that are opened in any of the repositories in the [conda GitHub organization][conda-org] will show up in the "Sorting" tab of the [Planning project][project-planning]. There are two [GitHub Actions][docs-actions] workflows utilized for this purpose; [`.github/workflows/issues.yml`][workflow-issues] and [`.github/workflows/project.yml`][workflow-project]. The GitHub Actions in the [`conda/infrastructure`][infrastructure] repository are viewed as canonical; the [`.github/workflows/sync.yml` workflow][workflow-sync] sends out any modifications to other `conda` repositories from there. @@ -126,8 +126,8 @@ For more information on the sorting process, see [Issue Sorting Procedures](#iss Items move out of the ["Sorting" tab][project-sorting] once the investigatory phase described in [What is done about the issues in the "Sorting" tab?](#what-is-done-about-the-issues-in-the-sorting-tab) has concluded and the sorting engineer has enough information to make a decision about the appropriate resolution schedule for the issue. The additional tabs in the project board that the issues can be moved to include the following: -- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the https://github.com/conda/infrastructure/labels/type%3A%3Asupport label to move an issue to this tab. -- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the https://github.com/conda/infrastructure/labels/backlog label to move an issue to this tab. +- **"Support"** - Any issue in the ["Support" tab of the Planning board][project-support] is a request for support and is not a feature request or a bug report. Add the [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) label to move an issue to this tab. +- **"Backlog"** - The issue has revealed a bug or feature request. We have collected enough details to understand the problem/request and to reproduce it on our own. These issues have been moved into the [Backlog tab of the Planning board][project-backlog] at the end of the sorting rotation during Refinement. Add the [`backlog`](https://github.com/conda/infrastructure/labels/backlog) label to move an issue to this tab. - **"Closed"** - The issue was closed due to being a duplicate, being redirected to a different project, was a user error, a question that has been resolved, etc. ### Where do work issues go after being sorted? @@ -143,12 +143,12 @@ Issues are "backlogged" when they have been sorted but not yet earmarked for an Global automation procedures synced out from the [`conda/infrastructure`][infrastructure] repo include: - [Marking of issues and pull requests as stale][workflow-stale], resulting in: - - issues marked as https://github.com/conda/infrastructure/labels/type%3A%3Asupport being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) - - all other inactive issues (not labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) + - issues marked as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 21 days of inactivity and being closed after 7 further days of inactivity (that is, closed after 30 inactive days total) + - all other inactive issues (not labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport) being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) - all inactive pull requests being labeled stale after 365 days of inactivity and being closed after 30 further days of inactivity (that is, closed after an approximate total of 1 year and 1 month of inactivity) - [Locking of closed issues and pull requests with no further activity][workflow-lock] after 365 days - [Adding new issues and pull requests to the respective project boards][workflow-project] -- [Indicating an issue is ready for the sorting engineer's attention by toggling https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback with https://github.com/conda/infrastructure/labels/pending%3A%3Asupport after a contributor leaves a comment][workflow-issues] +- [Indicating an issue is ready for the sorting engineer's attention][workflow-issues] by toggling [`pending::feedback`](https://github.com/conda/infrastructure/labels/pending%3A%3Afeedback) with [`pending::support`](https://github.com/conda/infrastructure/labels/pending%3A%3Asupport) after a contributor leaves a comment - [Verifying that contributors have signed the CLA][workflow-cla] before allowing pull requests to be merged; if the contributor hasn't signed the CLA previously, merging is be blocked until a manual review can be done - [Syncing out templates, labels, workflows, and documentation][workflow-sync] from [`conda/infrastructure`][infrastructure] to the other repositories @@ -166,9 +166,9 @@ Labeling is a very important means for sorting engineers to keep track of the cu Each label has an associated description that clarifies how the label should be used. Hover on the label to see its description. Label colors are used to distinguish labels by category. -Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, https://github.com/conda/infrastructure/labels/type%3A%3Abug, https://github.com/conda/infrastructure/labels/type%3A%3Afeature, and https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation, where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, https://github.com/conda/infrastructure/labels/os%3A%3Alinux, https://github.com/conda/infrastructure/labels/os%3A%3Amacos, and https://github.com/conda/infrastructure/labels/os%3A%3Awindows), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. +Generally speaking, labels with the same category are considered mutually exclusive, but in some cases labels sharing the same category can occur concurrently, as they indicate qualifiers as opposed to types. For example, we may have the following types, [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%3Abug), [`type::feature`](https://github.com/conda/infrastructure/labels/type%3A%3Afeature), and [`type::documentation`](https://github.com/conda/infrastructure/labels/type%3A%3Adocumentation), where for any one issue there would be _at most_ **one** of these to be defined (_i.e._ an issue should not be a bug _and_ a feature request at the same time). Alternatively, with issues involving specific operating systems (_i.e._, [`os::linux`](https://github.com/conda/infrastructure/labels/os%3A%3Alinux), [`os::macos`](https://github.com/conda/infrastructure/labels/os%3A%3Amacos), and [`os::windows`](https://github.com/conda/infrastructure/labels/os%3A%3Awindows)), an issue could be labeled with one or more, depending on the system(s) the issue occurs on. -Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as https://github.com/conda/infrastructure/labels/type%3A%3Asupport, that issue will be marked https://github.com/conda/infrastructure/labels/stale after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. +Please note that there are also automation policies in place that are affected by labeling. For example, if an issue is labeled as [`type::support`](https://github.com/conda/infrastructure/labels/type%3A%3Asupport), that issue will be marked [`stale`](https://github.com/conda/infrastructure/labels/stale) after 21 days of inactivity and auto-closed after seven more days without activity (30 inactive days total), which is earlier than issues without this label. See [What automation procedures are currently in place?](#what-automation-procedures-are-currently-in-place) for more details. ### What labels are required for each issue? @@ -178,7 +178,7 @@ The `type` labels are exclusive of each other: each sorted issue should have exa The `source` labels are exclusive of each other: each sorted issue should have exactly one `source` label. These labels give information on the sub-group to which the issue's author belongs (_e.g._, a partner, a frequent contributor, the wider community, etc.). Through these labels, maintainers gain insight into how well we're meeting the needs of various groups. -The `severity` labels are exclusive of each other and, while required for the https://github.com/conda/infrastructure/labels/type%3A%bug label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. +The `severity` labels are exclusive of each other and, while required for the [`type::bug`](https://github.com/conda/infrastructure/labels/type%3A%bug) label, they can also be applied to other types to indicate demand or need. These labels help us to prioritize our work. Severity is not the only factor for work prioritization, but it is an important consideration. Please review the descriptions of the `type`, `source`, and `severity` labels on the [labels page][labels-page] prior to use. @@ -265,6 +265,21 @@ please post details to the [Nucleus forums](https://community.anaconda.cloud/). +
+Slow solving of conda environment + + +
+Hi [@username],
+
+Thanks for voicing your concern about the performance of the classic dependency solver. To fix this, our official recommendation is using the new default "conda-libmamba-solver" instead of the classic solver (more information about the "conda-libmamba-solver" can be found here: https://conda.github.io/conda-libmamba-solver/getting-started/).
+
+In most cases "conda-libmamba-solver" should be significantly faster than the "classic" solver. We hope it provides you with a much better experience going forward.
+
+ +
+ + In order to not have to manually type or copy/paste the above repeatedly, note that it's possible to add text for the most commonly-used responses via [GitHub's "Add Saved Reply" option][docs-saved-reply]. ## Commit Signing diff --git a/Makefile b/Makefile index 649d38b766..db5bd26292 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ SHELL := /bin/bash -o pipefail -o errexit # ENV_NAME=dev TMPDIR=$HOME make test ENV_NAME ?= conda-build DOC_ENV_NAME ?= conda-build-docs -PYTHON_VERSION ?= 3.8 +PYTHON_VERSION ?= 3.11 TMPDIR := $(shell if test -w $(TMPDIR); then echo $(TMPDIR); else echo ./tmp/ ; fi)conda-build-testing # We want to bypass the shell wrapper function and use the binary directly for conda-run specifically @@ -13,7 +13,7 @@ CONDA := $(shell which conda) # Setup env for documents env-docs: - conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) --yes + conda create --name $(DOC_ENV_NAME) --channel defaults python=$(PYTHON_VERSION) pip --yes $(CONDA) run --name $(DOC_ENV_NAME) pip install -r ./docs/requirements.txt .PHONY: $(MAKECMDGOALS) diff --git a/README.md b/README.md index 9c4c9a2f11..cae61abbfd 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,6 @@ $ conda info $ conda install -n base conda-build ``` - ## Building Your Own Packages You can easily build your own packages for `conda`, and upload them to diff --git a/RELEASE.md b/RELEASE.md index ee0129400f..45e605e9eb 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,22 +1,26 @@ + +[epic template]: https://github.com/conda/conda/issues/new?assignees=&labels=epic&template=epic.yml +[compare]: https://github.com/conda/infrastructure/compare +[new release]: https://github.com/conda/infrastructure/releases/new -[epic template]: {{ repo.url }}/issues/new?assignees=&labels=epic&template=epic.yml [infrastructure]: https://github.com/conda/infrastructure [rever docs]: https://regro.github.io/rever-docs -[compare]: {{ repo.url }}/compare -[new release]: {{ repo.url }}/releases/new [release docs]: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes +[merge conflicts]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts +[Anaconda Recipes]: https://github.com/AnacondaRecipes/conda-feedstock +[conda-forge]: https://github.com/conda-forge/conda-feedstock # Release Process -> **Note** -> Throughout this document are references to the version number as `YY.M.0`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. +> **Note:** +> Throughout this document are references to the version number as `YY.M.[$patch_number]`, this should be replaced with the correct version number. Do **not** prefix the version with a lowercase `v`. -## 1. Open the Release Issue. (do this ~1 week prior to release) +## 1. Open the release issue and cut a release branch. (do this ~1 week prior to release) -> **Note** -> The [epic template][epic template] is perfect for this, just remember to remove the {{ repo.url }}/labels/epic label. +> **Note:** +> The new release branch should adhere to the naming convention of `YY.M.x` (make sure to put the `.x` at the end!). In the case of patch/hotfix releases, however, do NOT cut a new release branch; instead, use the previously-cut release branch with the appropriate `YY.M.x` version numbers. Use the issue template below to create the release issue. After creating the release issue, pin it for easy access. @@ -26,7 +30,7 @@ Use the issue template below to create the release issue. After creating the rel ```markdown ### Summary -Placeholder for `{{ repo.name }} YY.M.0` release. +Placeholder for `{{ repo.name }} YY.M.x` release. | Pilot | | |---|---| @@ -40,12 +44,11 @@ Placeholder for `{{ repo.name }} YY.M.0` release. [main]: https://github.com/AnacondaRecipes/{{ repo.name }}-feedstock [conda-forge]: https://github.com/conda-forge/{{ repo.name }}-feedstock [ReadTheDocs]: https://readthedocs.com/projects/continuumio-{{ repo.name }}/ -[announcement]: https://github.com/conda/communications #### The week before release week - [ ] Create release branch (named `YY.M.x`) -- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/YY.M.x`) +- [ ] Ensure release candidates are being successfully built (see `conda-canary/label/rc-{{ repo.name }}-YY.M.x`) - [ ] [Complete outstanding PRs][milestone] - [ ] Test release candidates @@ -61,21 +64,30 @@ Placeholder for `{{ repo.name }} YY.M.0` release. - [ ] Hand off to the Anaconda packaging team - [ ] Announce release - - [ ] Create release [announcement draft][announcement] - - [ ] Discourse - - [ ] Twitter - - [ ] Matrix + - Blog Post (optional) + - [ ] conda.org (link to pull request) + - Long form + - [ ] Create release [announcement draft](https://github.com/conda/communications) + - [ ] [Discourse](https://conda.discourse.group/) + - [ ] [Matrix (conda/conda)](https://matrix.to/#/#conda_conda:gitter.im) (this auto posts from Discourse) + - Summary + - [ ] [Twitter](https://twitter.com/condaproject) ``` - +> **Note:** +> The [epic template][epic template] is perfect for this; remember to remove the **`epic`** label. + +## 2. Alert various parties of the upcoming release. (do this ~1 week prior to release) + +Let various interested parties know about the upcoming release; at minimum, conda-forge maintainers should be informed. For major features, a blog post describing the new features should be prepared and posted once the release is completed (see the announcements section of the release issue). -## 2. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. +## 3. Ensure `rever.xsh` and `news/TEMPLATE` are up to date. These are synced from [`conda/infrastructure`][infrastructure].
-

3. Run Rever. (ideally done on the Monday of release week)

+

4. Run rever. (ideally done on the Monday of release week)

Currently, there are only 2 activities we use rever for, (1) aggregating the authors and (2) updating the changelog. Aggregating the authors can be an error-prone process and also suffers from builtin race conditions (_i.e._, to generate an updated `.authors.yml` we need an updated `.mailmap` but to have an updated `.mailmap` we need an updated `.authors.yml`). This is why the following steps are very heavy-handed (and potentially repetitive) in running rever commands, undoing commits, squashing/reordering commits, etc. @@ -104,12 +116,12 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut 2. Create a versioned branch, this is where rever will make its changes: ```bash - (rever) $ git checkout -b release-YY.M.0 + (rever) $ git checkout -b changelog-YY.M.[$patch_number] ``` 2. Run `rever --activities authors`: - > **Note** + > **Note:** > Include `--force` when re-running any rever commands for the same ``, otherwise, rever will skip the activity and no changes will be made (i.e., rever remembers if an activity has been run for a given version). ```bash @@ -132,7 +144,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut - Here's a sample run where we undo the commit made by rever in order to commit the changes to `.authors.yml` separately: ```bash - (rever) $ rever --activities authors --force YY.M.0 + (rever) $ rever --activities authors --force YY.M.[$patch_number] # changes were made to .authors.yml as per the prior bullet (rever) $ git diff --name-only HEAD HEAD~1 @@ -151,7 +163,7 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .authors.yml" + (rever) $ git commit -m "Update .authors.yml" ``` - Rerun `rever --activities authors` and finally check that your `.mailmap` is correct by running: @@ -176,21 +188,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated .mailmap" + (rever) $ git commit -m "Update .mailmap" ``` - Continue repeating the above processes until the `.authors.yml` and `.mailmap` are corrected to your liking. After completing this, you will have at most two commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap ``` 4. Review news snippets (ensure they are all using the correct Markdown format, **not** reStructuredText) and add additional snippets for undocumented PRs/changes as necessary. - > **Note** + > **Note:** > We've found it useful to name news snippets with the following format: `-`. > > We've also found that we like to include the PR #s inline with the text itself, e.g.: @@ -209,21 +221,21 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Updated news" + (rever) $ git commit -m "Update news" ``` - After completing this, you will have at most three commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 5. Run `rever --activities changelog`: - > **Note** + > **Note:** > This has previously been a notoriously fickle step (likely due to incorrect regex patterns in the `rever.xsh` config file and missing `github` keys in `.authors.yml`) so beware of potential hiccups. If this fails, it's highly likely to be an innocent issue. ```bash @@ -243,9 +255,9 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news ``` 6. Now that we have successfully run the activities separately, we wish to run both together. This will ensure that the contributor list, a side-effect of the authors activity, is included in the changelog activity. @@ -258,11 +270,11 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] ``` 7. Since rever does not include stats on first-time contributors, we will need to add this manually. @@ -273,25 +285,25 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut ```bash (rever) $ git add . - (rever) $ git commit -m "Added first contributions" + (rever) $ git commit -m "Add first-time contributions" ``` - After completing this, you will have at most six commits on your release branch: ```bash (rever) $ git cherry -v main - + 86957814cf235879498ed7806029b8ff5f400034 Updated .authors.yml - + 3ec7491f2f58494a62f1491987d66f499f8113ad Updated .mailmap - + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Updated news - + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Updated authorship for YY.M.0 - + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Updated CHANGELOG for YY.M.0 - + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Added first contributions + + 86957814cf235879498ed7806029b8ff5f400034 Update .authors.yml + + 3ec7491f2f58494a62f1491987d66f499f8113ad Update .mailmap + + 432a9e1b41a3dec8f95a7556632f9a93fdf029fd Update news + + a5c0db938893d2c12cab12a1f7eb3e646ed80373 Update authorship for YY.M.[$patch_number] + + 5e95169d0df4bcdc2da9a6ba4a2561d90e49f75d Update CHANGELOG for YY.M.[$patch_number] + + 93fdf029fd4cf235872c12cab12a1f7e8f95a755 Add first-time contributions ``` 8. Push this versioned branch. ```bash - (rever) $ git push -u upstream release-YY.M.0 + (rever) $ git push -u upstream changelog-YY.M.[$patch_number] ``` 9. Open the Release PR targing the `YY.M.x` branch. @@ -309,30 +321,76 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-10. Update Release Issue to include a link to the Release PR. +10. Update release issue to include a link to the release PR. 11. [Create][new release] the release and **SAVE AS A DRAFT** with the following values: - > **Note** - > Only publish the release after the Release PR is merged, until then always **save as draft**. + > **Note:** + > Only publish the release after the release PR is merged, until then always **save as draft**. | Field | Value | |---|---| - | Choose a tag | `YY.M.0` | + | Choose a tag | `YY.M.[$patch_number]` | | Target | `YY.M.x` | | Body | copy/paste blurb from `CHANGELOG.md` | -## 4. Wait for review and approval of Release PR. +## 5. Wait for review and approval of release PR. + +## 6. Manually test canary build(s). + +### Canary Builds for Manual Testing + +Once the release PRs are filed, successful canary builds will be available on `https://anaconda.org/conda-canary/conda/files?channel=rc-{{ repo.name }}-YY.M.x` for manual testing. + +> **Note:** +> You do not need to apply the `build::review` label for release PRs; every commit to the release branch builds and uploads canary builds to the respective `rc-` label. + +## 7. Merge release PR and publish release. + +To publish the release, go to the project's release page (e.g., https://github.com/conda/conda/releases) and add the release notes from `CHANGELOG.md` to the draft release you created earlier. Then publish the release. + +> **Note:** +> Release notes can be drafted and saved ahead of time. + +## 8. Merge/cherry pick the release branch over to the `main` branch. + +
+Internal process + +1. From the main "< > Code" page of the repository, select the drop down menu next to the `main` branch button and then select "View all branches" at the very bottom. + +2. Find the applicable `YY.M.x` branch and click the "New pull request" button. + +3. "Base" should point to `main` while "Compare" should point to `YY.M.x`. + +4. Ensure that all of the commits being pulled in look accurate, then select "Create pull request". + +> **Note:** +> Make sure NOT to push the "Update Branch" button. If there are [merge conflicts][merge conflicts], create a temporary "connector branch" dedicated to fixing merge conflicts separately from the `YY.M.x` and `main` branches. + +5. Review and merge the pull request the same as any code change pull request. + +> **Note:** +> The commits from the release branch need to be retained in order to be able to compare individual commits; in other words, a "merge commit" is required when merging the resulting pull request vs. a "squash merge". Protected branches will require permissions to be temporarily relaxed in order to enable this action. + +
-## 5. Merge Release PR and Publish Release. +## 9. Open PRs to bump [Anaconda Recipes][Anaconda Recipes] and [conda-forge][conda-forge] feedstocks to use `YY.M.[$patch_number]`. -## 6. Merge/cherry pick the release branch over to the `main` branch. +> **Note:** +> Conda-forge's PRs will be auto-created via the `regro-cf-autotick-bot`. Follow the instructions below if any changes need to be made to the recipe that were not automatically added (these instructions are only necessary for anyone who is _not_ a conda-forge feedstock maintainer, since maintainers can push changes directly to the autotick branch): +> - Create a new branch based off of autotick's branch (autotick's branches usually use the `regro-cf-autotick-bot:XX.YY.[$patch_number]_[short hash]` syntax) +> - Add any changes via commits to that new branch +> - Open a new PR and push it against the `main` branch +> +> Make sure to include a comment on the original `autotick-bot` PR that a new pull request has been created, in order to avoid duplicating work! `regro-cf-autotick-bot` will close the auto-created PR once the new PR is merged. +> +> For more information about this process, please read the ["Pushing to regro-cf-autotick-bot branch" section of the conda-forge documentation](https://conda-forge.org/docs/maintainer/updating_pkgs.html#pushing-to-regro-cf-autotick-bot-branch). -## 7. Open PRs to bump main and conda-forge feedstocks to use `YY.M.0`. -## 8. Hand off to Anaconda's packaging team. +## 10. Hand off to Anaconda's packaging team.
Internal process @@ -343,6 +401,6 @@ Currently, there are only 2 activities we use rever for, (1) aggregating the aut
-## 9. Continue championing and shepherding. +## 11. Continue championing and shepherding. -Remember to continue updating the Release Issue with the latest details as tasks are completed. +Remember to make all relevant announcements and continue to update the release issue with the latest details as tasks are completed. diff --git a/conda_build/__init__.py b/conda_build/__init__.py index 943084b6f4..91367d0d86 100644 --- a/conda_build/__init__.py +++ b/conda_build/__init__.py @@ -12,5 +12,6 @@ "index", "inspect", "metapackage", - "render" "skeleton", + "render", + "skeleton", ] diff --git a/conda_build/api.py b/conda_build/api.py index cc31f6e339..2d5fa7ee7d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -23,6 +23,8 @@ from conda_build.utils import expand_globs as _expand_globs from conda_build.utils import get_logger as _get_logger +from .deprecations import deprecated + def render( recipe_path, @@ -518,6 +520,7 @@ def create_metapackage( ) +@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") def update_index( dir_paths, config=None, @@ -538,7 +541,7 @@ def update_index( import yaml - from conda_build.index import update_index + from conda_build.index import update_index as legacy_update_index from conda_build.utils import ensure_list dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)] @@ -548,7 +551,7 @@ def update_index( current_index_versions = yaml.safe_load(f) for path in dir_paths: - update_index( + legacy_update_index( path, check_md5=check_md5, channel_name=channel_name, @@ -556,7 +559,6 @@ def update_index( threads=threads, verbose=verbose, progress=progress, - hotfix_source_repo=hotfix_source_repo, subdirs=ensure_list(subdir), current_index_versions=current_index_versions, index_file=kwargs.get("index_file", None), diff --git a/conda_build/bdist_conda.py b/conda_build/bdist_conda.py index 9e9d29e162..6e4a5335b9 100644 --- a/conda_build/bdist_conda.py +++ b/conda_build/bdist_conda.py @@ -8,9 +8,10 @@ import sys import time from collections import defaultdict -from distutils.command.install import install -from distutils.dist import Distribution -from distutils.errors import DistutilsGetoptError, DistutilsOptionError + +from setuptools.command.install import install +from setuptools.dist import Distribution +from setuptools.errors import BaseError, OptionError from conda_build import api from conda_build.build import handle_anaconda_upload @@ -22,6 +23,10 @@ # TODO: Add support for all the options that conda build has +class GetoptError(BaseError): + """The option table provided to 'fancy_getopt()' is bogus.""" + + class CondaDistribution(Distribution): """ Distribution subclass that supports bdist_conda options @@ -29,9 +34,6 @@ class CondaDistribution(Distribution): This class is required if you want to pass any bdist_conda specific options to setup(). To use, set distclass=CondaDistribution in setup(). - **NOTE**: If you use setuptools, you must import setuptools before - importing distutils.commands.bdist_conda. - Options that can be passed to setup() (must include distclass=CondaDistribution): @@ -115,7 +117,7 @@ def initialize_options(self): def finalize_options(self): opt_dict = self.distribution.get_option_dict("install") if self.prefix: - raise DistutilsOptionError("--prefix is not allowed") + raise OptionError("--prefix is not allowed") opt_dict["prefix"] = ("bdist_conda", self.config.host_prefix) super().finalize_options() @@ -184,7 +186,7 @@ def run(self): c.read_file(StringIO(newstr)) except Exception as err: # This seems to be the best error here - raise DistutilsGetoptError( + raise GetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" @@ -203,7 +205,7 @@ def run(self): entry_points[section] = None if not isinstance(entry_points, dict): - raise DistutilsGetoptError( + raise GetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points ) else: diff --git a/conda_build/build.py b/conda_build/build.py index cd416fb14e..fa62a238d3 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -25,7 +25,6 @@ from os.path import dirname, isdir, isfile, islink, join import conda_package_handling.api -import glob2 import yaml from bs4 import UnicodeDammit from conda import __version__ as conda_version @@ -37,7 +36,7 @@ from conda_build.config import Config from conda_build.create_test import create_all_test_files from conda_build.exceptions import CondaBuildException, DependencyNeedsBuildingError -from conda_build.index import get_build_index, update_index +from conda_build.index import _delegated_update_index, get_build_index from conda_build.metadata import FIELDS, MetaData from conda_build.post import ( fix_permissions, @@ -1203,11 +1202,11 @@ def get_files_with_prefix(m, replacements, files_in, prefix): for index, replacement in enumerate(replacements): all_matches = have_regex_files( files=[ - f - for f in files + file + for file in files if any( - glob2.fnmatch.fnmatch(f, r) - for r in replacement["glob_patterns"] + fnmatch.fnmatch(file, pattern) + for pattern in replacement["glob_patterns"] ) ], prefix=prefix, @@ -1445,6 +1444,11 @@ def write_about_json(m): extra = m.get_section("extra") # Add burn-in information to extra if m.config.extra_meta: + log = utils.get_logger(__name__) + log.info( + "Adding the following extra-meta data to about.json: %s", + m.config.extra_meta, + ) extra.update(m.config.extra_meta) env = environ.Environment(root_dir) d["root_pkgs"] = env.package_specs() @@ -2105,7 +2109,7 @@ def bundle_conda(output, metadata, env, stats, **kw): tmp_path, final_output, metadata.config.timeout, locking=False ) final_outputs.append(final_output) - update_index( + _delegated_update_index( os.path.dirname(output_folder), verbose=metadata.config.debug, threads=1 ) @@ -2602,6 +2606,7 @@ def build( utils.rm_rf(m.config.info_dir) files1 = utils.prefix_files(prefix=m.config.host_prefix) + os.makedirs(m.config.build_folder, exist_ok=True) with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f: f.write("\n".join(sorted(list(files1)))) f.write("\n") @@ -2911,6 +2916,7 @@ def build( locking=m.config.locking, timeout=m.config.timeout, clear_cache=True, + omit_defaults=False, ) get_build_index( subdir=index_subdir, @@ -2922,6 +2928,7 @@ def build( locking=m.config.locking, timeout=m.config.timeout, clear_cache=True, + omit_defaults=False, ) else: if not provision_only: @@ -3052,7 +3059,7 @@ def _construct_metadata_for_test_from_package(package, config): local_channel = os.path.dirname(local_pkg_location) # update indices in the channel - update_index(local_channel, verbose=config.debug, threads=1) + _delegated_update_index(local_channel, verbose=config.debug, threads=1) try: metadata = render_recipe( @@ -3670,7 +3677,7 @@ def tests_failed(package_or_metadata, move_broken, broken_dir, config): ) except OSError: pass - update_index( + _delegated_update_index( os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1 ) sys.exit("TESTS FAILED: " + os.path.basename(pkg)) @@ -4191,10 +4198,12 @@ def clean_build(config, folders=None): def is_package_built(metadata, env, include_local=True): + # bldpkgs_dirs is typically {'$ENVIRONMENT/conda-bld/noarch', '$ENVIRONMENT/conda-bld/osx-arm64'} + # could pop subdirs (last path element) and call update_index() once for d in metadata.config.bldpkgs_dirs: if not os.path.isdir(d): os.makedirs(d) - update_index(d, verbose=metadata.config.debug, warn=False, threads=1) + _delegated_update_index(d, verbose=metadata.config.debug, warn=False, threads=1) subdir = getattr(metadata.config, f"{env}_subdir") urls = [url_path(metadata.config.output_folder), "local"] if include_local else [] diff --git a/conda_build/cli/main_build.py b/conda_build/cli/main_build.py index ca3bb8a3cf..cba6fec6ff 100644 --- a/conda_build/cli/main_build.py +++ b/conda_build/cli/main_build.py @@ -4,6 +4,7 @@ import logging import sys import warnings +from glob import glob from itertools import chain from os.path import abspath, expanduser, expandvars from pathlib import Path @@ -11,25 +12,19 @@ import filelock from conda.auxlib.ish import dals from conda.common.io import dashlist -from glob2 import glob -import conda_build.api as api -import conda_build.build as build -import conda_build.source as source -import conda_build.utils as utils -from conda_build.cli.actions import KeyValueAction -from conda_build.cli.main_render import get_render_parser -from conda_build.conda_interface import ( - add_parser_channels, - binstar_upload, - cc_conda_build, -) -from conda_build.config import Config, get_channel_urls, zstd_compression_level_default -from conda_build.utils import LoggingContext +from .. import api, build, source, utils +from ..conda_interface import add_parser_channels, binstar_upload, cc_conda_build +from ..config import Config, get_channel_urls, zstd_compression_level_default +from ..deprecations import deprecated +from ..utils import LoggingContext +from .actions import KeyValueAction +from .main_render import get_render_parser def parse_args(args): p = get_render_parser() + p.prog = "conda build" p.description = dals( """ Tool for building conda packages. A conda package is a binary tarball @@ -547,7 +542,7 @@ def execute(args): outputs = [] failed_recipes = [] recipes = chain.from_iterable( - glob(abspath(recipe)) if "*" in recipe else [recipe] + glob(abspath(recipe), recursive=True) if "*" in recipe else [recipe] for recipe in args.recipe ) for recipe in recipes: @@ -588,6 +583,7 @@ def execute(args): return outputs +@deprecated("3.26.0", "4.0.0", addendum="Use `conda build` instead.") def main(): try: execute(sys.argv[1:]) diff --git a/conda_build/cli/main_convert.py b/conda_build/cli/main_convert.py index 43006a1e3b..4c09cfc1da 100644 --- a/conda_build/cli/main_convert.py +++ b/conda_build/cli/main_convert.py @@ -4,8 +4,9 @@ import sys from os.path import abspath, expanduser -from conda_build import api -from conda_build.conda_interface import ArgumentParser +from .. import api +from ..conda_interface import ArgumentParser +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) @@ -36,6 +37,7 @@ def parse_args(args): p = ArgumentParser( + prog="conda convert", description=""" Various tools to convert conda packages. Takes a pure Python package build for one platform and converts it to work on one or more other platforms, or @@ -127,5 +129,6 @@ def execute(args): api.convert(f, **args.__dict__) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda convert` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_debug.py b/conda_build/cli/main_debug.py index 702fbfc798..00c6eeb230 100644 --- a/conda_build/cli/main_debug.py +++ b/conda_build/cli/main_debug.py @@ -4,13 +4,11 @@ import sys from argparse import ArgumentParser -from conda_build import api -from conda_build.cli import validators as valid - -# we extend the render parser because we basically need to render the recipe before -# we can say what env to create. This is not really true for debugging tests, but meh... -from conda_build.cli.main_render import get_render_parser -from conda_build.utils import on_win +from .. import api +from ..deprecations import deprecated +from ..utils import on_win +from . import validators as valid +from .main_render import get_render_parser logging.basicConfig(level=logging.INFO) @@ -18,6 +16,7 @@ def get_parser() -> ArgumentParser: """Returns a parser object for this command""" p = get_render_parser() + p.prog = "conda debug" p.description = """ Set up environments and activation scripts to debug your build or test phase. @@ -87,9 +86,9 @@ def get_parser() -> ArgumentParser: return p -def execute(): +def execute(args): parser = get_parser() - args = parser.parse_args() + args = parser.parse_args(args) try: activation_string = api.debug( @@ -119,5 +118,6 @@ def execute(): sys.exit(1) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda debug` instead.") def main(): - return execute() + return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_develop.py b/conda_build/cli/main_develop.py index ec33555748..a7a202e5ff 100644 --- a/conda_build/cli/main_develop.py +++ b/conda_build/cli/main_develop.py @@ -5,14 +5,16 @@ from conda.base.context import context, determine_target_prefix -from conda_build import api -from conda_build.conda_interface import ArgumentParser, add_parser_prefix +from .. import api +from ..conda_interface import ArgumentParser, add_parser_prefix +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda develop", description=""" Install a Python package in 'development mode'. @@ -86,5 +88,6 @@ def execute(args): ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda develop` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_index.py b/conda_build/cli/main_index.py index fe504d4dc4..b0eefa8aa7 100644 --- a/conda_build/cli/main_index.py +++ b/conda_build/cli/main_index.py @@ -4,17 +4,21 @@ import os import sys -from conda_build import api -from conda_build.conda_interface import ArgumentParser -from conda_build.index import MAX_THREADS_DEFAULT -from conda_build.utils import DEFAULT_SUBDIRS +from conda_index.index import MAX_THREADS_DEFAULT +from conda_index.utils import DEFAULT_SUBDIRS + +from .. import api +from ..conda_interface import ArgumentParser +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( - description="Update package index metadata files in given directories." + prog="conda index", + description="Update package index metadata files in given directories. " + "Pending deprecated, please use the standalone conda-index project.", ) p.add_argument( @@ -96,6 +100,13 @@ def parse_args(args): def execute(args): _, args = parse_args(args) + deprecated.topic( + "3.25.0", + "4.0.0", + topic="`conda index` and `conda-index`", + addendum="Use the `conda-index` project instead.", + ) + api.update_index( args.dir, check_md5=args.check_md5, @@ -105,11 +116,11 @@ def execute(args): patch_generator=args.patch_generator, verbose=args.verbose, progress=args.progress, - hotfix_source_repo=args.hotfix_source_repo, current_index_versions=args.current_index_versions_file, index_file=args.file, ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda index` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_inspect.py b/conda_build/cli/main_inspect.py index 472bd2f3af..79e0594a4f 100644 --- a/conda_build/cli/main_inspect.py +++ b/conda_build/cli/main_inspect.py @@ -7,14 +7,16 @@ from conda.base.context import context, determine_target_prefix -from conda_build import api -from conda_build.conda_interface import ArgumentParser, add_parser_prefix +from .. import api +from ..conda_interface import ArgumentParser, add_parser_prefix +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda inspect", description="Tools for inspecting conda packages.", epilog=""" Run --help on the subcommands like 'conda inspect linkages --help' to see the @@ -220,5 +222,6 @@ def execute(args): raise ValueError(f"Unrecognized subcommand: {args.subcommand}.") +@deprecated("3.26.0", "4.0.0", addendum="Use `conda inspect` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_metapackage.py b/conda_build/cli/main_metapackage.py index 7657432fdc..71be2e7d3d 100644 --- a/conda_build/cli/main_metapackage.py +++ b/conda_build/cli/main_metapackage.py @@ -4,18 +4,16 @@ import logging import sys -from conda_build import api -from conda_build.conda_interface import ( - ArgumentParser, - add_parser_channels, - binstar_upload, -) +from .. import api +from ..conda_interface import ArgumentParser, add_parser_channels, binstar_upload +from ..deprecations import deprecated logging.basicConfig(level=logging.INFO) def parse_args(args): p = ArgumentParser( + prog="conda metapackage", description=""" Tool for building conda metapackages. A metapackage is a package with no files, only metadata. They are typically used to collect several packages @@ -114,5 +112,6 @@ def execute(args): api.create_metapackage(channel_urls=channel_urls, **args.__dict__) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda metapackage` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index 15cb3bdc4b..a563e87c1b 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -8,15 +8,12 @@ import yaml from yaml.parser import ParserError -from conda_build import __version__, api -from conda_build.conda_interface import ( - ArgumentParser, - add_parser_channels, - cc_conda_build, -) -from conda_build.config import get_channel_urls, get_or_merge_config -from conda_build.utils import LoggingContext -from conda_build.variants import get_package_variants, set_language_env_vars +from .. import __version__, api +from ..conda_interface import ArgumentParser, add_parser_channels, cc_conda_build +from ..config import get_channel_urls, get_or_merge_config +from ..deprecations import deprecated +from ..utils import LoggingContext +from ..variants import get_package_variants, set_language_env_vars on_win = sys.platform == "win32" log = logging.getLogger(__name__) @@ -44,6 +41,7 @@ def __call__(self, parser, namespace, values, option_string=None): def get_render_parser(): p = ArgumentParser( + prog="conda render", description=""" Tool for expanding the template meta.yml file (containing Jinja syntax and selectors) into the rendered meta.yml files. The template meta.yml file is @@ -245,6 +243,7 @@ def execute(args, print_results=True): return metadata_tuples +@deprecated("3.26.0", "4.0.0", addendum="Use `conda render` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/cli/main_skeleton.py b/conda_build/cli/main_skeleton.py index 7642bc14da..7bb9e3369f 100644 --- a/conda_build/cli/main_skeleton.py +++ b/conda_build/cli/main_skeleton.py @@ -6,9 +6,10 @@ import pkgutil import sys -import conda_build.api as api -from conda_build.conda_interface import ArgumentParser -from conda_build.config import Config +from .. import api +from ..conda_interface import ArgumentParser +from ..config import Config +from ..deprecations import deprecated thisdir = os.path.dirname(os.path.abspath(__file__)) logging.basicConfig(level=logging.INFO) @@ -16,6 +17,7 @@ def parse_args(args): p = ArgumentParser( + prog="conda skeleton", description=""" Generates a boilerplate/skeleton recipe, which you can then edit to create a full recipe. Some simple skeleton recipes may not even need edits. @@ -60,6 +62,7 @@ def execute(args): ) +@deprecated("3.26.0", "4.0.0", addendum="Use `conda skeleton` instead.") def main(): return execute(sys.argv[1:]) diff --git a/conda_build/conda_interface.py b/conda_build/conda_interface.py index 93996332ca..92d5ba0678 100644 --- a/conda_build/conda_interface.py +++ b/conda_build/conda_interface.py @@ -4,7 +4,6 @@ import configparser # noqa: F401 import os -import warnings from functools import partial from importlib import import_module # noqa: F401 @@ -78,6 +77,8 @@ from conda.models.channel import get_conda_build_local_url # noqa: F401 from conda.models.dist import Dist, IndexRecord # noqa: F401 +from .deprecations import deprecated + # TODO: Go to references of all properties below and import them from `context` instead binstar_upload = context.binstar_upload default_python = context.default_python @@ -104,21 +105,19 @@ class CrossPlatformStLink: def __call__(self, path: str | os.PathLike) -> int: return self.st_nlink(path) - @classmethod - def st_nlink(cls, path: str | os.PathLike) -> int: - warnings.warn( - "`conda_build.conda_interface.CrossPlatformStLink` is pending deprecation and will be removed in a " - "future release. Please use `os.stat().st_nlink` instead.", - PendingDeprecationWarning, - ) + @staticmethod + @deprecated("3.24.0", "4.0.0", addendum="Use `os.stat().st_nlink` instead.") + def st_nlink(path: str | os.PathLike) -> int: return os.stat(path).st_nlink +@deprecated("3.28.0", "4.0.0") class SignatureError(Exception): # TODO: What is this? 🤔 pass +@deprecated("3.28.0", "4.0.0") def which_package(path): """ Given the path (of a (presumably) conda installed file) iterate over @@ -137,6 +136,7 @@ def which_package(path): yield dist +@deprecated("3.28.0", "4.0.0") def which_prefix(path): """ Given the path (to a (presumably) conda installed file) return the @@ -159,6 +159,7 @@ def which_prefix(path): return prefix +@deprecated("3.28.0", "4.0.0") def get_installed_version(prefix, pkgs): """ Primarily used by conda-forge, but may be useful in general for checking when diff --git a/conda_build/config.py b/conda_build/config.py index fad7744d29..e1bba06518 100644 --- a/conda_build/config.py +++ b/conda_build/config.py @@ -3,7 +3,6 @@ """ Module to store conda build settings. """ - import copy import math import os @@ -11,7 +10,6 @@ import shutil import sys import time -import warnings from collections import namedtuple from os.path import abspath, expanduser, expandvars, join @@ -24,6 +22,7 @@ subdir, url_path, ) +from .deprecations import deprecated from .utils import get_build_folders, get_conda_operation_locks, get_logger, rm_rf from .variants import get_default_variant @@ -50,7 +49,7 @@ def set_invocation_time(): error_overlinking_default = "false" error_overdepending_default = "false" noarch_python_build_age_default = 0 -enable_static_default = "true" +enable_static_default = "false" no_rewrite_stdout_env_default = "false" ignore_verify_codes_default = [] exit_on_verify_error_default = False @@ -58,28 +57,20 @@ def set_invocation_time(): zstd_compression_level_default = 19 +@deprecated("3.25.0", "4.0.0") def python2_fs_encode(strin): - warnings.warn( - "`conda_build.config.python2_fs_encode` is pending deprecation and will be removed in a future release.", - PendingDeprecationWarning, - ) return strin +@deprecated( + "3.25.0", + "4.0.0", + addendum=( + "Use `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` " + "instead." + ), +) def _ensure_dir(path: os.PathLike): - """Try to ensure a directory exists - - Args: - path (os.PathLike): Path to directory - """ - # this can fail in parallel operation, depending on timing. Just try to make the dir, - # but don't bail if fail. - warnings.warn( - "`conda_build.config._ensure_dir` is pending deprecation and will be removed " - "in a future release. Please use `pathlib.Path.mkdir(exist_ok=True)` or " - "`os.makedirs(exist_ok=True)` instead", - PendingDeprecationWarning, - ) os.makedirs(path, exist_ok=True) @@ -262,19 +253,6 @@ def _get_default_settings(): ] -def print_function_deprecation_warning(func): - def func_wrapper(*args, **kw): - log = get_logger(__name__) - log.warn( - "WARNING: attribute {} is deprecated and will be removed in conda-build 4.0. " - "Please update your code - file issues on the conda-build issue tracker " - "if you need help.".format(func.__name__) - ) - return func(*args, **kw) - - return func_wrapper - - class Config: __file__ = __path__ = __file__ __package__ = __package__ @@ -516,56 +494,56 @@ def build_folder(self): # back compat for conda-build-all - expects CONDA_* vars to be attributes of the config object @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_LUA(self): return self.variant.get("lua", get_default_variant(self)["lua"]) @CONDA_LUA.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_LUA(self, value): self.variant["lua"] = value @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PY(self): value = self.variant.get("python", get_default_variant(self)["python"]) return int("".join(value.split("."))) @CONDA_PY.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PY(self, value): value = str(value) self.variant["python"] = ".".join((value[0], value[1:])) @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_NPY(self): value = self.variant.get("numpy", get_default_variant(self)["numpy"]) return int("".join(value.split("."))) @CONDA_NPY.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_NPY(self, value): value = str(value) self.variant["numpy"] = ".".join((value[0], value[1:])) @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PERL(self): return self.variant.get("perl", get_default_variant(self)["perl"]) @CONDA_PERL.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_PERL(self, value): self.variant["perl"] = value @property - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_R(self): return self.variant.get("r_base", get_default_variant(self)["r_base"]) @CONDA_R.setter - @print_function_deprecation_warning + @deprecated("3.0.28", "4.0.0") def CONDA_R(self, value): self.variant["r_base"] = value diff --git a/conda_build/deprecations.py b/conda_build/deprecations.py new file mode 100644 index 0000000000..e81f6e654d --- /dev/null +++ b/conda_build/deprecations.py @@ -0,0 +1,275 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +from __future__ import annotations + +import warnings +from functools import wraps +from types import ModuleType +from typing import Any, Callable + +from packaging.version import Version, parse + +from .__version__ import __version__ + + +class DeprecatedError(RuntimeError): + pass + + +# inspired by deprecation (https://deprecation.readthedocs.io/en/latest/) and +# CPython's warnings._deprecated +class DeprecationHandler: + _version: Version + + def __init__(self, version: Version | str): + """Factory to create a deprecation handle for the specified version. + + :param version: The version to compare against when checking deprecation statuses. + """ + try: + self._version = parse(version) + except TypeError: + self._version = parse("0.0.0.dev0+placeholder") + + def __call__( + self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[(Callable), Callable]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable) -> Callable: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{func.__module__}.{func.__qualname__}", + addendum=addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args, **kwargs): + warnings.warn(message, category, stacklevel=2 + stack) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def argument( + self, + deprecate_in: str, + remove_in: str, + argument: str, + *, + rename: str | None = None, + addendum: str | None = None, + stack: int = 0, + ) -> Callable[(Callable), Callable]: + """Deprecation decorator for keyword arguments. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param argument: The argument to deprecate. + :param rename: Optional new argument name. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + + def deprecated_decorator(func: Callable) -> Callable: + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{func.__module__}.{func.__qualname__}({argument})", + # provide a default addendum if renaming and no addendum is provided + addendum=f"Use '{rename}' instead." + if rename and not addendum + else addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + @wraps(func) + def inner(*args, **kwargs): + # only warn about argument deprecations if the argument is used + if argument in kwargs: + warnings.warn(message, category, stacklevel=2 + stack) + + # rename argument deprecations as needed + value = kwargs.pop(argument, None) + if rename: + kwargs.setdefault(rename, value) + + return func(*args, **kwargs) + + return inner + + return deprecated_decorator + + def module( + self, + deprecate_in: str, + remove_in: str, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for modules. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + self.topic( + deprecate_in=deprecate_in, + remove_in=remove_in, + topic=self._get_module(stack)[1], + addendum=addendum, + stack=2 + stack, + ) + + def constant( + self, + deprecate_in: str, + remove_in: str, + constant: str, + value: Any, + *, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for module constant/global. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param constant: + :param value: + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect calling module + module, fullname = self._get_module(stack) + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, + remove_in, + f"{fullname}.{constant}", + addendum, + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # patch module level __getattr__ to alert user that it's time to remove something + super_getattr = getattr(module, "__getattr__", None) + + def __getattr__(name: str) -> Any: + if name == constant: + warnings.warn(message, category, stacklevel=2 + stack) + return value + + if super_getattr: + return super_getattr(name) + + raise AttributeError(f"module '{fullname}' has no attribute '{name}'") + + module.__getattr__ = __getattr__ + + def topic( + self, + deprecate_in: str, + remove_in: str, + *, + topic: str, + addendum: str | None = None, + stack: int = 0, + ) -> None: + """Deprecation function for a topic. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param topic: The topic being deprecated. + :param addendum: Optional additional messaging. Useful to indicate what to do instead. + :param stack: Optional stacklevel increment. + """ + # detect function name and generate message + category, message = self._generate_message( + deprecate_in, remove_in, topic, addendum + ) + + # alert developer that it's time to remove something + if not category: + raise DeprecatedError(message) + + # alert user that it's time to remove something + warnings.warn(message, category, stacklevel=2 + stack) + + def _get_module(self, stack: int) -> tuple[ModuleType, str]: + """Detect the module from which we are being called. + + :param stack: The stacklevel increment. + :return: The module and module name. + """ + import inspect # expensive + + try: + frame = inspect.stack()[2 + stack] + module = inspect.getmodule(frame[0]) + return (module, module.__name__) + except (IndexError, AttributeError): + raise DeprecatedError("unable to determine the calling module") from None + + def _generate_message( + self, deprecate_in: str, remove_in: str, prefix: str, addendum: str + ) -> tuple[type[Warning] | None, str]: + """Deprecation decorator for functions, methods, & classes. + + :param deprecate_in: Version in which code will be marked as deprecated. + :param remove_in: Version in which code is expected to be removed. + :param prefix: The message prefix, usually the function name. + :param addendum: Additional messaging. Useful to indicate what to do instead. + :return: The warning category (if applicable) and the message. + """ + deprecate_version = parse(deprecate_in) + remove_version = parse(remove_in) + + if self._version < deprecate_version: + category = PendingDeprecationWarning + warning = f"is pending deprecation and will be removed in {remove_in}." + elif self._version < remove_version: + category = DeprecationWarning + warning = f"is deprecated and will be removed in {remove_in}." + else: + category = None + warning = f"was slated for removal in {remove_in}." + + return ( + category, + " ".join(filter(None, [prefix, warning, addendum])), # message + ) + + +deprecated = DeprecationHandler(__version__) diff --git a/conda_build/environ.py b/conda_build/environ.py index 853da44e02..7ef1b2a33d 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -580,7 +580,7 @@ def get_shlib_ext(host_platform): return ".dll" elif host_platform in ["osx", "darwin"]: return ".dylib" - elif host_platform.startswith("linux"): + elif host_platform.startswith("linux") or host_platform.endswith("-wasm32"): return ".so" elif host_platform == "noarch": # noarch packages should not contain shared libraries, use the system diff --git a/conda_build/index.py b/conda_build/index.py index 32eea4bc8e..690673f0c9 100644 --- a/conda_build/index.py +++ b/conda_build/index.py @@ -1,6 +1,5 @@ # Copyright (C) 2014 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import bz2 import copy import fnmatch @@ -40,6 +39,7 @@ # BAD BAD BAD - conda internals from conda.core.subdir_data import SubdirData from conda.models.channel import Channel +from conda_index.index import update_index as _update_index from conda_package_handling.api import InvalidArchiveError from jinja2 import Environment, PackageLoader from tqdm import tqdm @@ -62,6 +62,7 @@ human_bytes, url_path, ) +from .deprecations import deprecated from .utils import ( CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, @@ -142,8 +143,13 @@ def get_build_index( channel_urls=None, debug=False, verbose=True, - **kwargs, + locking=None, + timeout=None, ): + """ + Used during package builds to create/get a channel including any local or + newly built packages. This function both updates and gets index data. + """ global local_index_timestamp global local_subdir global local_output_folder @@ -194,7 +200,7 @@ def get_build_index( if local_path not in urls: urls.insert(0, local_path) _ensure_valid_channel(output_folder, subdir) - update_index(output_folder, verbose=debug) + _delegated_update_index(output_folder, verbose=debug) # replace noarch with native subdir - this ends up building an index with both the # native content and the noarch content. @@ -202,6 +208,8 @@ def get_build_index( if subdir == "noarch": subdir = conda_interface.subdir try: + # get_index() is like conda reading the index, not conda_index + # creating a new index. cached_index = get_index( channel_urls=urls, prepend=not omit_defaults, @@ -280,6 +288,49 @@ def _ensure_valid_channel(local_folder, subdir): os.makedirs(path) +def _delegated_update_index( + dir_path, + check_md5=False, + channel_name=None, + patch_generator=None, + threads=1, + verbose=False, + progress=False, + subdirs=None, + warn=True, + current_index_versions=None, + debug=False, +): + """ + update_index as called by conda-build, delegating to standalone conda-index. + Needed to allow update_index calls on single subdir. + """ + # conda-build calls update_index on a single subdir internally, but + # conda-index expects to index every subdir under dir_path + parent_path, dirname = os.path.split(dir_path) + if dirname in utils.DEFAULT_SUBDIRS: + dir_path = parent_path + subdirs = [dirname] + + return _update_index( + dir_path, + check_md5=check_md5, + channel_name=channel_name, + patch_generator=patch_generator, + threads=threads, + verbose=verbose, + progress=progress, + subdirs=subdirs, + warn=warn, + current_index_versions=current_index_versions, + debug=debug, + ) + + +# Everything below is deprecated to maintain API/feature compatibility. + + +@deprecated("3.25.0", "4.0.0", addendum="Use standalone conda-index.") def update_index( dir_path, check_md5=False, diff --git a/conda_build/metadata.py b/conda_build/metadata.py index ed2b716084..47f3166727 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -import contextlib import copy import hashlib import json @@ -41,9 +40,37 @@ ) try: - loader = yaml.CLoader -except: - loader = yaml.Loader + Loader = yaml.CLoader +except AttributeError: + Loader = yaml.Loader + + +class StringifyNumbersLoader(Loader): + @classmethod + def remove_implicit_resolver(cls, tag): + if "yaml_implicit_resolvers" not in cls.__dict__: + cls.yaml_implicit_resolvers = { + k: v[:] for k, v in cls.yaml_implicit_resolvers.items() + } + for ch in tuple(cls.yaml_implicit_resolvers): + resolvers = [(t, r) for t, r in cls.yaml_implicit_resolvers[ch] if t != tag] + if resolvers: + cls.yaml_implicit_resolvers[ch] = resolvers + else: + del cls.yaml_implicit_resolvers[ch] + + @classmethod + def remove_constructor(cls, tag): + if "yaml_constructors" not in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + if tag in cls.yaml_constructors: + del cls.yaml_constructors[tag] + + +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_implicit_resolver("tag:yaml.org,2002:int") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:float") +StringifyNumbersLoader.remove_constructor("tag:yaml.org,2002:int") on_win = sys.platform == "win32" @@ -97,14 +124,17 @@ def get_selectors(config: Config) -> dict[str, bool]: linux=plat.startswith("linux-"), linux32=bool(plat == "linux-32"), linux64=bool(plat == "linux-64"), + emscripten=plat.startswith("emscripten-"), + wasi=plat.startswith("wasi-"), arm=plat.startswith("linux-arm"), osx=plat.startswith("osx-"), - unix=plat.startswith(("linux-", "osx-")), + unix=plat.startswith(("linux-", "osx-", "emscripten-")), win=plat.startswith("win-"), win32=bool(plat == "win-32"), win64=bool(plat == "win-64"), x86=plat.endswith(("-32", "-64")), x86_64=plat.endswith("-64"), + wasm32=bool(plat.endswith("-wasm32")), os=os, environ=os.environ, nomkl=bool(int(os.environ.get("FEATURE_NOMKL", False))), @@ -261,9 +291,7 @@ def select_lines(data, namespace, variants_in_place): def yamlize(data): try: - with stringify_numbers(): - loaded_data = yaml.load(data, Loader=loader) - return loaded_data + return yaml.load(data, Loader=StringifyNumbersLoader) except yaml.error.YAMLError as e: if "{{" in data: try: @@ -1056,23 +1084,7 @@ def _hash_dependencies(hashing_dependencies, hash_length): return f"h{hash_.hexdigest()}"[: hash_length + 1] -@contextlib.contextmanager -def stringify_numbers(): - # ensure that numbers are not interpreted as ints or floats. That trips up versions - # with trailing zeros. - implicit_resolver_backup = loader.yaml_implicit_resolvers.copy() - for ch in list("0123456789"): - if ch in loader.yaml_implicit_resolvers: - del loader.yaml_implicit_resolvers[ch] - yield - for ch in list("0123456789"): - if ch in implicit_resolver_backup: - loader.yaml_implicit_resolvers[ch] = implicit_resolver_backup[ch] - - class MetaData: - __hash__ = None # declare as non-hashable to avoid its use with memoization - def __init__(self, path, config=None, variant=None): self.undefined_jinja_vars = [] self.config = get_or_merge_config(config, variant=variant) @@ -1583,9 +1595,17 @@ def get_hash_contents(self): # if dependencies are only 'target_platform' then ignore that. if dependencies == ["target_platform"]: - return {} + hash_contents = {} else: - return {key: self.config.variant[key] for key in dependencies} + hash_contents = {key: self.config.variant[key] for key in dependencies} + + # include virtual packages in run + run_reqs = self.meta.get("requirements", {}).get("run", []) + virtual_pkgs = [req for req in run_reqs if req.startswith("__")] + + # add name -> match spec mapping for virtual packages + hash_contents.update({pkg.split(" ")[0]: pkg for pkg in virtual_pkgs}) + return hash_contents def hash_dependencies(self): """With arbitrary pinning, we can't depend on the build string as done in diff --git a/conda_build/os_utils/external.py b/conda_build/os_utils/external.py index 18190aba5d..215f395f00 100644 --- a/conda_build/os_utils/external.py +++ b/conda_build/os_utils/external.py @@ -3,10 +3,9 @@ import os import stat import sys +from glob import glob from os.path import expanduser, isfile, join -from glob2 import glob - from conda_build.conda_interface import root_dir @@ -55,7 +54,7 @@ def find_executable(executable, prefix=None, all_matches=False): result = path break if not result and any([f in executable for f in ("*", "?", ".")]): - matches = glob(os.path.join(dir_path, executable)) + matches = glob(os.path.join(dir_path, executable), recursive=True) if matches: if all_matches: all_matches_found.extend(matches) diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index 9b01e5d07d..2cf6ce92ad 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -11,11 +11,10 @@ import struct import sys import threading +from fnmatch import fnmatch from functools import partial from subprocess import PIPE, Popen -import glob2 - from .external import find_executable # lief cannot handle files it doesn't know about gracefully @@ -29,6 +28,7 @@ try: import lief + lief.logging.disable() have_lief = True except: pass @@ -145,7 +145,7 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat if ( set_runpath and e.tag == lief.ELF.DYNAMIC_TAGS.RUNPATH - and glob2.fnmatch.fnmatch(e.runpath, old_matching) + and fnmatch(e.runpath, old_matching) and e.runpath != new_rpath ): e.runpath = new_rpath @@ -153,7 +153,7 @@ def _set_elf_rpathy_thing(binary, old_matching, new_rpath, set_rpath, set_runpat elif ( set_rpath and e.tag == lief.ELF.DYNAMIC_TAGS.RPATH - and glob2.fnmatch.fnmatch(e.rpath, old_matching) + and fnmatch(e.rpath, old_matching) and e.rpath != new_rpath ): e.rpath = new_rpath @@ -231,6 +231,8 @@ def get_runpaths_or_rpaths_raw(file): def set_rpath(old_matching, new_rpath, file): binary = ensure_binary(file) + if not binary: + return if binary.format == lief.EXE_FORMATS.ELF and ( binary.type == lief.ELF.ELF_CLASS.CLASS32 or binary.type == lief.ELF.ELF_CLASS.CLASS64 @@ -342,7 +344,9 @@ def _get_path_dirs(prefix): def get_uniqueness_key(file): binary = ensure_binary(file) - if binary.format == lief.EXE_FORMATS.MACHO: + if not binary: + return lief.EXE_FORMATS.UNKNOWN + elif binary.format == lief.EXE_FORMATS.MACHO: return binary.name elif binary.format == lief.EXE_FORMATS.ELF and ( # noqa binary.type == lief.ELF.ELF_CLASS.CLASS32 @@ -462,7 +466,9 @@ def inspect_linkages_lief( sysroot = _trim_sysroot(sysroot) default_paths = [] - if binary.format == lief.EXE_FORMATS.ELF: + if not binary: + default_paths = [] + elif binary.format == lief.EXE_FORMATS.ELF: if binary.type == lief.ELF.ELF_CLASS.CLASS64: default_paths = [ "$SYSROOT/lib64", @@ -490,6 +496,8 @@ def inspect_linkages_lief( filename2 = element[0] binary = element[1] uniqueness_key = get_uniqueness_key(binary) + if not binary: + continue if uniqueness_key not in already_seen: parent_exe_dirname = None if binary.format == lief.EXE_FORMATS.PE: diff --git a/conda_build/plugin.py b/conda_build/plugin.py new file mode 100644 index 0000000000..eddb85fe66 --- /dev/null +++ b/conda_build/plugin.py @@ -0,0 +1,108 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import conda.plugins + + +# lazy-import to avoid nasty import-time side effects when not using conda-build +def build(*args, **kwargs): + from .cli.main_build import execute + + execute(*args, **kwargs) + + +def convert(*args, **kwargs): + from .cli.main_convert import execute + + execute(*args, **kwargs) + + +def debug(*args, **kwargs): + from .cli.main_debug import execute + + execute(*args, **kwargs) + + +def develop(*args, **kwargs): + from .cli.main_develop import execute + + execute(*args, **kwargs) + + +def index(*args, **kwargs): + # deprecated! use conda-index! + from .cli.main_index import execute + + execute(*args, **kwargs) + + +def inspect(*args, **kwargs): + from .cli.main_inspect import execute + + execute(*args, **kwargs) + + +def metapackage(*args, **kwargs): + from .cli.main_metapackage import execute + + execute(*args, **kwargs) + + +def render(*args, **kwargs): + from .cli.main_render import execute + + execute(*args, **kwargs) + + +def skeleton(*args, **kwargs): + from .cli.main_skeleton import execute + + execute(*args, **kwargs) + + +@conda.plugins.hookimpl +def conda_subcommands(): + yield conda.plugins.CondaSubcommand( + name="build", + summary="Build conda packages from a conda recipe.", + action=build, + ) + yield conda.plugins.CondaSubcommand( + name="convert", + summary="Convert pure Python packages to other platforms (a.k.a., subdirs).", + action=convert, + ) + yield conda.plugins.CondaSubcommand( + name="debug", + summary="Debug the build or test phases of conda recipes.", + action=debug, + ) + yield conda.plugins.CondaSubcommand( + name="develop", + summary="Install a Python package in 'development mode'. Similar to `pip install --editable`.", + action=develop, + ) + yield conda.plugins.CondaSubcommand( + name="index", + summary="Update package index metadata files. Pending deprecation, use https://github.com/conda/conda-index instead.", + action=index, + ) + yield conda.plugins.CondaSubcommand( + name="inspect", + summary="Tools for inspecting conda packages.", + action=inspect, + ) + yield conda.plugins.CondaSubcommand( + name="metapackage", + summary="Specialty tool for generating conda metapackage.", + action=metapackage, + ) + yield conda.plugins.CondaSubcommand( + name="render", + summary="Expand a conda recipe into a platform-specific recipe.", + action=render, + ) + yield conda.plugins.CondaSubcommand( + name="skeleton", + summary="Generate boilerplate conda recipes.", + action=skeleton, + ) diff --git a/conda_build/render.py b/conda_build/render.py index 1acc0aaf8c..881898dc9d 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -18,10 +18,10 @@ import yaml +import conda_build.index import conda_build.source as source from conda_build import environ, exceptions, utils from conda_build.exceptions import DependencyNeedsBuildingError -from conda_build.index import get_build_index from conda_build.metadata import MetaData, combine_top_level_metadata_with_output from conda_build.variants import ( filter_by_key_value, @@ -318,10 +318,13 @@ def _read_specs_from_package(pkg_loc, pkg_dist): def execute_download_actions(m, actions, env, package_subset=None, require_files=False): - index, _, _ = get_build_index( - getattr(m.config, f"{env}_subdir"), + subdir = getattr(m.config, f"{env}_subdir") + index, _, _ = conda_build.index.get_build_index( + subdir=subdir, bldpkgs_dir=m.config.bldpkgs_dir, output_folder=m.config.output_folder, + clear_cache=False, + omit_defaults=False, channel_urls=m.config.channel_urls, debug=m.config.debug, verbose=m.config.verbose, diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index 9ebcb4aa13..b1194e6a8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -431,7 +431,9 @@ def skeletonize( if noarch_python: ordered_recipe["build"]["noarch"] = "python" - recipe_script_cmd = ["{{ PYTHON }} -m pip install . -vv"] + recipe_script_cmd = [ + "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" + ] ordered_recipe["build"]["script"] = " ".join( recipe_script_cmd + setup_options ) @@ -866,7 +868,6 @@ def get_package_metadata( if not metadata.get("summary"): metadata["summary"] = get_summary(pkginfo) - metadata["summary"] = get_summary(pkginfo) license_name = get_license_name(package, pkginfo, no_prompt, data) metadata["license"] = clean_license_name(license_name) diff --git a/conda_build/source.py b/conda_build/source.py index dc90054744..c8d21a4c2e 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -400,7 +400,15 @@ def git_mirror_checkout_recursive( # Now that all relative-URL-specified submodules are locally mirrored to # relatively the same place we can go ahead and checkout the submodules. check_call_env( - [git, "submodule", "update", "--init", "--recursive"], + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "update", + "--init", + "--recursive", + ], cwd=checkout_dir, stdout=stdout, stderr=stderr, diff --git a/conda_build/utils.py b/conda_build/utils.py index 58f33eecde..a62d6700be 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -34,6 +34,7 @@ ) from pathlib import Path from threading import Thread +from typing import Iterable import libarchive @@ -62,7 +63,8 @@ import urllib.parse as urlparse import urllib.request as urllib -from glob import glob as glob_glob +from contextlib import ExitStack # noqa: F401 +from glob import glob from conda.api import PackageCacheData # noqa @@ -90,15 +92,6 @@ win_path_to_unix, ) - -# stdlib glob is less feature-rich but considerably faster than glob2 -def glob(pathname, recursive=True): - return glob_glob(pathname, recursive=recursive) - - -# NOQA because it is not used in this file. -from contextlib import ExitStack # NOQA - PermissionError = PermissionError # NOQA FileNotFoundError = FileNotFoundError @@ -112,6 +105,8 @@ def glob(pathname, recursive=True): mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE DEFAULT_SUBDIRS = { + "emscripten-wasm32", + "wasi-wasm32", "linux-64", "linux-32", "linux-s390x", @@ -941,7 +936,7 @@ def file_info(path): } -def comma_join(items): +def comma_join(items: Iterable[str], conjunction: str = "and") -> str: """ Like ', '.join(items) but with and @@ -954,11 +949,10 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ( - " and ".join(items) - if len(items) <= 2 - else ", ".join(items[:-1]) + ", and " + items[-1] - ) + items = tuple(items) + if len(items) <= 2: + return f"{items[0]} {conjunction} {items[1]}" + return f"{', '.join(items[:-1])}, {conjunction} {items[-1]}" def safe_print_unicode(*args, **kwargs): @@ -1035,7 +1029,7 @@ def get_stdlib_dir(prefix, py_ver): lib_dir = os.path.join(prefix, "Lib") else: lib_dir = os.path.join(prefix, "lib") - python_folder = glob(os.path.join(lib_dir, "python?.*")) + python_folder = glob(os.path.join(lib_dir, "python?.*"), recursive=True) python_folder = sorted(filterfalse(islink, python_folder)) if python_folder: lib_dir = os.path.join(lib_dir, python_folder[0]) @@ -1050,7 +1044,7 @@ def get_site_packages(prefix, py_ver): def get_build_folders(croot): # remember, glob is not a regex. - return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*")) + return glob(os.path.join(croot, "*" + "[0-9]" * 10 + "*"), recursive=True) def prepend_bin_path(env, prefix, prepend_prefix=False): @@ -1083,7 +1077,7 @@ def sys_path_prepended(prefix): sys.path.insert(1, os.path.join(prefix, "lib", "site-packages")) else: lib_dir = os.path.join(prefix, "lib") - python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*")) + python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*"), recursive=True) if python_dir: python_dir = python_dir[0] sys.path.insert(1, os.path.join(python_dir, "site-packages")) @@ -1268,7 +1262,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, "__iter__"): + if isinstance(arg, str) or not isinstance(arg, Iterable): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1279,6 +1273,7 @@ def islist(arg, uniform=False, include_dict=True): return True # NOTE: not checking for Falsy arg since arg may be a generator + # WARNING: if uniform != False and arg is a generator then arg will be consumed if uniform is True: arg = iter(arg) @@ -1288,7 +1283,7 @@ def islist(arg, uniform=False, include_dict=True): # StopIteration: list is empty, an empty list is still uniform return True # check for explicit type match, do not allow the ambiguity of isinstance - uniform = lambda e: type(e) == etype + uniform = lambda e: type(e) == etype # noqa: E721 try: return all(uniform(e) for e in arg) @@ -1324,7 +1319,7 @@ def expand_globs(path_list, root_dir): files.append(os.path.join(root, folder)) else: # File compared to the globs use / as separator independently of the os - glob_files = glob(path) + glob_files = glob(path, recursive=True) if not glob_files: log = get_logger(__name__) log.error(f"Glob {path} did not match in root_dir {root_dir}") @@ -1404,6 +1399,9 @@ class LoggingContext: "conda_build.index", "conda_build.noarch_python", "urllib3.connectionpool", + "conda_index", + "conda_index.index", + "conda_index.index.convert_cache", ] def __init__(self, level=logging.WARN, handler=None, close=True, loggers=None): @@ -1451,7 +1449,7 @@ def get_installed_packages(path): Files are assumed to be in 'index.json' format. """ installed = dict() - for filename in glob(os.path.join(path, "conda-meta", "*.json")): + for filename in glob(os.path.join(path, "conda-meta", "*.json"), recursive=True): with open(filename) as file: data = json.load(file) installed[data["name"]] = data @@ -1707,6 +1705,7 @@ def filter(self, record): dedupe_filter = DuplicateFilter() info_debug_stdout_filter = LessThanFilter(logging.WARNING) warning_error_stderr_filter = GreaterThanFilter(logging.INFO) +level_formatter = logging.Formatter("%(levelname)s: %(message)s") # set filelock's logger to only show warnings by default logging.getLogger("filelock").setLevel(logging.WARN) @@ -1743,11 +1742,17 @@ def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers log.addFilter(dedupe_filter) # these are defaults. They can be overridden by configuring a log config yaml file. - if not log.handlers and add_stdout_stderr_handlers: + top_pkg = name.split(".")[0] + if top_pkg == "conda_build": + # we don't want propagation in CLI, but we do want it in tests + # this is a pytest limitation: https://github.com/pytest-dev/pytest/issues/3697 + logging.getLogger(top_pkg).propagate = "PYTEST_CURRENT_TEST" in os.environ + if add_stdout_stderr_handlers and not log.handlers: stdout_handler = logging.StreamHandler(sys.stdout) stderr_handler = logging.StreamHandler(sys.stderr) stdout_handler.addFilter(info_debug_stdout_filter) stderr_handler.addFilter(warning_error_stderr_filter) + stderr_handler.setFormatter(level_formatter) stdout_handler.setLevel(level) stderr_handler.setLevel(level) log.addHandler(stdout_handler) diff --git a/conda_build/variants.py b/conda_build/variants.py index 289a61385c..d7c6841238 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -19,7 +19,7 @@ DEFAULT_VARIANTS = { "python": f"{sys.version_info.major}.{sys.version_info.minor}", - "numpy": "1.21", + "numpy": "1.22", # this one actually needs to be pretty specific. The reason is that cpan skeleton uses the # version to say what's in their standard library. "perl": "5.26.2", diff --git a/conda_build/windows.py b/conda_build/windows.py index c2497580f0..1639c554b9 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -10,8 +10,10 @@ # Leverage the hard work done by setuptools/distutils to find vcvarsall using # either the registry or the VS**COMNTOOLS environment variable try: - from distutils.msvc9compiler import WINSDK_BASE, Reg - from distutils.msvc9compiler import find_vcvarsall as distutils_find_vcvarsall + from setuptools._distutils.msvc9compiler import WINSDK_BASE, Reg + from setuptools._distutils.msvc9compiler import ( + find_vcvarsall as distutils_find_vcvarsall, + ) except: # Allow some imports to work for cross or CONDA_SUBDIR usage. pass diff --git a/docs/requirements.txt b/docs/requirements.txt index c30be920a9..b4590377d8 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,20 +1,20 @@ -linkify-it-py==1.0.1 -myst-parser==0.15.2 -Pillow==9.3.0 -PyYAML==5.4.1 -requests==2.26.0 -ruamel.yaml==0.17.16 -Sphinx==4.2.0 -sphinx-argparse==0.3.1 +linkify-it-py==2.0.2 +myst-parser==2.0.0 +Pillow==10.0.1 +PyYAML==6.0.1 +requests==2.31.0 +ruamel.yaml==0.17.32 +Sphinx==7.2.6 +sphinx-argparse==0.4.0 sphinx-autobuild==2021.3.14 -sphinx-rtd-theme==1.0.0 -sphinx-sitemap==2.2.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 +sphinx-rtd-theme==1.3.0 +sphinx-sitemap==2.5.1 +sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-htmlhelp==2.0.4 sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-plantuml==0.21 +sphinxcontrib-plantuml==0.26 sphinxcontrib-programoutput==0.17 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -pylint==2.11.1 +sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-serializinghtml==1.1.9 +pylint==2.17.5 diff --git a/docs/source/conf.py b/docs/source/conf.py index eba0f759da..99a7e5974e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -88,7 +88,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -206,7 +206,7 @@ # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # -- Options for todo extension ---------------------------------------------- diff --git a/docs/source/install-conda-build.rst b/docs/source/install-conda-build.rst index e8cc6c9425..dd52e930ff 100644 --- a/docs/source/install-conda-build.rst +++ b/docs/source/install-conda-build.rst @@ -9,6 +9,42 @@ To enable building conda packages: * update conda and conda-build +.. _-conda-build-wow: + +Way of working +============== + +For proper functioning, it is strongly recommended to install conda-build in +the conda ``base`` environment. Not doing so may lead to problems. + +Explanation +----------- + +With earlier conda / conda-build versions, it was possible to build packages in +your own defined environment, e.g. ``my_build_env``. This was partly driven by +the general conda recommendation not to use the ``base`` env for normal work; +see `Conda Managing Environments`_ for instance. However, conda-build is better +viewed as part of the conda infrastructure, and not as a normal package. Hence, +installing it in the ``base`` env makes more sense. More information: +`Must conda-build be installed in the base envt?`_ + +Other considerations +-------------------- + +* An installation or update of conda-build (in fact, of any package) in the ``base`` + environment needs to be run from an account with the proper permissions + (i.e., the same permissions as were used to install conda and the base env in + the first place via the Miniconda or Anaconda installers). For example, on + Windows that might mean an account with administrator privileges. + +* `conda-verfiy`_ is a useful package that can also be added to the base + environment in order to remove some warnings generated when conda-build runs. + +* For critical CI/CD projects, you might want to pin to an explicit (but recent) + version of conda-build, i.e. only update to a newer version of conda-build + and conda once they have been first verified "offline". + + .. _install-conda-build: Installing conda-build @@ -18,6 +54,7 @@ To install conda-build, in your terminal window or an Anaconda Prompt, run: .. code-block:: bash + conda activate base conda install conda-build @@ -31,8 +68,14 @@ To update conda and conda-build, in your terminal window or an Anaconda Prompt, .. code-block:: bash + conda activate base conda update conda conda update conda-build For release notes, see the `conda-build GitHub page `_. + + +.. _`Conda Managing Environments`: https://conda.io/projects/conda/en/latest/user-guide/getting-started.html#managing-environments +.. _`conda-verfiy`: https://github.com/conda/conda-verify +.. _`Must conda-build be installed in the base envt?`: https://github.com/conda/conda-build/issues/4995 diff --git a/docs/source/resources/define-metadata.rst b/docs/source/resources/define-metadata.rst index 891dd759ef..d314349b2b 100644 --- a/docs/source/resources/define-metadata.rst +++ b/docs/source/resources/define-metadata.rst @@ -992,9 +992,10 @@ words, a Python package would list ``python`` here and an R package would list The PREFIX environment variable points to the host prefix. With respect to activation during builds, both the host and build environments are activated. -The build prefix is activated before the host prefix so that the host prefix -has priority over the build prefix. Executables that don't exist in the host -prefix should be found in the build prefix. +The build prefix is activated *after* the host prefix so that the build prefix, +which always contains native executables for the running platform, has priority +over the host prefix, which is not guaranteed to provide native executables (e.g. +when cross-compiling). As of conda-build 3.1.4, the build and host prefixes are always separate when both are defined, or when ``{{ compiler() }}`` Jinja2 functions are used. The @@ -1478,10 +1479,9 @@ explicitly in the script section: script: run_test.py -Test requirements for subpackages are not supported. Instead, -subpackage tests install their runtime requirements---but not the -run requirements for the top-level package---and the test-time -requirements of the top-level package. +Test requirements for subpackages can be specified using the optional +`test/requires` section of subpackage tests. Subpackage tests install +their runtime requirements during the test as well. EXAMPLE: In this example, the test for ``subpackage-name`` installs ``some-test-dep`` and ``subpackage-run-req``, but not @@ -1493,16 +1493,15 @@ installs ``some-test-dep`` and ``subpackage-run-req``, but not run: - some-top-level-run-req - test: - requires: - - some-test-dep - outputs: - name: subpackage-name requirements: - subpackage-run-req test: script: run_test.py + requires: + - some-test-dep + Output type @@ -1749,7 +1748,7 @@ practice means changing the conda-build source code. See the `_. For more information, see the `Jinja2 template -documentation `_ +documentation `_ and :ref:`the list of available environment variables `. diff --git a/docs/source/resources/package-spec.rst b/docs/source/resources/package-spec.rst index 601b79b6dd..a3f0c98ac9 100644 --- a/docs/source/resources/package-spec.rst +++ b/docs/source/resources/package-spec.rst @@ -6,14 +6,26 @@ Conda package specification :local: :depth: 1 -A conda package is a bzipped tar archive---.tar.bz2---that -contains: +A conda package is an archive file that contains: * Metadata under the ``info/`` directory. * A collection of files that are installed directly into an install prefix. -The format is identical across platforms and operating systems. +There are currently two formats of archives that are supported: + +.. list-table:: + :widths: 15 70 + + * - **Type** + - **Description** + + * - .tar.bz2 + - The original format of conda packages. Is the default output of conda-build. + * - .conda + - 2nd Gen. This is a more compact and thus faster. Can be outputed from conda-build by setting output in ``.condarc`` file. + +The formats are identical across platforms and operating systems. During the install process, all files are extracted into the install prefix, with the exception of the ones in ``info/``. Installing a conda package into an environment is similar to @@ -47,10 +59,10 @@ file is stored in ``repodata.json``, which is the repository index file, hence the name ``index.json``. The JSON object is a dictionary containing the keys shown below. The filename of the conda package is composed of the first 3 values, as in: -``--.tar.bz2``. +``--.tar.bz2`` or ``--.conda``. .. list-table:: - :widths: 15 15 70 + :widths: 15 15 45 * - **Key** - **Type** @@ -64,8 +76,7 @@ conda package is composed of the first 3 values, as in: * - version - string - The package version. May not contain "-". Conda - acknowledges `PEP 440 - `_. + acknowledges `PEP 440 `_. * - build - string @@ -84,15 +95,13 @@ conda package is composed of the first 3 values, as in: * - build_number - integer - - A non-negative integer representing the build number of - the package. + - A non-negative integer representing the build number of the package. + + Unlike the build string, the ``build_number`` is inspected by conda. + + Conda uses it to sort packages that have otherwise identical names and versions to determine the latest one. - Unlike the build string, the ``build_number`` is inspected by - conda. Conda uses it to sort packages that have otherwise - identical names and versions to determine the latest one. - This is important because new builds that contain bug - fixes for the way a package is built may be added to a - repository. + This is important because new builds that contain bug fixes for the way a package is built may be added to a repository. * - depends - list of strings @@ -113,10 +122,12 @@ conda package is composed of the first 3 values, as in: EXAMPLE: ``osx`` - Conda currently does not use this key. Packages for a - specific architecture and platform are usually - distinguished by the repository subdirectory that contains - them---see :ref:`repo-si`. + Conda currently does not use this key. + + Packages for a specific architecture and platform are usually distinguished by the repository subdirectory that contains + them. + + See :ref:`repo-si`. info/files ---------- @@ -263,7 +274,7 @@ the command line with ``conda install``, such as ``conda install python=3.4``. Internally, conda translates the command line syntax to the spec defined in this section. -EXAMPLE: python=3.4 is translated to python 3.4*. +EXAMPLE: python=3.4 is translated to python 3.4.*. ``conda search 'python=3.1'`` does NOT bring up Python 3.10, only Python 3.1.*. Package dependencies are specified using a match specification. A match specification is a space-separated string of 1, 2, or 3 @@ -272,85 +283,73 @@ parts: * The first part is always the exact name of the package. * The second part refers to the version and may contain special - characters: + characters. See table below. - * \| means OR. +* The third part is always the exact build string. When there are + three parts, the second part must be the exact version. - EXAMPLE: ``1.0|1.2`` matches version 1.0 or 1.2. +.. list-table:: Version Special Characters + :widths: 10, 40, 40 + :header-rows: 1 - * \* matches 0 or more characters in the version string. In - terms of regular expressions, it is the same as ``r'.*'``. + * - Symbol + - Meaning + - Example - EXAMPLE: 1.0|1.4* matches 1.0, 1.4 and 1.4.1b2, but not 1.2. + * - <, >, <=, >= + - Relational operators on versions, - * <, >, <=, >=, ==, and != are relational operators on versions, - which are compared using - `PEP-440 `_. For example, - ``<=1.0`` matches ``0.9``, ``0.9.1``, and ``1.0``, but not ``1.0.1``. - ``==`` and ``!=`` are exact equality. + which are compared using `PEP-440 `_. + - ``<=1.0`` matches 0.9, 0.9.1, and 1.0, but not 1.0.1. - Pre-release versioning is also supported such that ``>1.0b4`` will match - ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. + * - ==, and != + - Exact equality and not equalities. + - ``==0.5.1`` matches 0.5.1 and not anything else while ``!=0.5.1`` matches everything but. - EXAMPLE: <=1.0 matches 0.9, 0.9.1, and 1.0, but not 1.0.1. + * - ~= + - Compatibility Release + - ``~=0.5.3`` is equivalent to ``>=0.5.3, <0.6.0a`` - * , means AND. + * - \| + - OR + - ``1.0|1.2`` matches version 1.0 or 1.2. - EXAMPLE: >=2,<3 matches all packages in the 2 series. 2.0, - 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + * - \* + - Matches 0 or more characters in the version string. - * , has higher precedence than \|, so >=1,<2|>3 means greater - than or equal to 1 AND less than 2 or greater than 3, which - matches 1, 1.3 and 3.0, but not 2.2. + In terms of regular expressions, it is the same as ``r'.*'``. + - ``1.0|1.4*`` matches 1.0, 1.4 and 1.4.1b2, but not 1.2. - Conda parses the version by splitting it into parts separated - by \|. If the part begins with <, >, =, or !, it is parsed as a - relational operator. Otherwise, it is parsed as a version, - possibly containing the "*" operator. + * - , + - AND + - ``>=2,<3`` matches all packages in the 2 series. -* The third part is always the exact build string. When there are - 3 parts, the second part must be the exact version. + 2.0, 2.1, and 2.9 all match, but 3.0 and 1.0 do not. + +.. hint:: + ``,`` has higher precedence than \|, so >=1,<2|>3 means greater than or equal to 1 AND less than 2 or greater than 3, which matches 1, 1.3 and 3.0, but not 2.2. + +.. note:: + For package match specifications, pre-release versioning is also supported such that ``>1.0b4`` will match ``1.0b5`` and ``1.0rc1`` but not ``1.0b4`` or ``1.0a5``. + +Conda parses the version by splitting it into parts separated +by \|. If the part begins with <, >, =, or !, it is parsed as a +relational operator. Otherwise, it is parsed as a version, +possibly containing the "*" operator. Remember that the version specification cannot contain spaces, as spaces are used to delimit the package, version, and build string in the whole match specification. ``python >= 2.7`` is an -invalid match specification. Furthermore, ``python>=2.7`` is +invalid match specification. However, ``"python >= 2.7"`` (with double or single quotes) is matched as any version of a package named ``python>=2.7``. -When using the command line, put double quotes around any package -version specification that contains the space character or any of -the following characters: <, >, \*, or \|. - -EXAMPLE:: - - conda install numpy=1.11 - conda install numpy==1.11 - conda install "numpy>1.11" - conda install "numpy=1.11.1|1.11.3" - conda install "numpy>=1.8,<2" - - -Examples --------- - -The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or -1.11.3. +Examples of Package Specs +------------------------- -The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but -not 2.0. - -The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, -1.11.2, 1.11.18, and so on. - -The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, -and so on. - -The build string constraint "numpy=1.11.2=*nomkl*" matches the -NumPy 1.11.2 packages without MKL but not the normal MKL NumPy +The build string constraint "numpy=1.11.2=*nomkl*" matches the NumPy 1.11.2 packages without MKL, but not the normal MKL NumPy 1.11.2 packages. -The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches -NumPy 1.11.1 or 1.11.3 built for Python 3.6 but not any versions +The build string constraint "numpy=1.11.1|1.11.3=py36_0" matches NumPy 1.11.1 or 1.11.3 built for Python 3.6, but not any versions of NumPy built for Python 3.5 or Python 2.7. The following are all valid match specifications for @@ -366,3 +365,32 @@ numpy-1.8.1-py27_0: * numpy >=1.8,<2|1.9 * numpy 1.8.1 py27_0 * numpy=1.8.1=py27_0 + +Command Line Match Spec Examples +-------------------------------- + +When using the command line, put double or single quotes around any package +version specification that contains the space character or any of +the following characters: <, >, \*, or \|. + +.. list-table:: Examples + :widths: 30 60 + :header-rows: 1 + + * - Example + - Meaning + + * - conda install numpy=1.11 + - The fuzzy constraint numpy=1.11 matches 1.11, 1.11.0, 1.11.1, 1.11.2, 1.11.18, and so on. + + * - conda install numpy==1.11 + - The exact constraint numpy==1.11 matches 1.11, 1.11.0, 1.11.0.0, and so on. + + * - conda install "numpy=1.11.1|1.11.3" + - The OR constraint "numpy=1.11.1|1.11.3" matches with 1.11.1 or 1.11.3. + + * - conda install "numpy>1.11" + - Any numpy version 1.12.0a or greater. + + * - conda install "numpy>=1.8,<2" + - The AND constraint "numpy>=1.8,<2" matches with 1.8 and 1.9 but not 2.0. diff --git a/docs/source/resources/variants.rst b/docs/source/resources/variants.rst index d46bdf13e2..3209fd3620 100644 --- a/docs/source/resources/variants.rst +++ b/docs/source/resources/variants.rst @@ -49,7 +49,7 @@ meta.yaml contents like: requirements: build: - - python {{ python }} + - python run: - python @@ -89,7 +89,7 @@ map for the content below. requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -154,9 +154,9 @@ map for the content below. requirements: build: - - numpy {{ numpy }} + - numpy run: - - numpy {{ numpy }} + - numpy For legacy compatibility, Python is pinned implicitly without specifying ``{{ python }}`` in your recipe. This is generally intractable to extend to @@ -261,12 +261,12 @@ First, the ``meta.yaml`` file: - name: py-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - python {{ python }} + - python - name: r-xgboost requirements: - {{ pin_subpackage('libxgboost', exact=True) }} - - r-base {{ r_base }} + - r-base Next, the ``conda_build_config.yaml`` file, specifying our build matrix: @@ -343,9 +343,9 @@ Again, with ``meta.yaml`` contents like: requirements: build: - - python {{ python }} + - python run: - - python {{ python }} + - python You could supply a variant to build this recipe like so: @@ -569,7 +569,7 @@ requirements, and a variant that includes 2 NumPy versions: requirements: build: - - numpy {{ numpy }} + - numpy run: - numpy @@ -844,7 +844,7 @@ An example variant/recipe is shown here: requirements: build: - - boost {{ boost }} + - boost run: - boost @@ -890,7 +890,7 @@ function. requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', max_pin='x.x') }} @@ -911,7 +911,7 @@ Each can be passed independently of the other. An example of specifying both: requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', min_pin='x.x', max_pin='x.x') }} @@ -933,7 +933,7 @@ You can also pass the minimum or maximum version directly. These arguments super requirements: build: - - numpy {{ numpy }} + - numpy run: - {{ pin_compatible('numpy', lower_bound='1.10', upper_bound='3.0') }} diff --git a/news/4429-ensure-test-commands-and-run_test-both-work b/news/4429-ensure-test-commands-and-run_test-both-work deleted file mode 100644 index 0015ec7cd8..0000000000 --- a/news/4429-ensure-test-commands-and-run_test-both-work +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Ensure that `tests/commands` get run also in the presence of `run_test.*` (#4427) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4692-add-svn-source-credential-support b/news/4692-add-svn-source-credential-support deleted file mode 100644 index d8ac4c37d8..0000000000 --- a/news/4692-add-svn-source-credential-support +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Add support for svn source credential (`svn_username` and `svn_password`). (#4692) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4817-require-source-when-load-file-data-used b/news/4817-require-source-when-load-file-data-used deleted file mode 100644 index a1c5ac52cd..0000000000 --- a/news/4817-require-source-when-load-file-data-used +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Require the source when rendering a recipe that uses the load_file_data function (#4817, fixes #4807) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4832-subdir-downloads b/news/4832-subdir-downloads deleted file mode 100644 index 4b0e344f4c..0000000000 --- a/news/4832-subdir-downloads +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Download packages during build into the correct subdir folder. (#4750) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4836-auto-format b/news/4836-auto-format deleted file mode 100644 index 60660bdd5e..0000000000 --- a/news/4836-auto-format +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* Format with black and replaced pre-commit's darker hook with black. (#4836) -* Format with isort and add pre-commit isort hook. (#4836) diff --git a/news/4840-hatchling-build-system b/news/4840-hatchling-build-system deleted file mode 100644 index 40c849137b..0000000000 --- a/news/4840-hatchling-build-system +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* Switch from `setup.py` to `pyproject.toml` and use [Hatchling](https://pypi.org/project/hatchling/) for our build system. (#4840) - -### Bug fixes - -* - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4843-config-cleanup b/news/4843-config-cleanup deleted file mode 100644 index 9db85efc15..0000000000 --- a/news/4843-config-cleanup +++ /dev/null @@ -1,20 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* - -### Deprecations - -* Mark `conda_build.config.python2_fs_encode` as pending deprecation. (#4843) -* Mark `conda_build.config._ensure_dir` as pending deprecation. Use stdlib's `pathlib.Path.mkdir(exist_ok=True)` or `os.makedirs(exist_ok=True)` instead. (#4843) - -### Docs - -* - -### Other - -* Minor code simplification for `conda_build.index.ChannelIndex._ensuredirs`. (#4843) diff --git a/news/4862-multi-output-subdir-variable b/news/4862-multi-output-subdir-variable deleted file mode 100644 index 811fe525ac..0000000000 --- a/news/4862-multi-output-subdir-variable +++ /dev/null @@ -1,19 +0,0 @@ -### Enhancements - -* - -### Bug fixes - -* Use a unique subdir variable name when rebuilding the index for multi-output builds (#4862, fixes #4855) - -### Deprecations - -* - -### Docs - -* - -### Other - -* diff --git a/news/4845-enable-xattr-test-macos b/news/5015-ruff similarity index 70% rename from news/4845-enable-xattr-test-macos rename to news/5015-ruff index a1110a6a44..a2ae3705d1 100644 --- a/news/4845-enable-xattr-test-macos +++ b/news/5015-ruff @@ -16,4 +16,4 @@ ### Other -* Enable `xattr` test on macOS. (#4845) +* Use Ruff linter in pre-commit configuration. (#5015) diff --git a/pyproject.toml b/pyproject.toml index d71c386c2b..93522bca6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy" ] @@ -29,9 +30,9 @@ dependencies = [ "beautifulsoup4", "chardet", "conda >=4.13", + "conda-index", "conda-package-handling >=1.3", "filelock", - "glob2 >=0.6", "jinja2", "libarchive-c", "packaging", @@ -65,6 +66,9 @@ conda-debug = "conda_build.cli.main_debug:main" [project.entry-points."distutils.commands"] bdist_conda = "conda_build.bdist_conda:bdist_conda" +[project.entry-points.conda] +conda-build = "conda_build.plugin" + [tool.hatch.version] source = "vcs" @@ -78,7 +82,54 @@ include = ["conda_build", "conda_build/templates/*", "conda_build/cli-*.exe"] version-file = "conda_build/__version__.py" [tool.black] -target-version = ['py38', 'py39', 'py310'] +target-version = ['py38', 'py39', 'py310', 'py311'] + +[tool.coverage.run] +# store relative paths in coverage information +relative_files = true + +[tool.coverage.report] +show_missing = true +sort = "Miss" +skip_covered = true +omit = ["conda_build/skeletons/_example_skeleton.py"] -[tool.isort] -profile = "black" +[tool.ruff] +line-length = 180 +# E, W = pycodestyle errors and warnings +# F = pyflakes +# I = isort +# D = pydocstyle +select = ["E", "W", "F", "I", "D1"] +# E402 module level import not at top of file +# E722 do not use bare 'except' +# E731 do not assign a lambda expression, use a def +ignore = ["E402", "E722", "E731"] +# Use PEP 257-style docstrings. +pydocstyle = {convention = "pep257"} + +[tool.pytest.ini_options] +minversion = 3.0 +testpaths = ["tests"] +norecursedirs = ["tests/test-recipes/*"] +addopts = [ + "--color=yes", + # "--cov=conda_build", # passed in test runner scripts instead (avoid debugger) + "--cov-append", + "--cov-branch", + "--cov-report=term-missing", + "--cov-report=xml", + "--durations=16", + "--junitxml=junit.xml", + "--splitting-algorithm=least_duration", + "--store-durations", + "--strict-markers", + "--tb=native", + "-vv", +] +markers = [ + "serial: execute test serially (to avoid race conditions)", + "slow: execute the slow tests if active", + "sanity: execute the sanity tests", + "no_default_testing_config: used internally to disable monkeypatching for testing_config", +] diff --git a/recipe/bld.bat b/recipe/bld.bat deleted file mode 100644 index ea289b187c..0000000000 --- a/recipe/bld.bat +++ /dev/null @@ -1 +0,0 @@ -"$PYTHON" -m pip install . -vv diff --git a/recipe/build.sh b/recipe/build.sh deleted file mode 100644 index 9d7b162c92..0000000000 --- a/recipe/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -"$PYTHON" -m pip install . -vv diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml new file mode 100644 index 0000000000..a75aff37d1 --- /dev/null +++ b/recipe/conda_build_config.yaml @@ -0,0 +1,5 @@ +python: + - 3.8 + - 3.9 + - 3.10 + - 3.11 diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 0be0f99167..6a3ed0ea27 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -3,9 +3,11 @@ package: version: {{ GIT_DESCRIBE_TAG }}.{{ GIT_BUILD_STR }} source: + # git_url only captures committed code git_url: ../ build: + script: {{ PYTHON }} -m pip install . --no-deps --no-build-isolation -vv entry_points: - conda-build = conda_build.cli.main_build:main - conda-convert = conda_build.cli.main_convert:main @@ -19,90 +21,68 @@ build: requirements: build: - - git + - git # for source/git_url above host: - python - - setuptools + - pip - hatchling >=1.12.2 - hatch-vcs >=0.2.0 + - wheel run: - beautifulsoup4 - chardet - - conda >=4.13 + - conda >=22.11.0 + - conda-index + - conda-package-handling >=1.3 - filelock - jinja2 + - m2-patch >=2.6 # [win] - packaging - - patchelf # [linux] - patch >=2.6 # [not win] - - m2-patch >=2.6 # [win] + - patchelf # [linux] - pkginfo - psutil - py-lief # [not win] - python + - python-libarchive-c + - pytz - pyyaml - requests - six - - glob2 >=0.6 - - pytz - tomli # [py<311] - tqdm - - conda-package-handling >=1.3 - - python-libarchive-c run_constrained: - - conda-verify >=3.0.2 + - conda-verify >=3.1.0 test: + imports: + # high-level import + - conda_build + # new/updated submodules (can be dropped after 1-2 releases) + - conda_build.index files: - test_bdist_conda_setup.py - requires: - - pytest - - pytest-cov - - pytest-mock - # Optional: you can use pytest-xdist to run the tests in parallel - # - pytest-env # [win] - # - pytest-xdist - # - conda-verify >=3.0.3 # todo once it is released commands: - - type -P conda-build # [unix] - - where conda-build # [win] - - conda build -h - - type -P conda-convert # [unix] - - where conda-convert # [win] - - conda convert -h - - type -P conda-develop # [unix] - - where conda-develop # [win] - - conda develop -h - - type -P conda-index # [unix] - - where conda-index # [win] - - conda index -h - - type -P conda-inspect # [unix] - - where conda-inspect # [win] - - conda inspect -h - - conda inspect linkages -h \| grep "--name ENVIRONMENT" # [unix] - - conda inspect objects -h \| grep "--name ENVIRONMENT" # [osx] - - type -P conda-metapackage # [unix] - - where conda-metapackage # [win] - - conda metapackage -h - - type -P conda-render # [unix] - - where conda-render # [win] - - conda render -h - - type -P conda-skeleton # [unix] - - where conda-skeleton # [win] - - conda skeleton -h - - where conda-debug # [win] - - conda debug -h - # test that conda sees entry points appropriately in help + # builtin subcommands - conda --help - - # Check for bdist_conda + - conda build --help + - conda convert --help + - conda develop --help + - conda index --help + - conda inspect --help + - conda inspect linkages --help # [unix] + - conda inspect objects --help # [osx] + - conda metapackage --help + - conda render --help + - conda skeleton --help + - conda debug --help + # bdist_conda - python test_bdist_conda_setup.py bdist_conda --help - imports: - - conda_build - source_files: - - tests about: - home: https://github.com/conda/conda-build + home: https://conda.org license: BSD-3-Clause license_file: LICENSE summary: Canary release of conda-build + doc_url: https://conda.io/projects/conda-build/en/latest/ + dev_url: https://github.com/conda/conda-build diff --git a/recipe/run_test.py b/recipe/run_test.py deleted file mode 100644 index d9a40df806..0000000000 --- a/recipe/run_test.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -import conda_build - -print("conda_build.__version__: %s" % conda_build.__version__) diff --git a/rever.xsh b/rever.xsh index 644107dfd9..577ecfa980 100644 --- a/rever.xsh +++ b/rever.xsh @@ -26,3 +26,9 @@ $CHANGELOG_CATEGORIES = [ $CHANGELOG_CATEGORY_TITLE_FORMAT = "### {category}\n\n" $CHANGELOG_AUTHORS_TITLE = "Contributors" $CHANGELOG_AUTHORS_FORMAT = "* @{github}\n" + +try: + # allow repository to customize synchronized-from-infa rever config + from rever_overrides import * +except ImportError: + pass diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 867ecb1f15..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,37 +0,0 @@ -[flake8] -# leave 180 line length for historical/incremental pre-commit purposes -# rely on black/darker to enforce 88 standard -max-line-length = 180 -# E122: Continuation line missing indentation or outdented -# E123: Closing bracket does not match indentation of opening bracket's line -# E126: Continuation line over-indented for hanging indent -# E127: Continuation line over-indented for visual indent -# E128: Continuation line under-indented for visual indent -# E203: Whitespace before ':' [required by black/darker] -# E722: Do not use bare except, specify exception instead -# E731: Do not assign a lambda expression, use a def -# W503: Line break occurred before a binary operator -# W504: Line break occurred after a binary operator -# W605: Invalid escape sequence 'x' -ignore = E122,E123,E126,E127,E128,E203,E731,E722,W503,W504,W605 -exclude = build,conda_build/_version.py,tests,recipe,.git,versioneer.py,conda,relative,benchmarks,.asv,docs,rever - -[tool:pytest] -norecursedirs= tests/test-recipes .* *.egg* build dist recipe -addopts = - --junitxml=junit.xml - --ignore setup.py - --ignore run_test.py - --cov-report term-missing - --tb native - --strict - --strict-markers - --durations=16 -log_level = DEBUG -env = - PYTHONHASHSEED=0 -markers = - serial: execute test serially (to avoid race conditions) - slow: execute the slow tests if active - sanity: execute the sanity tests - no_default_testing_config: used internally to disable monkeypatching for testing_config diff --git a/tests/cli/test_main_render.py b/tests/cli/test_main_render.py index 6c946c68ba..7f385118cc 100644 --- a/tests/cli/test_main_render.py +++ b/tests/cli/test_main_render.py @@ -44,27 +44,26 @@ def test_render_add_channel(): ), f"Expected version number 1.0 on successful rendering, but got {required_package_details[1]}" -def test_render_without_channel_fails(): +def test_render_without_channel_fails(tmp_path): # do make extra channel available, so the required package should not be found - with TemporaryDirectory() as tmpdir: - rendered_filename = os.path.join(tmpdir, "out.yaml") - args = [ - "--override-channels", - os.path.join(metadata_dir, "_recipe_requiring_external_channel"), - "--file", - rendered_filename, - ] - main_render.execute(args) - with open(rendered_filename) as rendered_file: - rendered_meta = yaml.safe_load(rendered_file) - required_package_string = [ - pkg - for pkg in rendered_meta.get("requirements", {}).get("build", []) - if "conda_build_test_requirement" in pkg - ][0] - assert ( - required_package_string == "conda_build_test_requirement" - ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" + rendered_filename = tmp_path / "out.yaml" + args = [ + "--override-channels", + os.path.join(metadata_dir, "_recipe_requiring_external_channel"), + "--file", + str(rendered_filename), + ] + main_render.execute(args) + with open(rendered_filename) as rendered_file: + rendered_meta = yaml.safe_load(rendered_file) + required_package_string = [ + pkg + for pkg in rendered_meta.get("requirements", {}).get("build", []) + if "conda_build_test_requirement" in pkg + ][0] + assert ( + required_package_string == "conda_build_test_requirement" + ), f"Expected to get only base package name because it should not be found, but got :{required_package_string}" def test_render_output_build_path(testing_workdir, testing_metadata, capfd, caplog): diff --git a/tests/requirements.txt b/tests/requirements.txt index fe7c767c60..b0ac07be77 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -4,13 +4,13 @@ anaconda-client beautifulsoup4 chardet conda >=4.13 +conda-index conda-package-handling conda-verify contextlib2 cytoolz filelock git -glob2 >=0.6 jinja2 numpy perl @@ -18,7 +18,6 @@ pip pkginfo psutil py-lief -pycrypto pyflakes pytest pytest-cov @@ -28,6 +27,7 @@ pytest-replay pytest-rerunfailures pytest-xdist python-libarchive-c +conda-forge::allure-pytest pytz requests ripgrep diff --git a/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml new file mode 100644 index 0000000000..e636c4152c --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_no_platform_deps/meta.yaml @@ -0,0 +1,14 @@ +package: + name: test-noarch-with-no-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 diff --git a/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml new file mode 100644 index 0000000000..b0e2f804e2 --- /dev/null +++ b/tests/test-recipes/metadata/_noarch_with_platform_deps/meta.yaml @@ -0,0 +1,20 @@ +package: + name: test-noarch-with-platform-deps + version: 0.0.1 + +build: + number: 0 + noarch: python + +requirements: + build: + host: + - python >=3.7 + run: + - python >=3.7 + - colorama # [win] + - __win # [win] + - appnope # [osx] + - __osx # [osx] + - __archspec * ppc64le # [ppc64le] + - __linux # [linux] diff --git a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml index 7619b42085..663d173590 100644 --- a/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml +++ b/tests/test-recipes/metadata/_resolved_packages_host_build/meta.yaml @@ -6,8 +6,10 @@ requirements: build: - numpy - nomkl # [unix] + - openssl host: - curl + - {{ pin_compatible('openssl', exact=True) }} run: {% for package in resolved_packages('build') %} - {{ package }} diff --git a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py index 1411131d15..041334fbd1 100644 --- a/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py +++ b/tests/test-recipes/metadata/_script_win_creates_exe_garbled/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup(name='foobar', version='1.0', scripts=['test-script'] diff --git a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py index 5a17ae9b0a..8e09c8abe4 100644 --- a/tests/test-recipes/metadata/jinja_load_setuptools/setup.py +++ b/tests/test-recipes/metadata/jinja_load_setuptools/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup VERSION = '1.test' diff --git a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml index 79859474e3..3bf9010a3b 100644 --- a/tests/test-recipes/metadata/jinja_load_yaml/environment.yml +++ b/tests/test-recipes/metadata/jinja_load_yaml/environment.yml @@ -2,6 +2,6 @@ name: foo channels: - defaults dependencies: - - python=3.10 + - python - tqdm prefix: /home/abraham/.conda/envs/foo diff --git a/tests/test-recipes/metadata/state_variables/setup.py b/tests/test-recipes/metadata/state_variables/setup.py index 2f8660659e..ecd50c54c6 100644 --- a/tests/test-recipes/metadata/state_variables/setup.py +++ b/tests/test-recipes/metadata/state_variables/setup.py @@ -1,6 +1,5 @@ import os from setuptools import setup -# from distutils.core import setup if not os.getenv("CONDA_BUILD_STATE") == "RENDER": raise ValueError("Conda build state not set correctly") diff --git a/tests/test-recipes/test-package/setup.py b/tests/test-recipes/test-package/setup.py index f3ec4e663c..b0f90841cb 100644 --- a/tests/test-recipes/test-package/setup.py +++ b/tests/test-recipes/test-package/setup.py @@ -1,6 +1,5 @@ import sys from setuptools import setup -# from distutils.core import setup # test with an old version of Python that we'll never normally use if sys.version_info[:2] == (3, 5): diff --git a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml index 6a2ce5d722..56a761a011 100644 --- a/tests/test-recipes/variants/numpy_used/conda_build_config.yaml +++ b/tests/test-recipes/variants/numpy_used/conda_build_config.yaml @@ -2,5 +2,5 @@ python: - 3.8 - 3.9 numpy: - - 1.16 - 1.19 + - 1.22 diff --git a/tests/test_api_build.py b/tests/test_api_build.py index 15e1e641d7..e0c786dcc8 100644 --- a/tests/test_api_build.py +++ b/tests/test_api_build.py @@ -430,7 +430,7 @@ def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatc platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = sorted({"32", *platforms}) - compilers = ["3.9", "3.10"] + compilers = ["3.10", "3.11"] msvc_vers = ["14.0"] else: msvc_vers = [] @@ -684,6 +684,8 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke check_call_env( [ git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), "submodule", "add", convert_path_for_cygwin_or_msys2(git, absolute_sub), @@ -692,14 +694,33 @@ def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monke env=sys_git_env, ) check_call_env( - [git, "submodule", "add", "../relative_sub", "relative"], + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "add", + "../relative_sub", + "relative", + ], env=sys_git_env, ) else: # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we # can change this to `git submodule update --recursive`. gits = git.replace("\\", "/") - check_call_env([git, "submodule", "foreach", gits, "pull"], env=sys_git_env) + check_call_env( + [ + git, + # CVE-2022-39253 + *("-c", "protocol.file.allow=always"), + "submodule", + "foreach", + gits, + "pull", + ], + env=sys_git_env, + ) check_call_env( [git, "commit", "-am", f"added submodules@{tag}"], env=sys_git_env ) @@ -1826,12 +1847,17 @@ def test_ignore_verify_codes(testing_config): @pytest.mark.sanity -def test_extra_meta(testing_config): +def test_extra_meta(testing_config, caplog): recipe_dir = os.path.join(metadata_dir, "_extra_meta") - testing_config.extra_meta = {"foo": "bar"} + extra_meta_data = {"foo": "bar"} + testing_config.extra_meta = extra_meta_data outputs = api.build(recipe_dir, config=testing_config) about = json.loads(package_has_file(outputs[0], "info/about.json")) assert "foo" in about["extra"] and about["extra"]["foo"] == "bar" + assert ( + f"Adding the following extra-meta data to about.json: {extra_meta_data}" + in caplog.text + ) def test_symlink_dirs_in_always_include_files(testing_config): diff --git a/tests/test_api_consistency.py b/tests/test_api_consistency.py index 15e4321d5d..502046e5ad 100644 --- a/tests/test_api_consistency.py +++ b/tests/test_api_consistency.py @@ -3,6 +3,7 @@ # This file makes sure that our API has not changed. Doing so can not be accidental. Whenever it # happens, we should bump our major build number, because we may have broken someone. +import inspect import sys from inspect import getfullargspec as getargspec @@ -185,8 +186,9 @@ def test_api_create_metapackage(): def test_api_update_index(): - argspec = getargspec(api.update_index) - assert argspec.args == [ + # getfullargspec() isn't friends with functools.wraps + argspec = inspect.signature(api.update_index) + assert list(argspec.parameters) == [ "dir_paths", "config", "force", @@ -200,8 +202,10 @@ def test_api_update_index(): "progress", "hotfix_source_repo", "current_index_versions", + "kwargs", ] - assert argspec.defaults == ( + assert tuple(parameter.default for parameter in argspec.parameters.values()) == ( + inspect._empty, None, False, False, @@ -214,4 +218,5 @@ def test_api_update_index(): False, None, None, + inspect._empty, ) diff --git a/tests/test_api_render.py b/tests/test_api_render.py index c2cd8999e2..a68f69135e 100644 --- a/tests/test_api_render.py +++ b/tests/test_api_render.py @@ -182,10 +182,38 @@ def test_cross_info_index_platform(testing_config): assert metadata.config.host_platform == info_index["platform"] +def test_noarch_with_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_platform_deps") + build_ids = {} + for subdir_ in ["linux-64", "linux-aarch64", "linux-ppc64le", "osx-64", "win-64"]: + platform, arch = subdir_.split("-") + m = api.render( + recipe_path, config=testing_config, platform=platform, arch=arch + )[0][0] + build_ids[subdir_] = m.build_id() + + # one hash for each platform, plus one for the archspec selector + assert len(set(build_ids.values())) == 4 + assert build_ids["linux-64"] == build_ids["linux-aarch64"] + assert ( + build_ids["linux-64"] != build_ids["linux-ppc64le"] + ) # not the same due to archspec + + +def test_noarch_with_no_platform_deps(testing_workdir, testing_config): + recipe_path = os.path.join(metadata_dir, "_noarch_with_no_platform_deps") + build_ids = set() + for platform in ["osx", "linux", "win"]: + m = api.render(recipe_path, config=testing_config, platform=platform)[0][0] + build_ids.add(m.build_id()) + + assert len(build_ids) == 1 + + def test_setting_condarc_vars_with_env_var_expansion(testing_workdir): os.makedirs("config") # python won't be used - the stuff in the recipe folder will override it - python_versions = ["2.6", "3.4", "3.10"] + python_versions = ["2.6", "3.4", "3.11"] config = {"python": python_versions, "bzip2": ["0.9", "1.0"]} with open(os.path.join("config", "conda_build_config.yaml"), "w") as f: yaml.dump(config, f, default_flow_style=False) diff --git a/tests/test_api_skeleton.py b/tests/test_api_skeleton.py index 10d9a6973e..514d469c56 100644 --- a/tests/test_api_skeleton.py +++ b/tests/test_api_skeleton.py @@ -28,13 +28,19 @@ from conda_build.utils import on_win from conda_build.version import _parse as parse_version -SYMPY_URL = "https://pypi.python.org/packages/source/s/sympy/sympy-1.10.tar.gz#md5=b3f5189ad782bbcb1bedc1ec2ca12f29" +SYMPY_URL = ( + "https://files.pythonhosted.org/packages/7d/23/70fa970c07f0960f7543af982d2554be805e1034b9dcee9cb3082ce80f80/sympy-1.10.tar.gz" + "#sha256=6cf85a5cfe8fff69553e745b05128de6fc8de8f291965c63871c79701dc6efc9" +) PYLINT_VERSION = "2.3.1" PYLINT_HASH_TYPE = "sha256" PYLINT_HASH_VALUE = "723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1" +PYLINT_HASH_VALUE_BLAKE2 = ( + "018b538911c0ebc2529f15004f4cb07e3ca562bb9aacea5df89cc25b62e01891" +) PYLINT_FILENAME = f"pylint-{PYLINT_VERSION}.tar.gz" -PYLINT_URL = f"https://pypi.python.org/packages/source/p/pylint/{PYLINT_FILENAME}#{PYLINT_HASH_TYPE}={PYLINT_HASH_VALUE}" +PYLINT_URL = f"https://files.pythonhosted.org/packages/{PYLINT_HASH_VALUE_BLAKE2[:2]}/{PYLINT_HASH_VALUE_BLAKE2[2:4]}/{PYLINT_HASH_VALUE_BLAKE2[4:]}/{PYLINT_FILENAME}" @pytest.fixture diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py new file mode 100644 index 0000000000..3df998fe1f --- /dev/null +++ b/tests/test_deprecations.py @@ -0,0 +1,161 @@ +# Copyright (C) 2014 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (C) 2012 Anaconda, Inc +# SPDX-License-Identifier: BSD-3-Clause +import sys + +import pytest +from conda.deprecations import DeprecatedError, DeprecationHandler + + +@pytest.fixture(scope="module") +def deprecated_v1(): + return DeprecationHandler("1.0") + + +@pytest.fixture(scope="module") +def deprecated_v2(): + return DeprecationHandler("2.0") + + +@pytest.fixture(scope="module") +def deprecated_v3(): + return DeprecationHandler("3.0") + + +def test_pending(deprecated_v1): + @deprecated_v1("2.0", "3.0") + def foo(): + return True + + # alerting user that a function will be unavailable + with pytest.deprecated_call(match="pending deprecation"): + assert foo() + + +def test_deprecated(deprecated_v2): + @deprecated_v2("2.0", "3.0") + def foo(): + return True + + # alerting user that a function will be unavailable + with pytest.deprecated_call(match="deprecated"): + assert foo() + + +def test_remove(deprecated_v3): + # alerting developer that a function needs to be removed + with pytest.raises(DeprecatedError): + + @deprecated_v3("2.0", "3.0") + def foo(): + return True + + +def test_arguments_pending(deprecated_v1): + @deprecated_v1.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +def test_arguments_deprecated(deprecated_v2): + @deprecated_v2.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + # too many arguments, can only deprecate keyword arguments + with pytest.raises(TypeError): + assert foo(1, 2, 3) + + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + assert foo(1, 2, three=3) + + # normal usage not needing deprecation + assert foo(1, 2) + + +def test_arguments_remove(deprecated_v3): + # alerting developer that a keyword argument needs to be removed + with pytest.raises(DeprecatedError): + + @deprecated_v3.argument("2.0", "3.0", "three") + def foo(one, two): + return True + + +def test_module_pending(deprecated_v1): + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + deprecated_v1.module("2.0", "3.0") + + +def test_module_deprecated(deprecated_v2): + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + deprecated_v2.module("2.0", "3.0") + + +def test_module_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.module("2.0", "3.0") + + +def test_constant_pending(deprecated_v1): + deprecated_v1.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + module.SOME_CONSTANT + + +def test_constant_deprecated(deprecated_v2): + deprecated_v2.constant("2.0", "3.0", "SOME_CONSTANT", 42) + module = sys.modules[__name__] + + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + module.SOME_CONSTANT + + +def test_constant_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.constant("2.0", "3.0", "SOME_CONSTANT", 42) + + +def test_topic_pending(deprecated_v1): + # alerting user to pending deprecation + with pytest.deprecated_call(match="pending deprecation"): + deprecated_v1.topic("2.0", "3.0", topic="Some special topic") + + +def test_topic_deprecated(deprecated_v2): + # alerting user to pending deprecation + with pytest.deprecated_call(match="deprecated"): + deprecated_v2.topic("2.0", "3.0", topic="Some special topic") + + +def test_topic_remove(deprecated_v3): + # alerting developer that a module needs to be removed + with pytest.raises(DeprecatedError): + deprecated_v3.topic("2.0", "3.0", topic="Some special topic") + + +def test_version_fallback(): + """Test that conda can run even if deprecations can't parse the version.""" + version = DeprecationHandler(None)._version # type: ignore + assert version.major == version.minor == version.micro == 0 diff --git a/tests/test_index.py b/tests/test_index.py deleted file mode 100644 index 263147fd87..0000000000 --- a/tests/test_index.py +++ /dev/null @@ -1,1197 +0,0 @@ -# Copyright (C) 2014 Anaconda, Inc -# SPDX-License-Identifier: BSD-3-Clause -""" -See also https://github.com/conda-incubator/conda-index -""" -import json -import os -import shutil -import tarfile -from logging import getLogger -from os.path import dirname, isdir, isfile, join -from unittest import mock - -import conda_package_handling.api -import pytest - -import conda_build.api -import conda_build.index -from conda_build.conda_interface import context -from conda_build.utils import copy_into, rm_rf - -from .utils import archive_dir - -log = getLogger(__name__) - -here = os.path.dirname(__file__) - -# NOTE: The recipes for test packages used in this module are at https://github.com/kalefranz/conda-test-packages - -# match ./index_hotfix_pkgs/ -TEST_SUBDIR = "osx-64" - - -def download(url, local_path): - # NOTE: The tests in this module used to download packages from the - # conda-test channel. These packages are small and are now included. - if not isdir(dirname(local_path)): - os.makedirs(dirname(local_path)) - - archive_path = join(here, "archives", url.rsplit("/", 1)[-1]) - - shutil.copy(archive_path, local_path) - return local_path - - -def test_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - } - }, - "subdirs": ["noarch", "osx-64"], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_on_single_subdir_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - updated_packages = expected_repodata_json.get("packages") - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "osx-64", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "subdirs": [ - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a", - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "recipe_origin": None, - "source_url": None, - "tags": None, - "timestamp": 1508520039, - }, - }, - "subdirs": ["noarch", "osx-64"], - } - - assert actual_channeldata_json == expected_channeldata_json - - -def test_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": { - "subdir": "noarch", - }, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - }, - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.loads(fh.read()) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520039, - "keywords": None, - "recipe_origin": None, - } - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def test_file_index_noarch_osx64_1(testing_workdir): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2" - download(test_package_url, test_package_path) - - # test threads=1 flow - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", threads=1 - ) - - # ####################################### - # tests for osx-64 subdir - # ####################################### - assert isfile(join(testing_workdir, "osx-64", "index.html")) - assert isfile( - join(testing_workdir, "osx-64", "repodata.json") - ) # repodata is tested in test_index_on_single_subdir_1 - assert isfile(join(testing_workdir, "osx-64", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json")) - assert isfile(join(testing_workdir, "osx-64", "repodata_from_packages.json.bz2")) - - # ####################################### - # tests for noarch subdir - # ####################################### - assert isfile(join(testing_workdir, "noarch", "index.html")) - assert isfile(join(testing_workdir, "noarch", "repodata.json.bz2")) - assert isfile(join(testing_workdir, "noarch", "repodata_from_packages.json.bz2")) - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - expected_repodata_json = { - "info": {"subdir": "noarch"}, - "packages": { - "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": { - "build": "pyhed9eced_1", - "build_number": 1, - "depends": ["python"], - "license": "BSD", - "md5": "56b5f6b7fb5583bccfc4489e7c657484", - "name": "conda-index-pkg-a", - "noarch": "python", - "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6", - "size": 7882, - "subdir": "noarch", - "timestamp": 1508520204768, - "version": "1.0", - } - }, - "packages.conda": {}, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # download two packages per subdir here, put them both in the same subdir - test_package_path = join(testing_workdir, "osx-64", "fly-2.5.2-0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/fly-2.5.2-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "osx-64", "nano-2.4.1-0-tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/osx-64/nano-2.4.1-0.tar.bz2" - ) - download(test_package_url, test_package_path) - - test_package_path = join( - testing_workdir, "noarch", "spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/noarch/spiffy-test-app-0.5-pyh6afbcc8_0.tar.bz2" - download(test_package_url, test_package_path) - - test_package_path = join(testing_workdir, "noarch", "flask-0.11.1-py_0.tar.bz2") - test_package_url = ( - "https://conda.anaconda.org/conda-test/noarch/flask-0.11.1-py_0.tar.bz2" - ) - download(test_package_url, test_package_path) - - # only tell index to index one of them and then assert that it was added - p = os.path.join(testing_workdir, "index_file") - with open(p, "a+") as fh: - fh.write("noarch/flask-0.11.1-py_0.tar.bz2\n") - fh.write("osx/fly-2.5.2-0.tar.bz2\n") - - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", index_file=p - ) - - updated_packages = expected_repodata_json.get("packages", {}) - updated_packages["flask-0.11.1-py_0.tar.bz2"] = { - "build": "py_0", - "build_number": 0, - "depends": [ - "click >=2.0", - "itsdangerous >=0.21", - "jinja2 >=2.4", - "python", - "werkzeug >=0.7", - ], - "license": "BSD", - "md5": "f53df88de4ba505aadbcf42ff310a18d", - "name": "flask", - "noarch": "python", - "sha256": "20bb13679a48679964cd84571c8dd1aa110f8366565f5d82a8f4efa8dd8b160c", - "size": 5334, - "subdir": "noarch", - "version": "0.11.1", - } - - expected_repodata_json["packages"] = updated_packages - - with open(join(testing_workdir, "noarch", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - assert actual_repodata_json - with open(join(testing_workdir, "noarch", "repodata_from_packages.json")) as fh: - actual_pkg_repodata_json = json.loads(fh.read()) - assert actual_pkg_repodata_json - - assert actual_repodata_json == expected_repodata_json - assert actual_pkg_repodata_json == expected_repodata_json - - # ####################################### - # tests for full channel - # ####################################### - - with open(join(testing_workdir, "channeldata.json")) as fh: - actual_channeldata_json = json.load(fh) - expected_channeldata_json = { - "channeldata_version": 1, - "packages": { - "conda-index-pkg-a": { - "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. " - "Python is a widely used high-level, general-purpose, interpreted, dynamic " - "programming language. Its design philosophy emphasizes code " - "readability, and its syntax allows programmers to express concepts in " - "fewer lines of code than would be possible in languages such as C++ or " - "Java. The language provides constructs intended to enable clear programs " - "on both a small and large scale.", - "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml", - "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md", - "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a", - "home": "https://anaconda.org/conda-test/conda-index-pkg-a", - "license": "BSD", - "source_git_url": "https://github.com/kalefranz/conda-test-packages.git", - "source_url": None, - "subdirs": [ - "noarch", - "osx-64", - ], - "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.", # <- tests that the higher noarch build number is the data collected - "version": "1.0", - "activate.d": False, - "deactivate.d": False, - "post_link": True, - "pre_link": False, - "pre_unlink": False, - "binary_prefix": False, - "text_prefix": True, - "run_exports": {}, - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "tags": None, - "timestamp": 1508520204, - "keywords": None, - "recipe_origin": None, - }, - "flask": { - "activate.d": False, - "binary_prefix": False, - "deactivate.d": False, - "description": "Flask is a microframework for Python based on Werkzeug and Jinja2. " - "It's intended for getting started very quickly and was developed with best intentions in mind.", - "dev_url": "https://github.com/mitsuhiko/flask", - "doc_source_url": None, - "doc_url": "http://flask.pocoo.org/docs/0.10/", - "home": "http://flask.pocoo.org/", - "icon_hash": None, - "icon_url": None, - "identifiers": None, - "keywords": None, - "license": "BSD", - "post_link": False, - "pre_link": False, - "pre_unlink": False, - "recipe_origin": None, - "run_exports": {}, - "source_git_url": None, - "source_url": None, - "subdirs": ["noarch"], - "summary": "A microframework based on Werkzeug, Jinja2 and good intentions", - "tags": None, - "text_prefix": False, - "timestamp": 0, - "version": "0.11.1", - }, - }, - "subdirs": [ - "noarch", - "osx-64", - ], - } - assert actual_channeldata_json == expected_channeldata_json - - -def _build_test_index(workdir): - """ - Copy repodata.json, packages to workdir for testing. - """ - # Python 3.7 workaround "no dirs_exist_ok flag" - index_hotfix_pkgs = join(here, "index_hotfix_pkgs") - for path in os.scandir(index_hotfix_pkgs): - if path.is_dir(): - shutil.copytree( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - elif path.is_file(): - shutil.copyfile( - join(here, "index_hotfix_pkgs", path.name), join(workdir, path.name) - ) - - with open(os.path.join(workdir, TEST_SUBDIR, "repodata.json")) as f: - original_metadata = json.load(f) - - pkg_list = original_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -# SLOW -def test_gen_patch_py(testing_workdir): - """ - This is a channel-wide file that applies to many subdirs. It must have a function with this signature: - - def _patch_repodata(repodata, subdir): - - That function must return a dictionary of patch instructions, of the form: - - { - "patch_instructions_version": 1, - "packages": defaultdict(dict), - "revoke": [], - "remove": [], - } - - revoke and remove are lists of filenames. remove makes the file not show up - in the index (it may still be downloadable with a direct URL to the file). - revoke makes packages uninstallable by adding an unsatisfiable dependency. - This can be made installable by including a channel that has that package - (to be created by @jjhelmus). - - packages is a dictionary, where keys are package filenames. Values are - dictionaries similar to the contents of each package in repodata.json. Any - values in provided in packages here overwrite the values in repodata.json. - Any value set to None is removed. - """ - _build_test_index(testing_workdir) - - func = """ -def _patch_repodata(repodata, subdir): - pkgs = repodata["packages"] - import fnmatch - replacement_dict = {} - if "track_features_test-1.0-0.tar.bz2" in pkgs: - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - if "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkgs: - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": pkgs["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] + ["dummy"], - "features": None} - revoke_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "revoke_test*")] - remove_list = [pkg for pkg in pkgs if fnmatch.fnmatch(pkg, "remove_test*")] - return { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": revoke_list, - "remove": remove_list, - } -""" - patch_file = os.path.join(testing_workdir, "repodata_patch.py") - with open(patch_file, "w") as f: - f.write(func) - - # indexing a second time with the same patchset should keep the removals - for i in (1, 2): - conda_build.index.update_index( - testing_workdir, - patch_generator=patch_file, - verbose=True, - ) - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - print("pass %s track features ok" % i) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - print("pass %s hotfix ok" % i) - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - print("pass %s revoke ok" % i) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - assert "remove_test-1.0-0.tar.bz2" in patched_metadata["removed"], ( - "removed list not populated in run %d" % i - ) - print("pass %s remove ok" % i) - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_metadata = json.load(f) - - pkg_list = pkg_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert ( - pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - ) - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert ( - pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - ) - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - -def test_channel_patch_instructions_json(testing_workdir): - _build_test_index(testing_workdir) - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "patch_instructions.json"), "w" - ) as f: - json.dump(patch, f) - - conda_build.index.update_index(testing_workdir) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - formats = (("packages", ".tar.bz2"), ("packages.conda", ".conda")) - - for key, ext in formats: - pkg_list = patched_metadata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0" + ext] - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata[key] - assert "track_features_test-1.0-0" + ext in pkg_list - assert pkg_list["track_features_test-1.0-0" + ext]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0" + ext in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0" + ext]["depends"] - - assert "revoke_test-1.0-0" + ext in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0" + ext]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0" + ext]["depends"] - ) - - assert "remove_test-1.0-0" + ext in pkg_list - - -def test_patch_from_tarball(testing_workdir): - """This is how we expect external communities to provide patches to us. - We can't let them just give us Python files for us to run, because of the - security risk of arbitrary code execution.""" - _build_test_index(testing_workdir) - - # our hotfix metadata can be generated any way you want. Hard-code this here, but in general, - # people will use some python file to generate this. - - replacement_dict = {} - replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None} - replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = { - "depends": ["zlib", "dummy"], - "features": None, - } - - patch = { - "patch_instructions_version": 1, - "packages": replacement_dict, - "revoke": ["revoke_test-1.0-0.tar.bz2"], - "remove": ["remove_test-1.0-0.tar.bz2"], - } - with open("patch_instructions.json", "w") as f: - json.dump(patch, f) - - with tarfile.open("patch_archive.tar.bz2", "w:bz2") as archive: - archive.add( - "patch_instructions.json", "%s/patch_instructions.json" % TEST_SUBDIR - ) - - conda_build.index.update_index( - testing_workdir, patch_generator="patch_archive.tar.bz2" - ) - - with open(os.path.join(testing_workdir, TEST_SUBDIR, "repodata.json")) as f: - patched_metadata = json.load(f) - - pkg_list = patched_metadata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"] - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"] - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" not in pkg_list - - with open( - os.path.join(testing_workdir, TEST_SUBDIR, "repodata_from_packages.json") - ) as f: - pkg_repodata = json.load(f) - - pkg_list = pkg_repodata["packages"] - assert "track_features_test-1.0-0.tar.bz2" in pkg_list - assert pkg_list["track_features_test-1.0-0.tar.bz2"]["track_features"] == "dummy" - - assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list - assert pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["features"] == "dummy" - assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"] - - assert "revoke_test-1.0-0.tar.bz2" in pkg_list - assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - assert ( - "package_has_been_revoked" - not in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"] - ) - - assert "remove_test-1.0-0.tar.bz2" in pkg_list - - -def test_index_of_removed_pkg(testing_metadata): - archive_name = "test_index_of_removed_pkg-1.0-1.tar.bz2" - archive_destination = os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, archive_name - ) - - # copy the package - os.makedirs(os.path.join(testing_metadata.config.croot, TEST_SUBDIR)) - shutil.copy(os.path.join(here, "archives", archive_name), archive_destination) - - conda_build.api.update_index(testing_metadata.config.croot) - - # repodata.json should exist here - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert repodata["packages"] - - for f in [archive_destination]: - os.remove(f) - - # repodata.json should be empty here - conda_build.api.update_index(testing_metadata.config.croot) - with open( - os.path.join(testing_metadata.config.croot, TEST_SUBDIR, "repodata.json") - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - with open( - os.path.join( - testing_metadata.config.croot, TEST_SUBDIR, "repodata_from_packages.json" - ) - ) as f: - repodata = json.load(f) - assert not repodata["packages"] - - -def test_patch_instructions_with_missing_subdir(): - os.makedirs("linux-64") - os.makedirs("zos-z") - conda_build.api.update_index(".") # what is the current working directory? - # we use conda-forge's patch instructions because they don't have zos-z data, and that triggers an error - pkg = "conda-forge-repodata-patches" - url = "https://anaconda.org/conda-forge/{0}/20180828/download/noarch/{0}-20180828-0.tar.bz2".format( - pkg - ) - patch_instructions = download(url, os.path.join(os.getcwd(), "patches.tar.bz2")) - conda_build.api.update_index(".", patch_generator=patch_instructions) - - -def test_stat_cache_used(testing_workdir, mocker): - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - ) - test_package_url = "https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2" - download(test_package_url, test_package_path) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - cph_extract.assert_not_called() - - -def test_new_pkg_format_preferred(testing_workdir, mocker): - """Test that in one pass, the .conda file is extracted before the .tar.bz2, and the .tar.bz2 uses the cache""" - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - exts = (".tar.bz2", ".conda") - for ext in exts: - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ext), - test_package_path + ext, - ) - # mock the extract function, so that we can assert that it is not called - # with the .tar.bz2, because the .conda should be preferred - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - # extract should get called once by default. Within a channel, we assume that a .tar.bz2 and .conda have the same contents. - cph_extract.assert_called_once_with(test_package_path + ".conda", mock.ANY, "info") - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - # if we clear the stat cache, we force a re-examination. This re-examination will load files - # from the cache. This has been a source of bugs in the past, where the wrong cached file - # being loaded resulted in incorrect hashes/sizes for either the .tar.bz2 or .conda, depending - # on which of those 2 existed in the cache. - rm_rf(os.path.join(testing_workdir, "osx-64", "stat.json")) - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", verbose=True, debug=True - ) - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - assert actual_repodata_json == expected_repodata_json - - -def test_new_pkg_format_stat_cache_used(testing_workdir, mocker): - # if we have old .tar.bz2 index cache stuff, assert that we pick up correct md5, sha26 and size for .conda - test_package_path = join( - testing_workdir, "osx-64", "conda-index-pkg-a-1.0-py27h5e241af_0" - ) - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".tar.bz2"), - test_package_path + ".tar.bz2", - ) - conda_build.index.update_index(testing_workdir, channel_name="test-channel") - - # mock the extract function, so that we can assert that it is not called, because the stat cache should exist - # if this doesn't work, something about the stat cache is confused. It's a little convoluted, because - # the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available - # because extracting them is much, much faster. - copy_into( - os.path.join(archive_dir, "conda-index-pkg-a-1.0-py27h5e241af_0" + ".conda"), - test_package_path + ".conda", - ) - cph_extract = mocker.spy(conda_package_handling.api, "extract") - conda_build.index.update_index( - testing_workdir, channel_name="test-channel", debug=True - ) - cph_extract.assert_not_called() - - with open(join(testing_workdir, "osx-64", "repodata.json")) as fh: - actual_repodata_json = json.loads(fh.read()) - - expected_repodata_json = { - "info": { - "subdir": "osx-64", - }, - "packages": { - "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "37861df8111170f5eed4bff27868df59", - "name": "conda-index-pkg-a", - "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30", - "size": 8733, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "packages.conda": { - "conda-index-pkg-a-1.0-py27h5e241af_0.conda": { - "build": "py27h5e241af_0", - "build_number": 0, - "depends": ["python >=2.7,<2.8.0a0"], - "license": "BSD", - "md5": "4ed4b435f400dac1aabdc1fff06f78ff", - "name": "conda-index-pkg-a", - "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19", - "size": 9296, - "subdir": "osx-64", - "timestamp": 1508520039632, - "version": "1.0", - }, - }, - "removed": [], - "repodata_version": 1, - } - assert actual_repodata_json == expected_repodata_json - - -@pytest.mark.skipif( - not hasattr(context, "use_only_tar_bz2") or getattr(context, "use_only_tar_bz2"), - reason="conda is set to auto-disable .conda for old conda-build.", -) -def test_current_index_reduces_space(): - repodata = os.path.join( - os.path.dirname(__file__), "index_data", "time_cut", "repodata.json" - ) - with open(repodata) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 7 - assert len(repodata["packages.conda"]) == 3 - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, None - ) - - tar_bz2_keys = { - "two-because-satisfiability-1.2.11-h7b6447c_3.tar.bz2", - "two-because-satisfiability-1.2.10-h7b6447c_3.tar.bz2", - "depends-on-older-1.2.10-h7b6447c_3.tar.bz2", - "ancient-package-1.2.10-h7b6447c_3.tar.bz2", - "one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2", - } - # conda 4.7 removes .tar.bz2 files in favor of .conda files - tar_bz2_keys.remove("one-gets-filtered-1.3.10-h7b6447c_3.tar.bz2") - - # .conda files will replace .tar.bz2 files. Older packages that are necessary for satisfiability will remain - assert set(trimmed_repodata["packages"].keys()) == tar_bz2_keys - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.3.10-h7b6447c_3.conda" - } - - # we can keep more than one version series using a collection of keys - trimmed_repodata = conda_build.index._build_current_repodata( - "linux-64", repodata, {"one-gets-filtered": ["1.2", "1.3"]} - ) - assert set(trimmed_repodata["packages.conda"].keys()) == { - "one-gets-filtered-1.2.11-h7b6447c_3.conda", - "one-gets-filtered-1.3.10-h7b6447c_3.conda", - } - - -def test_current_index_version_keys_keep_older_packages(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - - # pass no version file - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - # only the newest version is kept - assert len(repodata["packages"]) == 1 - assert list(repodata["packages"].values())[0]["version"] == "2.0" - - # pass version file - conda_build.api.update_index( - pkg_dir, current_index_versions=os.path.join(pkg_dir, "versions.yml") - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 2 - - # pass dict that is equivalent to version file - conda_build.api.update_index( - pkg_dir, current_index_versions={"dummy-package": ["1.0"]} - ) - with open(os.path.join(pkg_dir, "osx-64", "current_repodata.json")) as f: - repodata = json.load(f) - assert list(repodata["packages"].values())[0]["version"] == "1.0" - - -def test_channeldata_picks_up_all_versions_of_run_exports(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "packages") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - run_exports = repodata["packages"]["run_exports_versions"]["run_exports"] - assert len(run_exports) == 2 - assert "1.0" in run_exports - assert "2.0" in run_exports - - -def test_index_invalid_packages(): - pkg_dir = os.path.join(os.path.dirname(__file__), "index_data", "corrupt") - conda_build.api.update_index(pkg_dir) - with open(os.path.join(pkg_dir, "channeldata.json")) as f: - repodata = json.load(f) - assert len(repodata["packages"]) == 0 diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 99545c50c9..b5a696ff6f 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -8,7 +8,7 @@ import pytest from conda_build import api -from conda_build.metadata import MetaData, _hash_dependencies, select_lines +from conda_build.metadata import MetaData, _hash_dependencies, select_lines, yamlize from conda_build.utils import DEFAULT_SUBDIRS from .utils import metadata_dir, thisdir @@ -260,3 +260,66 @@ def test_config_member_decoupling(testing_metadata): b = testing_metadata.copy() b.config.some_member = "123" assert b.config.some_member != testing_metadata.config.some_member + + +# ensure that numbers are not interpreted as ints or floats, doing so trips up versions +# with trailing zeros +def test_yamlize_zero(): + yml = yamlize( + """ + - 0 + - 0. + - 0.0 + - .0 + """ + ) + + assert yml == ["0", "0.", "0.0", ".0"] + + +def test_yamlize_positive(): + yml = yamlize( + """ + - +1 + - +1. + - +1.2 + - +.2 + """ + ) + + assert yml == ["+1", "+1.", "+1.2", "+.2"] + + +def test_yamlize_negative(): + yml = yamlize( + """ + - -1 + - -1. + - -1.2 + - -.2 + """ + ) + + assert yml == ["-1", "-1.", "-1.2", "-.2"] + + +def test_yamlize_numbers(): + yml = yamlize( + """ + - 1 + - 1.2 + """ + ) + + assert yml == ["1", "1.2"] + + +def test_yamlize_versions(): + yml = yamlize( + """ + - 1.2.3 + - 1.2.3.4 + """ + ) + + assert yml == ["1.2.3", "1.2.3.4"] diff --git a/tests/test_pypi_skeleton.py b/tests/test_pypi_skeleton.py index 6562a50f14..20581ef14d 100644 --- a/tests/test_pypi_skeleton.py +++ b/tests/test_pypi_skeleton.py @@ -58,7 +58,7 @@ def test_print_dict(): }, "build": { "number": 0, - "script": "{{ PYTHON }} -m pip install . -vv", + "script": "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation", }, } recipe_order = ["package", "source", "build", "about"] @@ -74,7 +74,7 @@ def test_print_dict(): build: number: 0 - script: "{{ PYTHON }} -m pip install . -vv" + script: "{{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation" about: home: "https://conda.io" diff --git a/tests/test_variants.py b/tests/test_variants.py index 4df2c3f768..3e7ba621a5 100644 --- a/tests/test_variants.py +++ b/tests/test_variants.py @@ -59,7 +59,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): python 3.5 -> python >=3.5,<3.6.0a0 otherPackages 3.5 -> otherPackages 3.5 """ - variants = {"python": ["3.9", "3.10"]} + variants = {"python": ["3.10", "3.11"]} testing_config.ignore_system_config = True # write variants to disk @@ -86,7 +86,7 @@ def test_python_variants(testing_workdir, testing_config, as_yaml): assert { *metadata[0][0].meta["requirements"]["run"], *metadata[1][0].meta["requirements"]["run"], - } == {"python >=3.9,<3.10.0a0", "python >=3.10,<3.11.0a0"} + } == {"python >=3.10,<3.11.0a0", "python >=3.11,<3.12.0a0"} def test_use_selectors_in_variants(testing_workdir, testing_config): @@ -493,11 +493,6 @@ def test_target_platform_looping(): assert len(outputs) == 2 -@pytest.mark.skipif( - on_mac and platform.machine() == "arm64", - reason="Unsatisfiable dependencies for M1 MacOS systems: {'numpy=1.16'}", -) -# TODO Remove the above skip decorator once https://github.com/conda/conda-build/issues/4717 is resolved def test_numpy_used_variable_looping(): outputs = api.get_output_file_paths(os.path.join(variants_dir, "numpy_used")) assert len(outputs) == 4