diff --git a/.dockerignore b/.dockerignore index c44c0349b2..f9dda275b8 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,4 +8,5 @@ !.git !.gitignore !Makefile +!gunicorn.conf.py .git/config diff --git a/.github/workflows/acceptance-tests.yml b/.github/workflows/acceptance-tests.yml index e1f70034d2..ec78faab96 100644 --- a/.github/workflows/acceptance-tests.yml +++ b/.github/workflows/acceptance-tests.yml @@ -26,12 +26,13 @@ jobs: renku-graph: ${{ steps.deploy-comment.outputs.renku-graph}} renku-notebooks: ${{ steps.deploy-comment.outputs.renku-notebooks}} renku-ui: ${{ steps.deploy-comment.outputs.renku-ui}} + renku-data-services: ${{ steps.deploy-comment.outputs.renku-data-services}} test-enabled: ${{ steps.deploy-comment.outputs.test-enabled}} extra-values: ${{ steps.deploy-comment.outputs.extra-values}} persist: ${{ steps.deploy-comment.outputs.persist}} steps: - id: deploy-comment - uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.4.5 + uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.7.0 with: string: /deploy pr_ref: ${{ github.event.number }} @@ -43,7 +44,7 @@ jobs: name: renku-ci-rp-${{ github.event.number }} steps: - name: deploy-pr - uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.4.5 + uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.7.0 env: DOCKER_PASSWORD: ${{ secrets.RENKU_DOCKER_PASSWORD }} DOCKER_USERNAME: ${{ secrets.RENKU_DOCKER_USERNAME }} @@ -52,7 +53,7 @@ jobs: RANCHER_PROJECT_ID: ${{ secrets.CI_RANCHER_PROJECT }} RENKU_RELEASE: renku-ci-rp-${{ github.event.number }} RENKU_VALUES_FILE: "${{ github.workspace }}/values.yaml" - RENKU_VALUES: ${{ secrets.CI_RENKU_VALUES }} + RENKU_VALUES: ${{ secrets.COMBINED_CHARTS_CI_RENKU_VALUES }} RENKUBOT_KUBECONFIG: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} RENKUBOT_RANCHER_BEARER_TOKEN: ${{ secrets.RENKUBOT_RANCHER_BEARER_TOKEN }} RANCHER_DEV_API_ENDPOINT: ${{ secrets.RANCHER_DEV_API_ENDPOINT }} @@ -66,6 +67,7 @@ jobs: renku_graph: "${{ needs.check-deploy.outputs.renku-graph }}" renku_notebooks: "${{ needs.check-deploy.outputs.renku-notebooks }}" renku_ui: "${{ needs.check-deploy.outputs.renku-ui }}" + renku_data_services: "${{ needs.check-deploy.outputs.renku-data-services }}" extra_values: "${{ needs.check-deploy.outputs.extra-values }}" - name: Check existing renkubot comment uses: peter-evans/find-comment@v2 @@ -88,7 +90,7 @@ jobs: if: ${{ github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-enabled == 'true' }} needs: [check-deploy, deploy-pr] steps: - - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.4.5 + - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.7.0 with: kubeconfig: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} renku-release: renku-ci-rp-${{ github.event.number }} @@ -103,7 +105,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: renku teardown - uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.4.5 + uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.7.0 env: HELM_RELEASE_REGEX: "^renku-ci-rp-${{ github.event.number }}$" GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} diff --git a/.github/workflows/test_deploy.yml b/.github/workflows/test_deploy.yml index 929f45632f..f58aaede2f 100644 --- a/.github/workflows/test_deploy.yml +++ b/.github/workflows/test_deploy.yml @@ -496,9 +496,7 @@ jobs: CLOUD_STORAGE_AZURE_KEY: ${{ secrets.CLOUD_STORAGE_AZURE_KEY }} CLOUD_STORAGE_S3_ACCESS_KEY_ID: ${{ secrets.CLOUD_STORAGE_S3_ACCESS_KEY_ID }} CLOUD_STORAGE_S3_SECRET_ACCESS_KEY: ${{ secrets.CLOUD_STORAGE_S3_SECRET_ACCESS_KEY }} - run: pytest -m "integration and not serial and not service" -v - - name: Start Redis - uses: supercharge/redis-github-action@1.5.0 + run: pytest -m "integration and not serial and not service and not redis" -v - name: Test with pytest (serial) env: POETRY_VIRTUALENVS_CREATE: false @@ -589,19 +587,16 @@ jobs: run: | echo "GIT_USER=Renku Bot" >> $GITHUB_ENV echo "GIT_EMAIL=renku@datascience.ch" >> $GITHUB_ENV - - name: Push chart and images - uses: SwissDataScienceCenter/renku-actions/publish-chart@v1.4.5 + - name: Build and push images + uses: SwissDataScienceCenter/renku-actions/publish-chartpress-images@v1.7.0 env: - CHART_NAME: renku-core GITHUB_TOKEN: ${{ secrets.RENKUBOT_GITHUB_TOKEN }} DOCKER_USERNAME: ${{ secrets.RENKU_DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.RENKU_DOCKER_PASSWORD }} - - name: Wait for chart to be available - run: sleep 120 - name: Update component version - uses: SwissDataScienceCenter/renku-actions/update-component-version@v1.4.5 + uses: SwissDataScienceCenter/renku-actions/update-component-version@v1.7.0 env: - CHART_NAME: renku-core + COMPONENT_NAME: renku-core GITHUB_TOKEN: ${{ secrets.RENKUBOT_GITHUB_TOKEN }} coveralls-final: diff --git a/.gitignore b/.gitignore index d383f25628..1fba0f841a 100644 --- a/.gitignore +++ b/.gitignore @@ -35,6 +35,7 @@ var/ *.egg-info/ .installed.cfg *.egg +.python-version # PyInstaller # Usually these files are written by a python script from a template diff --git a/AUTHORS.rst b/AUTHORS.rst index 095712d698..26ac559f98 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/CHANGES.rst b/CHANGES.rst index 0acd4de792..cd7bd92c38 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). @@ -18,6 +18,57 @@ Changes ======= +`2.8.0 `__ (2023-11-17) +------------------------------------------------------------------------------------------------------- + +Bug Fixes +~~~~~~~~~ + +- **cli:** do not error unnecessarily on session stop + (`#3642 `__) + (`4726f66 `__) +- **cli:** do not pass the force-build flag to docker + (`#3641 `__) + (`ff5e8f6 `__) +- **cli:** do not start a session when in detached HEAD state + (`#3636 `__) + (`80f70f5 `__) +- **core:** do not clone submodules in renku clone command + (`#3630 `__) + (`e9986e0 `__) +- don’t fail on exotic git diff types + (`#3632 `__) + (`160620c `__) +- make usage of name and slug consistent + (`#3620 `__) + (`b31ade0 `__) +- **service:** set job timeouts correctly + (`#3651 `__) + (`db0f8cf `__) +- warn user about old version of git when using mergetool + (`#3637 `__) + (`5df0ce1 `__) + +Features +~~~~~~~~ + +- add prometheus metrics + (`#3640 `__) + (`d3eedb5 `__) +- **cli, service:** support for project image + (`#3623 `__) + (`db9f93b `__) +- **cli:** session pause and resume + (`#3633 `__) + (`f4b6480 `__) +- **service:** accept commit SHA in read endpoints + (`#3608 `__) + (`01a0798 `__) +- **svc:** rename git_url to template_git_url and return git_url on all + endpoints + (`#3646 `__) + (`314786e `__) + `2.7.0 `__ (2023-09-27) ------------------------------------------------------------------------------------------------------- diff --git a/DEVELOPING.rst b/DEVELOPING.rst index b62fe6a0d0..594c4a07e5 100644 --- a/DEVELOPING.rst +++ b/DEVELOPING.rst @@ -261,7 +261,7 @@ number. - Once the release PR has been merged, publish the github release. This creates the tag on master that kicks off the publishing CI. - Keep an eye on CI, make sure that the `publish-pypi`, `build-images` and - `publish-chart` finish successfully. + `publish-chartpress-images` finish successfully. * If any of them don't finish successfully, ask for help. - Go to the `Renku` repository and approve/merge the automatically created PR there. diff --git a/Dockerfile b/Dockerfile index 3fb5d9d40c..76bb02e244 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ RUN apt-get install --no-install-recommends -y build-essential && \ # time the code changes # set the BUILD_CORE_SERVICE to non null to install additional service dependencies ARG BUILD_CORE_SERVICE -COPY pyproject.toml poetry.lock README.rst CHANGES.rst Makefile /code/renku/ +COPY pyproject.toml poetry.lock README.rst CHANGES.rst Makefile gunicorn.conf.py /code/renku/ COPY .git /code/renku/.git COPY renku /code/renku/renku WORKDIR /code/renku @@ -46,6 +46,7 @@ RUN addgroup -gid 1000 shuhitsu && \ if [ -n "${BUILD_CORE_SERVICE}" ]; then mkdir /svc && chown shuhitsu:shuhitsu /svc ; fi COPY --from=builder /code/renku /code/renku +WORKDIR /code/renku ENV PATH="${PATH}:/code/renku/.venv/bin" # shuhitsu (執筆): The "secretary" of the renga, as it were, who is responsible for @@ -55,5 +56,6 @@ USER shuhitsu ENV RENKU_SVC_NUM_WORKERS 4 ENV RENKU_SVC_NUM_THREADS 8 ENV RENKU_DISABLE_VERSION_CHECK=1 +ENV PROMETHEUS_MULTIPROC_DIR /tmp ENTRYPOINT ["tini", "-g", "--", "renku"] diff --git a/LICENSE b/LICENSE index d645695673..989ccc577f 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2019-2023 - Swiss Data Science Center Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index c1cf1f4556..b4cced7c10 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/QA_PROTOCOL.rst b/QA_PROTOCOL.rst index c588408fa8..2d8d04e80f 100644 --- a/QA_PROTOCOL.rst +++ b/QA_PROTOCOL.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/README.rst b/README.rst index 3e6d0556a9..32c1bdd504 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/chartpress.yaml b/chartpress.yaml index bdab0ab67e..a60c81f5df 100644 --- a/chartpress.yaml +++ b/chartpress.yaml @@ -11,8 +11,7 @@ charts: renku-core: contextPath: . dockerfilePath: Dockerfile - valuesPath: versions.latest.image + valuesPath: global.core.versions.latest.image buildArgs: CLEAN_INSTALL: "1" BUILD_CORE_SERVICE: "1" - diff --git a/docs/_ext/cheatsheet.py b/docs/_ext/cheatsheet.py index 0c4f18c4e7..c5f5bdb9c4 100644 --- a/docs/_ext/cheatsheet.py +++ b/docs/_ext/cheatsheet.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/docs/_static/cheatsheet/cheatsheet.json b/docs/_static/cheatsheet/cheatsheet.json index e9b4933770..f1a8709a81 100644 --- a/docs/_static/cheatsheet/cheatsheet.json +++ b/docs/_static/cheatsheet/cheatsheet.json @@ -325,6 +325,20 @@ "rp" ] }, + { + "command": "$ renku session pause ", + "description": "Pause the specified session.", + "target": [ + "rp" + ] + }, + { + "command": "$ renku session resume ", + "description": "Resume the specified paused session.", + "target": [ + "rp" + ] + }, { "command": "$ renku session stop ", "description": "Stop the specified session.", diff --git a/docs/_static/cheatsheet/cheatsheet.pdf b/docs/_static/cheatsheet/cheatsheet.pdf index 520ff7e15b..4246dc60c9 100644 Binary files a/docs/_static/cheatsheet/cheatsheet.pdf and b/docs/_static/cheatsheet/cheatsheet.pdf differ diff --git a/docs/changes.rst b/docs/changes.rst index 0dcc47fa07..d4afa345d7 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/cheatsheet/conf.py b/docs/cheatsheet/conf.py index 5757630679..681d03a446 100644 --- a/docs/cheatsheet/conf.py +++ b/docs/cheatsheet/conf.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/docs/cheatsheet_hash b/docs/cheatsheet_hash index 430a963389..050ca46521 100644 --- a/docs/cheatsheet_hash +++ b/docs/cheatsheet_hash @@ -1,2 +1,2 @@ -ad86ac1d0614ccb692c96e893db4d20d cheatsheet.tex +5316163d742bdb6792ed8bcb35031f6c cheatsheet.tex c70c179e07f04186ec05497564165f11 sdsc_cheatsheet.cls diff --git a/docs/cheatsheet_json_hash b/docs/cheatsheet_json_hash index 002fc23dbd..7bd1476dcb 100644 --- a/docs/cheatsheet_json_hash +++ b/docs/cheatsheet_json_hash @@ -1 +1 @@ -1ac51267cefdf4976c29c9d7657063b8 cheatsheet.json +1856fb451165d013777c7c4cdd56e575 cheatsheet.json diff --git a/docs/conf.py b/docs/conf.py index b17724eefc..51aaf643d0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/docs/gettingstarted.rst b/docs/gettingstarted.rst index e9d83f19bc..e7917117e5 100644 --- a/docs/gettingstarted.rst +++ b/docs/gettingstarted.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/index.rst b/docs/index.rst index 51a5b41e3d..ae51cb9692 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,7 +1,7 @@ :orphan: .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/installation.rst b/docs/installation.rst index 7ce08c351e..e6f9599c31 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/introduction.rst b/docs/introduction.rst index c480de4620..ee71d61b5e 100644 --- a/docs/introduction.rst +++ b/docs/introduction.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/license.rst b/docs/license.rst index 01aa4d8887..dee6639217 100644 --- a/docs/license.rst +++ b/docs/license.rst @@ -3,7 +3,7 @@ License .. code-block:: text - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/plugins.rst b/docs/plugins.rst index 2037656567..c8e700b256 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). @@ -33,6 +33,25 @@ data science pipeline. `Documentation `_ `Example Project `_ +renku-graph-vis +--------------- + +`renku-graph-vis `_ is a plugin that +provides a graphical representation of the renku repository's knowledge graph. +In particular, the plugin enables an interactive graph visualization feature +for real-time monitoring during a renku session introducing the ability to have +a live overview of the ongoing development. It also provides two CLI commands: + +* ``display`` to generate a representation of the graph over a png output image +* ``show-graph`` to start an interactive visualization of the graph over the browser + +renku-aqs-annotation +-------------------- + +`renku-aqs-annotation `_ is a plugin that +intercepts several key astroquery methods and stores a number of dedicated annotations +containing information about the calls to these methods (like the arguments used in the call) +to the project's Knowledge Graph. Developing a plugin? -------------------- diff --git a/docs/reference/api.rst b/docs/reference/api.rst index 5a7902438c..ba30cb9fe6 100644 --- a/docs/reference/api.rst +++ b/docs/reference/api.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/commands/index.rst b/docs/reference/commands/index.rst index 99c8550ead..407f809244 100644 --- a/docs/reference/commands/index.rst +++ b/docs/reference/commands/index.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/core.rst b/docs/reference/core.rst index 19aba89606..2238e68edd 100644 --- a/docs/reference/core.rst +++ b/docs/reference/core.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). @@ -65,6 +65,10 @@ Schema classes used to serialize domain models to JSON-LD. :members: :show-inheritance: +.. automodule:: renku.command.schema.image + :members: + :show-inheritance: + .. automodule:: renku.command.schema.parameter :members: :show-inheritance: @@ -105,10 +109,6 @@ Datasets :members: :show-inheritance: -.. automodule:: renku.core.dataset.request_model - :members: - :show-inheritance: - .. automodule:: renku.core.dataset.tag :members: :show-inheritance: @@ -237,6 +237,17 @@ Errors that can be raised by ``renku.core``. :members: :show-inheritance: +Project/Dataset Images +---------------------- + +.. automodule:: renku.core.image + :members: + :show-inheritance: + +.. automodule:: renku.domain_model.image + :members: + + Utilities --------- diff --git a/docs/reference/database.rst b/docs/reference/database.rst index 6b99c8dc7a..a7dfa043fc 100644 --- a/docs/reference/database.rst +++ b/docs/reference/database.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/gateways.rst b/docs/reference/gateways.rst index e1f2542148..2a273641bd 100644 --- a/docs/reference/gateways.rst +++ b/docs/reference/gateways.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/index.rst b/docs/reference/index.rst index 54fbd2d0aa..d2a1170a05 100644 --- a/docs/reference/index.rst +++ b/docs/reference/index.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/models/dataset_provider.rst b/docs/reference/models/dataset_provider.rst index 64fd8347ba..4e8429ed64 100644 --- a/docs/reference/models/dataset_provider.rst +++ b/docs/reference/models/dataset_provider.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/models/datasets.rst b/docs/reference/models/datasets.rst index 68b6f0e29d..c6ccab4856 100644 --- a/docs/reference/models/datasets.rst +++ b/docs/reference/models/datasets.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/models/projects.rst b/docs/reference/models/projects.rst index eddeb08930..d46031fd52 100644 --- a/docs/reference/models/projects.rst +++ b/docs/reference/models/projects.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/models/provenance.rst b/docs/reference/models/provenance.rst index c99947e1fb..005e4d52a8 100644 --- a/docs/reference/models/provenance.rst +++ b/docs/reference/models/provenance.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/models/workflow.rst b/docs/reference/models/workflow.rst index 64cdb9c444..2b0c43eeaa 100644 --- a/docs/reference/models/workflow.rst +++ b/docs/reference/models/workflow.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/plugins.rst b/docs/reference/plugins.rst index 8f9a52da55..cbad35f8a5 100644 --- a/docs/reference/plugins.rst +++ b/docs/reference/plugins.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/service.rst b/docs/reference/service.rst index fdc066297d..409ea9fde9 100644 --- a/docs/reference/service.rst +++ b/docs/reference/service.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/reference/service_errors.rst b/docs/reference/service_errors.rst index a67b2e5271..9569ccc7eb 100644 --- a/docs/reference/service_errors.rst +++ b/docs/reference/service_errors.rst @@ -1,5 +1,5 @@ .. - Copyright 2017-2023 - Swiss Data Science Center (SDSC) + Copyright Swiss Data Science Center (SDSC). A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and Eidgenössische Technische Hochschule Zürich (ETHZ). diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index eef8849ab2..668d437f04 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -5,9 +5,11 @@ Amalthea analytics api app +aqs args argv ascii +astroquery async attrs auditability @@ -173,6 +175,7 @@ params PIDs pipenv PNG +png Postgresql powerline pre @@ -182,6 +185,7 @@ prepending preprocessed preprocessing programmatically +prometheus py pyshacl rclone @@ -296,6 +300,7 @@ versioning Versioning vertices viewmodel +vis vm wasDerivedFrom webhook diff --git a/git-deploy-chart.sh b/git-deploy-chart.sh deleted file mode 100755 index 4fc340f23a..0000000000 --- a/git-deploy-chart.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright 2018 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -# get ssh key to use for docker hub login -chmod 600 deploy_rsa -eval "$(ssh-agent -s)" -ssh-add deploy_rsa - -make docker-login - -# build charts/images and push -helm repo update -helm dependency update helm-chart/renku-core -chartpress --push --publish-chart -git diff - -# push also images tagged with "latest" -chartpress --tag latest --push - -# if it's a tag, push the tagged chart -if [[ -n $TAG ]]; then - git clean -dff - chartpress --tag "$TAG" --push --publish-chart -fi diff --git a/gunicorn.conf.py b/gunicorn.conf.py new file mode 100644 index 0000000000..6f524217ff --- /dev/null +++ b/gunicorn.conf.py @@ -0,0 +1,14 @@ +"""Gunicorn Configuration.""" +import os + +from prometheus_flask_exporter.multiprocess import GunicornPrometheusMetrics + + +def when_ready(server): + """Run metrics server on separate port.""" + GunicornPrometheusMetrics.start_http_server_when_ready(int(os.getenv("METRICS_PORT", "8765"))) + + +def child_exit(server, worker): + """Properly exit when metrics server stops.""" + GunicornPrometheusMetrics.mark_process_dead_on_child_exit(worker.pid) diff --git a/helm-chart/renku-core/Chart.yaml b/helm-chart/renku-core/Chart.yaml index c78fdaeb30..ae2e0c7d14 100644 --- a/helm-chart/renku-core/Chart.yaml +++ b/helm-chart/renku-core/Chart.yaml @@ -3,4 +3,4 @@ appVersion: "1.0" description: A Helm chart for Kubernetes name: renku-core icon: https://avatars0.githubusercontent.com/u/53332360?s=400&u=a4311d22842343604ef61a8c8a1e5793209a67e9&v=4 -version: 2.7.0 +version: 2.8.0 diff --git a/helm-chart/renku-core/requirements.yaml b/helm-chart/renku-core/requirements.yaml deleted file mode 100644 index 3bd290aa59..0000000000 --- a/helm-chart/renku-core/requirements.yaml +++ /dev/null @@ -1,4 +0,0 @@ -dependencies: -- name: certificates - version: 0.0.3 - repository: "https://swissdatasciencecenter.github.io/helm-charts/" diff --git a/helm-chart/renku-core/templates/_helpers.tpl b/helm-chart/renku-core/templates/_helpers.tpl deleted file mode 100644 index 9d6d734fc9..0000000000 --- a/helm-chart/renku-core/templates/_helpers.tpl +++ /dev/null @@ -1,45 +0,0 @@ -{{/* vim: set filetype=mustache: */}} -{{/* -Expand the name of the chart. -*/}} -{{- define "renku-core.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "renku-core.fullname" -}} -{{- if .Values.fullnameOverride -}} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- $name := default .Chart.Name .Values.nameOverride -}} -{{- if contains $name .Release.Name -}} -{{- .Release.Name | trunc 63 | trimSuffix "-" -}} -{{- else -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "renku-core.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Common labels -*/}} -{{- define "renku-core.labels" -}} -app.kubernetes.io/name: {{ include "renku-core.name" . }} -helm.sh/chart: {{ include "renku-core.chart" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} -app.kubernetes.io/managed-by: {{ .Release.Service }} -{{- end -}} diff --git a/helm-chart/renku-core/templates/cache-cleanup-job.yaml b/helm-chart/renku-core/templates/cache-cleanup-job.yaml deleted file mode 100644 index 162b8f4b92..0000000000 --- a/helm-chart/renku-core/templates/cache-cleanup-job.yaml +++ /dev/null @@ -1,27 +0,0 @@ -{{- range $version := .Values.versions }} -{{ if ne $version.name "v9"}} ---- -apiVersion: batch/v1 -kind: CronJob -metadata: - name: {{ include "renku-core.fullname" $ }}-cleanup-{{ $version.name }} - labels: - app.kubernetes.io/deploymentVersion: {{ $version.name }} -spec: - schedule: "*/5 * * * *" - concurrencyPolicy: Forbid - jobTemplate: - spec: - template: - spec: - containers: - - name: {{ include "renku-core.fullname" $ }}-cache-cleanup-{{ $version.name }} - image: renku/renku-core-cleanup:v1 - imagePullPolicy: IfNotPresent - args: - - {{ $version.name | quote}} - - {{ $.Release.Namespace }} - restartPolicy: OnFailure - serviceAccountName: {{ include "renku-core.fullname" $ }}-cleanup -{{ end }} -{{ end }} diff --git a/helm-chart/renku-core/templates/configmap.yaml b/helm-chart/renku-core/templates/configmap.yaml deleted file mode 100644 index abd664ba8d..0000000000 --- a/helm-chart/renku-core/templates/configmap.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "renku-core.fullname" . }}-metadata-versions -data: - metadata-versions.json: | - { - "name": "renku-core", - "versions": [ - {{- $printComma := false -}} - {{- range $key, $version := .Values.versions }} - {{- if $printComma }},{{ else }} {{- $printComma = true }} {{ end }} - { - "version": "{{ $version.image.tag }}", - "data": { - "metadata_version": "{{ $version.prefix }}" - } - } - {{- end }} - ] - } diff --git a/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml b/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml deleted file mode 100644 index 3b94c995d6..0000000000 --- a/helm-chart/renku-core/templates/cronjob-serviceaccount.yaml +++ /dev/null @@ -1,36 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "renku-core.fullname" $ }}-cleanup - labels: -{{ include "renku-core.labels" $ | indent 4 }} ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: {{ include "renku-core.fullname" $ }}-cleanup - labels: -{{ include "renku-core.labels" $ | indent 4 }} -rules: -- apiGroups: - - "" - resources: - - pods - verbs: - - get - - list ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: {{ include "renku-core.fullname" $ }}-cleanup - labels: -{{ include "renku-core.labels" $ | indent 4 }} -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: {{ include "renku-core.fullname" $ }}-cleanup -subjects: -- kind: ServiceAccount - name: {{ include "renku-core.fullname" $ }}-cleanup - namespace: {{ $.Release.Namespace }} diff --git a/helm-chart/renku-core/templates/deployment.yaml b/helm-chart/renku-core/templates/deployment.yaml deleted file mode 100644 index 08ccf913a0..0000000000 --- a/helm-chart/renku-core/templates/deployment.yaml +++ /dev/null @@ -1,289 +0,0 @@ -{{- range $version := .Values.versions }} ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "renku-core.fullname" $ }}-{{ $version.name }} - labels: - app.kubernetes.io/deploymentVersion: {{ $version.name }} -{{ include "renku-core.labels" $ | indent 4 }} -spec: - replicas: {{ $.Values.replicaCount }} - strategy: - type: RollingUpdate - rollingUpdate: - maxUnavailable: 0 - maxSurge: 1 - selector: - matchLabels: - app.kubernetes.io/name: {{ include "renku-core.name" $ }} - app.kubernetes.io/instance: {{ $.Release.Name }} - app.kubernetes.io/deploymentVersion: {{ $version.name }} - template: - metadata: - annotations: - {{ if $.Values.metrics.enabled }} - prometheus.io/scrape: 'true' - prometheus.io/path: '/metrics' - prometheus.io/port: '8765' - {{ end }} - labels: - app.kubernetes.io/name: {{ include "renku-core.name" $ }} - app.kubernetes.io/instance: {{ $.Release.Name }} - app.kubernetes.io/deploymentVersion: {{ $version.name }} - {{ $.Values.global.redis.clientLabel | toYaml | nindent 8 }} - spec: - {{- with $.Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - volumes: - - name: shared-volume - emptyDir: {} - - name: metadata-versions - configMap: - name: {{ include "renku-core.fullname" $ }}-metadata-versions - {{- include "certificates.volumes" $ | nindent 8 }} - initContainers: - {{- include "certificates.initContainer" $ | nindent 8 }} - securityContext: - {{- toYaml $.Values.podSecurityContext | nindent 8 }} - automountServiceAccountToken: {{ $.Values.global.debug }} - containers: - {{ if $.Values.metrics.enabled }} - - name: {{ $.Chart.Name}}-rqmetrics - image: "{{ $.Values.metrics.image.repository }}:{{ $.Values.metrics.image.tag }}" - imagePullPolicy: {{ $.Values.metrics.image.pullPolicy }} - securityContext: - {{- toYaml $.Values.securityContext | nindent 12 }} - env: - - name: RQ_REDIS_HOST - value: {{ $.Values.global.redis.host | quote }} - - name: RQ_REDIS_PORT - value: {{ $.Values.global.redis.port | quote }} - - name: RQ_REDIS_DB - value: {{ $.Values.global.redis.dbIndex.coreService | quote }} - - name: RQ_REDIS_IS_SENTINEL - value: {{ $.Values.global.redis.sentinel.enabled | quote }} - - name: RQ_REDIS_MASTER_SET - value: {{ $.Values.global.redis.sentinel.masterSet | quote }} - - name: RQ_REDIS_PASS - valueFrom: - secretKeyRef: - name: {{ $.Values.global.redis.existingSecret }} - key: {{ $.Values.global.redis.existingSecretPasswordKey }} - # TODO: Deal with redis namespace properly in rqmetrics, then pass that on as well - {{ end }} - - name: {{ $.Chart.Name }} - image: "{{ $version.image.repository }}:{{ $version.image.tag }}" - imagePullPolicy: {{ $version.image.pullPolicy }} - securityContext: - {{- toYaml $.Values.securityContext | nindent 12 }} - args: ["service", "api"] - env: - - name: REDIS_HOST - value: {{ $.Values.global.redis.host | quote }} - - name: REDIS_PORT - value: {{ $.Values.global.redis.port | quote }} - - name: REDIS_DATABASE - value: {{ $.Values.global.redis.dbIndex.coreService | quote }} - - name: REDIS_IS_SENTINEL - value: {{ $.Values.global.redis.sentinel.enabled | quote }} - - name: REDIS_MASTER_SET - value: {{ $.Values.global.redis.sentinel.masterSet | quote }} - - name: REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: {{ $.Values.global.redis.existingSecret }} - key: {{ $.Values.global.redis.existingSecretPasswordKey }} - - name: REDIS_NAMESPACE - value: {{ $version.name }} - - name: CACHE_DIR - value: {{ $.Values.cacheDirectory }} - - name: PROJECT_CLONE_DEPTH_DEFAULT - value: {{ $.Values.projectCloneDepth | quote }} - - name: TEMPLATE_CLONE_DEPTH_DEFAULT - value: {{ $.Values.templateCloneDepth | quote }} - - name: MAX_CONTENT_LENGTH - value: {{ $.Values.maximumUploadSizeBytes | quote }} - - name: REQUEST_TIMEOUT - value: {{ $.Values.requestTimeout | quote }} - - name: CORE_SERVICE_PREFIX - value: /renku - - name: CORE_SERVICE_API_BASE_PATH - value: {{ $.Values.apiBasePath }} - - name: RENKU_SVC_SWAGGER_URL - value: /renku/openapi.json - - name: SERVICE_LOG_LEVEL - value: {{ $.Values.logLevel }} - - name: SENTRY_ENABLED - value: {{ $.Values.sentry.enabled | quote }} - - name: SENTRY_DSN - value: {{ $.Values.sentry.dsn }} - - name: SENTRY_SAMPLE_RATE - value: {{ $.Values.sentry.sampleRate | quote }} - - name: SENTRY_ENV - value: {{ $.Values.sentry.environment }} - {{ if $.Values.nWorkers }} - - name: RENKU_SVC_NUM_WORKERS - value: {{ $.Values.nWorkers | quote }} - {{ end }} - {{ if $.Values.nThreads }} - - name: RENKU_SVC_NUM_THREADS - value: {{ $.Values.nThreads | quote }} - {{ end }} - - name: GIT_LFS_SKIP_SMUDGE - value: {{ $.Values.gitLFSSkipSmudge | quote }} - - name: RENKU_DOMAIN - value: {{ $.Values.global.renku.domain }} - - name: RENKU_PROJECT_DEFAULT_CLI_VERSION - value: {{ $.Values.global.renku.cli_version | default "" | quote }} - - name: METADATA_VERSIONS_LIST - value: /svc/config/metadata-versions/metadata-versions.json - {{- include "certificates.env.python" $ | nindent 12 }} - volumeMounts: - - name: shared-volume - mountPath: {{ $.Values.cacheDirectory }} - - name: metadata-versions - mountPath: /svc/config/metadata-versions - {{- include "certificates.volumeMounts.system" $ | nindent 12 }} - ports: - - name: http - containerPort: 8080 - protocol: TCP - livenessProbe: - httpGet: - path: /health - port: http - readinessProbe: - httpGet: - path: /health - port: http - resources: - {{- toYaml $.Values.resources.core | nindent 12 }} - - name: {{ $.Chart.Name }}-datasets-workers - image: "{{ $version.image.repository }}:{{ $version.image.tag }}" - imagePullPolicy: {{ $version.image.pullPolicy }} - securityContext: - {{- toYaml $.Values.securityContext | nindent 12 }} - args: ["service", "worker"] - env: - - name: REDIS_HOST - value: {{ $.Values.global.redis.host | quote }} - - name: REDIS_PORT - value: {{ $.Values.global.redis.port | quote }} - - name: REDIS_DATABASE - value: {{ $.Values.global.redis.dbIndex.coreService | quote }} - - name: REDIS_IS_SENTINEL - value: {{ $.Values.global.redis.sentinel.enabled | quote }} - - name: REDIS_MASTER_SET - value: {{ $.Values.global.redis.sentinel.masterSet | quote }} - - name: REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: {{ $.Values.global.redis.existingSecret }} - key: {{ $.Values.global.redis.existingSecretPasswordKey }} - - name: REDIS_NAMESPACE - value: {{ $version.name }} - - name: RENKU_JWT_TOKEN_SECRET - value: {{ $.Values.jwtTokenSecret }} - - name: CACHE_DIR - value: {{ $.Values.cacheDirectory }} - - name: RENKU_SVC_CLEANUP_INTERVAL - value: {{ $.Values.cleanupInterval | quote }} - - name: RENKU_SVC_WORKER_QUEUES - value: {{ $.Values.datasetsWorkerQueues}} - - name: RENKU_SVC_CLEANUP_TTL_FILES - value: {{ $.Values.cleanupFilesTTL | quote }} - - name: RENKU_SVC_CLEANUP_TTL_PROJECTS - value: {{ $.Values.cleanupProjectsTTL | quote }} - - name: DEPLOYMENT_LOG_LEVEL - value: {{ $.Values.logLevel }} - - name: SENTRY_ENABLED - value: {{ $.Values.sentry.enabled | quote }} - - name: SENTRY_DSN - value: {{ $.Values.sentry.dsn }} - - name: SENTRY_SAMPLE_RATE - value: {{ $.Values.sentry.sampleRate | quote }} - - name: SENTRY_ENV - value: {{ $.Values.sentry.environment }} - - name: GIT_LFS_SKIP_SMUDGE - value: {{ $.Values.gitLFSSkipSmudge | quote }} - - name: RENKU_DOMAIN - value: {{ $.Values.global.renku.domain }} - {{- include "certificates.env.python" $ | nindent 12 }} - volumeMounts: - - name: shared-volume - mountPath: {{ $.Values.cacheDirectory }} - {{- include "certificates.volumeMounts.system" $ | nindent 12 }} - resources: - {{- toYaml $.Values.resources.datasetsWorkers | nindent 12 }} - - - name: {{ $.Chart.Name }}-management-workers - image: "{{ $version.image.repository }}:{{ $version.image.tag }}" - imagePullPolicy: {{ $version.image.pullPolicy }} - securityContext: - {{- toYaml $.Values.securityContext | nindent 12 }} - args: ["service", "worker"] - env: - - name: REDIS_HOST - value: {{ $.Values.global.redis.host | quote }} - - name: REDIS_PORT - value: {{ $.Values.global.redis.port | quote }} - - name: REDIS_DATABASE - value: {{ $.Values.global.redis.dbIndex.coreService | quote }} - - name: REDIS_IS_SENTINEL - value: {{ $.Values.global.redis.sentinel.enabled | quote }} - - name: REDIS_MASTER_SET - value: {{ $.Values.global.redis.sentinel.masterSet | quote }} - - name: REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: {{ $.Values.global.redis.existingSecret }} - key: {{ $.Values.global.redis.existingSecretPasswordKey }} - - name: REDIS_NAMESPACE - value: {{ $version.name }} - - name: CACHE_DIR - value: {{ $.Values.cacheDirectory }} - - name: RENKU_SVC_CLEANUP_INTERVAL - value: {{ $.Values.cleanupInterval | quote }} - - name: RENKU_SVC_WORKER_QUEUES - value: {{ $.Values.managementWorkerQueues }} - - name: RENKU_SVC_CLEANUP_TTL_FILES - value: {{ $.Values.cleanupFilesTTL | quote }} - - name: RENKU_SVC_CLEANUP_TTL_PROJECTS - value: {{ $.Values.cleanupProjectsTTL | quote }} - - name: DEPLOYMENT_LOG_LEVEL - value: {{ $.Values.logLevel }} - - name: SENTRY_ENABLED - value: {{ $.Values.sentry.enabled | quote }} - - name: SENTRY_DSN - value: {{ $.Values.sentry.dsn }} - - name: SENTRY_SAMPLE_RATE - value: {{ $.Values.sentry.sampleRate | quote }} - - name: SENTRY_ENV - value: {{ $.Values.sentry.environment }} - - name: GIT_LFS_SKIP_SMUDGE - value: {{ $.Values.gitLFSSkipSmudge | quote }} - - name: RENKU_DOMAIN - value: {{ $.Values.global.renku.domain }} - {{- include "certificates.env.python" $ | nindent 12 }} - volumeMounts: - - name: shared-volume - mountPath: {{ $.Values.cacheDirectory }} - {{- include "certificates.volumeMounts.system" $ | nindent 12 }} - resources: - {{- toYaml $.Values.resources.scheduler | nindent 12 }} - {{- with $.Values.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with $.Values.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with $.Values.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} -{{ end }} diff --git a/helm-chart/renku-core/templates/hpa.yaml b/helm-chart/renku-core/templates/hpa.yaml deleted file mode 100644 index 3ce0b9ee87..0000000000 --- a/helm-chart/renku-core/templates/hpa.yaml +++ /dev/null @@ -1,29 +0,0 @@ -{{- range $version := .Values.versions }} ---- -{{- if $.Capabilities.APIVersions.Has "autoscaling/v2" }} -apiVersion: autoscaling/v2 -{{- else if $.Capabilities.APIVersions.Has "autoscaling/v2beta2" }} -apiVersion: autoscaling/v2beta2 -{{- else if $.Capabilities.APIVersions.Has "autoscaling/v2beta1" }} -apiVersion: autoscaling/v2beta1 -{{- else }} - {{- fail "ERROR: You must have at least autoscaling/v2beta1 to use HorizontalPodAutoscaler" }} -{{- end }} -kind: HorizontalPodAutoscaler -metadata: - name: {{ include "renku-core.fullname" $ }}-{{ $version.name }} -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: {{ include "renku-core.fullname" $ }}-{{ $version.name }} - minReplicas: {{ $.Values.horizontalPodAutoscaling.minReplicas }} - maxReplicas: {{ $.Values.horizontalPodAutoscaling.maxReplicas }} - metrics: - - type: Resource - resource: - name: memory - target: - type: Utilization - averageUtilization: {{ $.Values.horizontalPodAutoscaling.averageMemoryUtilization }} -{{ end }} diff --git a/helm-chart/renku-core/templates/pdb.yaml b/helm-chart/renku-core/templates/pdb.yaml deleted file mode 100644 index 6bbba04907..0000000000 --- a/helm-chart/renku-core/templates/pdb.yaml +++ /dev/null @@ -1,12 +0,0 @@ -{{- range $version := .Values.versions }} ---- -apiVersion: policy/v1 -kind: PodDisruptionBudget -metadata: - name: {{ include "renku-core.fullname" $ }}-{{ $version.name }} -spec: - minAvailable: 1 - selector: - matchLabels: - app.kubernetes.io/deploymentVersion: {{ $version.name }} -{{ end }} diff --git a/helm-chart/renku-core/templates/service.yaml b/helm-chart/renku-core/templates/service.yaml deleted file mode 100644 index a85f29fc0c..0000000000 --- a/helm-chart/renku-core/templates/service.yaml +++ /dev/null @@ -1,21 +0,0 @@ -{{- range $version := .Values.versions }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "renku-core.fullname" $ }}-{{ $version.name }} - labels: - app.kubernetes.io/deploymentVersion: {{ $version.name }} -{{ include "renku-core.labels" $ | indent 4 }} -spec: - type: {{ $.Values.service.type }} - ports: - - port: {{ $.Values.service.port }} - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: {{ include "renku-core.name" $ }} - app.kubernetes.io/instance: {{ $.Release.Name }} - app.kubernetes.io/deploymentVersion: {{ $version.name }} -{{ end }} diff --git a/helm-chart/renku-core/values.schema.json b/helm-chart/renku-core/values.schema.json deleted file mode 100644 index b814c0f133..0000000000 --- a/helm-chart/renku-core/values.schema.json +++ /dev/null @@ -1,258 +0,0 @@ -{ - "$schema": "https://json-schema.org/draft-07/schema#", - "properties": { - "replicaCount": { - "description": "The number of replicas of the service workers to deploy", - "type": "integer", - "minimum": 0 - }, - "cacheDirectory": { - "description": "The directory the project cache is in", - "type": "string", - "pattern": "^(/[^/]+)+$" - }, - "cleanupInterval": { - "description": "Interval for cache cleanup", - "type": "integer", - "minimum": 0 - }, - "projectCloneDepth": { - "description": "Depth of git clone in cache", - "type": "integer", - "minimum": 1 - }, - "templateCloneDepth": { - "description": "Depth of git clone for templates", - "type": "integer", - "minimum": 1 - }, - "maximumUploadSizeBytes": { - "description": "Maximum allowed file upload size.", - "type": "string", - "minimum": 1, - "pattern": "^\\d+" - }, - "requestTimeout": { - "description": "Time before requests time out.", - "type": "integer" - }, - "datasetsWorkerQueues": { - "description": "Name of the worker queue for dataset jobs", - "type": "string" - }, - "managementWorkerQueues": { - "description": "Name of the worker queue for management jobs", - "type": "string" - }, - "cleanupFilesTTL": { - "description": "Time to live for uploaded files", - "type": "integer", - "minimum": 0 - }, - "cleanupProjectsTTL": { - "description": "Time to live for projects in cache", - "type": "integer", - "minimum": 0 - }, - "logLevel": { - "description": "Logging level", - "type": "string" - }, - "gitLFSSkipSmudge": { - "description": "Whether to download files from LFS when cloning a project", - "type": "integer", - "minimum": 0, - "maximum": 1 - }, - "nWorkers": { - "description": "Number of gunicorn workers for the service", - "type": "integer", - "minimum": 1 - }, - "nThreads": { - "description": "Number of gunicorn threads per worker for the service", - "type": "integer", - "minimum": 1 - }, - "jwtTokenSecret": { - "description": "Secret to encrypt jwt tokens", - "type": "string", - "minLength": 32 - }, - "imagePullSecrets": { - "description": "Secrets necessary for pulling the image", - "type": "array", - "items": { - "type": "object" - } - }, - "service": { - "description": "Kubernetes service definition", - "properties": { - "type": { - "description": "The type of kubernetes service", - "type": "string" - }, - "port": { - "description": "The port of the kubernetes service", - "type": "integer", - "minimum": 1 - } - }, - "type": "object" - }, - "metrics": { - "description": "Definition of Redis Queue metrics", - "properties": { - "enabled":{ - "description": "whether to enable redis queue metrics", - "type":"boolean" - }, - "image": { - "description": "Image settings for rq metrics", - "properties": { - "repository": { - "type": "string" - }, - "tag": { - "type": "string" - }, - "pullPolicy": { - "type": "string" - } - }, - "type": "object" - } - }, - "type": "object" - }, - "resources": { - "description": "Kubenetes resource requests/limites (cpu/memory/etc.) to request", - "type": "object" - }, - "nodeSelector": { - "description": "Node selector for deployment", - "type": "object" - }, - "horizontalPodAutoscaling": { - "description": "Setup for scaling the core service", - "type": "object", - "properties": { - "minReplicas": { - "description": "Flag to turn on/off Sentry", - "type": "integer", - "minimum": 1 - }, - "maxReplicas": { - "description": "URI of the sentry Data Source Name", - "type": "integer", - "minimum": 1 - }, - "averageMemoryUtilization": { - "description": "Name of the sentry environment to post to", - "type": "integer", - "exclusiveMinimum": 0, - "exclusiveMaximum": 100 - } - } - }, - "tolerations": { - "description": "Tolerations for deployment", - "items": { - "type": "object" - }, - "type": "array" - }, - "affinity": { - "description": "Affinity for deployment", - "type": "object" - }, - "sentry": { - "description": "Definition of sentry instance to use to log warnings/errors", - "properties": { - "enabled": { - "description": "Flag to turn on/off Sentry", - "type": "boolean" - }, - "dsn": { - "description": "URI of the sentry Data Source Name", - "type": ["string", "null"] - }, - "environment": { - "description": "Name of the sentry environment to post to", - "type": ["string", "null"] - }, - "sampleRate": { - "description": "Portion of requests to track in Sentry performance tracing", - "type": ["number", "null"] - } - }, - "type": "object" - }, - "versions": { - "description": "A map of different service deployments", - "type": "object", - "required": ["latest"], - "additionalProperties": { - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "prefix": { - "description": "Prefix for deployment's API route", - "type": "string" - }, - "nameOverride": { - "description": "Override for the chart name", - "type": "string" - }, - "fullnameOverride": { - "description": "Override for the chart fullname", - "type": "string" - }, - "image": { - "description": "Docker image settings", - "properties": { - "repository": { - "type": "string" - }, - "tag": { - "type": "string" - }, - "pullPolicy": { - "type": "string" - } - }, - "type": "object" - } - }, - "required": [ - "name", - "prefix", - "image" - ] - } - } - }, - "required": [ - "replicaCount", - "cacheDirectory", - "cleanupInterval", - "projectCloneDepth", - "templateCloneDepth", - "datasetsWorkerQueues", - "managementWorkerQueues", - "cleanupFilesTTL", - "cleanupProjectsTTL", - "logLevel", - "gitLFSSkipSmudge", - "jwtTokenSecret", - "service", - "metrics", - "sentry", - "versions" - ], - "title": "Values", - "type": "object" - } diff --git a/helm-chart/renku-core/values.yaml b/helm-chart/renku-core/values.yaml index e5477fc60b..3f234d3e66 100644 --- a/helm-chart/renku-core/values.yaml +++ b/helm-chart/renku-core/values.yaml @@ -1,112 +1,11 @@ -# Default values for renku-core. -# This is a YAML-formatted file. -# Declare variables to be passed into your templates. -replicaCount: 2 global: - ## Specify a secret that containes the certificate - ## if you would like to use a custom CA. The key for the secret - ## should have the .crt extension otherwise it is ignored. The - ## keys across all secrets are mounted as files in one location so - ## the keys across all secrets have to be unique. - certificates: - image: - repository: renku/certificates - tag: '0.0.2' - customCAs: [] - # - secret: - ## Redis configuration. This is where renku-core expects to find - ## a functioning redis instance and credentials to connect to it. - redis: - sentinel: - enabled: true - masterSet: mymaster - dbIndex: - coreService: "1" - host: renku-redis - port: 26379 - clientLabel: - renku-redis-host: "true" - existingSecret: redis-secret - existingSecretPasswordKey: redis-password - debug: false -# base path - this is the reverse proxy base path -apiBasePath: /api -cacheDirectory: /svc/cache -cleanupInterval: 60 # NOTE: This needs to be a divisor of, and less than cleanupFilesTTL|cleanupProjectsTTL. -projectCloneDepth: 1 -templateCloneDepth: 1 -maximumUploadSizeBytes: "1073741824" # 1 Gigabyte, store as string to keep Helm from converting it to scientific notation -requestTimeout: 600 -datasetsWorkerQueues: datasets.jobs,delayed.ctrl.DatasetsCreateCtrl,delayed.ctrl.DatasetsAddFileCtrl,delayed.ctrl.DatasetsRemoveCtrl,delayed.ctrl.DatasetsImportCtrl,delayed.ctrl.DatasetsEditCtrl,delayed.ctrl.DatasetsUnlinkCtrl -managementWorkerQueues: cache.cleanup.files,cache.cleanup.projects,delayed.ctrl.MigrateProjectCtrl,delayed.ctrl.SetConfigCtrl -cleanupFilesTTL: 1800 -cleanupProjectsTTL: 1800 -logLevel: INFO -# override to automatically pull LFS data on clone -gitLFSSkipSmudge: 1 -# Concurrency settings for the main service: -# the default it 4 workers with 8 threads set in the Dockerfile -nWorkers: 4 -nThreads: 8 -# NOTE: Make sure token secret is greater or equal to 32 bytes. -jwtTokenSecret: bW9menZ3cnh6cWpkcHVuZ3F5aWJycmJn -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" -service: - type: ClusterIP - port: 80 -metrics: - enabled: false - image: - repository: renku/rqmetrics - tag: 0.0.2 - pullPolicy: IfNotPresent -resources: - core: {} - rqmetrics: {} - datasetsWorkers: {} - managementWorkers: {} - scheduler: {} -# nodeSelector: {} - -# tolerations: [] - -# affinity: {} - -## Add sentry configuration -sentry: - enabled: false - dsn: - environment: - sampleRate: 0.2 -# versions is the list of different deployment that support different metadata versions. + core: + versions: + latest: + image: + repository: renku/renku-core + tag: "v2.7.0" versions: latest: - name: v10 - prefix: "10" - nameOverride: "" - fullnameOverride: "" - image: - repository: renku/renku-core - tag: "v2.7.0" - pullPolicy: IfNotPresent - v9: - name: v9 - prefix: "9" - nameOverride: "" - fullnameOverride: "" image: - repository: renku/renku-core - tag: "v1.11.4" - pullPolicy: IfNotPresent -podSecurityContext: - runAsUser: 1000 - runAsGroup: 1000 - fsGroup: 100 -securityContext: - allowPrivilegeEscalation: false -horizontalPodAutoscaling: - minReplicas: 2 - maxReplicas: 10 - averageMemoryUtilization: 50 + tag: v2.8.0 diff --git a/poetry.lock b/poetry.lock index 8c9fdcea1d..8b13e0673f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "addict" @@ -749,8 +749,8 @@ files = [ [package.dependencies] "ruamel.yaml" = [ {version = ">=0.16.0,<0.18", markers = "python_version >= \"3.10\""}, - {version = ">=0.15.78,<0.18", markers = "python_version >= \"3.8\""}, - {version = ">=0.15.98,<0.18", markers = "python_version >= \"3.9\""}, + {version = ">=0.15.98,<0.18", markers = "python_version >= \"3.9\" and python_version < \"3.10\""}, + {version = ">=0.15.78,<0.18", markers = "python_version >= \"3.8\" and python_version < \"3.9\""}, ] schema-salad = "*" setuptools = "*" @@ -1040,7 +1040,7 @@ files = [ name = "flask" version = "2.2.5" description = "A simple framework for building complex web applications." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "Flask-2.2.5-py3-none-any.whl", hash = "sha256:58107ed83443e86067e41eff4631b058178191a355886f8e479e347fa1285fdf"}, @@ -1357,7 +1357,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, @@ -2390,6 +2390,35 @@ wcwidth = "*" [package.extras] tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"] +[[package]] +name = "prometheus-client" +version = "0.17.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prometheus-flask-exporter" +version = "0.22.4" +description = "Prometheus metrics exporter for Flask" +optional = false +python-versions = "*" +files = [ + {file = "prometheus_flask_exporter-0.22.4-py3-none-any.whl", hash = "sha256:e130179c26d5a9b903c12c0d8826127ae491b04b298cae0b92b98677dcf2c06f"}, + {file = "prometheus_flask_exporter-0.22.4.tar.gz", hash = "sha256:959b69f1e740b6736ea53fe5f28dc2ab6229b2ebeade6582b3dbb5d74c7d58e4"}, +] + +[package.dependencies] +flask = "*" +prometheus-client = "*" + [[package]] name = "prov" version = "1.5.1" @@ -4582,4 +4611,4 @@ service = ["apispec", "apispec-oneofschema", "apispec-webframeworks", "circus", [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "a75f7197bf53bde1db069a16c1192aa982dbdf84d6c864bb032f8f764c34f154" +content-hash = "1649b695abc65d476919f07e320a2675210a1232d59897783aa66359f0163af3" diff --git a/pyproject.toml b/pyproject.toml index 20579a9d9c..a2de1b7a46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -116,6 +114,7 @@ redis = { version = ">=3.5.3,<4.6.0,!=4.5.5", optional = true } rq = { version = "==1.15.0", optional = true } sentry-sdk = { version = ">=1.5.11,<1.26.0", extras = ["flask"], optional = true } walrus = { version = ">=0.8.2,<0.10.0", optional = true } +prometheus-flask-exporter = "^0.22.4" [tool.poetry.group.dev.dependencies] black = "==23.1.0" @@ -303,6 +302,7 @@ module = [ "pexpect", "PIL", "pluggy", + "prometheus_flask_exporter.*", "psutil", "pyld", "pyshacl", diff --git a/renku/command/__init__.py b/renku/command/__init__.py index ef1ee6e5be..c4bc9ac824 100644 --- a/renku/command/__init__.py +++ b/renku/command/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/checks/datasets.py b/renku/command/checks/datasets.py index 511bf9405a..96bea6281f 100644 --- a/renku/command/checks/datasets.py +++ b/renku/command/checks/datasets.py @@ -77,17 +77,17 @@ def check_missing_files(dataset_gateway: IDatasetGateway, **_): path = project_context.path / file_.entity.path file_exists = path.exists() or (file_.is_external and os.path.lexists(path)) if not file_exists: - missing[dataset.name].append(file_.entity.path) + missing[dataset.slug].append(file_.entity.path) if not missing: return True, False, None problems = WARNING + "There are missing files in datasets." - for dataset_name, files in missing.items(): + for dataset_slug, files in missing.items(): problems += ( "\n\t" - + click.style(dataset_name, fg="yellow") + + click.style(dataset_slug, fg="yellow") + ":\n\t " + "\n\t ".join(click.style(path, fg="red") for path in files) ) @@ -114,9 +114,9 @@ def fix_or_report(dataset): dataset.unfreeze() dataset.derived_from = None dataset.freeze() - communication.info(f"Fixing dataset '{dataset.name}'") + communication.info(f"Fixing dataset '{dataset.slug}'") else: - invalid_datasets.append(dataset.name) + invalid_datasets.append(dataset.slug) for dataset in dataset_gateway.get_provenance_tails(): while dataset.derived_from is not None and dataset.derived_from.url_id is not None: @@ -137,7 +137,7 @@ def fix_or_report(dataset): WARNING + "There are invalid dataset metadata in the project (use 'renku doctor --fix' to fix them):" + "\n\n\t" - + "\n\t".join(click.style(name, fg="yellow") for name in invalid_datasets) + + "\n\t".join(click.style(slug, fg="yellow") for slug in invalid_datasets) + "\n" ) @@ -177,12 +177,12 @@ def check_dataset_files_outside_datadir(fix, dataset_gateway: IDatasetGateway, * continue if fix: - communication.info(f"Fixing dataset '{dataset.name}' files.") + communication.info(f"Fixing dataset '{dataset.slug}' files.") dataset.unfreeze() for file in detected_files: dataset.unlink_file(file.entity.path) dataset.freeze() - add_to_dataset(dataset.name, urls=[file.entity.path for file in detected_files], link=True) + add_to_dataset(dataset.slug, urls=[file.entity.path for file in detected_files], link=True) else: invalid_files.extend(detected_files) @@ -220,7 +220,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): for file in dataset.files: if file.is_external: external_files.append(file.entity.path) - datasets[dataset.name].append(file) + datasets[dataset.slug].append(file) if not external_files: return True, False, None @@ -240,7 +240,7 @@ def check_external_files(fix, dataset_gateway: IDatasetGateway, **_): f"dataset with an external storage backend:\n\t{external_files_str}" ) - for name, files in datasets.items(): - file_unlink(name=name, yes=True, dataset_files=files) + for slug, files in datasets.items(): + file_unlink(slug=slug, yes=True, dataset_files=files) return True, False, None diff --git a/renku/command/checks/validate_shacl.py b/renku/command/checks/validate_shacl.py index 69c031a024..ab3951ebbc 100644 --- a/renku/command/checks/validate_shacl.py +++ b/renku/command/checks/validate_shacl.py @@ -104,7 +104,7 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): try: conform, graph, t = _check_shacl_structure(data) except (Exception, BaseException) as e: - problems.append(f"Couldn't validate dataset '{dataset.name}': {e}\n\n") + problems.append(f"Couldn't validate dataset '{dataset.slug}': {e}\n\n") continue if conform: @@ -112,7 +112,7 @@ def check_datasets_structure(dataset_gateway: IDatasetGateway, **_): ok = False - problems.append(f"{dataset.name}\n\t{_shacl_graph_to_string(graph)}\n") + problems.append(f"{dataset.slug}\n\t{_shacl_graph_to_string(graph)}\n") if ok: return True, False, None diff --git a/renku/command/clone.py b/renku/command/clone.py index f4f26c6ba5..24b7cdbf70 100644 --- a/renku/command/clone.py +++ b/renku/command/clone.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -33,7 +32,7 @@ def _project_clone( install_githooks: bool = True, install_mergetool: bool = True, skip_smudge: bool = True, - recursive: bool = True, + recursive: bool = False, depth: Optional[int] = None, progress: Optional[RemoteProgress] = None, config: Optional[Dict[str, Any]] = None, @@ -49,7 +48,7 @@ def _project_clone( install_githooks(bool): Whether to install the pre-commit hook or not (Default value = True). install_mergetool(bool): Whether to install the renku metadata git mergetool or not (Default value = True). skip_smudge(bool): Whether to skip pulling files from LFS (Default value = True). - recursive(bool): Recursively clone (Default value = True). + recursive(bool): Recursively clone (Default value = False). depth(Optional[int]): Clone depth (commits from HEAD) (Default value = None). progress(Optional[RemoteProgress]): Git progress object (Default value = None). config(Optional[Dict[str, Any]]): Initial config (Default value = None). diff --git a/renku/command/config.py b/renku/command/config.py index be44fd7bb1..f8f10a1c5d 100644 --- a/renku/command/config.py +++ b/renku/command/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/dataset.py b/renku/command/dataset.py index dc1fb86238..103eea7233 100644 --- a/renku/command/dataset.py +++ b/renku/command/dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -39,7 +38,7 @@ def search_datasets_command(): - """Command to get all the datasets whose name starts with the given string.""" + """Command to get all the datasets whose slug starts with the given string.""" return Command().command(search_datasets).require_migration().with_database() diff --git a/renku/command/doctor.py b/renku/command/doctor.py index 9150352934..e5ac5edc0d 100644 --- a/renku/command/doctor.py +++ b/renku/command/doctor.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/format/dataset_files.py b/renku/command/format/dataset_files.py index ff2b32b644..bbaaab60d2 100644 --- a/renku/command/format/dataset_files.py +++ b/renku/command/format/dataset_files.py @@ -73,7 +73,10 @@ def get_lfs_tracking(): record.is_lfs = False def naturalize(value) -> str: - return naturalsize(value).upper().replace("BYTES", " B") + try: + return naturalsize(value).upper().replace("BYTES", " B") + except ValueError: + return str(value) def get_file_sizes(): if not any(r for r in records if r.size is None): # All records already have a size @@ -181,8 +184,8 @@ def json(records, **_): "dataset": ("title", "dataset"), "full_path": ("full_path", None), "path": ("path", None), - "short_name": ("dataset_name", "dataset name"), - "dataset_name": ("dataset_name", "dataset name"), + "short_name": ("dataset_slug", "dataset slug"), + "dataset_slug": ("dataset_slug", "dataset slug"), "size": ("size", None), "lfs": ("is_lfs", "lfs"), "source": ("source", None), diff --git a/renku/command/format/datasets.py b/renku/command/format/datasets.py index 40da217ab0..1fcc9b16be 100644 --- a/renku/command/format/datasets.py +++ b/renku/command/format/datasets.py @@ -90,13 +90,14 @@ def json(datasets, **kwargs): "id": ("identifier", "id"), "created": ("date_created", None), "date_created": ("date_created", None), - "short_name": ("name", None), + "short_name": ("slug", None), "name": ("name", None), + "slug": ("slug", None), "creators": ("creators_csv", "creators"), "creators_full": ("creators_full_csv", "creators"), "tags": ("tags_csv", "tags"), "version": ("version", None), - "title": ("title", "title"), + "title": ("name", None), "keywords": ("keywords_csv", "keywords"), "description": ("short_description", "description"), "storage": ("storage", None), diff --git a/renku/command/gc.py b/renku/command/gc.py index 9d56bf2399..1cccf74739 100644 --- a/renku/command/gc.py +++ b/renku/command/gc.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/githooks.py b/renku/command/githooks.py index 0680a9521a..aaaad0b581 100644 --- a/renku/command/githooks.py +++ b/renku/command/githooks.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/graph.py b/renku/command/graph.py index 7799299018..0efe73e726 100644 --- a/renku/command/graph.py +++ b/renku/command/graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/group.py b/renku/command/group.py index 150aeb88fd..9c4fbb85d5 100644 --- a/renku/command/group.py +++ b/renku/command/group.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/init.py b/renku/command/init.py index 41cf4e97e2..3b94d27e5a 100644 --- a/renku/command/init.py +++ b/renku/command/init.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/log.py b/renku/command/log.py index 78d0d70668..3ba15da3ec 100644 --- a/renku/command/log.py +++ b/renku/command/log.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/login.py b/renku/command/login.py index 1a380a9360..cb51f8b2f7 100644 --- a/renku/command/login.py +++ b/renku/command/login.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/mergetool.py b/renku/command/mergetool.py index d1397150ff..c2d209680d 100644 --- a/renku/command/mergetool.py +++ b/renku/command/mergetool.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/migrate.py b/renku/command/migrate.py index df57d07dcc..a46ac34d3c 100644 --- a/renku/command/migrate.py +++ b/renku/command/migrate.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/move.py b/renku/command/move.py index ab0cea8585..6c48278da4 100644 --- a/renku/command/move.py +++ b/renku/command/move.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -57,7 +56,7 @@ def _move(sources: List[str], destination: str, force: bool, verbose: bool, to_d absolute_sources = [_get_absolute_path(src) for src in sources] if to_dataset: - target_dataset = DatasetsProvenance().get_by_name(to_dataset, strict=True) + target_dataset = DatasetsProvenance().get_by_slug(to_dataset, strict=True) if not is_subpath(absolute_destination, _get_absolute_path(target_dataset.get_datadir())): raise errors.ParameterError( f"Destination {destination} must be in {target_dataset.get_datadir()} when moving to a dataset." @@ -112,7 +111,7 @@ def _move(sources: List[str], destination: str, force: bool, verbose: bool, to_d # NOTE: Force-add to include possible ignored files repository.add(*files.values(), force=True) - move_files(files=files, to_dataset_name=to_dataset) + move_files(files=files, to_dataset_slug=to_dataset) if verbose: _show_moved_files(project_context.path, files) diff --git a/renku/command/options.py b/renku/command/options.py index 54551c7fe6..cf06f087e0 100644 --- a/renku/command/options.py +++ b/renku/command/options.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/project.py b/renku/command/project.py index d38c9bbb30..663e4cdba0 100644 --- a/renku/command/project.py +++ b/renku/command/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,14 +16,14 @@ """Project management.""" from renku.command.command_builder.command import Command -from renku.core.constant import DATABASE_METADATA_PATH +from renku.core.constant import PROJECT_METADATA_PATH from renku.core.project import edit_project, show_project def edit_project_command(): """Command for editing project metadata.""" command = Command().command(edit_project).lock_project().with_database(write=True) - return command.require_migration().with_commit(commit_only=DATABASE_METADATA_PATH) + return command.require_migration().with_commit(commit_only=PROJECT_METADATA_PATH) def show_project_command(): diff --git a/renku/command/remove.py b/renku/command/remove.py index 7bd0d22778..7fc89f6789 100644 --- a/renku/command/remove.py +++ b/renku/command/remove.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/rerun.py b/renku/command/rerun.py index 6b06a7ff1e..3ef65fccc6 100644 --- a/renku/command/rerun.py +++ b/renku/command/rerun.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/rollback.py b/renku/command/rollback.py index fd067a4739..5cf223a978 100644 --- a/renku/command/rollback.py +++ b/renku/command/rollback.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/run.py b/renku/command/run.py index ed258eefa8..323f916cb0 100644 --- a/renku/command/run.py +++ b/renku/command/run.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/schema/dataset.py b/renku/command/schema/dataset.py index 6987f58b14..e82e55bc7d 100644 --- a/renku/command/schema/dataset.py +++ b/renku/command/schema/dataset.py @@ -21,7 +21,8 @@ from renku.command.schema.annotation import AnnotationSchema from renku.command.schema.calamus import DateTimeList, JsonLDSchema, Nested, Uri, fields, oa, prov, renku, schema from renku.command.schema.entity import CollectionSchema, EntitySchema -from renku.domain_model.dataset import Dataset, DatasetFile, DatasetTag, ImageObject, Language, RemoteEntity, Url +from renku.command.schema.image import ImageObjectSchema +from renku.domain_model.dataset import Dataset, DatasetFile, DatasetTag, Language, RemoteEntity, Url def dump_dataset_as_jsonld(dataset: Dataset) -> dict: @@ -104,21 +105,6 @@ class Meta: name = fields.String(schema.name) -class ImageObjectSchema(JsonLDSchema): - """ImageObject schema.""" - - class Meta: - """Meta class.""" - - rdf_type = schema.ImageObject - model = ImageObject - unknown = EXCLUDE - - content_url = fields.String(schema.contentUrl) - id = fields.Id(load_default=None) - position = fields.Integer(schema.position) - - class RemoteEntitySchema(JsonLDSchema): """RemoteEntity schema.""" @@ -202,11 +188,11 @@ class Meta: in_language = Nested(schema.inLanguage, LanguageSchema, load_default=None) keywords = fields.List(schema.keywords, fields.String(), load_default=None) license = Uri(schema.license, load_default=None) - name = fields.String(renku.slug) + slug = fields.String(renku.slug) initial_identifier = fields.String(renku.originalIdentifier) project_id = fields.IRI(renku.hasDataset, reverse=True) same_as = Nested(schema.sameAs, UrlSchema, load_default=None) - title = fields.String(schema.name) + name = fields.String(schema.name) version = fields.String(schema.version, load_default=None) @pre_dump(pass_many=True) diff --git a/renku/command/schema/image.py b/renku/command/schema/image.py new file mode 100644 index 0000000000..a8f38d9a34 --- /dev/null +++ b/renku/command/schema/image.py @@ -0,0 +1,36 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Image JSON-LD schema.""" + +from marshmallow import EXCLUDE + +from renku.command.schema.calamus import JsonLDSchema, fields, schema +from renku.domain_model.image import ImageObject + + +class ImageObjectSchema(JsonLDSchema): + """ImageObject schema.""" + + class Meta: + """Meta class.""" + + rdf_type = schema.ImageObject + model = ImageObject + unknown = EXCLUDE + + content_url = fields.String(schema.contentUrl) + id = fields.Id(load_default=None) + position = fields.Integer(schema.position) diff --git a/renku/command/schema/project.py b/renku/command/schema/project.py index e7833e8a93..4d34cf9838 100644 --- a/renku/command/schema/project.py +++ b/renku/command/schema/project.py @@ -20,6 +20,7 @@ from renku.command.schema.agent import PersonSchema from renku.command.schema.annotation import AnnotationSchema from renku.command.schema.calamus import DateTimeList, JsonLDSchema, Nested, StringList, fields, oa, prov, renku, schema +from renku.command.schema.image import ImageObjectSchema from renku.domain_model.project import Project @@ -39,6 +40,7 @@ class Meta: date_created = DateTimeList(schema.dateCreated, load_default=None, format="iso", extra_formats=("%Y-%m-%d",)) description = fields.String(schema.description, load_default=None) id = fields.Id(load_default=None) + image = fields.Nested(schema.image, ImageObjectSchema, load_default=None) immutable_template_files = fields.List( renku.immutableTemplateFiles, fields.String(), diff --git a/renku/command/session.py b/renku/command/session.py index 12bd063c9a..62a0d1cd4f 100644 --- a/renku/command/session.py +++ b/renku/command/session.py @@ -17,10 +17,13 @@ from renku.command.command_builder.command import Command from renku.core.session.session import ( + search_hibernating_session_providers, search_session_providers, search_sessions, session_list, session_open, + session_pause, + session_resume, session_start, session_stop, ssh_setup, @@ -37,6 +40,11 @@ def search_session_providers_command(): return Command().command(search_session_providers).require_migration().with_database(write=False) +def search_hibernating_session_providers_command(): + """Get all the session provider names that support hibernation and match a pattern.""" + return Command().command(search_hibernating_session_providers).require_migration().with_database(write=False) + + def session_list_command(): """List all the running interactive sessions.""" return Command().command(session_list).with_database(write=False) @@ -49,14 +57,24 @@ def session_start_command(): def session_stop_command(): """Stop a running an interactive session.""" - return Command().command(session_stop) + return Command().command(session_stop).with_database(write=False) def session_open_command(): """Open a running interactive session.""" - return Command().command(session_open) + return Command().command(session_open).with_database(write=False) def ssh_setup_command(): """Setup SSH keys for SSH connections to sessions.""" return Command().command(ssh_setup) + + +def session_pause_command(): + """Pause a running interactive session.""" + return Command().command(session_pause).with_database(write=False) + + +def session_resume_command(): + """Resume a paused session.""" + return Command().command(session_resume).with_database(write=False) diff --git a/renku/command/status.py b/renku/command/status.py index d92698fbaf..6e01a24879 100644 --- a/renku/command/status.py +++ b/renku/command/status.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/storage.py b/renku/command/storage.py index 3522ccf877..575153c586 100644 --- a/renku/command/storage.py +++ b/renku/command/storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/template.py b/renku/command/template.py index 908dc7ab43..b013a58534 100644 --- a/renku/command/template.py +++ b/renku/command/template.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/update.py b/renku/command/update.py index 3201d5f617..4ac3196d9a 100644 --- a/renku/command/update.py +++ b/renku/command/update.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/util.py b/renku/command/util.py index 5575a882d6..b61775b4d9 100644 --- a/renku/command/util.py +++ b/renku/command/util.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/command/view_model/dataset.py b/renku/command/view_model/dataset.py index 25c7ed3a53..1c1b33cd51 100644 --- a/renku/command/view_model/dataset.py +++ b/renku/command/view_model/dataset.py @@ -23,14 +23,14 @@ class DatasetViewModel: """A view model for a ``Dataset``.""" - def __init__(self, name: str, same_as: Optional[str]): - self.name: str = name + def __init__(self, slug: str, same_as: Optional[str]): + self.slug: str = slug self.same_as: Optional[str] = same_as @classmethod def from_dataset(cls, dataset: Dataset) -> "DatasetViewModel": """Create view model from ``Dataset``.""" - return cls(name=dataset.name, same_as=dataset.same_as.value if dataset.same_as else None) + return cls(slug=dataset.slug, same_as=dataset.same_as.value if dataset.same_as else None) class DatasetFileViewModel: diff --git a/renku/command/view_model/log.py b/renku/command/view_model/log.py index c9d2f90c83..488a45979c 100644 --- a/renku/command/view_model/log.py +++ b/renku/command/view_model/log.py @@ -90,7 +90,7 @@ class DatasetChangeDetailsViewModel: modified: bool = False files_added: Optional[List[str]] = None files_removed: Optional[List[str]] = None - title_changed: Optional[str] = None + name_changed: Optional[str] = None description_changed: Optional[str] = None creators_added: Optional[List[str]] = None creators_removed: Optional[List[str]] = None @@ -181,7 +181,7 @@ def from_dataset(cls, dataset: "Dataset") -> "DatasetLogViewModel": dataset_gateway = inject.instance(IDatasetGateway) - descriptions = [f"Dataset '{dataset.name}': "] + descriptions = [f"Dataset '{dataset.slug}': "] details = DatasetChangeDetailsViewModel() if not dataset.derived_from and not dataset.same_as: @@ -231,8 +231,8 @@ def from_dataset(cls, dataset: "Dataset") -> "DatasetLogViewModel": if not previous_dataset: # NOTE: Check metadata changes on create/import - if dataset.title: - details.title_changed = dataset.title + if dataset.name: + details.name_changed = dataset.name if dataset.description: details.description_changed = dataset.description @@ -248,8 +248,8 @@ def from_dataset(cls, dataset: "Dataset") -> "DatasetLogViewModel": elif not details.deleted: # NOTE: Check metadata changes to previous dataset modified = False - if dataset.title != previous_dataset.title: - details.title_changed = dataset.title + if dataset.name != previous_dataset.name: + details.name_changed = dataset.name modified = True if dataset.description != previous_dataset.description: details.description_changed = dataset.description @@ -286,7 +286,7 @@ def from_dataset(cls, dataset: "Dataset") -> "DatasetLogViewModel": descriptions.append("metadata modified") return DatasetLogViewModel( - id=dataset.name, + id=dataset.slug, date=dataset.date_removed if dataset.date_removed else ( diff --git a/renku/command/workflow.py b/renku/command/workflow.py index c5d85936ee..30d57f480f 100644 --- a/renku/command/workflow.py +++ b/renku/command/workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/constant.py b/renku/core/constant.py index bde8581c07..3cd4b634bc 100644 --- a/renku/core/constant.py +++ b/renku/core/constant.py @@ -19,6 +19,9 @@ from enum import IntEnum from pathlib import Path +FILESYSTEM_ROOT = os.path.abspath(os.sep) +"""Path to the root of the filesystem.""" + APP_NAME = "Renku" """Application name for storing configuration.""" @@ -41,6 +44,9 @@ DATASET_IMAGES = "dataset_images" """Directory for dataset images.""" +IMAGES = "images" +"""Path for images/icons.""" + DEFAULT_DATA_DIR = "data" DOCKERFILE = "Dockerfile" @@ -79,6 +85,11 @@ Path(RENKU_HOME) / DATABASE_PATH, ] +PROJECT_METADATA_PATH = [ + Path(RENKU_HOME) / DATABASE_PATH, + Path(RENKU_HOME) / IMAGES, +] + DATASET_METADATA_PATHS = [ Path(RENKU_HOME) / DATABASE_PATH, Path(RENKU_HOME) / DATASET_IMAGES, diff --git a/renku/core/dataset/context.py b/renku/core/dataset/context.py index f7db5d7ba0..4909724743 100644 --- a/renku/core/dataset/context.py +++ b/renku/core/dataset/context.py @@ -31,14 +31,14 @@ class DatasetContext: def __init__( self, - name: str, + slug: str, create: Optional[bool] = False, commit_database: Optional[bool] = False, creator: Optional[Person] = None, datadir: Optional[Path] = None, storage: Optional[str] = None, ) -> None: - self.name = name + self.slug = slug self.create = create self.commit_database = commit_database self.creator = creator @@ -49,17 +49,17 @@ def __init__( def __enter__(self): """Enter context.""" - self.dataset = self.dataset_provenance.get_by_name(name=self.name) + self.dataset = self.dataset_provenance.get_by_slug(slug=self.slug) if self.dataset is None: if not self.create: - raise errors.DatasetNotFound(name=self.name) + raise errors.DatasetNotFound(slug=self.slug) # NOTE: Don't update provenance when creating here because it will be updated later self.dataset = create_dataset( - name=self.name, update_provenance=False, datadir=self.datadir, storage=self.storage + slug=self.slug, update_provenance=False, datadir=self.datadir, storage=self.storage ) elif self.create: - raise errors.DatasetExistsError(self.name) + raise errors.DatasetExistsError(self.slug) return self.dataset diff --git a/renku/core/dataset/dataset.py b/renku/core/dataset/dataset.py index c8456ee8e8..fc7fefc2de 100644 --- a/renku/core/dataset/dataset.py +++ b/renku/core/dataset/dataset.py @@ -15,6 +15,7 @@ # limitations under the License. """Dataset business logic.""" +import imghdr import os import shutil import urllib @@ -35,8 +36,8 @@ from renku.core.dataset.providers.factory import ProviderFactory from renku.core.dataset.providers.git import GitProvider from renku.core.dataset.providers.models import DatasetUpdateAction, ProviderDataset -from renku.core.dataset.request_model import ImageRequestModel from renku.core.dataset.tag import get_dataset_by_tag, prompt_access_token, prompt_tag_selection +from renku.core.image import ImageObjectRequest from renku.core.interface.dataset_gateway import IDatasetGateway from renku.core.storage import check_external_storage, track_paths_in_storage from renku.core.util import communication @@ -50,6 +51,7 @@ get_absolute_path, get_file_size, get_files, + get_relative_path, get_safe_relative_path, hash_file, is_path_empty, @@ -60,7 +62,7 @@ from renku.core.util.urls import get_slug from renku.core.util.util import parallel_execute from renku.domain_model.constant import NO_VALUE, NON_EXISTING_ENTITY_CHECKSUM, NoValueType -from renku.domain_model.dataset import Dataset, DatasetDetailsJson, DatasetFile, RemoteEntity, is_dataset_name_valid +from renku.domain_model.dataset import Dataset, DatasetDetailsJson, DatasetFile, RemoteEntity, is_dataset_slug_valid from renku.domain_model.entity import Entity from renku.domain_model.enums import ConfigFilter from renku.domain_model.project_context import project_context @@ -73,17 +75,17 @@ @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def search_datasets(name: str) -> List[str]: - """Get all the datasets whose name starts with the given string. +def search_datasets(slug: str) -> List[str]: + """Get all the datasets whose slug starts with the given string. Args: - name(str): Beginning of dataset name to search for. + slug(str): Beginning of dataset slug to search for. Returns: - List[str]: List of found dataset names. + List[str]: List of found dataset slugs. """ datasets_provenance = DatasetsProvenance() - return list(filter(lambda x: x.startswith(name), map(lambda x: x.name, datasets_provenance.datasets))) + return list(filter(lambda x: x.startswith(slug), map(lambda x: x.slug, datasets_provenance.datasets))) def list_datasets(): @@ -104,12 +106,12 @@ def list_datasets(): @validate_arguments(config=dict(arbitrary_types_allowed=True)) def create_dataset( - name: str, - title: Optional[str] = None, + slug: str, + name: Optional[str] = None, description: Optional[str] = None, creators: Optional[List[Person]] = None, keywords: Optional[List[str]] = None, - images: Optional[List[ImageRequestModel]] = None, + images: Optional[List[ImageObjectRequest]] = None, update_provenance: bool = True, custom_metadata: Optional[Dict[str, Any]] = None, storage: Optional[str] = None, @@ -118,12 +120,12 @@ def create_dataset( """Create a dataset. Args: - name(str): Name of the dataset - title(Optional[str], optional): Dataset title (Default value = None). + slug(str): Slug of the dataset + name(Optional[str], optional): Dataset name (Default value = None). description(Optional[str], optional): Dataset description (Default value = None). creators(Optional[List[Person]], optional): Dataset creators (Default value = None). keywords(Optional[List[str]], optional): Dataset keywords (Default value = None). - images(Optional[List[ImageRequestModel]], optional): Dataset images (Default value = None). + images(Optional[List[ImageObjectRequest]], optional): Dataset images (Default value = None). update_provenance(bool, optional): Whether to add this dataset to dataset provenance (Default value = True). custom_metadata(Optional[Dict[str, Any]], optional): Custom JSON-LD metadata (Default value = None). @@ -140,17 +142,17 @@ def create_dataset( if user: creators.append(user) - if not is_dataset_name_valid(name): - valid_name = get_slug(name, lowercase=False) - raise errors.ParameterError(f"Dataset name '{name}' is not valid (Hint: '{valid_name}' is valid).") + if not is_dataset_slug_valid(slug): + valid_slug = get_slug(slug, lowercase=False) + raise errors.ParameterError(f"Dataset slug '{slug}' is not valid (Hint: '{valid_slug}' is valid).") datasets_provenance = DatasetsProvenance() - if datasets_provenance.get_by_name(name=name): - raise errors.DatasetExistsError(name) + if datasets_provenance.get_by_slug(slug=slug): + raise errors.DatasetExistsError(slug) - if not title: - title = name + if not name: + name = slug keywords = keywords or [] @@ -166,8 +168,8 @@ def create_dataset( dataset = Dataset( identifier=None, + slug=slug, name=name, - title=title, description=description, creators=creators, keywords=keywords, @@ -194,24 +196,24 @@ def create_dataset( @validate_arguments(config=dict(arbitrary_types_allowed=True)) def edit_dataset( - name: str, - title: Optional[Union[str, NoValueType]], + slug: str, + name: Optional[Union[str, NoValueType]], description: Optional[Union[str, NoValueType]], creators: Optional[Union[List[Person], NoValueType]], keywords: Optional[Union[List[str], NoValueType]] = NO_VALUE, - images: Optional[Union[List[ImageRequestModel], NoValueType]] = NO_VALUE, + images: Optional[Union[List[ImageObjectRequest], NoValueType]] = NO_VALUE, custom_metadata: Optional[Union[Dict, List[Dict], NoValueType]] = NO_VALUE, custom_metadata_source: Optional[Union[str, NoValueType]] = NO_VALUE, ): """Edit dataset metadata. Args: - name(str): Name of the dataset to edit - title(Optional[Union[str, NoValueType]]): New title for the dataset. + slug(str): Slug of the dataset to edit + name(Optional[Union[str, NoValueType]]): New name for the dataset. description(Optional[Union[str, NoValueType]]): New description for the dataset. creators(Optional[Union[List[Person], NoValueType]]): New creators for the dataset. keywords(Optional[Union[List[str], NoValueType]]): New keywords for dataset (Default value = ``NO_VALUE``). - images(Optional[Union[List[ImageRequestModel], NoValueType]]): New images for dataset + images(Optional[Union[List[ImageObjectRequest], NoValueType]]): New images for dataset (Default value = ``NO_VALUE``). custom_metadata(Optional[Union[Dict, List[Dict], NoValueType]]): Custom JSON-LD metadata (Default value = ``NO_VALUE``). @@ -221,21 +223,21 @@ def edit_dataset( Returns: bool: True if updates were performed. """ - if isinstance(title, str): - title = title.strip() + if isinstance(name, str): + name = name.strip() - if title is None: - title = "" + if name is None: + name = "" possible_updates = { "creators": creators, "description": description, "keywords": keywords, - "title": title, + "name": name, } dataset_provenance = DatasetsProvenance() - dataset = dataset_provenance.get_by_name(name=name) + dataset = dataset_provenance.get_by_slug(slug=slug) if dataset is None: raise errors.ParameterError("Dataset does not exist.") @@ -243,12 +245,12 @@ def edit_dataset( updated: Dict[str, Any] = {k: v for k, v in possible_updates.items() if v != NO_VALUE} if updated: - dataset.update_metadata(creators=creators, description=description, keywords=keywords, title=title) + dataset.update_metadata(creators=creators, description=description, keywords=keywords, name=name) if images == NO_VALUE: images_updated = False else: - images_updated = set_dataset_images(dataset=dataset, images=cast(Optional[List[ImageRequestModel]], images)) + images_updated = set_dataset_images(dataset=dataset, images=cast(Optional[List[ImageObjectRequest]], images)) if images_updated: updated["images"] = ( @@ -295,11 +297,11 @@ def list_dataset_files( from renku.command.format.dataset_files import get_lfs_tracking_and_file_sizes records = filter_dataset_files( - names=datasets, tag=tag, creators=creators, include=include, exclude=exclude, immutable=True + slugs=datasets, tag=tag, creators=creators, include=include, exclude=exclude, immutable=True ) for record in records: - record.title = record.dataset.title - record.dataset_name = record.dataset.name + record.title = record.dataset.name + record.dataset_slug = record.dataset.slug record.dataset_id = record.dataset.id record.creators_csv = record.dataset.creators_csv record.creators_full_csv = record.dataset.creators_full_csv @@ -315,7 +317,7 @@ def list_dataset_files( @validate_arguments(config=dict(arbitrary_types_allowed=True)) def file_unlink( - name: str, + slug: str, include: Optional[List[str]] = None, exclude: Optional[List[str]] = None, yes: bool = False, @@ -324,7 +326,7 @@ def file_unlink( """Remove matching files from a dataset. Args: - name(str): Dataset name. + slug(str): Dataset slug. include(Optional[List[str]]): Include filter for files (Default value = None). exclude(Optional[List[str]]): Exclude filter for files (Default value = None). yes(bool): Whether to skip user confirmation or not (Default value = False). @@ -341,21 +343,21 @@ def file_unlink( datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(name=name) + dataset = datasets_provenance.get_by_slug(slug=slug) if not dataset: raise errors.ParameterError("Dataset does not exist.") records = [] if not dataset_files: - records = filter_dataset_files(names=[name], include=include, exclude=exclude) + records = filter_dataset_files(slugs=[slug], include=include, exclude=exclude) if not records: raise errors.ParameterError("No records found.") dataset_files = [cast(DatasetFile, r) for r in records] if not yes: prompt_text = ( - f'You are about to remove following from "{name}" dataset.' + f'You are about to remove following from "{slug}" dataset.' + "\n" + "\n".join([str(record.entity.path) for record in dataset_files]) + "\nDo you wish to continue?" @@ -394,23 +396,23 @@ def file_unlink( @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def remove_dataset(name: str): +def remove_dataset(slug: str): """Delete a dataset. Args: - name(str): Name of dataset to delete. + slug(str): Slug of dataset to delete. """ datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(name=name, strict=True) + dataset = datasets_provenance.get_by_slug(slug=slug, strict=True) datasets_provenance.remove(dataset=dataset) @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def export_dataset(name: str, provider_name: str, tag: Optional[str], **kwargs): +def export_dataset(slug: str, provider_name: str, tag: Optional[str], **kwargs): """Export data to 3rd party provider. Args: - name(str): Name of dataset to export. + slug(str): Slug of dataset to export. provider_name(str): Provider to use for export. tag(str): Dataset tag from which to export. """ @@ -421,7 +423,7 @@ def export_dataset(name: str, provider_name: str, tag: Optional[str], **kwargs): # TODO: all these callbacks are ugly, improve in #737 config_key_secret = "access_token" # nosec - dataset: Optional[Dataset] = datasets_provenance.get_by_name(name, strict=True, immutable=True) + dataset: Optional[Dataset] = datasets_provenance.get_by_slug(slug, strict=True, immutable=True) provider = ProviderFactory.get_export_provider(provider_name=provider_name) @@ -432,7 +434,7 @@ def export_dataset(name: str, provider_name: str, tag: Optional[str], **kwargs): selected_tag = next((t for t in tags if t.name == tag), None) if not selected_tag: - raise errors.ParameterError(f"Tag '{tag}' not found for dataset '{name}'") + raise errors.ParameterError(f"Tag '{tag}' not found for dataset '{slug}'") elif tags: selected_tag = prompt_tag_selection(tags) @@ -471,7 +473,7 @@ def export_dataset(name: str, provider_name: str, tag: Optional[str], **kwargs): @validate_arguments(config=dict(arbitrary_types_allowed=True)) def import_dataset( uri: str, - name: Optional[str] = "", + slug: Optional[str] = "", extract: bool = False, yes: bool = False, datadir: Optional[Path] = None, @@ -484,7 +486,7 @@ def import_dataset( Args: uri(str): DOI or URL of dataset to import. - name(str): Name to give imported dataset (Default value = ""). + slug(str): Slug to give to the imported dataset (Default value = ""). extract(bool): Whether to extract compressed dataset data (Default value = False). yes(bool): Whether to skip user confirmation (Default value = False). datadir(Optional[Path]): Dataset's data directory (Default value = None). @@ -555,10 +557,10 @@ def remove_files(dataset): except ValueError as e: raise errors.ParameterError("Datadir must be inside repository.") from e - name = name or provider_dataset.name + slug = slug or provider_dataset.slug new_dataset = add_to_dataset( - dataset_name=name, + dataset_slug=slug, urls=[], importer=importer, create=not previous_dataset, @@ -577,7 +579,7 @@ def remove_files(dataset): remove_files(new_dataset) - importer.tag_dataset(name) + importer.tag_dataset(slug) importer.copy_extra_metadata(new_dataset) project_context.database.commit() @@ -586,7 +588,7 @@ def remove_files(dataset): @inject.autoparams() @validate_arguments(config=dict(arbitrary_types_allowed=True)) def update_datasets( - names: List[str], + slugs: List[str], creators: Optional[str], include: Optional[List[str]], exclude: Optional[List[str]], @@ -603,7 +605,7 @@ def update_datasets( """Update dataset files. Args: - names(List[str]): Names of datasets to update. + slugs(List[str]): Slugs of datasets to update. creators(Optional[str]): Creators to filter dataset files by. include(Optional[List[str]]): Include filter for paths to update. exclude(Optional[List[str]]): Exclude filter for paths to update. @@ -619,7 +621,7 @@ def update_datasets( """ from renku.core.dataset.providers.renku import RenkuProvider - if not update_all and not names and not include and not exclude and not dry_run: + if not update_all and not slugs and not include and not exclude and not dry_run: raise errors.ParameterError("No update criteria is specified") imported_dataset_updates: List[Dataset] = [] @@ -627,21 +629,21 @@ def update_datasets( all_datasets = dataset_gateway.get_all_active_datasets() imported_datasets = [d for d in all_datasets if d.same_as] - if names and update_all: - raise errors.ParameterError("Cannot pass dataset names when updating all datasets") + if slugs and update_all: + raise errors.ParameterError("Cannot pass dataset slugs when updating all datasets") elif (include or exclude) and update_all: raise errors.ParameterError("Cannot specify include and exclude filters when updating all datasets") - elif (include or exclude) and names and any(d for d in imported_datasets if d.name in names): + elif (include or exclude) and slugs and any(d for d in imported_datasets if d.slug in slugs): raise errors.IncompatibleParametersError(first_param="--include/--exclude", second_param="imported datasets") - names = names or [d.name for d in all_datasets] + slugs = slugs or [d.slug for d in all_datasets] # NOTE: update imported datasets if not include and not exclude and not no_remote: must_match_records = False for dataset in imported_datasets: - if dataset.name not in names: + if dataset.slug not in slugs: continue uri = dataset.same_as.value # type: ignore @@ -658,13 +660,13 @@ def update_datasets( # NOTE: Do not update Renku dataset that are imported from a specific version if tag is not None and tag.dataset_id.value == dataset.id: communication.echo( - f"Skipped updating imported Renku dataset '{dataset.name}' with tag '{tag.name}'" + f"Skipped updating imported Renku dataset '{dataset.slug}' with tag '{tag.name}'" ) - names.remove(dataset.name) + slugs.remove(dataset.slug) continue if record.is_latest_version() and record.is_version_equal_to(dataset): - names.remove(dataset.name) + slugs.remove(dataset.slug) continue if not dry_run: @@ -682,28 +684,28 @@ def update_datasets( break import_dataset( - uri=uri, name=dataset.name, extract=extract, yes=True, previous_dataset=dataset, delete=delete + uri=uri, slug=dataset.slug, extract=extract, yes=True, previous_dataset=dataset, delete=delete ) - communication.echo(f"Updated dataset '{dataset.name}' from remote provider") + communication.echo(f"Updated dataset '{dataset.slug}' from remote provider") - names.remove(dataset.name) + slugs.remove(dataset.slug) imported_dataset_updates.append(dataset) else: must_match_records = True imported_dataset_updates_view_models = [DatasetViewModel.from_dataset(d) for d in imported_dataset_updates] - if not names: + if not slugs: return imported_dataset_updates_view_models, [] # NOTE: Exclude all imported dataset from individual file filter records = filter_dataset_files( - names=names, + slugs=slugs, creators=creators, include=include, exclude=exclude, - ignore=[d.name for d in imported_datasets], + ignore=[d.slug for d in imported_datasets], check_data_directory=check_data_directory, ) @@ -731,7 +733,7 @@ def update_datasets( ProviderFactory.get_add_provider(uri), ) except errors.DatasetProviderNotFound: - communication.warn(f"Couldn't find provider for file {file.path} in dataset {file.dataset.name}") + communication.warn(f"Couldn't find provider for file {file.path} in dataset {file.dataset.slug}") continue provider_files[file.provider].append(file) @@ -799,18 +801,18 @@ def update_datasets( @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def show_dataset(name: str, tag: Optional[str] = None): +def show_dataset(slug: str, tag: Optional[str] = None): """Show detailed dataset information. Args: - name(str): Name of dataset to show details for. + slug(str): Slug of dataset to show details for. tag(str, optional): Tags for which to get the metadata (Default value = None). Returns: dict: JSON dictionary of dataset details. """ datasets_provenance = DatasetsProvenance() - dataset: Optional[Dataset] = datasets_provenance.get_by_name(name, strict=True) + dataset: Optional[Dataset] = datasets_provenance.get_by_slug(slug, strict=True) if tag is None: return DatasetDetailsJson().dump(dataset) @@ -855,12 +857,12 @@ def add_datadir_files_to_dataset(dataset: Dataset) -> None: dataset.add_or_update_files(dataset_files) -def set_dataset_images(dataset: Dataset, images: Optional[List[ImageRequestModel]]): +def set_dataset_images(dataset: Dataset, images: Optional[List[ImageObjectRequest]]): """Set a dataset's images. Args: dataset(Dataset): The dataset to set images on. - images(List[ImageRequestModel]): The images to set. + images(List[ImageObjectRequest]): The images to set. Returns: True if images were set/modified. @@ -875,10 +877,30 @@ def set_dataset_images(dataset: Dataset, images: Optional[List[ImageRequestModel dataset.images = [] images_updated = False for img in images: - img_object = img.to_image_object(dataset) + image_folder = project_context.dataset_images_path / dataset.initial_identifier + try: + img_object = img.to_image_object(owner_id=dataset.id) + except errors.ImageError as e: + raise errors.DatasetImageError(e) from e + + path = img_object.content_url + + if not img_object.is_remote: + # NOTE: only copy dataset image if it's not in .renku/datasets//images/ already + if not path.startswith(str(image_folder)): + image_type = imghdr.what(path) + if image_type: + ext = f".{image_type}" + else: + _, ext = os.path.splitext(path) + target_image_path: Union[Path, str] = image_folder / f"{img_object.position}{ext}" - if not img_object: - continue + image_folder.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(path, target_image_path) + else: + target_image_path = path + + img_object.content_url = get_relative_path(target_image_path, base=project_context.path) # type: ignore if any(i.position == img_object.position for i in dataset.images): raise errors.DatasetImageError(f"Duplicate dataset image specified for position {img_object.position}") @@ -927,20 +949,20 @@ def update_dataset_custom_metadata( @inject.autoparams("dataset_gateway") -def move_files(dataset_gateway: IDatasetGateway, files: Dict[Path, Path], to_dataset_name: Optional[str] = None): +def move_files(dataset_gateway: IDatasetGateway, files: Dict[Path, Path], to_dataset_slug: Optional[str] = None): """Move files and their metadata from one or more datasets to a target dataset. Args: dataset_gateway(IDatasetGateway):Injected dataset gateway. files(Dict[Path, Path]): Files to move - to_dataset_name(Optional[str], optional): Target dataset (Default value = None) + to_dataset_slug(Optional[str], optional): Target dataset (Default value = None) """ datasets = [d.copy() for d in dataset_gateway.get_all_active_datasets()] to_dataset: Optional[Dataset] = None - if to_dataset_name: + if to_dataset_slug: # NOTE: Use the same dataset object or otherwise a race happens if dataset is in both source and destination - to_dataset = next(d for d in datasets if d.name == to_dataset_name) + to_dataset = next(d for d in datasets if d.slug == to_dataset_slug) modified_datasets: Dict[str, Dataset] = {} progress_name = "Updating dataset metadata" @@ -958,7 +980,7 @@ def move_files(dataset_gateway: IDatasetGateway, files: Dict[Path, Path], to_dat for dataset in datasets: removed = dataset.unlink_file(src, missing_ok=True) if removed: - modified_datasets[dataset.name] = dataset + modified_datasets[dataset.slug] = dataset new_dataset_file.based_on = removed.based_on new_dataset_file.source = removed.source @@ -972,7 +994,7 @@ def move_files(dataset_gateway: IDatasetGateway, files: Dict[Path, Path], to_dat modified = dataset.find_file(dst) added = is_subpath(project_context.path / dst, project_context.path / dataset.get_datadir()) if modified or added: - modified_datasets[dataset.name] = dataset + modified_datasets[dataset.slug] = dataset dataset.add_or_update_files(new_dataset_file) if to_dataset: @@ -998,14 +1020,14 @@ def _update_datasets_files_metadata(updated_files: List[DynamicProxy], deleted_f new_file = DatasetFile.from_path( path=file.entity.path, based_on=file.based_on, source=file.source, checksum=checksums.get(file.entity.path) ) - modified_datasets[file.dataset.name] = ( + modified_datasets[file.dataset.slug] = ( file.dataset._subject if isinstance(file.dataset, DynamicProxy) else file.dataset ) file.dataset.add_or_update_files(new_file) if delete: for file in deleted_files: - modified_datasets[file.dataset.name] = ( + modified_datasets[file.dataset.slug] = ( file.dataset._subject if isinstance(file.dataset, DynamicProxy) else file.dataset ) file.dataset.unlink_file(file.entity.path) @@ -1045,7 +1067,7 @@ def update_linked_files(records: List[DynamicProxy], dry_run: bool) -> List[Dyna @inject.autoparams("dataset_gateway") def filter_dataset_files( dataset_gateway: IDatasetGateway, - names: Optional[List[str]] = None, + slugs: Optional[List[str]] = None, tag: Optional[str] = None, creators: Optional[Union[str, List[str], Tuple[str]]] = None, include: Optional[List[str]] = None, @@ -1058,7 +1080,7 @@ def filter_dataset_files( Args: dataset_gateway(IDatasetGateway):Injected dataset gateway. - names(Optional[List[str]]): Filter by specified dataset names (Default value = None). + slugs(Optional[List[str]]): Filter by specified dataset slugs (Default value = None). tag(Optional[str]): Filter by specified tag (Default value = None). creators(Optional[Union[str, List[str], Tuple[str]]]): Filter by creators (Default value = None). include(Optional[List[str]]): Tuple containing patterns to which include from result (Default value = None). @@ -1093,13 +1115,13 @@ def should_include(filepath: Path) -> bool: creators_set = set(creators) records = [] - unused_names = set(names) if names is not None else set() + unused_slugs = set(slugs) if slugs is not None else set() if ignore: - unused_names = unused_names - set(ignore) + unused_slugs = unused_slugs - set(ignore) for dataset in dataset_gateway.get_all_active_datasets(): - if (names and dataset.name not in names) or (ignore and dataset.name in ignore): + if (slugs and dataset.slug not in slugs) or (ignore and dataset.slug in ignore): continue if tag: @@ -1110,8 +1132,8 @@ def should_include(filepath: Path) -> bool: if not immutable: dataset = dataset.copy() - if unused_names: - unused_names.remove(dataset.name) + if unused_slugs: + unused_slugs.remove(dataset.slug) if creators_set: dataset_creators = {creator.name for creator in dataset.creators} @@ -1139,9 +1161,9 @@ def should_include(filepath: Path) -> bool: record.dataset = dataset records.append(record) - if unused_names: - unused_names_str = ", ".join(unused_names) - raise errors.ParameterError(f"These datasets don't exist: {unused_names_str}") + if unused_slugs: + unused_slugs_str = ", ".join(unused_slugs) + raise errors.ParameterError(f"These datasets don't exist: {unused_slugs_str}") return sorted(records, key=lambda r: r.date_added) @@ -1200,14 +1222,14 @@ def download_file(file: DatasetFile, storage: "IStorage") -> List[DatasetFile]: @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def pull_cloud_storage(name: str, location: Optional[Path] = None) -> None: +def pull_cloud_storage(slug: str, location: Optional[Path] = None) -> None: """Pull/copy data for a cloud storage to a dataset's data directory or a specified location. Args: - name(str): Name of the dataset + slug(str): Slug of the dataset location(Optional[Path]): A directory to copy data to (Default value = None). """ - dataset, datadir = _get_dataset_with_cloud_storage(name=name) + dataset, datadir = _get_dataset_with_cloud_storage(slug=slug) # NOTE: Try to unmount the path in case it was mounted before unmount_path(datadir) @@ -1235,7 +1257,7 @@ def pull_cloud_storage(name: str, location: Optional[Path] = None) -> None: def store_dataset_data_location(dataset: Dataset, location: Optional[Path]) -> None: """Store data location for a dataset in the config file.""" section = "dataset-locations" - key = dataset.name + key = dataset.slug if not location: remove_value(section=section, key=key) @@ -1245,19 +1267,19 @@ def store_dataset_data_location(dataset: Dataset, location: Optional[Path]) -> N def read_dataset_data_location(dataset: Dataset) -> Optional[str]: """Read data location for a dataset in the config file.""" - return get_value(section="dataset-locations", key=dataset.name, config_filter=ConfigFilter.LOCAL_ONLY) + return get_value(section="dataset-locations", key=dataset.slug, config_filter=ConfigFilter.LOCAL_ONLY) @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def mount_cloud_storage(name: str, existing: Optional[Path], yes: bool) -> None: +def mount_cloud_storage(slug: str, existing: Optional[Path], yes: bool) -> None: """Mount a cloud storage to a dataset's data directory. Args: - name(str): Name of the dataset + slug(str): Slug of the dataset existing(Optional[Path]): An existing mount point to use instead of actually mounting the backend storage. yes(bool): Don't prompt when removing non-empty dataset's data directory. """ - dataset, datadir = _get_dataset_with_cloud_storage(name=name) + dataset, datadir = _get_dataset_with_cloud_storage(slug=slug) # NOTE: Try to unmount the path in case it was mounted before unmount_path(datadir) @@ -1286,23 +1308,23 @@ def mount_cloud_storage(name: str, existing: Optional[Path], yes: bool) -> None: @validate_arguments(config=dict(arbitrary_types_allowed=True)) -def unmount_cloud_storage(name: str) -> None: +def unmount_cloud_storage(slug: str) -> None: """Mount a cloud storage to a dataset's data directory. Args: - name(str): Name of the dataset + slug(str): Slug of the dataset """ - _, datadir = _get_dataset_with_cloud_storage(name=name) + _, datadir = _get_dataset_with_cloud_storage(slug=slug) unmount_path(datadir) -def _get_dataset_with_cloud_storage(name: str) -> Tuple[Dataset, Path]: +def _get_dataset_with_cloud_storage(slug: str) -> Tuple[Dataset, Path]: datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(name=name, strict=True) + dataset = datasets_provenance.get_by_slug(slug=slug, strict=True) if not dataset.storage: - raise errors.ParameterError(f"Dataset '{name}' doesn't have a storage backend") + raise errors.ParameterError(f"Dataset '{slug}' doesn't have a storage backend") datadir = project_context.path / dataset.get_datadir() diff --git a/renku/core/dataset/dataset_add.py b/renku/core/dataset/dataset_add.py index 34328aaa7a..46985a1eaf 100644 --- a/renku/core/dataset/dataset_add.py +++ b/renku/core/dataset/dataset_add.py @@ -45,7 +45,7 @@ def add_to_dataset( - dataset_name: str, + dataset_slug: str, urls: List[str], *, importer: Optional[ImporterApi] = None, @@ -69,7 +69,7 @@ def add_to_dataset( raise errors.ParameterError("Storage can be set only when creating a dataset") try: - with DatasetContext(name=dataset_name, create=create, datadir=datadir, storage=storage) as dataset: + with DatasetContext(slug=dataset_slug, create=create, datadir=datadir, storage=storage) as dataset: destination_path = _create_destination_directory(dataset, destination) check_external_storage() @@ -115,7 +115,7 @@ def add_to_dataset( raise errors.DatasetNotFound( message="Dataset '{0}' does not exist.\n" "Use 'renku dataset create {0}' to create the dataset or retry 'renku dataset add {0}' command " - "with '--create' option for automatic dataset creation.".format(dataset_name) + "with '--create' option for automatic dataset creation.".format(dataset_slug) ) except (FileNotFoundError, errors.GitCommandError) as e: raise errors.ParameterError("Could not find paths/URLs: \n{}".format("\n".join(urls))) from e @@ -189,7 +189,7 @@ def get_files_metadata( @inject.autoparams("dataset_gateway") def has_cloud_storage(dataset_gateway: IDatasetGateway) -> bool: """Return if a project has any dataset with cloud storage with its data directory mounted or pulled.""" - # NOTE: ``exists`` return False for symlinks if their target doesn't exists, but it's fine here since it means the + # NOTE: ``exists`` return False for symlinks if their target doesn't exist, but it's fine here since it means the # dataset's mounted/pulled location doesn't exist. return any( dataset @@ -221,8 +221,8 @@ def get_cloud_dataset_from_path( datadir = project_context.path / dataset.get_datadir() resolved_path = path.resolve() - # NOTE: Resolve ``path`` because ``datadir`` is resolved and resolved paths might have be on a different - # location (e.g. on macos /tmp resolves to /private/tmp) + # NOTE: Resolve ``path`` because ``datadir`` is resolved and resolved paths might have been on a different + # location (e.g. on macOS /tmp resolves to /private/tmp) resolved_relative_path = get_relative_path(resolved_path, base=datadir.resolve()) if is_subpath(path, base=datadir) or resolved_relative_path is not None: diff --git a/renku/core/dataset/datasets_provenance.py b/renku/core/dataset/datasets_provenance.py index 2402e804e2..c9a12ebdf8 100644 --- a/renku/core/dataset/datasets_provenance.py +++ b/renku/core/dataset/datasets_provenance.py @@ -56,31 +56,31 @@ def get_by_id(self, id: str, immutable: bool = False) -> Optional["Dataset"]: return None @overload - def get_by_name(self, name: str, *, immutable: bool = False, strict: Literal[False] = False) -> Optional["Dataset"]: + def get_by_slug(self, slug: str, *, immutable: bool = False, strict: Literal[False] = False) -> Optional["Dataset"]: ... @overload - def get_by_name(self, name: str, *, immutable: bool = False, strict: Literal[True]) -> "Dataset": + def get_by_slug(self, slug: str, *, immutable: bool = False, strict: Literal[True]) -> "Dataset": ... - def get_by_name( - self, name: str, immutable: bool = False, strict: bool = False + def get_by_slug( + self, slug: str, immutable: bool = False, strict: bool = False ) -> Union[Optional["Dataset"], "Dataset"]: - """Return a dataset by its name. + """Return a dataset by its slug. Args: - name(str): Name of the dataset + slug(str): Slug of the dataset immutable(bool): Whether the dataset will be used as an immutable instance or will be modified (Default value = False). strict(bool): Whether to raise an exception if the dataset doesn't exist or not (Default value = False) Returns: - Optional[Dataset]: Dataset with the specified name if exists. + Optional[Dataset]: Dataset with the specified slug if exists. """ - dataset = self.dataset_gateway.get_by_name(name) + dataset = self.dataset_gateway.get_by_slug(slug) if not dataset: if strict: - raise errors.DatasetNotFound(name=name) + raise errors.DatasetNotFound(slug=slug) return None if not dataset.immutable or immutable: @@ -108,13 +108,13 @@ def add_or_update(self, dataset: "Dataset", date: Optional[datetime] = None, cre assert isinstance(dataset, Dataset) - # NOTE: Dataset's name never changes, so, we use it to detect if a dataset should be mutated. - current_dataset = self.get_by_name(dataset.name) + # NOTE: Dataset's slug never changes, so, we use it to detect if a dataset should be mutated. + current_dataset = self.get_by_slug(dataset.slug) if current_dataset: assert ( not current_dataset.is_removed() - ), f"Adding/Updating a removed dataset '{dataset.name}:{dataset.identifier}'" + ), f"Adding/Updating a removed dataset '{dataset.slug}:{dataset.identifier}'" dataset.update_files_from(current_dataset, date=date) @@ -123,7 +123,7 @@ def add_or_update(self, dataset: "Dataset", date: Optional[datetime] = None, cre else: assert ( dataset.derived_from is None - ), f"Parent dataset {dataset.derived_from} not found for '{dataset.name}:{dataset.identifier}'" + ), f"Parent dataset {dataset.derived_from} not found for '{dataset.slug}:{dataset.identifier}'" self.dataset_gateway.add_or_remove(dataset) @@ -133,18 +133,18 @@ def remove(self, dataset, date: Optional[datetime] = None, creator: Optional["Pe assert isinstance(dataset, Dataset) - # NOTE: Dataset's name never changes, so, we use it to detect if a dataset should be mutated. - current_dataset = self.dataset_gateway.get_by_name(dataset.name) + # NOTE: Dataset's slug never changes, so, we use it to detect if a dataset should be mutated. + current_dataset = self.dataset_gateway.get_by_slug(dataset.slug) if current_dataset: - assert not current_dataset.is_removed(), f"Removing a removed dataset '{dataset.name}:{dataset.identifier}'" + assert not current_dataset.is_removed(), f"Removing a removed dataset '{dataset.slug}:{dataset.identifier}'" # NOTE: We always assign a new identifier to make sure an old identifier is not reused dataset.derive_from(current_dataset, creator=creator) else: assert ( dataset.derived_from is None - ), f"Parent dataset {dataset.derived_from} not found for '{dataset.name}:{dataset.identifier}'" + ), f"Parent dataset {dataset.derived_from} not found for '{dataset.slug}:{dataset.identifier}'" dataset.remove(date) self.dataset_gateway.add_or_remove(dataset) @@ -171,8 +171,8 @@ def update_dataset(existing, new) -> "Dataset": existing.dataset_files = new.dataset_files return existing - # NOTE: Dataset's name never changes, so, we use it to detect if a dataset should be mutated. - current_dataset = self.get_by_name(dataset.name, immutable=True) + # NOTE: Dataset's slug never changes, so, we use it to detect if a dataset should be mutated. + current_dataset = self.get_by_slug(dataset.slug, immutable=True) new_identifier = self._create_dataset_identifier(commit_sha, dataset.identifier) dataset_with_same_id = self.get_by_id(dataset.id, immutable=True) @@ -193,11 +193,11 @@ def update_dataset(existing, new) -> "Dataset": dataset.derive_from(current_dataset, creator=None, identifier=identifier, date_created=date_created) else: if remove: - communication.warn(f"Deleting non-existing dataset '{dataset.name}'") + communication.warn(f"Deleting non-existing dataset '{dataset.slug}'") if dataset.derived_from: communication.warn( - f"Parent dataset {dataset.derived_from} not found for '{dataset.name}:{dataset.identifier}'" + f"Parent dataset {dataset.derived_from} not found for '{dataset.slug}:{dataset.identifier}'" ) dataset.derived_from = None diff --git a/renku/core/dataset/providers/api.py b/renku/core/dataset/providers/api.py index 02ab716315..8e0c657a2a 100644 --- a/renku/core/dataset/providers/api.py +++ b/renku/core/dataset/providers/api.py @@ -169,7 +169,7 @@ def update_files( """Update dataset files from the remote provider.""" from renku.core.dataset.providers.models import DatasetUpdateAction, DatasetUpdateMetadata - progress_text = f"Checking remote files for updates in dataset {files[0].dataset.name}" + progress_text = f"Checking remote files for updates in dataset {files[0].dataset.slug}" results: List[DatasetUpdateMetadata] = [] diff --git a/renku/core/dataset/providers/dataverse.py b/renku/core/dataset/providers/dataverse.py index 22b938d43e..b53f72e0e2 100644 --- a/renku/core/dataset/providers/dataverse.py +++ b/renku/core/dataset/providers/dataverse.py @@ -228,7 +228,7 @@ def fetch_provider_dataset(self) -> "ProviderDataset": from renku.command.schema.agent import PersonSchema from renku.core.dataset.providers.models import ProviderDataset, ProviderDatasetFile, ProviderDatasetSchema - from renku.domain_model.dataset import Url, generate_default_name + from renku.domain_model.dataset import Url, generate_default_slug class DataverseDatasetSchema(ProviderDatasetSchema): """Schema for Dataverse datasets.""" @@ -268,7 +268,7 @@ def fix_timezone(self, obj, **kwargs): files = self.get_files() dataset = ProviderDataset.from_jsonld(data=self._json, schema_class=DataverseDatasetSchema) dataset.version = self.version - dataset.name = generate_default_name(title=dataset.title or "", version=dataset.version) + dataset.slug = generate_default_slug(name=dataset.name or "", version=dataset.version) dataset.same_as = ( Url(url_str=get_doi_url(dataset.identifier)) if is_doi(dataset.identifier) @@ -391,7 +391,7 @@ def _get_dataset_metadata(self): keywords = self._get_keywords() metadata_template = Template(DATASET_METADATA_TEMPLATE) metadata = metadata_template.substitute( - name=_escape_json_string(self.dataset.title), + name=_escape_json_string(self.dataset.name), authors=json.dumps(authors), contacts=json.dumps(contacts), description=_escape_json_string(self.dataset.description), diff --git a/renku/core/dataset/providers/doi.py b/renku/core/dataset/providers/doi.py index eb3ee19f96..e32138f99b 100644 --- a/renku/core/dataset/providers/doi.py +++ b/renku/core/dataset/providers/doi.py @@ -107,7 +107,7 @@ def __init__( self.issued = issued self.language = language self.publisher = publisher - self.title = title + self.name = title self.type = type self._version = version diff --git a/renku/core/dataset/providers/local.py b/renku/core/dataset/providers/local.py index e537e77958..538b9f8576 100644 --- a/renku/core/dataset/providers/local.py +++ b/renku/core/dataset/providers/local.py @@ -328,7 +328,7 @@ def export(self, **kwargs) -> str: if self._path: dst_root = project_context.path / self._path else: - dataset_dir = f"{self._dataset.name}-{self._tag.name}" if self._tag else self._dataset.name + dataset_dir = f"{self._dataset.slug}-{self._tag.name}" if self._tag else self._dataset.slug dst_root = project_context.path / project_context.datadir / dataset_dir if dst_root.exists() and not dst_root.is_dir(): diff --git a/renku/core/dataset/providers/models.py b/renku/core/dataset/providers/models.py index 4a9f57bf44..055a20f5ed 100644 --- a/renku/core/dataset/providers/models.py +++ b/renku/core/dataset/providers/models.py @@ -143,10 +143,10 @@ def from_dataset(cls, dataset: "Dataset") -> "ProviderDataset": initial_identifier=dataset.initial_identifier, keywords=dataset.keywords, license=dataset.license, - name=dataset.name, + slug=dataset.slug, project_id=dataset.project_id, same_as=dataset.same_as, - title=dataset.title, + name=dataset.name, version=dataset.version, storage=dataset.storage, ) diff --git a/renku/core/dataset/providers/olos.py b/renku/core/dataset/providers/olos.py index 72bd742db0..73242e61e6 100644 --- a/renku/core/dataset/providers/olos.py +++ b/renku/core/dataset/providers/olos.py @@ -128,7 +128,7 @@ def _get_dataset_metadata(self): "description": self.dataset.description, "identifier": identifier, "keywords": self.dataset.keywords, - "title": self.dataset.title, + "title": self.dataset.name, "access": "CLOSED", "dataSensitivity": "CRIMSON", "year": datetime.datetime.today().year, diff --git a/renku/core/dataset/providers/renku.py b/renku/core/dataset/providers/renku.py index 76feb8549c..90398d022f 100644 --- a/renku/core/dataset/providers/renku.py +++ b/renku/core/dataset/providers/renku.py @@ -56,7 +56,7 @@ def __init__(self, uri: str, **_): @staticmethod def supports(uri): - """Whether or not this provider supports a given URI.""" + """Whether this provider supports a given URI.""" parsed_url = urllib.parse.urlparse(uri) if not parsed_url.netloc: @@ -87,13 +87,13 @@ def get_importer(self, tag: Optional[str] = None, gitlab_token: Optional[str] = self._prepare_auth(self.uri) - name, identifier, latest_version_uri, kg_url = self._fetch_dataset_info(self.uri) + slug, identifier, latest_version_uri, kg_url = self._fetch_dataset_info(self.uri) project_url_ssh, project_url_http = self._get_project_urls(kg_url) return RenkuImporter( uri=self.uri, - name=name, + slug=slug, identifier=identifier, tag=self._tag, latest_version_uri=latest_version_uri, @@ -107,23 +107,23 @@ def _fetch_dataset_info(self, uri): """Return initial dataset identifier and urls of all projects that contain the dataset.""" parsed_url = urllib.parse.urlparse(uri) - project_id, dataset_name_or_id = RenkuProvider._extract_project_and_dataset_ids(parsed_url) - if not project_id and not dataset_name_or_id: + project_id, dataset_slug_or_id = RenkuProvider._extract_project_and_dataset_ids(parsed_url) + if not project_id and not dataset_slug_or_id: raise errors.ParameterError("Invalid URI", param_hint=uri) - kg_path = f"/knowledge-graph/datasets/{dataset_name_or_id}" + kg_path = f"/knowledge-graph/datasets/{dataset_slug_or_id}" dataset_kg_url = parsed_url._replace(path=kg_path).geturl() try: dataset_info = self._query_knowledge_graph(dataset_kg_url) except errors.NotFound: - # NOTE: If URI is not found we assume that it contains dataset's name instead of its id - dataset_name = dataset_name_or_id + # NOTE: If URI is not found we assume that it contains dataset's slug instead of its id + dataset_slug = dataset_slug_or_id identifier = None dataset_info = None else: # name was renamed to slug, name kept for backwards compatibility - dataset_name = dataset_info.get("slug", dataset_info.get("name")) + dataset_slug = dataset_info.get("slug", dataset_info.get("name")) identifier = dataset_info["identifier"] if project_id: @@ -142,21 +142,21 @@ def _fetch_dataset_info(self, uri): if not project_kg_url: raise errors.ParameterError("Cannot find project's KG URL from URI", param_hint=uri) - latest_identifier, latest_version_uri = self._fetch_dataset_info_from_project(project_kg_url, dataset_name) + latest_identifier, latest_version_uri = self._fetch_dataset_info_from_project(project_kg_url, dataset_slug) identifier = identifier or latest_identifier - return dataset_name, identifier, latest_version_uri, project_kg_url + return dataset_slug, identifier, latest_version_uri, project_kg_url - def _fetch_dataset_info_from_project(self, project_kg_url, dataset_name): + def _fetch_dataset_info_from_project(self, project_kg_url, dataset_slug): datasets_kg_url = f"{project_kg_url}/datasets" try: response = self._query_knowledge_graph(datasets_kg_url) except errors.NotFound: raise errors.NotFound(f"Cannot find project in the knowledge graph: {project_kg_url}") - dataset = next((d for d in response if d.get("name") == dataset_name), None) + dataset = next((d for d in response if d.get("slug") == dataset_slug), None) if not dataset: - raise errors.OperationError(f"Cannot fetch dataset with name '{dataset_name}' from '{project_kg_url}'") + raise errors.OperationError(f"Cannot fetch dataset with slug '{dataset_slug}' from '{project_kg_url}'") links = dataset.get("_links", []) latest_version_uri = next((link["href"] for link in links if link["rel"] == "details"), None) @@ -167,12 +167,12 @@ def _fetch_dataset_info_from_project(self, project_kg_url, dataset_name): @staticmethod def _extract_project_and_dataset_ids(parsed_url): - # https:///projects/:namespace/:0-or-more-subgroups/:name/datasets/:dataset-name + # https:///projects/:namespace/:0-or-more-subgroups/:name/datasets/:dataset-slug # https:///projects/:namespace/:0-or-more-subgroups/:name/datasets/:id # https:///datasets/:id match = re.match(r"(?:/projects/((?:[^/]+/)+[^/]+))?/datasets/([^/]+)/?$", parsed_url.path) - project_id, dataset_name_or_id = match.groups() if match else (None, None) - return project_id, dataset_name_or_id + project_id, dataset_slug_or_id = match.groups() if match else (None, None) + return project_id, dataset_slug_or_id def _query_knowledge_graph(self, url): from renku.core.util import requests @@ -223,7 +223,7 @@ class RenkuImporter(ImporterApi): def __init__( self, uri, - name, + slug, identifier, tag, latest_version_uri, @@ -235,7 +235,7 @@ def __init__( """Create a RenkuImporter from a Dataset.""" super().__init__(uri=uri, original_uri=uri) - self._name = name + self._slug = slug self._identifier = identifier self._tag = tag self._latest_version_uri = latest_version_uri @@ -368,19 +368,19 @@ def add_file(src_entity_path: str, content_path: Path, checksum) -> None: return results - def tag_dataset(self, name: str) -> None: - """Create a tag for the dataset ``name`` if the remote dataset has a tag/version.""" + def tag_dataset(self, slug: str) -> None: + """Create a tag for the dataset ``slug`` if the remote dataset has a tag/version.""" from renku.core.dataset.tag import add_dataset_tag if self.provider_dataset.tag: add_dataset_tag( - dataset_name=name, + dataset_slug=slug, tag=self.provider_dataset.tag.name, description=self.provider_dataset.tag.description, ) elif self.provider_dataset.version: add_dataset_tag( - dataset_name=name, + dataset_slug=slug, tag=self.provider_dataset.version, description=f"Tag {self.provider_dataset.version} created by renku import", ) @@ -485,16 +485,16 @@ def _fetch_dataset(self): datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(self._name) + dataset = datasets_provenance.get_by_slug(self._slug) if not dataset: - raise errors.ParameterError(f"Cannot find dataset '{self._name}' in project '{self._project_url}'") + raise errors.ParameterError(f"Cannot find dataset '{self._slug}' in project '{self._project_url}'") if self._tag: tags = datasets_provenance.get_all_tags(dataset=dataset) tag = next((t for t in tags if t.name == self._tag), None) if tag is None: - raise errors.ParameterError(f"Cannot find tag '{self._tag}' for dataset '{self._name}'") + raise errors.ParameterError(f"Cannot find tag '{self._tag}' for dataset '{self._slug}'") dataset = datasets_provenance.get_by_id(tag.dataset_id.value) else: diff --git a/renku/core/dataset/providers/repository.py b/renku/core/dataset/providers/repository.py index f8c7fd5ea9..2b82b6fd4e 100644 --- a/renku/core/dataset/providers/repository.py +++ b/renku/core/dataset/providers/repository.py @@ -44,7 +44,7 @@ def tag_dataset(self, name: str) -> None: if self.provider_dataset.version: add_dataset_tag( - dataset_name=name, + dataset_slug=name, tag=re.sub("[^a-zA-Z0-9.-_]", "_", self.provider_dataset.version), description=f"Tag {self.provider_dataset.version} created by renku import", ) diff --git a/renku/core/dataset/providers/zenodo.py b/renku/core/dataset/providers/zenodo.py index 26daf7452d..5b9d399eda 100644 --- a/renku/core/dataset/providers/zenodo.py +++ b/renku/core/dataset/providers/zenodo.py @@ -170,7 +170,7 @@ def fetch_provider_dataset(self) -> "ProviderDataset": from renku.command.schema.agent import PersonSchema from renku.core.dataset.providers.models import ProviderDataset, ProviderDatasetFile, ProviderDatasetSchema - from renku.domain_model.dataset import Url, generate_default_name + from renku.domain_model.dataset import Url, generate_default_slug class ZenodoDatasetSchema(ProviderDatasetSchema): """Schema for Dataverse datasets.""" @@ -204,7 +204,7 @@ def fix_data(self, data, **kwargs): files = self.get_files() metadata = self.get_jsonld() dataset = ProviderDataset.from_jsonld(metadata, schema_class=ZenodoDatasetSchema) - dataset.name = generate_default_name(title=dataset.title or "", version=dataset.version) + dataset.slug = generate_default_slug(name=dataset.name or "", version=dataset.version) dataset.same_as = Url(url_id=remove_credentials(self.original_uri)) if is_doi(dataset.identifier): dataset.same_as = Url(url_str=urllib.parse.urljoin("https://doi.org", dataset.identifier)) @@ -473,7 +473,7 @@ def attach_metadata(self, dataset, tag): request_payload = { "metadata": { - "title": dataset.title, + "title": dataset.name, "upload_type": "dataset", "description": dataset.description if dataset.description else None, "creators": [ diff --git a/renku/core/dataset/request_model.py b/renku/core/dataset/request_model.py deleted file mode 100644 index 1b27c8d72b..0000000000 --- a/renku/core/dataset/request_model.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Swiss Data Science Center (SDSC). A partnership between -# École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Renku management dataset request models.""" - -import imghdr -import os -import shutil -import urllib -from pathlib import Path -from typing import List, Optional, Union, cast -from urllib.request import urlretrieve - -from renku.core import errors -from renku.domain_model.dataset import Dataset, ImageObject -from renku.domain_model.project_context import project_context - - -class ImageRequestModel: - """Model for passing image information to dataset use-cases.""" - - def __init__( - self, - content_url: str, - position: int, - mirror_locally: bool = False, - safe_image_paths: Optional[List[str]] = None, - ) -> None: - self.content_url = content_url - self.position = position - self.mirror_locally = mirror_locally - self.safe_image_paths: List[Union[str, Path]] = cast(List[Union[str, Path]], safe_image_paths) or [] - - def to_image_object(self, dataset: Dataset) -> ImageObject: - """Convert request model to ``ImageObject``.""" - image_type = None - self.safe_image_paths.append(project_context.path) - - image_folder = project_context.dataset_images_path / dataset.initial_identifier - image_folder.mkdir(exist_ok=True, parents=True) - - if urllib.parse.urlparse(self.content_url).netloc: - # NOTE: absolute url - if not self.mirror_locally: - return ImageObject( - content_url=self.content_url, - position=self.position, - id=ImageObject.generate_id(dataset_id=dataset.id, position=self.position), - ) - - # NOTE: mirror the image locally - try: - path, _ = urlretrieve(self.content_url) - except urllib.error.URLError as e: - raise errors.DatasetImageError(f"Dataset image with url {self.content_url} couldn't be mirrored") from e - - image_type = imghdr.what(path) - if image_type: - image_type = f".{image_type}" - - self.content_url = path - self.safe_image_paths.append(Path(path).parent) - - path = self.content_url - if not os.path.isabs(path): - path = os.path.normpath(os.path.join(project_context.path, path)) - - if not os.path.exists(path) or not any( - os.path.commonprefix([path, p]) == str(p) for p in self.safe_image_paths - ): - # NOTE: make sure files exists and prevent path traversal - raise errors.DatasetImageError(f"Dataset image with relative path {self.content_url} not found") - - if not path.startswith(str(image_folder)): - # NOTE: only copy dataset image if it's not in .renku/datasets//images/ already - if image_type: - ext = image_type - else: - _, ext = os.path.splitext(self.content_url) - - img_path = image_folder / f"{self.position}{ext}" - shutil.copy(path, img_path) - else: - img_path = Path(path) - - return ImageObject( - content_url=str(img_path.relative_to(project_context.path)), - position=self.position, - id=ImageObject.generate_id(dataset_id=dataset.id, position=self.position), - ) diff --git a/renku/core/dataset/tag.py b/renku/core/dataset/tag.py index 1ca5a7c678..4226aa7165 100644 --- a/renku/core/dataset/tag.py +++ b/renku/core/dataset/tag.py @@ -27,7 +27,7 @@ from renku.infrastructure.immutable import DynamicProxy -def add_dataset_tag(dataset_name: str, tag: str, description="", force=False): +def add_dataset_tag(dataset_slug: str, tag: str, description="", force=False): """Adds a new tag to a dataset. Validates if the tag already exists and that the tag follows the same rules as docker tags. @@ -45,7 +45,7 @@ def add_dataset_tag(dataset_name: str, tag: str, description="", force=False): "Only characters a-z, A-Z, 0-9, ., - and _ are allowed.\nTag can't start with a . or -" ) datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(dataset_name, strict=True) + dataset = datasets_provenance.get_by_slug(dataset_slug, strict=True) assert dataset is not None tags = datasets_provenance.get_all_tags(dataset) @@ -60,25 +60,25 @@ def add_dataset_tag(dataset_name: str, tag: str, description="", force=False): datasets_provenance.add_tag(dataset, new_tag) -def list_dataset_tags(dataset_name, format): +def list_dataset_tags(dataset_slug, format): """List all tags for a dataset.""" datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(dataset_name, strict=True) + dataset = datasets_provenance.get_by_slug(dataset_slug, strict=True) assert dataset is not None tags = datasets_provenance.get_all_tags(dataset) tags = sorted(tags, key=lambda t: t.date_created) tags = [cast(Dataset, DynamicProxy(t)) for t in tags] for tag in tags: - tag.dataset = dataset.title + tag.dataset = dataset.name return DATASET_TAGS_FORMATS[format](tags) -def remove_dataset_tags(dataset_name: str, tags: List[str]): +def remove_dataset_tags(dataset_slug: str, tags: List[str]): """Removes tags from a dataset.""" datasets_provenance = DatasetsProvenance() - dataset = datasets_provenance.get_by_name(dataset_name, strict=True) + dataset = datasets_provenance.get_by_slug(dataset_slug, strict=True) assert dataset is not None dataset_tags = datasets_provenance.get_all_tags(dataset) diff --git a/renku/core/errors.py b/renku/core/errors.py index 0da28254c1..6216e9d07d 100644 --- a/renku/core/errors.py +++ b/renku/core/errors.py @@ -271,12 +271,12 @@ def __init__(self, return_code, success_codes=None, message=None): class DatasetNotFound(DatasetException): """Raise when dataset is not found.""" - def __init__(self, *, name=None, message=None): + def __init__(self, *, slug=None, message=None): """Build a custom message.""" if message: msg = message - elif name: - msg = f"Dataset '{name}' is not found." + elif slug: + msg = f"Dataset '{slug}' is not found." else: msg = "Dataset is not found." super().__init__(msg) @@ -512,7 +512,11 @@ class RenkuSaveError(RenkuException): """Raised when renku save doesn't work.""" -class DatasetImageError(DatasetException): +class ImageError(RenkuException): + """Raised when an image for a project/dataset is not accessible.""" + + +class DatasetImageError(DatasetException, ImageError): """Raised when a local dataset image is not accessible.""" diff --git a/renku/core/image.py b/renku/core/image.py new file mode 100644 index 0000000000..6e875912f1 --- /dev/null +++ b/renku/core/image.py @@ -0,0 +1,82 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku project/dataset image management.""" + +import imghdr +import os +import urllib +from pathlib import Path +from typing import List, Optional, Union, cast +from urllib.request import urlretrieve + +from renku.core import errors +from renku.core.constant import FILESYSTEM_ROOT +from renku.core.util.os import is_subpath +from renku.core.util.urls import is_remote +from renku.domain_model.image import ImageObject +from renku.domain_model.project_context import project_context + + +class ImageObjectRequest: + """Model for passing image information.""" + + def __init__( + self, + content_url: str, + position: int = 0, + mirror_locally: bool = True, + safe_image_paths: Optional[List[str]] = None, + ) -> None: + self.content_url = content_url + self.position = position + self.mirror_locally = mirror_locally + self.safe_image_paths: List[Union[str, Path]] = cast(List[Union[str, Path]], safe_image_paths) or [] + + def to_image_object(self, owner_id: str) -> ImageObject: + """Convert request model to ``ImageObject`` and download the image if requested and return its path.""" + self.safe_image_paths.append(project_context.path) + + if is_remote(self.content_url): + if not self.mirror_locally: + return ImageObject( + content_url=self.content_url, + position=self.position, + id=ImageObject.generate_id(owner_id=owner_id, position=self.position), + ) + + # NOTE: Download the image + try: + tmp_path, _ = urlretrieve(self.content_url) + except urllib.error.URLError as e: + raise errors.ImageError(f"Cannot download image with url {self.content_url}: {e}") from e + + path = Path(tmp_path) + else: + path = Path(self.content_url).resolve() + + if not os.path.exists(path): + raise errors.ImageError(f"Image with local path '{self.content_url}' not found") + # NOTE: Prevent path traversal or usage of non-image files + elif (FILESYSTEM_ROOT in self.safe_image_paths and imghdr.what(path) is None) or not any( + is_subpath(path, base=p) for p in self.safe_image_paths + ): + raise errors.ImageError(f"'{self.content_url}' isn't a valid image file") + + return ImageObject( + content_url=path.as_posix(), + position=self.position, + id=ImageObject.generate_id(owner_id=owner_id, position=self.position), + ) diff --git a/renku/core/init.py b/renku/core/init.py index 18f12ef560..f62cd4cc27 100644 --- a/renku/core/init.py +++ b/renku/core/init.py @@ -30,8 +30,10 @@ from renku.core.constant import DATA_DIR_CONFIG_KEY, RENKU_HOME from renku.core.git import with_worktree from renku.core.githooks import install_githooks +from renku.core.image import ImageObjectRequest from renku.core.interface.database_gateway import IDatabaseGateway from renku.core.migration.utils import OLD_METADATA_PATH +from renku.core.project import set_project_image from renku.core.storage import init_external_storage, storage_installed from renku.core.template.template import ( FileAction, @@ -101,6 +103,7 @@ def init_project( name: Optional[str], description: Optional[str], keywords: Optional[List[str]], + image_request: Optional[ImageObjectRequest], template_id: Optional[str], template_source: Optional[str], template_ref: Optional[str], @@ -114,11 +117,12 @@ def init_project( """Initialize a renku project. Args: - external_storage_requested: Whether or not external storage should be used. + external_storage_requested: Whether external storage should be used. path: Path to initialize repository at. name: Name of the project. description: Description of the project. keywords: keywords for the project. + image_request(Optional[ImageObjectRequest]): Project's image. template_id: id of the template to use. template_source: Source to get the template from. template_ref: Reference to use to get the template. @@ -211,6 +215,7 @@ def init_project( description=description, keywords=keywords, install_mergetool=install_mergetool, + image_request=image_request, ) except FileExistsError as e: raise errors.InvalidFileOperation(e) @@ -264,6 +269,7 @@ def create_from_template( commit_message: Optional[str] = None, description: Optional[str] = None, keywords: Optional[List[str]] = None, + image_request: Optional[ImageObjectRequest] = None, install_mergetool: bool = False, ): """Initialize a new project from a template. @@ -278,7 +284,8 @@ def create_from_template( commit_message(Optional[str]): Message for initial commit (Default value = None). description(Optional[str]): Description of the project (Default value = None). keywords(Optional[List[str]]): Keywords for project (Default value = None). - install_mergetool(bool): Whether to setup renku metadata mergetool (Default value = False). + image_request(Optional[ImageObjectRequest]): Project's image (Default value = None). + install_mergetool(bool): Whether to set up renku metadata mergetool (Default value = False). """ commit_only = [f"{RENKU_HOME}/", str(project_context.template_checksums_path)] + list(rendered_template.get_files()) @@ -312,6 +319,9 @@ def create_from_template( ) as project: copy_template_to_project(rendered_template=rendered_template, project=project, actions=actions) + # NOTE: Copy image to project + set_project_image(image_request=image_request) + if install_mergetool: setup_mergetool() @@ -337,6 +347,7 @@ def create_from_template_local( keywords: Optional[List[str]] = None, data_dir: Optional[str] = None, ssh_supported: bool = False, + image_request: Optional[ImageObjectRequest] = None, ): """Initialize a new project from a template. @@ -356,6 +367,7 @@ def create_from_template_local( description(Optional[str]): Project description (Default value = None). keywords(Optional[List[str]]): Project keywords (Default value = None). data_dir(Optional[str]): Project base data directory (Default value = None). + image_request(Optional[ImageObjectRequest]): Project's image (Default value = None). """ metadata = metadata or {} default_metadata = default_metadata or {} @@ -410,4 +422,5 @@ def create_from_template_local( description=description, keywords=keywords, data_dir=data_dir, + image_request=image_request, ) diff --git a/renku/core/interface/__init__.py b/renku/core/interface/__init__.py index d045a4a0cb..790542c27e 100644 --- a/renku/core/interface/__init__.py +++ b/renku/core/interface/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/activity_gateway.py b/renku/core/interface/activity_gateway.py index db4d5579ca..a5e2469dd9 100644 --- a/renku/core/interface/activity_gateway.py +++ b/renku/core/interface/activity_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/database_gateway.py b/renku/core/interface/database_gateway.py index 4bae4e3ab9..182ed114a5 100644 --- a/renku/core/interface/database_gateway.py +++ b/renku/core/interface/database_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/dataset_gateway.py b/renku/core/interface/dataset_gateway.py index e169919075..ec5ece49b4 100644 --- a/renku/core/interface/dataset_gateway.py +++ b/renku/core/interface/dataset_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -30,8 +29,8 @@ def get_by_id(self, id: str) -> "Dataset": """Get a dataset by id.""" raise NotImplementedError - def get_by_name(self, name: str) -> Optional["Dataset"]: - """Get a dataset by id.""" + def get_by_slug(self, slug: str) -> Optional["Dataset"]: + """Get a dataset by slug.""" raise NotImplementedError def get_all_active_datasets(self) -> List["Dataset"]: diff --git a/renku/core/interface/plan_gateway.py b/renku/core/interface/plan_gateway.py index 85749be54b..c00b3feb22 100644 --- a/renku/core/interface/plan_gateway.py +++ b/renku/core/interface/plan_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/project_gateway.py b/renku/core/interface/project_gateway.py index fc5fb5e68b..366704f875 100644 --- a/renku/core/interface/project_gateway.py +++ b/renku/core/interface/project_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/storage.py b/renku/core/interface/storage.py index f77d05f131..ac57b3de36 100644 --- a/renku/core/interface/storage.py +++ b/renku/core/interface/storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/interface/workflow_file_parser.py b/renku/core/interface/workflow_file_parser.py index 5388212bb8..726fdfcbaf 100644 --- a/renku/core/interface/workflow_file_parser.py +++ b/renku/core/interface/workflow_file_parser.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/__init__.py b/renku/core/migration/__init__.py index 27f0bf1142..37105b625f 100644 --- a/renku/core/migration/__init__.py +++ b/renku/core/migration/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0003__0_pyld2.py b/renku/core/migration/m_0003__0_pyld2.py index 6b0a7699f0..22c3af3874 100644 --- a/renku/core/migration/m_0003__0_pyld2.py +++ b/renku/core/migration/m_0003__0_pyld2.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0003__1_jsonld.py b/renku/core/migration/m_0003__1_jsonld.py index 380425eb7b..bfaae12cfa 100644 --- a/renku/core/migration/m_0003__1_jsonld.py +++ b/renku/core/migration/m_0003__1_jsonld.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0003__2_initial.py b/renku/core/migration/m_0003__2_initial.py index 9bf9b0c073..47d9c3262f 100644 --- a/renku/core/migration/m_0003__2_initial.py +++ b/renku/core/migration/m_0003__2_initial.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,7 +35,7 @@ from renku.core.util.contexts import with_project_metadata from renku.core.util.git import get_in_submodules from renku.core.util.urls import url_to_string -from renku.domain_model.dataset import generate_default_name +from renku.domain_model.dataset import generate_default_slug from renku.domain_model.project_context import project_context @@ -88,7 +87,7 @@ def _migrate_datasets_pre_v0_3(): dataset = Dataset.from_yaml(old_path) dataset.title = name - dataset.name = generate_default_name(name) + dataset.name = generate_default_slug(name) new_path = get_datasets_path() / dataset.identifier / OLD_METADATA_PATH new_path.parent.mkdir(parents=True, exist_ok=True) @@ -123,9 +122,9 @@ def _migrate_broken_dataset_paths(migration_context): """Ensure all paths are using correct directory structure.""" for dataset in get_project_datasets(): if not dataset.name: - dataset.name = generate_default_name(dataset.title, dataset.version) + dataset.name = generate_default_slug(dataset.title, dataset.version) else: - dataset.name = generate_default_name(dataset.name) + dataset.name = generate_default_slug(dataset.name) expected_path = get_datasets_path() / dataset.identifier diff --git a/renku/core/migration/m_0004__0_pyld2.py b/renku/core/migration/m_0004__0_pyld2.py index 6b0a7699f0..22c3af3874 100644 --- a/renku/core/migration/m_0004__0_pyld2.py +++ b/renku/core/migration/m_0004__0_pyld2.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0004__submodules.py b/renku/core/migration/m_0004__submodules.py index 10148034c1..0fae9324c1 100644 --- a/renku/core/migration/m_0004__submodules.py +++ b/renku/core/migration/m_0004__submodules.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0005__1_pyld2.py b/renku/core/migration/m_0005__1_pyld2.py index 078af4ba61..5266b96705 100644 --- a/renku/core/migration/m_0005__1_pyld2.py +++ b/renku/core/migration/m_0005__1_pyld2.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0005__2_cwl.py b/renku/core/migration/m_0005__2_cwl.py index 77fba95f57..cfdf33402e 100644 --- a/renku/core/migration/m_0005__2_cwl.py +++ b/renku/core/migration/m_0005__2_cwl.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0006__dataset_context.py b/renku/core/migration/m_0006__dataset_context.py index 47d9d76e86..b050576375 100644 --- a/renku/core/migration/m_0006__dataset_context.py +++ b/renku/core/migration/m_0006__dataset_context.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0007__source_url.py b/renku/core/migration/m_0007__source_url.py index 2e3d5f1978..1a2a1df722 100644 --- a/renku/core/migration/m_0007__source_url.py +++ b/renku/core/migration/m_0007__source_url.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0008__dataset_metadata.py b/renku/core/migration/m_0008__dataset_metadata.py index 4a3e8ab052..31581a2f49 100644 --- a/renku/core/migration/m_0008__dataset_metadata.py +++ b/renku/core/migration/m_0008__dataset_metadata.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0009__new_metadata_storage.py b/renku/core/migration/m_0009__new_metadata_storage.py index d1cdaad811..11cdd8ebd2 100644 --- a/renku/core/migration/m_0009__new_metadata_storage.py +++ b/renku/core/migration/m_0009__new_metadata_storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/m_0010__metadata_fixes.py b/renku/core/migration/m_0010__metadata_fixes.py index 11f7d49035..8b88c1093b 100644 --- a/renku/core/migration/m_0010__metadata_fixes.py +++ b/renku/core/migration/m_0010__metadata_fixes.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -238,7 +237,7 @@ def migrate_project_template_data(project_gateway: IProjectGateway): @inject.autoparams("plan_gateway") def fix_plan_times(plan_gateway: IPlanGateway): - """Add timezone to plan invalidations.""" + """Rename plan's date-related attributes and add timezone to invalidation time.""" plans: List[AbstractPlan] = plan_gateway.get_all_plans() for plan in plans: @@ -248,6 +247,8 @@ def fix_plan_times(plan_gateway: IPlanGateway): del plan.invalidated_at elif not hasattr(plan, "date_removed"): plan.date_removed = None + if not hasattr(plan, "date_created"): + plan.date_created = getattr(plan, "date_modified", None) or plan.date_removed or local_now() if plan.date_removed is not None: if plan.date_removed.tzinfo is None: diff --git a/renku/core/migration/migrate.py b/renku/core/migration/migrate.py index 586da9fc49..30e0e37a17 100644 --- a/renku/core/migration/migrate.py +++ b/renku/core/migration/migrate.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/__init__.py b/renku/core/migration/models/__init__.py index cdeb33eedb..155ebc084a 100644 --- a/renku/core/migration/models/__init__.py +++ b/renku/core/migration/models/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/migration.py b/renku/core/migration/models/migration.py index d2cab7421a..86678043d3 100644 --- a/renku/core/migration/models/migration.py +++ b/renku/core/migration/models/migration.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/refs.py b/renku/core/migration/models/refs.py index 9e579c681f..f1cabeb0d9 100644 --- a/renku/core/migration/models/refs.py +++ b/renku/core/migration/models/refs.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/v10.py b/renku/core/migration/models/v10.py index ed9fb27d93..9fc871fe8a 100644 --- a/renku/core/migration/models/v10.py +++ b/renku/core/migration/models/v10.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/v3.py b/renku/core/migration/models/v3.py index 8423cdf7b7..8afe0b4e2a 100644 --- a/renku/core/migration/models/v3.py +++ b/renku/core/migration/models/v3.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/v7.py b/renku/core/migration/models/v7.py index 99641c7617..37909cbd59 100644 --- a/renku/core/migration/models/v7.py +++ b/renku/core/migration/models/v7.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/v8.py b/renku/core/migration/models/v8.py index 535d415d6b..b27399059d 100644 --- a/renku/core/migration/models/v8.py +++ b/renku/core/migration/models/v8.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/models/v9.py b/renku/core/migration/models/v9.py index 9b902aeac9..0eb510f891 100644 --- a/renku/core/migration/models/v9.py +++ b/renku/core/migration/models/v9.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -67,7 +66,7 @@ from renku.core.util.doi import extract_doi, is_doi from renku.core.util.git import get_in_submodules from renku.core.util.urls import get_host, get_slug -from renku.domain_model.dataset import generate_default_name +from renku.domain_model.dataset import generate_default_slug from renku.domain_model.project_context import project_context from renku.infrastructure.repository import Commit from renku.version import __version__, version_url @@ -1691,7 +1690,7 @@ def __attrs_post_init__(self): pass if not self.name: - self.name = generate_default_name(self.title, self.version) + self.name = generate_default_slug(self.title, self.version) @classmethod def from_yaml(cls, path, commit=None): diff --git a/renku/core/migration/utils/__init__.py b/renku/core/migration/utils/__init__.py index e81ef7fa90..68e685acf3 100644 --- a/renku/core/migration/utils/__init__.py +++ b/renku/core/migration/utils/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/migration/utils/conversion.py b/renku/core/migration/utils/conversion.py index 7e86e261aa..85c4880897 100644 --- a/renku/core/migration/utils/conversion.py +++ b/renku/core/migration/utils/conversion.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,12 +27,12 @@ Dataset, DatasetFile, DatasetTag, - ImageObject, Language, RemoteEntity, Url, - is_dataset_name_valid, + is_dataset_slug_valid, ) +from renku.domain_model.image import ImageObject from renku.domain_model.project_context import project_context from renku.domain_model.provenance import agent as new_agents @@ -65,7 +64,7 @@ def _convert_image_object(image_object: Optional[old_datasets.ImageObject], data """Create from old ImageObject instance.""" if not image_object: return - id = ImageObject.generate_id(dataset_id=dataset_id, position=image_object.position) + id = ImageObject.generate_id(owner_id=dataset_id, position=image_object.position) return ImageObject(content_url=image_object.content_url, position=image_object.position, id=id) @@ -184,7 +183,7 @@ def convert_license(license): return str(license) tags = [_convert_dataset_tag(tag) for tag in (dataset.tags or [])] - name = get_slug(dataset.name) if not is_dataset_name_valid(dataset.name) else dataset.name + slug = get_slug(dataset.name) if not is_dataset_slug_valid(dataset.name) else dataset.name identifier = _convert_dataset_identifier(dataset.identifier) id = Dataset.generate_id(identifier=identifier) @@ -205,11 +204,11 @@ def convert_license(license): in_language=_convert_language(dataset.in_language), keywords=dataset.keywords, license=convert_license(dataset.license), - name=name, + slug=slug, project_id=project_context.project.id, initial_identifier=_convert_dataset_identifier(dataset.initial_identifier), same_as=_convert_same_as(dataset.same_as), - title=dataset.title, + name=dataset.title, version=dataset.version, ), tags, diff --git a/renku/core/plugin/__init__.py b/renku/core/plugin/__init__.py index e675cbd87b..4460f6bf17 100644 --- a/renku/core/plugin/__init__.py +++ b/renku/core/plugin/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/dataset_provider.py b/renku/core/plugin/dataset_provider.py index 231d72fbbb..be3df271c1 100644 --- a/renku/core/plugin/dataset_provider.py +++ b/renku/core/plugin/dataset_provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/implementations/__init__.py b/renku/core/plugin/implementations/__init__.py index f19b97f010..45253b80c0 100644 --- a/renku/core/plugin/implementations/__init__.py +++ b/renku/core/plugin/implementations/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/pluginmanager.py b/renku/core/plugin/pluginmanager.py index 455c98f79e..5c2c861406 100644 --- a/renku/core/plugin/pluginmanager.py +++ b/renku/core/plugin/pluginmanager.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/provider.py b/renku/core/plugin/provider.py index 994012ebed..e6bf770a34 100644 --- a/renku/core/plugin/provider.py +++ b/renku/core/plugin/provider.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/run.py b/renku/core/plugin/run.py index 6d06492ab7..4e971897b7 100644 --- a/renku/core/plugin/run.py +++ b/renku/core/plugin/run.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/session.py b/renku/core/plugin/session.py index 00feb558b7..a779afce6b 100644 --- a/renku/core/plugin/session.py +++ b/renku/core/plugin/session.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,7 +18,7 @@ import pluggy -from renku.domain_model.session import ISessionProvider +from renku.domain_model.session import IHibernatingSessionProvider, ISessionProvider hookspec = pluggy.HookspecMarker("renku") @@ -42,3 +41,9 @@ def get_supported_session_providers() -> List[ISessionProvider]: providers = pm.hook.session_provider() return sorted(providers, key=lambda p: p.priority) + + +def get_supported_hibernating_session_providers() -> List[IHibernatingSessionProvider]: + """Returns the currently available interactive session providers that support hibernation.""" + providers = get_supported_session_providers() + return [p for p in providers if isinstance(p, IHibernatingSessionProvider)] diff --git a/renku/core/plugin/workflow.py b/renku/core/plugin/workflow.py index a428c07c42..17aaaba44c 100644 --- a/renku/core/plugin/workflow.py +++ b/renku/core/plugin/workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/plugin/workflow_file_parser.py b/renku/core/plugin/workflow_file_parser.py index 30130009d1..b5ed34dd9c 100644 --- a/renku/core/plugin/workflow_file_parser.py +++ b/renku/core/plugin/workflow_file_parser.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/project.py b/renku/core/project.py index c852a31ecf..d0d8257857 100644 --- a/renku/core/project.py +++ b/renku/core/project.py @@ -15,15 +15,21 @@ # limitations under the License. """Project business logic.""" +import os +import shutil from typing import Dict, List, Optional, Union, cast from pydantic import validate_arguments from renku.command.command_builder import inject from renku.command.view_model.project import ProjectViewModel +from renku.core import errors +from renku.core.image import ImageObjectRequest from renku.core.interface.project_gateway import IProjectGateway from renku.core.util.metadata import construct_creator +from renku.core.util.os import get_relative_path from renku.domain_model.constant import NO_VALUE, NoValueType +from renku.domain_model.dataset import ImageObjectRequestJson from renku.domain_model.project_context import project_context from renku.domain_model.provenance.agent import Person @@ -36,6 +42,7 @@ def edit_project( keywords: Optional[Union[List[str], NoValueType]], custom_metadata: Optional[Union[Dict, List[Dict], NoValueType]], custom_metadata_source: Optional[Union[str, NoValueType]], + image_request: Optional[Union[ImageObjectRequest, NoValueType]], project_gateway: IProjectGateway, ): """Edit dataset metadata. @@ -47,6 +54,7 @@ def edit_project( custom_metadata(Union[Optional[Dict, List[Dict]]): Custom JSON-LD metadata. custom_metadata_source(Optional[str]): Custom metadata source. project_gateway(IProjectGateway): Injected project gateway. + image_request(Optional[ImageObjectRequest]): Project's image. Returns: Tuple of fields that were updated and dictionary of warnings. @@ -56,6 +64,11 @@ def edit_project( "description": description, "keywords": keywords, "custom_metadata": custom_metadata, + "image": ( + image_request + if image_request is NO_VALUE or image_request is None + else ImageObjectRequestJson().dump(image_request) + ), } no_email_warnings: Optional[Union[Dict, str]] = None @@ -64,10 +77,16 @@ def edit_project( if creator is not NO_VALUE: parsed_creator, no_email_warnings = construct_creator(cast(Union[Dict, str], creator), ignore_email=True) + if image_request is None: + delete_project_image() + elif image_request is not NO_VALUE: + set_project_image(image_request=image_request) # type: ignore + updated = {k: v for k, v in possible_updates.items() if v is not NO_VALUE} if updated: project = project_gateway.get_project() + # NOTE: No need to pass ``image`` here since we already copied/deleted the file and updated the project project.update_metadata( creator=parsed_creator, description=description, @@ -87,3 +106,45 @@ def show_project() -> ProjectViewModel: Project view model. """ return ProjectViewModel.from_project(project_context.project) + + +def set_project_image(image_request: Optional[ImageObjectRequest]) -> None: + """Download and set a project's images. + + Args: + image_request(Optional[ImageObjectRequest]): The image to set. + """ + if image_request is None: + return + + # NOTE: Projects can have maximum one image + image_request.position = 0 + + image_object = image_request.to_image_object(owner_id=project_context.project.id) + + project_image = project_context.project_image_pathname + + # NOTE: Do nothing if the new path is the same as the old one + if project_image.resolve() != image_object.content_url: + # NOTE: Always delete the old image in case the image wasn't mirrored in the project + delete_project_image() + + if not image_object.is_remote: + project_image.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(image_object.content_url, project_context.project_image_pathname) + + image_object.content_url = get_relative_path(project_image, base=project_context.path) # type: ignore + + project_context.project.image = image_object + + +def delete_project_image() -> None: + """Delete project image in a project.""" + try: + os.remove(project_context.project_image_pathname) + except FileNotFoundError: + pass + except OSError as e: + raise errors.ImageError(f"Cannot delete project image '{project_context.project_image_pathname}': {e}") from e + else: + project_context.project.image = None diff --git a/renku/core/session/docker.py b/renku/core/session/docker.py index 15d009dc32..f7a930ef42 100644 --- a/renku/core/session/docker.py +++ b/renku/core/session/docker.py @@ -356,6 +356,9 @@ def session_start_helper(consider_disk_request: bool): environment["CHOWN_HOME"] = "yes" environment["CHOWN_HOME_OPTS"] = "-R" + if "force_build" in kwargs: + del kwargs["force_build"] + container = self.docker_client().containers.run( image_name, 'jupyter notebook --NotebookApp.ip="0.0.0.0"' diff --git a/renku/core/session/renkulab.py b/renku/core/session/renkulab.py index 32ed5a5364..f9c95d1393 100644 --- a/renku/core/session/renkulab.py +++ b/renku/core/session/renkulab.py @@ -34,13 +34,13 @@ from renku.core.util.jwt import is_token_expired from renku.core.util.ssh import SystemSSHConfig from renku.domain_model.project_context import project_context -from renku.domain_model.session import ISessionProvider, Session, SessionStopStatus +from renku.domain_model.session import IHibernatingSessionProvider, Session, SessionStopStatus if TYPE_CHECKING: from renku.core.dataset.providers.models import ProviderParameter -class RenkulabSessionProvider(ISessionProvider): +class RenkulabSessionProvider(IHibernatingSessionProvider): """A session provider that uses the notebook service API to launch sessions.""" DEFAULT_TIMEOUT_SECONDS = 300 @@ -118,7 +118,7 @@ def _wait_for_session_status( ) if res.status_code == 404 and status == "stopping": return - if res.status_code == 200 and status != "stopping": + if res.status_code in [200, 204] and status != "stopping": if res.json().get("status", {}).get("state") == status: return sleep(5) @@ -210,9 +210,9 @@ def _remote_head_hexsha(): return remote.head - def _send_renku_request(self, req_type: str, *args, **kwargs): - res = getattr(requests, req_type)(*args, **kwargs) - if res.status_code == 401: + def _send_renku_request(self, verb: str, *args, **kwargs): + response = getattr(requests, verb)(*args, **kwargs) + if response.status_code == 401: # NOTE: Check if logged in to KC but not the Renku UI token = read_renku_token(endpoint=self._renku_url()) if token and not is_token_expired(token): @@ -222,7 +222,7 @@ def _send_renku_request(self, req_type: str, *args, **kwargs): raise errors.AuthenticationError( "Please run the renku login command to authenticate with Renku or to refresh your expired credentials." ) - return res + return response @staticmethod def _project_name_from_full_project_name(project_name: str) -> str: @@ -262,7 +262,7 @@ def find_image(self, image_name: str, config: Optional[Dict[str, Any]]) -> bool: ) @hookimpl - def session_provider(self) -> ISessionProvider: + def session_provider(self) -> IHibernatingSessionProvider: """Supported session provider. Returns: @@ -511,3 +511,69 @@ def session_url(self, session_name: str) -> str: def force_build_image(self, **kwargs) -> bool: """Whether we should force build the image directly or check for an existing image first.""" return self._force_build + + def session_pause(self, project_name: str, session_name: Optional[str], **_) -> SessionStopStatus: + """Pause all sessions (for the given project) or a specific interactive session.""" + + def pause(session_name: str): + result = self._send_renku_request( + "patch", + f"{self._notebooks_url()}/servers/{session_name}", + headers=self._auth_header(), + json={"state": "hibernated"}, + ) + + self._wait_for_session_status(session_name, "hibernated") + + return result + + sessions = self.session_list(project_name=project_name) + n_sessions = len(sessions) + + if n_sessions == 0: + return SessionStopStatus.NO_ACTIVE_SESSION + + if session_name: + response = pause(session_name) + elif n_sessions == 1: + response = pause(sessions[0].name) + else: + return SessionStopStatus.NAME_NEEDED + + return SessionStopStatus.SUCCESSFUL if response.status_code == 204 else SessionStopStatus.FAILED + + def session_resume(self, project_name: str, session_name: Optional[str], **kwargs) -> bool: + """Resume a paused session. + + Args: + project_name(str): Renku project name. + session_name(Optional[str]): The unique id of the interactive session. + """ + sessions = self.session_list(project_name="") + system_config = SystemSSHConfig() + name = self._project_name_from_full_project_name(project_name) + ssh_prefix = f"{system_config.renku_host}-{name}-" + + if not session_name: + if len(sessions) == 1: + session_name = sessions[0].name + else: + return False + else: + if session_name.startswith(ssh_prefix): + # NOTE: User passed in ssh connection name instead of session id by accident + session_name = session_name.replace(ssh_prefix, "", 1) + + if not any(s.name == session_name for s in sessions): + return False + + self._send_renku_request( + "patch", + f"{self._notebooks_url()}/servers/{session_name}", + headers=self._auth_header(), + json={"state": "running"}, + ) + + self._wait_for_session_status(session_name, "running") + + return True diff --git a/renku/core/session/session.py b/renku/core/session/session.py index ce1fa67f6c..4c62e9b047 100644 --- a/renku/core/session/session.py +++ b/renku/core/session/session.py @@ -25,12 +25,12 @@ from renku.core import errors from renku.core.config import get_value -from renku.core.plugin.session import get_supported_session_providers +from renku.core.plugin.session import get_supported_hibernating_session_providers, get_supported_session_providers from renku.core.session.utils import get_image_repository_host, get_renku_project_name from renku.core.util import communication from renku.core.util.os import safe_read_yaml from renku.core.util.ssh import SystemSSHConfig, generate_ssh_keys -from renku.domain_model.session import ISessionProvider, Session, SessionStopStatus +from renku.domain_model.session import IHibernatingSessionProvider, ISessionProvider, Session, SessionStopStatus def _safe_get_provider(provider: str) -> ISessionProvider: @@ -80,6 +80,22 @@ def search_session_providers(name: str) -> List[str]: return [p.name for p in get_supported_session_providers() if p.name.lower().startswith(name)] +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def search_hibernating_session_providers(name: str) -> List[str]: + """Get all session providers that support hibernation and their name starts with the given name. + + Args: + name(str): The name to search for. + + Returns: + All session providers whose name starts with ``name``. + """ + from renku.core.plugin.session import get_supported_hibernating_session_providers + + name = name.lower() + return [p.name for p in get_supported_hibernating_session_providers() if p.name.lower().startswith(name)] + + @validate_arguments(config=dict(arbitrary_types_allowed=True)) def session_list(*, provider: Optional[str] = None) -> SessionList: """List interactive sessions. @@ -142,6 +158,9 @@ def session_start( """ from renku.domain_model.project_context import project_context + if project_context.repository.head.detached: + raise errors.SessionStartError("Cannot start a session from a detached HEAD. Check out a branch first.") + # NOTE: The Docker client in Python requires the parameters below to be a list and will fail with a tuple. # Click will convert parameters with the flag "many" set to True to tuples. kwargs["security_opt"] = list(kwargs.get("security_opt", [])) @@ -223,10 +242,16 @@ def stop_sessions(session_provider: ISessionProvider) -> SessionStopStatus: return session_provider.session_stop( project_name=project_name, session_name=session_name, stop_all=stop_all ) - except errors.RenkulabSessionGetUrlError: + except errors.RenkulabSessionGetUrlError as e: if provider: raise - return SessionStopStatus.FAILED + communication.warn(f"Didn't stop any renkulab sessions: {e}") + return SessionStopStatus.SUCCESSFUL + except errors.DockerError as e: + if provider: + raise + communication.warn(f"Didn't stop any docker sessions: {e}") + return SessionStopStatus.SUCCESSFUL session_detail = "all sessions" if stop_all else f"session {session_name}" if session_name else "session" project_name = get_renku_project_name() @@ -358,3 +383,94 @@ def ssh_setup(existing_key: Optional[Path] = None, force: bool = False): "This command does not add any public SSH keys to your project. " "Keys have to be added manually or by using the 'renku session start' command with the '--ssh' flag." ) + + +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def session_pause(session_name: Optional[str], provider: Optional[str] = None, **kwargs): + """Pause an interactive session. + + Args: + session_name(Optional[str]): Name of the session. + provider(Optional[str]): Name of the session provider to use. + """ + + def pause(session_provider: IHibernatingSessionProvider) -> SessionStopStatus: + try: + return session_provider.session_pause(project_name=project_name, session_name=session_name) + except errors.RenkulabSessionGetUrlError: + if provider: + raise + return SessionStopStatus.FAILED + + project_name = get_renku_project_name() + + if provider: + session_provider = _safe_get_provider(provider) + if session_provider is None: + raise errors.ParameterError(f"Provider '{provider}' not found") + elif not isinstance(session_provider, IHibernatingSessionProvider): + raise errors.ParameterError(f"Provider '{provider}' doesn't support pausing sessions") + providers = [session_provider] + else: + providers = get_supported_hibernating_session_providers() + + session_message = f"session {session_name}" if session_name else "session" + statues = [] + warning_messages = [] + with communication.busy(msg=f"Waiting for {session_message} to pause..."): + for session_provider in sorted(providers, key=lambda p: p.priority): + try: + status = pause(session_provider) # type: ignore + except errors.RenkuException as e: + warning_messages.append(f"Cannot pause sessions in provider '{session_provider.name}': {e}") + else: + statues.append(status) + + # NOTE: The given session name was stopped; don't continue + if session_name and status == SessionStopStatus.SUCCESSFUL: + break + + if warning_messages: + for message in warning_messages: + communication.warn(message) + + if not statues: + return + elif all(s == SessionStopStatus.NO_ACTIVE_SESSION for s in statues): + raise errors.ParameterError("There are no running sessions.") + elif session_name and not any(s == SessionStopStatus.SUCCESSFUL for s in statues): + raise errors.ParameterError(f"Could not find '{session_name}' among the running sessions.") + elif not session_name and not any(s == SessionStopStatus.SUCCESSFUL for s in statues): + raise errors.ParameterError("Session name is missing") + + +@validate_arguments(config=dict(arbitrary_types_allowed=True)) +def session_resume(session_name: Optional[str], provider: Optional[str] = None, **kwargs): + """Resume a paused session. + + Args: + session_name(Optional[str]): Name of the session. + provider(Optional[str]): Name of the session provider to use. + """ + project_name = get_renku_project_name() + + if provider: + session_provider = _safe_get_provider(provider) + if session_provider is None: + raise errors.ParameterError(f"Provider '{provider}' not found") + elif not isinstance(session_provider, IHibernatingSessionProvider): + raise errors.ParameterError(f"Provider '{provider}' doesn't support pausing/resuming sessions") + providers = [session_provider] + else: + providers = get_supported_hibernating_session_providers() + + session_message = f"session {session_name}" if session_name else "session" + with communication.busy(msg=f"Waiting for {session_message} to resume..."): + for session_provider in providers: + if session_provider.session_resume(project_name, session_name, **kwargs): # type: ignore + return + + if session_name: + raise errors.ParameterError(f"Could not find '{session_name}' among the sessions.") + else: + raise errors.ParameterError("Session name is missing") diff --git a/renku/core/util/contexts.py b/renku/core/util/contexts.py index d9cdf8a7c3..9f3599c4cf 100644 --- a/renku/core/util/contexts.py +++ b/renku/core/util/contexts.py @@ -162,10 +162,14 @@ def with_project_metadata( custom_metadata=custom_metadata, ) - yield project - - if not read_only: + if read_only: + yield project + else: + # NOTE: Set project so that ``project_context`` can be used inside the code project_gateway.update_project(project) + + yield project + database_gateway.commit() diff --git a/renku/core/util/git.py b/renku/core/util/git.py index cd167454c8..348d87e9b6 100644 --- a/renku/core/util/git.py +++ b/renku/core/util/git.py @@ -29,6 +29,8 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Union, cast from uuid import uuid4 +import git + from renku.core import errors from renku.infrastructure.repository import DiffChangeType @@ -625,7 +627,7 @@ def clone_renku_repository( install_githooks=False, install_lfs=True, skip_smudge=True, - recursive=True, + recursive=False, progress=None, config: Optional[dict] = None, raise_git_except=False, @@ -644,7 +646,7 @@ def clone_renku_repository( install_githooks: Whether to install git hooks (Default value = False). install_lfs: Whether to install Git LFS (Default value = True). skip_smudge: Whether to pull files from Git LFS (Default value = True). - recursive: Whether to clone recursively (Default value = True). + recursive: Whether to clone recursively (Default value = False). progress: The GitProgress object (Default value = None). config(Optional[dict], optional): Set configuration for the project (Default value = None). raise_git_except: Whether to raise git exceptions (Default value = False). @@ -710,9 +712,9 @@ def clone_repository( install_githooks=True, install_lfs=True, skip_smudge=True, - recursive=True, + recursive=False, depth=None, - progress=None, + progress: Optional[git.RemoteProgress] = None, config: Optional[dict] = None, raise_git_except=False, checkout_revision=None, @@ -728,7 +730,7 @@ def clone_repository( install_githooks: Whether to install git hooks (Default value = True). install_lfs: Whether to install Git LFS (Default value = True). skip_smudge: Whether to pull files from Git LFS (Default value = True). - recursive: Whether to clone recursively (Default value = True). + recursive: Whether to clone recursively (Default value = False). depth: The clone depth, number of commits from HEAD (Default value = None). progress: The GitProgress object (Default value = None). config(Optional[dict], optional): Set configuration for the project (Default value = None). @@ -746,10 +748,8 @@ def clone_repository( path = Path(path) if path else Path(get_repository_name(url)) - def handle_git_exception(): - """Handle git exceptions.""" - if raise_git_except: - return + def error_from_progress(progress: Optional[git.RemoteProgress], url: str) -> errors.GitError: + """Format a Git command error into a more user-friendly format.""" message = f"Cannot clone repo from {url}" @@ -758,9 +758,9 @@ def handle_git_exception(): error = "".join([f"\n\t{line}" for line in lines if line.strip()]) message += f" - error message:\n {error}" - raise errors.GitError(message) + return errors.GitError(message) - def clean_directory(): + def clean_directory(clean: bool): if not clean or not path: return try: @@ -791,10 +791,10 @@ def check_and_reuse_existing_repository() -> Optional["Repository"]: pass else: # NOTE: not same remote, so don't reuse - clean_directory() + clean_directory(clean=clean) return None except errors.GitError: # NOTE: Not a git repository, remote not found, or checkout failed - clean_directory() + clean_directory(clean=clean) else: return repository @@ -825,15 +825,20 @@ def clone(branch, depth): repository = clone(branch=checkout_revision, depth=depth) except errors.GitCommandError: if not checkout_revision: - handle_git_exception() - raise + if raise_git_except: + raise + raise error_from_progress(progress, url) + + # NOTE: Delete the partially-cloned repository + clean_directory(clean=True) # NOTE: clone without branch set, in case checkout_revision was not a branch or a tag but a commit try: repository = clone(branch=None, depth=None) except errors.GitCommandError: - handle_git_exception() - raise + if raise_git_except: + raise + raise error_from_progress(progress, url) if checkout_revision is not None and not no_checkout: try: diff --git a/renku/core/util/requests.py b/renku/core/util/requests.py index 5bf802f854..629ba58f8b 100644 --- a/renku/core/util/requests.py +++ b/renku/core/util/requests.py @@ -78,6 +78,11 @@ def put(url, *, data=None, files=None, headers=None, params=None): return _request("put", url=url, data=data, files=files, headers=headers, params=params) +def patch(url, *, json=None, files=None, headers=None, params=None): + """Send a PATCH request.""" + return _request("patch", url=url, json=json, files=files, headers=headers, params=params) + + def _request(verb: str, url: str, *, allow_redirects=True, data=None, files=None, headers=None, json=None, params=None): try: with _retry() as session: diff --git a/renku/core/util/urls.py b/renku/core/util/urls.py index 9a491d58af..a5bacdb404 100644 --- a/renku/core/util/urls.py +++ b/renku/core/util/urls.py @@ -167,3 +167,9 @@ def check_url(url: str) -> Tuple[bool, bool]: is_git = is_remote and (u.path.lower().endswith(".git") or scheme in ("git+https", "git+ssh") or starts_with_git) return is_remote, is_git + + +def is_remote(uri: str) -> bool: + """Returns True if a given URI is remote.""" + is_remote, _ = check_url(uri) + return is_remote diff --git a/renku/core/workflow/__init__.py b/renku/core/workflow/__init__.py index 7313d99769..fd88019875 100644 --- a/renku/core/workflow/__init__.py +++ b/renku/core/workflow/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/activity.py b/renku/core/workflow/activity.py index 17f75cd3cc..efab91f71f 100644 --- a/renku/core/workflow/activity.py +++ b/renku/core/workflow/activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/converters/__init__.py b/renku/core/workflow/converters/__init__.py index 29cc4adaf5..f6d9536701 100644 --- a/renku/core/workflow/converters/__init__.py +++ b/renku/core/workflow/converters/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/converters/cwl.py b/renku/core/workflow/converters/cwl.py index 04da4b036a..227561b3d9 100644 --- a/renku/core/workflow/converters/cwl.py +++ b/renku/core/workflow/converters/cwl.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/converters/renku.py b/renku/core/workflow/converters/renku.py index 0440d05ad6..cc00340a12 100644 --- a/renku/core/workflow/converters/renku.py +++ b/renku/core/workflow/converters/renku.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/execute.py b/renku/core/workflow/execute.py index 517ce18e92..72749709e0 100644 --- a/renku/core/workflow/execute.py +++ b/renku/core/workflow/execute.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/model/__init__.py b/renku/core/workflow/model/__init__.py index 698857b0f7..5b0083f1d9 100644 --- a/renku/core/workflow/model/__init__.py +++ b/renku/core/workflow/model/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/model/concrete_execution_graph.py b/renku/core/workflow/model/concrete_execution_graph.py index 80a4f4aa47..6eb513c282 100644 --- a/renku/core/workflow/model/concrete_execution_graph.py +++ b/renku/core/workflow/model/concrete_execution_graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/model/workflow_file.py b/renku/core/workflow/model/workflow_file.py index 1016b37ee3..50ecb421d8 100644 --- a/renku/core/workflow/model/workflow_file.py +++ b/renku/core/workflow/model/workflow_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/parser/__init__.py b/renku/core/workflow/parser/__init__.py index 90b5c607ce..9eacc555eb 100644 --- a/renku/core/workflow/parser/__init__.py +++ b/renku/core/workflow/parser/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/parser/renku.py b/renku/core/workflow/parser/renku.py index dd6750ba17..3f5be8d5f0 100644 --- a/renku/core/workflow/parser/renku.py +++ b/renku/core/workflow/parser/renku.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/plan.py b/renku/core/workflow/plan.py index 5be92ecee3..26c3fde873 100644 --- a/renku/core/workflow/plan.py +++ b/renku/core/workflow/plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/plan_factory.py b/renku/core/workflow/plan_factory.py index 959a85d30d..0013f683cf 100644 --- a/renku/core/workflow/plan_factory.py +++ b/renku/core/workflow/plan_factory.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/providers/__init__.py b/renku/core/workflow/providers/__init__.py index bba8c3d007..06fab013bd 100644 --- a/renku/core/workflow/providers/__init__.py +++ b/renku/core/workflow/providers/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/providers/cwltool.py b/renku/core/workflow/providers/cwltool.py index c3c0ba0bde..07e70aa3b5 100644 --- a/renku/core/workflow/providers/cwltool.py +++ b/renku/core/workflow/providers/cwltool.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/providers/local.py b/renku/core/workflow/providers/local.py index edbb50485e..3232a5120d 100644 --- a/renku/core/workflow/providers/local.py +++ b/renku/core/workflow/providers/local.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/providers/toil.py b/renku/core/workflow/providers/toil.py index fe40fedd59..7b9d947db7 100644 --- a/renku/core/workflow/providers/toil.py +++ b/renku/core/workflow/providers/toil.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/run.py b/renku/core/workflow/run.py index 1b806f7bd1..43f41525a5 100644 --- a/renku/core/workflow/run.py +++ b/renku/core/workflow/run.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/types.py b/renku/core/workflow/types.py index 10c4c593d4..406e426d66 100644 --- a/renku/core/workflow/types.py +++ b/renku/core/workflow/types.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/update.py b/renku/core/workflow/update.py index 090084dc7e..5f356be79b 100644 --- a/renku/core/workflow/update.py +++ b/renku/core/workflow/update.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2018-2022- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/value_resolution.py b/renku/core/workflow/value_resolution.py index 6a2025a402..84e007576f 100644 --- a/renku/core/workflow/value_resolution.py +++ b/renku/core/workflow/value_resolution.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/core/workflow/workflow_file.py b/renku/core/workflow/workflow_file.py index 82d1517e7c..e76173613d 100644 --- a/renku/core/workflow/workflow_file.py +++ b/renku/core/workflow/workflow_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/data/shacl_shape.json b/renku/data/shacl_shape.json index f52cc3f4fb..fb2f78c9d5 100644 --- a/renku/data/shacl_shape.json +++ b/renku/data/shacl_shape.json @@ -159,6 +159,14 @@ } ] }, + { + "path": "schema:image", + "sh:class": { + "@id": "schema:ImageObject" + }, + "sh:pattern": "http(s)?://[^/]+/projects/.+/images/0", + "maxCount": 1 + }, { "nodeKind": "sh:Literal", "path": "renku:templateSource", diff --git a/renku/domain_model/dataset.py b/renku/domain_model/dataset.py index 8d234241ff..005cc58cba 100644 --- a/renku/domain_model/dataset.py +++ b/renku/domain_model/dataset.py @@ -32,8 +32,9 @@ from renku.core.util.git import get_entity_from_revision from renku.core.util.metadata import is_linked_file from renku.core.util.os import get_absolute_path -from renku.core.util.urls import get_path, get_slug +from renku.core.util.urls import get_slug from renku.domain_model.constant import NO_VALUE, NON_EXISTING_ENTITY_CHECKSUM +from renku.domain_model.image import ImageObject from renku.domain_model.project_context import project_context from renku.infrastructure.immutable import Immutable, Slots from renku.infrastructure.persistent import Persistent @@ -44,27 +45,28 @@ from renku.domain_model.provenance.annotation import Annotation -def is_dataset_name_valid(name: str) -> bool: - """Check if name is a valid slug.""" - return name is not None and name == get_slug(name, lowercase=False) +def is_dataset_slug_valid(slug: Optional[str]) -> bool: + """Check if a given slug is valid.""" + # NOTE: Empty string, ``""``, isn't a valid name. + return slug is not None and slug != "" and slug == get_slug(slug, lowercase=False) -def generate_default_name(title: str, version: Optional[str] = None) -> str: - """Get dataset name.""" +def generate_default_slug(name: str, version: Optional[str] = None) -> str: + """Get dataset slug.""" max_length = 24 - # For compatibility with older versions use title as name if it is valid; otherwise, use encoded title - if is_dataset_name_valid(title): - return title + # For compatibility with older versions use name as slug if it is valid; otherwise, use encoded name + if is_dataset_slug_valid(name): + return name - slug = get_slug(title) - name = slug[:max_length] + slug = get_slug(name) + slug = slug[:max_length] if version: max_version_length = 10 version_slug = get_slug(version)[:max_version_length] - name = f"{name[:-(len(version_slug) + 1)]}_{version_slug}" + slug = f"{slug[:-(len(version_slug) + 1)]}_{version_slug}" - return get_slug(name) + return get_slug(slug) class Url: @@ -173,30 +175,6 @@ def generate_id(name: str) -> str: return f"/languages/{name}" -class ImageObject(Slots): - """Represents a schema.org `ImageObject`.""" - - __slots__ = ("content_url", "id", "position") - - id: str - content_url: str - position: int - - def __init__(self, *, content_url: str, id: str, position: int): - id = get_path(id) - super().__init__(content_url=content_url, position=position, id=id) - - @staticmethod - def generate_id(dataset_id: str, position: int) -> str: - """Generate @id field.""" - return f"{dataset_id}/images/{position}" - - @property - def is_absolute(self): - """Whether content_url is an absolute or relative url.""" - return bool(urlparse(self.content_url).netloc) - - class RemoteEntity(Slots): """Reference to an Entity in a remote repository.""" @@ -389,9 +367,9 @@ def __init__( datadir: Optional[Path] = None, dataset_files: Optional[List[DatasetFile]] = None, date_created: Optional[datetime] = None, + date_modified: Optional[datetime] = None, date_published: Optional[datetime] = None, date_removed: Optional[datetime] = None, - date_modified: Optional[datetime] = None, derived_from: Optional[Url] = None, description: Optional[str] = None, id: Optional[str] = None, @@ -404,15 +382,25 @@ def __init__( name: Optional[str] = None, project_id: Optional[str] = None, same_as: Optional[Url] = None, + slug: Optional[str] = None, storage: Optional[str] = None, title: Optional[str] = None, version: Optional[str] = None, ): - if not name: - assert title, "Either 'name' or 'title' must be set." - name = generate_default_name(title, version) - - self._validate_name(name) + if not slug: + if title: # NOTE: Old metadata which only has name/title + slug, name, title = name, title, None + elif not name: + raise errors.ParameterError("Either 'slug', 'name' or 'title' must be set.", show_prefix=False) + + # NOTE: At this point we have new metadata with slug/name + slug = slug or generate_default_slug(name, version) + elif title: + # NOTE: When both slug and title are set, copy title to name. This happens when transitioning from the old + # metadata to the new one. + name, title = title, None + + self._validate_slug(slug) self._validate_creator(creators) # if `date_published` is set, we are probably dealing with an imported dataset so `date_created` is not needed @@ -426,7 +414,10 @@ def __init__( self.identifier = identifier or uuid4().hex self.id = id or Dataset.generate_id(identifier=self.identifier) - self.name: str = name + + self.name: Optional[str] = name + self.slug: str = slug + self.title: Optional[str] = None self.creators: List["Person"] = creators or [] # `dataset_files` includes existing files and those that have been removed in the previous version @@ -445,33 +436,41 @@ def __init__( self.project_id: Optional[str] = project_id self.same_as: Optional[Url] = same_as self.storage: Optional[str] = storage - self.title: Optional[str] = title self.version: Optional[str] = version self.annotations: List["Annotation"] = annotations or [] if datadir: self.datadir: Optional[str] = str(datadir) - self.correct_linked_files() + self._correct_linked_files() def __setstate__(self, state): super().__setstate__(state) - self.correct_linked_files() + self._adjust_slug_and_name() + self._correct_linked_files() - def correct_linked_files(self): + def _correct_linked_files(self): """Fix linked dataset files.""" for file in self.dataset_files: file.correct_linked_attribute() + def _adjust_slug_and_name(self): + """Replace name/title with slug/name if needed.""" + slug = getattr(self, "slug", None) + if not slug: # NOTE: Dataset doesn't have new metadata since slug isn't set + self.slug, self.name, self.title = self.name, self.title, None # type: ignore + else: + assert self.title is None, f"Invalid slug: '{slug}', name: '{self.name}', and title: '{self.title}' values" + @staticmethod def generate_id(identifier: str) -> str: """Generate an identifier for Dataset.""" return f"/datasets/{identifier}" @staticmethod - def _validate_name(name): - if not is_dataset_name_valid(name): - raise errors.ParameterError(f"Invalid dataset name: '{name}'") + def _validate_slug(slug: Optional[str]): + if not is_dataset_slug_valid(slug): + raise errors.ParameterError(f"Invalid dataset slug: '{slug}'") @staticmethod def _validate_creator(creators): @@ -507,10 +506,10 @@ def get_datadir(self) -> Path: if self.datadir: return Path(self.datadir) - return Path(os.path.join(project_context.datadir, self.name)) + return Path(os.path.join(project_context.datadir, self.slug)) def __repr__(self) -> str: - return f"" + return f"" def is_derivation(self) -> bool: """Return if a dataset has correct derived_from.""" @@ -538,7 +537,7 @@ def replace_identifier(self, identifier: Optional[str] = None): `initial_identifier`. """ assert self.derived_from is None, ( - f"Replacing identifier of dataset '{self.name}:{self.identifier}' " + f"Replacing identifier of dataset '{self.slug}:{self.identifier}' " f"that is derived from {self.derived_from.url_id}" ) @@ -563,8 +562,8 @@ def derive_from( ): """Make `self` a derivative of `dataset` and update related fields.""" assert dataset is not None, "Cannot derive from None" - assert self is not dataset, f"Cannot derive from the same dataset '{self.name}:{self.identifier}'" - assert not identifier or self.id != identifier, f"Cannot derive from the same id '{self.name}:{identifier}'" + assert self is not dataset, f"Cannot derive from the same dataset '{self.slug}:{self.identifier}'" + assert not identifier or self.id != identifier, f"Cannot derive from the same id '{self.slug}:{identifier}'" self._assign_new_identifier(identifier) # NOTE: Setting `initial_identifier` is required for migration of broken projects @@ -639,8 +638,8 @@ def update_metadata_from(self, other: "Dataset", exclude=None): "in_language", "keywords", "license", + "name", "same_as", - "title", "version", ] for name in updatable_fields: @@ -662,7 +661,7 @@ def update_metadata_from(self, other: "Dataset", exclude=None): def update_metadata(self, **kwargs): """Updates metadata.""" - editable_attributes = ["creators", "description", "keywords", "title"] + editable_attributes = ["creators", "description", "keywords", "name"] for name, value in kwargs.items(): if name not in editable_attributes: raise errors.ParameterError(f"Cannot edit field: '{name}'") @@ -736,11 +735,11 @@ class AnnotationJson(marshmallow.Schema): class DatasetDetailsJson(marshmallow.Schema): """Serialize a dataset to a response object.""" - name = marshmallow.fields.String(required=True) + slug = marshmallow.fields.String(required=True) version = marshmallow.fields.String(allow_none=True) created_at = marshmallow.fields.String(allow_none=True, attribute="date_created") - title = marshmallow.fields.String() + name = marshmallow.fields.String() creators = marshmallow.fields.List(marshmallow.fields.Nested(DatasetCreatorsJson)) description = marshmallow.fields.String() keywords = marshmallow.fields.List(marshmallow.fields.String()) @@ -773,7 +772,7 @@ class DatasetFileDetailsJson(marshmallow.Schema): is_lfs = marshmallow.fields.Boolean() dataset_id = marshmallow.fields.String() - dataset_name = marshmallow.fields.String() + dataset_slug = marshmallow.fields.String() creators = marshmallow.fields.List(marshmallow.fields.Nested(DatasetCreatorsJson)) @@ -790,8 +789,8 @@ class ImageObjectRequestJson(marshmallow.Schema): file_id = marshmallow.fields.String() content_url = marshmallow.fields.String() - position = marshmallow.fields.Integer() - mirror_locally = marshmallow.fields.Bool(dump_default=False) + position = marshmallow.fields.Integer(load_default=0) + mirror_locally = marshmallow.fields.Bool(load_default=False) def get_file_path_in_dataset(dataset: Dataset, dataset_file: DatasetFile) -> Path: diff --git a/renku/domain_model/image.py b/renku/domain_model/image.py new file mode 100644 index 0000000000..87b0483113 --- /dev/null +++ b/renku/domain_model/image.py @@ -0,0 +1,52 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Image model.""" + +from pathlib import Path +from typing import Union +from urllib.parse import urlparse + +from renku.core.util.urls import get_path, is_remote +from renku.infrastructure.immutable import Slots + + +class ImageObject(Slots): + """Represents a schema.org ``ImageObject``.""" + + __slots__ = ("content_url", "id", "position") + + id: str + content_url: str + position: int + + def __init__(self, *, content_url: Union[str, Path], id: str, position: int): + id = get_path(id) + super().__init__(content_url=str(content_url), position=position, id=id) + + @staticmethod + def generate_id(owner_id: str, position: int) -> str: + """Generate @id field.""" + return f"{owner_id}/images/{position}" + + @property + def is_absolute(self): + """Whether content_url is an absolute or relative url.""" + return bool(urlparse(self.content_url).netloc) + + @property + def is_remote(self) -> bool: + """Return True if the URI isn't on the local filesystem.""" + return is_remote(self.content_url) diff --git a/renku/domain_model/project.py b/renku/domain_model/project.py index 397fadda22..52b3ddd697 100644 --- a/renku/domain_model/project.py +++ b/renku/domain_model/project.py @@ -27,6 +27,7 @@ from renku.core.util.git import get_git_user from renku.core.util.os import normalize_to_ascii from renku.domain_model.constant import NO_VALUE +from renku.domain_model.image import ImageObject from renku.domain_model.provenance.agent import Person from renku.domain_model.provenance.annotation import Annotation from renku.version import __minimum_project_version__ @@ -53,6 +54,7 @@ class Project(persistent.Persistent): """Represent a project.""" keywords: List[str] = list() + image: Optional[ImageObject] = None # NOTE: the minimum version of renku to needed to work with a project # This should be bumped on metadata version changes and when we do not forward-compatible on-the-fly migrations @@ -71,6 +73,7 @@ def __init__( template_metadata: Optional[ProjectTemplateMetadata] = None, version: Optional[str] = None, keywords: Optional[List[str]] = None, + image: Optional[ImageObject] = None, ): from renku.core.migration.migrate import SUPPORTED_PROJECT_VERSION @@ -91,6 +94,7 @@ def __init__( self.id: str = id self.version: str = version self.keywords = keywords or [] + self.image = image self.template_metadata: ProjectTemplateMetadata = template_metadata or ProjectTemplateMetadata() @@ -107,6 +111,7 @@ def from_project_context( keywords: Optional[List[str]] = None, custom_metadata: Optional[Dict] = None, creator: Optional[Person] = None, + image: Optional[ImageObject] = None, ) -> "Project": """Create an instance from a path. @@ -119,6 +124,7 @@ def from_project_context( custom_metadata(Optional[Dict]): Custom JSON-LD metadata (when creating a new project) (Default value = None). creator(Optional[Person]): The project creator. + image(Optional[ImageObject]): Project's image/avatar. """ creator = creator or get_git_user(repository=project_context.repository) @@ -141,7 +147,13 @@ def from_project_context( id = cls.generate_id(namespace=namespace, name=name) return cls( - creator=creator, id=id, name=name, description=description, keywords=keywords, annotations=annotations + annotations=annotations, + creator=creator, + description=description, + id=id, + image=image, + keywords=keywords, + name=name, ) @staticmethod diff --git a/renku/domain_model/project_context.py b/renku/domain_model/project_context.py index c545b40752..bc862b7409 100644 --- a/renku/domain_model/project_context.py +++ b/renku/domain_model/project_context.py @@ -31,6 +31,7 @@ DATASET_IMAGES, DEFAULT_DATA_DIR, DOCKERFILE, + IMAGES, LOCK_SUFFIX, POINTERS, RENKU_HOME, @@ -92,6 +93,11 @@ def dataset_images_path(self) -> Path: """Return a ``Path`` instance of Renku dataset metadata folder.""" return self.path / RENKU_HOME / DATASET_IMAGES + @property + def project_image_pathname(self) -> Path: + """Return the path to the project's image file.""" + return self.path / RENKU_HOME / IMAGES / "project" / "0.png" + @property def dockerfile_path(self) -> Path: """Path to the Dockerfile.""" diff --git a/renku/domain_model/session.py b/renku/domain_model/session.py index 1c2cf5d899..4e35da6789 100644 --- a/renku/domain_model/session.py +++ b/renku/domain_model/session.py @@ -30,7 +30,7 @@ class SessionStopStatus(Enum): - """Status code returned when stopping sessions.""" + """Status code returned when stopping/pausing sessions.""" NO_ACTIVE_SESSION = auto() SUCCESSFUL = auto() @@ -61,6 +61,11 @@ def __init__( self.provider = provider self.ssh_enabled = ssh_enabled + @property + def name(self) -> str: + """Return session name which is the same as its id.""" + return self.id + class ISessionProvider(metaclass=ABCMeta): """Abstract class for an interactive session provider.""" @@ -210,3 +215,28 @@ def pre_start_checks(self, **kwargs): def force_build_image(self, **kwargs) -> bool: """Whether we should force build the image directly or check for an existing image first.""" return False + + +class IHibernatingSessionProvider(ISessionProvider): + """Abstract class for an interactive session provider that supports hibernation.""" + + @abstractmethod + def session_pause(self, project_name: str, session_name: Optional[str], **kwargs) -> SessionStopStatus: + """Pause all or a given interactive session. + + Args: + project_name(str): Project's name. + session_name(str, optional): The unique id of the interactive session. + + Returns: + SessionStopStatus: The status of running and paused sessions + """ + + @abstractmethod + def session_resume(self, project_name: str, session_name: Optional[str], **kwargs) -> bool: + """Resume a paused session. + + Args: + project_name(str): Renku project name. + session_name(Optional[str]): The unique id of the interactive session. + """ diff --git a/renku/domain_model/sort.py b/renku/domain_model/sort.py deleted file mode 100644 index 85203083ac..0000000000 --- a/renku/domain_model/sort.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright Swiss Data Science Center (SDSC). A partnership between -# École Polytechnique Fédérale de Lausanne (EPFL) and -# Eidgenössische Technische Hochschule Zürich (ETHZ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Process Git repository.""" - -from collections import deque -from typing import Any, Deque - -GRAY, BLACK = 0, 1 - - -def topological(nodes): - """Return nodes in a topological order.""" - order: Deque[Any] - order, enter, state = deque(), set(nodes), {} - - def dfs(node): - """Visit nodes in depth-first order.""" - state[node] = GRAY - for parent in nodes.get(node, ()): - color = state.get(parent, None) - if color == GRAY: - raise ValueError("cycle") - if color == BLACK: - continue - enter.discard(parent) - dfs(parent) - order.appendleft(node) - state[node] = BLACK - - while enter: - dfs(enter.pop()) - - return order diff --git a/renku/domain_model/workflow/plan.py b/renku/domain_model/workflow/plan.py index a115b5be5f..95b179f200 100644 --- a/renku/domain_model/workflow/plan.py +++ b/renku/domain_model/workflow/plan.py @@ -374,7 +374,7 @@ def set_parameters_from_strings(self, params_strings: List[str]) -> None: def copy(self): """Create a copy of this plan. - Required where a plan is used several times in a workflow but we need to set different values on them. + Required where a plan is used several times in a workflow, but we need to set different values on them. """ return copy.deepcopy(self) diff --git a/renku/infrastructure/__init__.py b/renku/infrastructure/__init__.py index 3a57dd5bfc..00505c22d1 100644 --- a/renku/infrastructure/__init__.py +++ b/renku/infrastructure/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/database.py b/renku/infrastructure/database.py index 10128e7f02..e24ca20c11 100644 --- a/renku/infrastructure/database.py +++ b/renku/infrastructure/database.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -583,6 +582,10 @@ def new_ghost(self, oid, object): class Index(persistent.Persistent): """Database index.""" + # NOTE: If this field isn't None, we use it as the index-attribute instead of ``_attribute``. This is used to avoid + # creating a migration when the index-attribute changes. + _v_main_attribute: Optional[str] = None + def __init__(self, *, name: str, object_type, attribute: Optional[str], key_type=None): """Create an index where keys are extracted using ``attribute`` from an object or a key. @@ -641,6 +644,9 @@ def __setstate__(self, data): def __iter__(self): return self._entries.__iter__() + def __repr__(self) -> str: + return f"" + @property def name(self) -> str: """Return Index's name.""" @@ -748,9 +754,11 @@ def _verify_and_get_key( else: assert key_object is None, f"Index '{self.name}' does not accept 'key_object'" - if self._attribute: + attribute = self._v_main_attribute or self._attribute + + if attribute: key_object = key_object or object - correct_key = get_attribute(key_object, self._attribute) + correct_key = get_attribute(key_object, attribute) if key is not None: if verify: assert key == correct_key, f"Incorrect key for index '{self.name}': '{key}' != '{correct_key}'" diff --git a/renku/infrastructure/gateway/__init__.py b/renku/infrastructure/gateway/__init__.py index 0b546d85f7..0f5eb580f3 100644 --- a/renku/infrastructure/gateway/__init__.py +++ b/renku/infrastructure/gateway/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/gateway/activity_gateway.py b/renku/infrastructure/gateway/activity_gateway.py index 1c71deece0..7e8d970890 100644 --- a/renku/infrastructure/gateway/activity_gateway.py +++ b/renku/infrastructure/gateway/activity_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/gateway/database_gateway.py b/renku/infrastructure/gateway/database_gateway.py index e6c3f82de4..e20dc36866 100644 --- a/renku/infrastructure/gateway/database_gateway.py +++ b/renku/infrastructure/gateway/database_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/gateway/dataset_gateway.py b/renku/infrastructure/gateway/dataset_gateway.py index 35ed139ba3..f2435fd0e5 100644 --- a/renku/infrastructure/gateway/dataset_gateway.py +++ b/renku/infrastructure/gateway/dataset_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -29,15 +28,19 @@ class DatasetGateway(IDatasetGateway): """Gateway for dataset database operations.""" + def __init__(self): + # NOTE: Set ``slug`` as the index-attribute for ``datasets`` index in this version of renku + project_context.database["datasets"]._v_main_attribute = "slug" + def get_by_id(self, id: str) -> Dataset: """Get a dataset by id.""" dataset = project_context.database.get_by_id(id) assert isinstance(dataset, Dataset) return dataset - def get_by_name(self, name: str) -> Optional[Dataset]: - """Get a dataset by id.""" - return project_context.database["datasets"].get(name) + def get_by_slug(self, slug: str) -> Optional[Dataset]: + """Get a dataset by slug.""" + return project_context.database["datasets"].get(slug) def get_all_active_datasets(self) -> List[Dataset]: """Return all datasets.""" @@ -49,15 +52,15 @@ def get_provenance_tails(self) -> List[Dataset]: def get_all_tags(self, dataset: Dataset) -> List[DatasetTag]: """Return the list of all tags for a dataset.""" - return list(project_context.database["datasets-tags"].get(dataset.name, [])) + return list(project_context.database["datasets-tags"].get(dataset.slug, [])) @deal.pre(lambda _: _.tag.date_created is None or _.tag.date_created >= project_context.project.date_created) def add_tag(self, dataset: Dataset, tag: DatasetTag): """Add a tag from a dataset.""" - tags: PersistentList = project_context.database["datasets-tags"].get(dataset.name) + tags: PersistentList = project_context.database["datasets-tags"].get(dataset.slug) if not tags: tags = PersistentList() - project_context.database["datasets-tags"].add(tags, key=dataset.name) + project_context.database["datasets-tags"].add(tags, key=dataset.slug) assert tag.dataset_id.value == dataset.id, f"Tag has wrong dataset id: {tag.dataset_id.value} != {dataset.id}" @@ -65,7 +68,7 @@ def add_tag(self, dataset: Dataset, tag: DatasetTag): def remove_tag(self, dataset: Dataset, tag: DatasetTag): """Remove a tag from a dataset.""" - tags: PersistentList = project_context.database["datasets-tags"].get(dataset.name) + tags: PersistentList = project_context.database["datasets-tags"].get(dataset.slug) for t in tags: if t.name == tag.name: tags.remove(t) @@ -81,8 +84,8 @@ def add_or_remove(self, dataset: Dataset) -> None: database = project_context.database if dataset.date_removed: - database["datasets"].pop(dataset.name, None) - database["datasets-tags"].pop(dataset.name, None) + database["datasets"].pop(dataset.slug, None) + database["datasets-tags"].pop(dataset.slug, None) else: database["datasets"].add(dataset) diff --git a/renku/infrastructure/gateway/plan_gateway.py b/renku/infrastructure/gateway/plan_gateway.py index 1b1c2e316c..a0ba248504 100644 --- a/renku/infrastructure/gateway/plan_gateway.py +++ b/renku/infrastructure/gateway/plan_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/gateway/project_gateway.py b/renku/infrastructure/gateway/project_gateway.py index f3d56465bf..22a1545709 100644 --- a/renku/infrastructure/gateway/project_gateway.py +++ b/renku/infrastructure/gateway/project_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/git_merger.py b/renku/infrastructure/git_merger.py index 69ac6b9904..da0f3b6e38 100644 --- a/renku/infrastructure/git_merger.py +++ b/renku/infrastructure/git_merger.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,6 +17,7 @@ import os import shutil +import traceback from json import JSONDecodeError from pathlib import Path from tempfile import mkdtemp @@ -117,6 +117,12 @@ def _setup_worktrees(self, repository): ) ) except Exception: + exc_str = traceback.format_exc(limit=None, chain=True) + if "No such file or directory" in exc_str and "Unable to create" in exc_str: + communication.error( + "Error when trying to sparse checkout worktree. This is likely due to using an old version of " + "git. Please try with a newer version." + ) # NOTE: cleanup worktree try: repository.remove_worktree(worktree_path) diff --git a/renku/infrastructure/immutable.py b/renku/infrastructure/immutable.py index 92c8ece3c2..94f3b8fd13 100644 --- a/renku/infrastructure/immutable.py +++ b/renku/infrastructure/immutable.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/persistent.py b/renku/infrastructure/persistent.py index 42951330a6..2785fde43b 100644 --- a/renku/infrastructure/persistent.py +++ b/renku/infrastructure/persistent.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/repository.py b/renku/infrastructure/repository.py index 28afbdaaeb..4fb9500630 100644 --- a/renku/infrastructure/repository.py +++ b/renku/infrastructure/repository.py @@ -30,7 +30,6 @@ from pathlib import Path from typing import ( Any, - Callable, Dict, Generator, List, @@ -1001,20 +1000,24 @@ def clone_from( branch: Optional[str] = None, recursive: bool = False, depth: Optional[int] = None, - progress: Optional[Callable] = None, + progress: Optional[git.RemoteProgress] = None, no_checkout: bool = False, env: Optional[dict] = None, clone_options: Optional[List[str]] = None, ) -> "Repository": - """Clone a remote repository and create an instance.""" + """Clone a remote repository and create an instance. + + Since this is just a thin wrapper around GitPython note that the branch parameter + can work and accept either a branch name or a tag. But it will not work with a commit SHA. + """ try: repository = git.Repo.clone_from( url=url, to_path=path, - branch=branch, + branch=branch, # NOTE: Git python will accept tag or branch here but not SHA recursive=recursive, depth=depth, - progress=progress, + progress=progress, # type: ignore[arg-type] no_checkout=no_checkout, env=env, multi_options=clone_options, @@ -1283,10 +1286,13 @@ class DiffChangeType(Enum): """Type of change in a ``Diff``.""" ADDED = "A" + COPIED = "C" DELETED = "D" - RENAMED = "R" MODIFIED = "M" + RENAMED = "R" TYPE_CHANGED = "T" + UNMERGED = "U" + UNKNOWN = "X" class Diff(NamedTuple): @@ -1554,6 +1560,11 @@ def reference(self) -> Optional[Reference]: except (git.GitError, TypeError): return None + @property + def detached(self) -> bool: + """True if the reference is to a commit and not a branch.""" + return self._reference.is_detached + class RemoteReference(Reference): """A git remote reference.""" @@ -1681,8 +1692,11 @@ def set_url(self, url: str): _run_git_command(self._repository, "remote", "set-url", self.name, url) @property - def head(self) -> str: + def head(self) -> Optional[str]: """The head commit of the remote.""" + if self._repository.head.is_detached: + return None + self._remote.fetch() return _run_git_command(self._repository, "rev-parse", f"{self._remote.name}/{self._repository.active_branch}") diff --git a/renku/infrastructure/storage/__init__.py b/renku/infrastructure/storage/__init__.py index 938f74f43a..d6a9b83a39 100644 --- a/renku/infrastructure/storage/__init__.py +++ b/renku/infrastructure/storage/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/storage/factory.py b/renku/infrastructure/storage/factory.py index d558270774..436010c53c 100644 --- a/renku/infrastructure/storage/factory.py +++ b/renku/infrastructure/storage/factory.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/infrastructure/storage/rclone.py b/renku/infrastructure/storage/rclone.py index a0be2cffa4..22409fc5fc 100644 --- a/renku/infrastructure/storage/rclone.py +++ b/renku/infrastructure/storage/rclone.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/__init__.py b/renku/ui/__init__.py index 701f6e0b32..1f749428d0 100644 --- a/renku/ui/__init__.py +++ b/renku/ui/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/__init__.py b/renku/ui/api/__init__.py index 54be6858ef..a67dab0c39 100644 --- a/renku/ui/api/__init__.py +++ b/renku/ui/api/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/graph/__init__.py b/renku/ui/api/graph/__init__.py index df40793e59..114771c30e 100644 --- a/renku/ui/api/graph/__init__.py +++ b/renku/ui/api/graph/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/graph/rdf.py b/renku/ui/api/graph/rdf.py index eaab3b81f7..d141a528bd 100644 --- a/renku/ui/api/graph/rdf.py +++ b/renku/ui/api/graph/rdf.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/models/__init__.py b/renku/ui/api/models/__init__.py index 99cb206e4d..be5a45c61f 100644 --- a/renku/ui/api/models/__init__.py +++ b/renku/ui/api/models/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/models/activity.py b/renku/ui/api/models/activity.py index 3442f5fedb..45b2a3d32f 100644 --- a/renku/ui/api/models/activity.py +++ b/renku/ui/api/models/activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/models/dataset.py b/renku/ui/api/models/dataset.py index 9963d68af4..09f46c2709 100644 --- a/renku/ui/api/models/dataset.py +++ b/renku/ui/api/models/dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,7 +26,7 @@ datasets = Dataset.list() -You can then access metadata of a dataset like ``name``, ``title``, +You can then access metadata of a dataset like ``name``, ``slug``, ``keywords``, etc. To get the list of files inside a dataset use ``files`` property: @@ -62,7 +61,7 @@ class Dataset: "keywords", "license", "name", - "title", + "slug", "url", "version", ] diff --git a/renku/ui/api/models/parameter.py b/renku/ui/api/models/parameter.py index acc887d336..643b54ad4e 100644 --- a/renku/ui/api/models/parameter.py +++ b/renku/ui/api/models/parameter.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/models/plan.py b/renku/ui/api/models/plan.py index 587e530ae8..c629d0ffbe 100644 --- a/renku/ui/api/models/plan.py +++ b/renku/ui/api/models/plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/models/project.py b/renku/ui/api/models/project.py index 2d5bd5cc9a..c7f0bcdb1f 100644 --- a/renku/ui/api/models/project.py +++ b/renku/ui/api/models/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/api/util.py b/renku/ui/api/util.py index f344ffc654..7d85573870 100644 --- a/renku/ui/api/util.py +++ b/renku/ui/api/util.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/__init__.py b/renku/ui/cli/__init__.py index 3a3ad44ab8..3effa698bd 100644 --- a/renku/ui/cli/__init__.py +++ b/renku/ui/cli/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/__main__.py b/renku/ui/cli/__main__.py index 957e0fdecd..106783278b 100644 --- a/renku/ui/cli/__main__.py +++ b/renku/ui/cli/__main__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/clone.py b/renku/ui/cli/clone.py index 6330d34cd2..9e09efe294 100644 --- a/renku/ui/cli/clone.py +++ b/renku/ui/cli/clone.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/config.py b/renku/ui/cli/config.py index c8459a8102..2b644521cb 100644 --- a/renku/ui/cli/config.py +++ b/renku/ui/cli/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/dataset.py b/renku/ui/cli/dataset.py index 956592eb1b..4bd1be3b45 100644 --- a/renku/ui/cli/dataset.py +++ b/renku/ui/cli/dataset.py @@ -58,8 +58,8 @@ .. code-block:: console - $ renku dataset ls --columns id,name,date_created,creators - ID NAME CREATED CREATORS + $ renku dataset ls --columns id,slug,date_created,creators + ID SLUG CREATED CREATORS -------- ------------- ------------------- --------- 0ad1cb9a some-dataset 2020-03-19 16:39:46 sam 9436e36c my-dataset 2020-02-28 16:48:09 sam @@ -74,7 +74,7 @@ .. code-block:: console $ renku dataset show some-dataset - Name: some-dataset + Slug: some-dataset Created: 2020-12-09 13:52:06.640778+00:00 Creator(s): John Doe [SDSC] Keywords: Dataset, Data @@ -82,7 +82,7 @@ [ {...} ] - Title: Some Dataset + Name: Some Dataset Description: Just some dataset @@ -242,7 +242,7 @@ datasets' metadata accordingly. You can automatically add new files from the dataset's data directory by using the ``--check-data-directory`` flag. -You can limit the scope of updated files by specifying dataset names, using +You can limit the scope of updated files by specifying dataset slugs, using ``--include`` and ``--exclude`` to filter based on file names, or using ``--creators`` to filter based on creators. For example, the following command updates only CSV files from ``my-dataset``: @@ -316,7 +316,7 @@ .. code-block:: console $ renku dataset import \ - https://renkulab.io/projects///datasets/ + https://renkulab.io/projects///datasets/ or @@ -386,7 +386,7 @@ To export to a Dataverse provider you must pass Dataverse server's URL and the name of the parent dataverse where the dataset will be exported to. -Server's URL is stored in your Renku setting and you don't need to pass it +Server's URL is stored in your Renku setting, and you don't need to pass it every time. To export a dataset to OLOS you must pass the OLOS server's base URL and @@ -411,7 +411,7 @@ This also creates a copy of dataset's metadata at the given version and puts it in ``/METADATA.yml``. If a destination path is not given to this command, it creates a directory in project's data directory using dataset's -name and version: ``/-``. Export fails if the +slug and version: ``/-``. Export fails if the destination directory is not empty. .. note:: See our `dataset versioning tutorial @@ -423,7 +423,7 @@ .. code-block:: console $ renku dataset ls-files - DATASET NAME ADDED PATH LFS + DATASET SLUG ADDED PATH LFS ------------------- ------------------- ----------------------------- ---- my-dataset 2020-02-28 16:48:09 data/my-dataset/add-me * my-dataset 2020-02-28 16:49:02 data/my-dataset/weather/file1 * @@ -441,8 +441,8 @@ .. code-block:: console - $ renku dataset ls-files --columns name,creators, path - DATASET NAME CREATORS PATH + $ renku dataset ls-files --columns slug,creators, path + DATASET SLUG CREATORS PATH ------------------- --------- ----------------------------- my-dataset sam data/my-dataset/add-me my-dataset sam data/my-dataset/weather/file1 @@ -460,7 +460,7 @@ .. code-block:: console $ renku dataset ls-files --include "file*" --exclude "file3" - DATASET NAME ADDED PATH LFS + DATASET SLUG ADDED PATH LFS ------------------- ------------------- ----------------------------- ---- my-dataset 2020-02-28 16:49:02 data/my-dataset/weather/file1 * my-dataset 2020-02-28 16:49:02 data/my-dataset/weather/file2 * @@ -517,6 +517,7 @@ from renku.command.format.dataset_files import DATASET_FILES_COLUMNS, DATASET_FILES_FORMATS from renku.command.format.dataset_tags import DATASET_TAGS_FORMATS from renku.command.format.datasets import DATASETS_COLUMNS, DATASETS_FORMATS +from renku.core import errors from renku.domain_model.constant import NO_VALUE, NoValueType from renku.ui.cli.utils.click import shell_complete_datasets @@ -534,7 +535,7 @@ def dataset(): "-c", "--columns", type=click.STRING, - default="id,name,title,version,datadir,storage", + default="id,slug,name,version,datadir,storage", metavar="", help="Comma-separated list of column to display: {}.".format(", ".join(DATASETS_COLUMNS.keys())), show_default=True, @@ -552,8 +553,9 @@ def list_dataset(format, columns): @dataset.command() -@click.argument("name") -@click.option("-t", "--title", default=None, type=click.STRING, help="Title of the dataset.") +@click.argument("slug") +@click.option("-t", "--title", default=None, hidden=True, type=click.STRING, help="Name of the dataset.") +@click.option("-n", "--name", default=None, type=click.STRING, help="Name of the dataset.") @click.option("-d", "--description", default=None, type=click.STRING, help="Dataset's description.") @click.option( "-c", @@ -576,9 +578,9 @@ def list_dataset(format, columns): "--datadir", default=None, type=click.Path(), - help="Dataset's data directory (defaults to 'data/').", + help="Dataset's data directory (defaults to 'data/').", ) -def create(name, title, description, creators, metadata, keyword, storage, datadir): +def create(slug, title, name, description, creators, metadata, keyword, storage, datadir): """Create an empty dataset in the current repo.""" from renku.command.dataset import create_dataset_command from renku.core.util.metadata import construct_creators @@ -589,6 +591,12 @@ def create(name, title, description, creators, metadata, keyword, storage, datad custom_metadata = None + if title: + if name: + raise errors.ParameterError("Cannot pass both 'title' and 'name'", show_prefix=False) + communicator.warn("The '-t/--title' flags are deprecated. Use '-n/--name' instead.") + name = title + if metadata: custom_metadata = json.loads(Path(metadata).read_text()) @@ -600,8 +608,8 @@ def create(name, title, description, creators, metadata, keyword, storage, datad .with_communicator(communicator) .build() .execute( + slug=slug, name=name, - title=title, description=description, creators=creators, keywords=keyword, @@ -613,13 +621,14 @@ def create(name, title, description, creators, metadata, keyword, storage, datad new_dataset = result.output - click.echo(f'Use the name "{new_dataset.name}" to refer to this dataset.') + click.echo(f'Use the slug "{new_dataset.slug}" to refer to this dataset.') click.secho("OK", fg=color.GREEN) @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) -@click.option("-t", "--title", default=NO_VALUE, type=click.UNPROCESSED, help="Title of the dataset.") +@click.argument("slug", shell_complete=shell_complete_datasets) +@click.option("-t", "--title", default=NO_VALUE, hidden=True, type=click.UNPROCESSED, help="Name of the dataset.") +@click.option("-n", "--name", default=NO_VALUE, type=click.UNPROCESSED, help="Name of the dataset.") @click.option("-d", "--description", default=NO_VALUE, type=click.UNPROCESSED, help="Dataset's description.") @click.option( "-c", @@ -660,12 +669,20 @@ def create(name, title, description, creators, metadata, keyword, storage, datad type=click.Choice(["keywords", "k", "images", "i", "metadata", "m"]), help="Remove keywords from dataset.", ) -def edit(name, title, description, creators, metadata, metadata_source, keywords, unset): +def edit(slug, title, name, description, creators, metadata, metadata_source, keywords, unset): """Edit dataset metadata.""" from renku.command.dataset import edit_dataset_command from renku.core.util.metadata import construct_creators from renku.ui.cli.utils.callback import ClickCallback + communicator = ClickCallback() + + if title != NO_VALUE: + if name != NO_VALUE: + raise errors.ParameterError("Cannot pass both 'title' and 'name'", show_prefix=False) + communicator.warn("The '-t/--title' flags are deprecated. Use '-n/--name' instead.") + name = title + images: Union[None, NoValueType] = NO_VALUE if list(creators) == [NO_VALUE]: @@ -710,8 +727,8 @@ def edit(name, title, description, creators, metadata, metadata_source, keywords edit_dataset_command() .build() .execute( + slug=slug, name=name, - title=title, description=description, creators=creators, keywords=keywords, @@ -725,28 +742,26 @@ def edit(name, title, description, creators, metadata, metadata_source, keywords click.echo( "Nothing to update. " "Check available fields with `renku dataset edit --help`\n\n" - 'Hint: `renku dataset edit --title "new title"`' + 'Hint: `renku dataset edit --name "new name"`' ) else: click.echo("Successfully updated: {}.".format(", ".join(updated.keys()))) if no_email_warnings: - click.echo( - ClickCallback.WARNING + "No email or wrong format for: " + ", ".join(cast(List[str], no_email_warnings)) - ) + communicator.warn("No email or wrong format for: " + ", ".join(cast(List[str], no_email_warnings))) @dataset.command("show") @click.option("-t", "--tag", default=None, type=click.STRING, help="Tag for which to show dataset metadata.") -@click.argument("name", shell_complete=shell_complete_datasets) -def show(tag, name): +@click.argument("slug", shell_complete=shell_complete_datasets) +def show(tag, slug): """Show metadata of a dataset.""" from renku.command.dataset import show_dataset_command from renku.ui.cli.utils.terminal import print_markdown - result = show_dataset_command().build().execute(name=name, tag=tag) + result = show_dataset_command().build().execute(slug=slug, tag=tag) ds = result.output - click.echo(click.style("Name: ", bold=True, fg=color.MAGENTA) + click.style(ds["name"], bold=True)) + click.echo(click.style("Slug: ", bold=True, fg=color.MAGENTA) + click.style(ds["slug"], bold=True)) click.echo(click.style("Created: ", bold=True, fg=color.MAGENTA) + (ds.get("created_at", "") or "")) click.echo(click.style("Data Directory: ", bold=True, fg=color.MAGENTA) + str(ds.get("data_directory", "") or "")) @@ -768,13 +783,13 @@ def show(tag, name): if ds["annotations"]: click.echo(json.dumps(ds.get("annotations", ""), indent=2)) - click.echo(click.style("Title: ", bold=True, fg=color.MAGENTA) + click.style(ds.get("title", ""), bold=True)) + click.echo(click.style("Name: ", bold=True, fg=color.MAGENTA) + click.style(ds.get("name", ""), bold=True)) click.echo(click.style("Description: ", bold=True, fg=color.MAGENTA)) print_markdown(ds.get("description") or "") -def add_provider_options(*param_decls, **attrs): +def add_provider_options(*_, **__): """Sets dataset export provider option groups on the dataset add command.""" from renku.core.dataset.providers.factory import ProviderFactory from renku.ui.cli.utils.click import create_options @@ -784,7 +799,7 @@ def add_provider_options(*param_decls, **attrs): @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.argument("urls", type=click.Path(), nargs=-1) @click.option("-f", "--force", is_flag=True, help="Allow adding otherwise ignored files.") @click.option("-o", "--overwrite", is_flag=True, help="Overwrite existing files.") @@ -795,10 +810,10 @@ def add_provider_options(*param_decls, **attrs): "--datadir", default=None, type=click.Path(), - help="Dataset's data directory (defaults to 'data/').", + help="Dataset's data directory (defaults to 'data/').", ) @add_provider_options() -def add(name, urls, force, overwrite, create, destination, datadir, **kwargs): +def add(slug, urls, force, overwrite, create, destination, datadir, **kwargs): """Add data to a dataset.""" from renku.command.dataset import add_to_dataset_command from renku.ui.cli.utils.callback import ClickCallback @@ -810,7 +825,7 @@ def add(name, urls, force, overwrite, create, destination, datadir, **kwargs): .build() .execute( urls=list(urls), - dataset_name=name, + dataset_slug=slug, force=force, overwrite=overwrite, create=create, @@ -822,13 +837,13 @@ def add(name, urls, force, overwrite, create, destination, datadir, **kwargs): dataset = result.output if dataset.storage: - communicator.info(f"To download files from the remote storage use 'renku dataset pull {dataset.name}'") + communicator.info(f"To download files from the remote storage use 'renku dataset pull {dataset.slug}'") click.secho("OK", fg=color.GREEN) @dataset.command("ls-files") -@click.argument("names", nargs=-1, shell_complete=shell_complete_datasets) +@click.argument("slugs", nargs=-1, shell_complete=shell_complete_datasets) @click.option("-t", "--tag", default=None, type=click.STRING, help="Tag for which to show dataset files.") @click.option( "--creators", @@ -846,12 +861,12 @@ def add(name, urls, force, overwrite, create, destination, datadir, **kwargs): "-c", "--columns", type=click.STRING, - default="dataset_name,path,size,added,lfs", + default="dataset_slug,path,size,added,lfs", metavar="", help="Comma-separated list of column to display: {}.".format(", ".join(DATASET_FILES_COLUMNS.keys())), show_default=True, ) -def ls_files(names, tag, creators, include, exclude, format, columns): +def ls_files(slugs, tag, creators, include, exclude, format, columns): """List files in dataset.""" from renku.command.dataset import list_files_command @@ -862,18 +877,18 @@ def ls_files(names, tag, creators, include, exclude, format, columns): list_files_command() .lock_dataset() .build() - .execute(datasets=names, tag=tag, creators=creators, include=include, exclude=exclude) + .execute(datasets=slugs, tag=tag, creators=creators, include=include, exclude=exclude) ) click.echo(DATASET_FILES_FORMATS[format](result.output, columns=columns)) @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.option("-I", "--include", multiple=True, help="Include files matching given pattern.") @click.option("-X", "--exclude", multiple=True, help="Exclude files matching given pattern.") @click.option("-y", "--yes", is_flag=True, help="Confirm unlinking of all files.") -def unlink(name, include, exclude, yes): +def unlink(slug, include, exclude, yes): """Remove matching files from a dataset.""" from renku.command.dataset import file_unlink_command from renku.core import errors @@ -888,59 +903,59 @@ def unlink(name, include, exclude, yes): communicator = ClickCallback() file_unlink_command().with_communicator(communicator).build().execute( - name=name, include=include, exclude=exclude, yes=yes + slug=slug, include=include, exclude=exclude, yes=yes ) click.secho("OK", fg=color.GREEN) @dataset.command("rm") -@click.argument("name") -def remove(name): +@click.argument("slug") +def remove(slug): """Delete a dataset.""" from renku.command.dataset import remove_dataset_command - remove_dataset_command().build().execute(name) + remove_dataset_command().build().execute(slug) click.secho("OK", fg=color.GREEN) @dataset.command("tag") -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.argument("tag") @click.option("-d", "--description", default="", help="A description for this tag") @click.option("-f", "--force", is_flag=True, help="Allow overwriting existing tags.") -def tag(name, tag, description, force): +def tag(slug, tag, description, force): """Create a tag for a dataset.""" from renku.command.dataset import add_dataset_tag_command - add_dataset_tag_command().build().execute(dataset_name=name, tag=tag, description=description, force=force) + add_dataset_tag_command().build().execute(dataset_slug=slug, tag=tag, description=description, force=force) click.secho("OK", fg=color.GREEN) @dataset.command("rm-tags") -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.argument("tags", nargs=-1) -def remove_tags(name, tags): +def remove_tags(slug, tags): """Remove tags from a dataset.""" from renku.command.dataset import remove_dataset_tags_command - remove_dataset_tags_command().build().execute(dataset_name=name, tags=tags) + remove_dataset_tags_command().build().execute(dataset_slug=slug, tags=tags) click.secho("OK", fg=color.GREEN) @dataset.command("ls-tags") -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.option( "--format", type=click.Choice(list(DATASET_TAGS_FORMATS.keys())), default="tabular", help="Choose an output format." ) -def ls_tags(name, format): +def ls_tags(slug, format): """List all tags of a dataset.""" from renku.command.dataset import list_tags_command - result = list_tags_command().lock_dataset().build().execute(dataset_name=name, format=format) + result = list_tags_command().lock_dataset().build().execute(dataset_slug=slug, format=format) click.echo(result.output) -def export_provider_argument(*param_decls, **attrs): +def export_provider_argument(*_, **__): """Sets dataset export provider argument on the dataset export command.""" def wrapper(f): @@ -956,7 +971,7 @@ def get_providers_names(): return wrapper -def export_provider_options(*param_decls, **attrs): +def export_provider_options(*_, **__): """Sets dataset export provider option groups on the dataset export command.""" from renku.core.dataset.providers.factory import ProviderFactory from renku.ui.cli.utils.click import create_options @@ -966,11 +981,11 @@ def export_provider_options(*param_decls, **attrs): @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @export_provider_argument() @click.option("-t", "--tag", help="Dataset tag to export") @export_provider_options() -def export(name, provider, tag, **kwargs): +def export(slug, provider, tag, **kwargs): """Export data to 3rd party provider.""" from renku.command.dataset import export_dataset_command from renku.core import errors @@ -979,7 +994,7 @@ def export(name, provider, tag, **kwargs): try: communicator = ClickCallback() export_dataset_command().lock_dataset().with_communicator(communicator).build().execute( - name=name, provider_name=provider, tag=tag, **kwargs + slug=slug, provider_name=provider, tag=tag, **kwargs ) except (ValueError, errors.InvalidAccessToken, errors.DatasetNotFound, errors.RequestError) as e: raise click.BadParameter(str(e)) @@ -987,7 +1002,7 @@ def export(name, provider, tag, **kwargs): click.secho("OK", fg=color.GREEN) -def import_provider_options(*param_decls, **attrs): +def import_provider_options(*_, **__): """Sets dataset import provider option groups on the dataset import command.""" from renku.core.dataset.providers.factory import ProviderFactory from renku.ui.cli.utils.click import create_options @@ -998,17 +1013,18 @@ def import_provider_options(*param_decls, **attrs): @dataset.command("import") @click.argument("uri") -@click.option("--short-name", "--name", "name", default=None, help="A convenient name for dataset.") +@click.option("--short-name", "--name", "name", hidden=True, default=None, help="A slug for dataset.") +@click.option("-s", "--slug", "slug", default=None, help="A slug for dataset.") @click.option("-x", "--extract", is_flag=True, help="Extract files before importing to dataset.") @click.option("-y", "--yes", is_flag=True, help="Bypass download confirmation.") @click.option( "--datadir", default=None, type=click.Path(), - help="Dataset's data directory (defaults to 'data/').", + help="Dataset's data directory (defaults to 'data/').", ) @import_provider_options() -def import_(uri, name, extract, yes, datadir, **kwargs): +def import_(uri, slug, extract, yes, datadir, name, **kwargs): """Import data from a 3rd party provider or another renku project. Supported providers: [Dataverse, Renku, Zenodo] @@ -1017,8 +1033,15 @@ def import_(uri, name, extract, yes, datadir, **kwargs): from renku.ui.cli.utils.callback import ClickCallback communicator = ClickCallback() + + if name: + if slug: + raise errors.ParameterError("Cannot pass both 'slug' and 'name'", show_prefix=False) + communicator.warn("The '--short-name/--name' flags are deprecated. Use '-s/--slug' instead.") + slug = name + import_dataset_command().with_communicator(communicator).build().execute( - uri=uri, name=name, extract=extract, yes=yes, datadir=datadir, **kwargs + uri=uri, slug=slug, extract=extract, yes=yes, datadir=datadir, **kwargs ) click.secho(" " * 79 + "\r", nl=False) @@ -1027,7 +1050,7 @@ def import_(uri, name, extract, yes, datadir, **kwargs): @dataset.command() @click.pass_context -@click.argument("names", nargs=-1, shell_complete=shell_complete_datasets) +@click.argument("slugs", nargs=-1, shell_complete=shell_complete_datasets) @click.option( "--creators", help="Filter files which where authored by specific creators. Multiple creators are specified by comma.", @@ -1052,7 +1075,7 @@ def import_(uri, name, extract, yes, datadir, **kwargs): ) def update( ctx, - names, + slugs, creators, include, exclude, @@ -1079,11 +1102,11 @@ def update( elif external: communicator.warn("'-e/--external' argument is deprecated") - if not update_all and not names and not include and not exclude and not dry_run: - raise errors.ParameterError("Either NAMES, -a/--all, -n/--dry-run, or --include/--exclude should be specified") + if not update_all and not slugs and not include and not exclude and not dry_run: + raise errors.ParameterError("Either SLUGS, -a/--all, -n/--dry-run, or --include/--exclude should be specified") - if names and update_all: - raise errors.ParameterError("Cannot pass dataset names with -a/--all") + if slugs and update_all: + raise errors.ParameterError("Cannot pass dataset slugs with -a/--all") elif (include or exclude) and update_all: raise errors.ParameterError("Cannot pass --include/--exclude with -a/--all") @@ -1092,7 +1115,7 @@ def update( .with_communicator(communicator) .build() .execute( - names=list(names), + slugs=list(slugs), creators=creators, include=include, exclude=exclude, @@ -1113,7 +1136,7 @@ def update( def get_dataset_files(records): from renku.command.format.tabulate import tabulate - columns = {"path": ("path", None), "dataset": ("dataset.name", "dataset"), "external": ("external", None)} + columns = {"path": ("path", None), "dataset": ("dataset.slug", "dataset"), "external": ("external", None)} return tabulate(collection=records, columns="path,dataset,external", columns_mapping=columns) if not datasets and not dataset_files: @@ -1122,13 +1145,13 @@ def get_dataset_files(records): return if datasets: - ds_names = sorted([d.name for d in datasets]) + ds_slugs = sorted([d.slug for d in datasets]) if plain: - ds_names = [f"d {n}" for n in ds_names] - click.echo(os.linesep.join(ds_names) + os.linesep) + ds_slugs = [f"d {n}" for n in ds_slugs] + click.echo(os.linesep.join(ds_slugs) + os.linesep) else: - names = "\n\t".join(ds_names) - click.echo(f"The following imported datasets will be updated:\n\t{names}\n") + slugs = "\n\t".join(ds_slugs) + click.echo(f"The following imported datasets will be updated:\n\t{slugs}\n") if not dataset_files: return @@ -1136,7 +1159,7 @@ def get_dataset_files(records): files = [f for f in dataset_files if not f.deleted] if files: if plain: - files = [f"f {f.path} {f.dataset.name}" for f in files] + files = [f"f {f.path} {f.dataset.slug}" for f in files] click.echo(os.linesep.join(files) + os.linesep) else: files = get_dataset_files(files) @@ -1145,7 +1168,7 @@ def get_dataset_files(records): deleted_files = [f for f in dataset_files if f.deleted] if deleted_files: if plain: - files = [f"r {f.path} {f.dataset.name}" for f in deleted_files] + files = [f"r {f.path} {f.dataset.slug}" for f in deleted_files] click.echo(os.linesep.join(files)) else: files = get_dataset_files(deleted_files) @@ -1156,7 +1179,7 @@ def get_dataset_files(records): @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.option( "-l", "--location", @@ -1164,17 +1187,17 @@ def get_dataset_files(records): type=click.Path(exists=False, file_okay=False, writable=True), help="A directory to copy data to, instead of the dataset's data directory.", ) -def pull(name, location): +def pull(slug, location): """Pull data from a cloud storage.""" from renku.command.dataset import pull_cloud_storage_command from renku.ui.cli.utils.callback import ClickCallback communicator = ClickCallback() - pull_cloud_storage_command().with_communicator(communicator).build().execute(name=name, location=location) + pull_cloud_storage_command().with_communicator(communicator).build().execute(slug=slug, location=location) @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) +@click.argument("slug", shell_complete=shell_complete_datasets) @click.option( "-e", "--existing", @@ -1184,7 +1207,7 @@ def pull(name, location): ) @click.option("-u", "--unmount", is_flag=True, help="Unmount dataset's backend storage.") @click.option("-y", "--yes", is_flag=True, help="No prompt when removing non-empty dataset's data directory.") -def mount(name, existing, unmount, yes): +def mount(slug, existing, unmount, yes): """Mount a cloud storage in the dataset's data directory.""" from renku.command.dataset import mount_cloud_storage_command from renku.ui.cli.utils.callback import ClickCallback @@ -1192,16 +1215,16 @@ def mount(name, existing, unmount, yes): command = mount_cloud_storage_command(unmount=unmount).with_communicator(ClickCallback()).build() if unmount: - command.execute(name=name) + command.execute(slug=slug) else: - command.execute(name=name, existing=existing, yes=yes) + command.execute(slug=slug, existing=existing, yes=yes) @dataset.command() -@click.argument("name", shell_complete=shell_complete_datasets) -def unmount(name): +@click.argument("slug", shell_complete=shell_complete_datasets) +def unmount(slug): """Unmount a backend storage in the dataset's data directory.""" from renku.command.dataset import unmount_cloud_storage_command from renku.ui.cli.utils.callback import ClickCallback - unmount_cloud_storage_command().with_communicator(ClickCallback()).build().execute(name=name) + unmount_cloud_storage_command().with_communicator(ClickCallback()).build().execute(slug=slug) diff --git a/renku/ui/cli/doctor.py b/renku/ui/cli/doctor.py index e522ddbb56..6aad9b1d74 100644 --- a/renku/ui/cli/doctor.py +++ b/renku/ui/cli/doctor.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/exception_handler.py b/renku/ui/cli/exception_handler.py index 7f65b4adae..54181a949d 100644 --- a/renku/ui/cli/exception_handler.py +++ b/renku/ui/cli/exception_handler.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/gc.py b/renku/ui/cli/gc.py index 83b7859203..a56e307015 100644 --- a/renku/ui/cli/gc.py +++ b/renku/ui/cli/gc.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/githooks.py b/renku/ui/cli/githooks.py index 1902151395..73bc869252 100644 --- a/renku/ui/cli/githooks.py +++ b/renku/ui/cli/githooks.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/graph.py b/renku/ui/cli/graph.py index e751db3b84..947f616629 100644 --- a/renku/ui/cli/graph.py +++ b/renku/ui/cli/graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/init.py b/renku/ui/cli/init.py index 01d8ffcf6c..dd2f06ecee 100644 --- a/renku/ui/cli/init.py +++ b/renku/ui/cli/init.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -231,6 +230,7 @@ def resolve_data_directory(data_dir, path): type=click.Path(writable=True, file_okay=False), help="Data directory within the project", ) +@click.option("-i", "--image", default=None, type=str, help="Path or URL to project's avatar/image.") @click.option("-t", "--template-id", help="Provide the id of the template to use.") @click.option("-s", "--template-source", help="Provide the templates repository url or path.") @click.option( @@ -268,6 +268,7 @@ def init( path, name, description, + image, keyword, template_id, template_source, @@ -282,6 +283,8 @@ def init( ): """Initialize a project in PATH. Default is the current path.""" from renku.command.init import init_project_command + from renku.core.constant import FILESYSTEM_ROOT + from renku.core.image import ImageObjectRequest from renku.core.util.git import check_global_git_user_is_configured from renku.ui.cli.utils.callback import ClickCallback @@ -300,6 +303,8 @@ def init( if metadata: custom_metadata = json.loads(Path(metadata).read_text()) + image_request = ImageObjectRequest(content_url=image, safe_image_paths=[FILESYSTEM_ROOT]) if image else None + communicator = ClickCallback() init_project_command().with_communicator(communicator).build().execute( external_storage_requested=external_storage_requested, @@ -307,6 +312,7 @@ def init( name=name, description=description, keywords=keyword, + image_request=image_request, template_id=template_id, template_source=template_source, template_ref=template_ref, diff --git a/renku/ui/cli/log.py b/renku/ui/cli/log.py index 1d8552bcc4..7077797d8f 100644 --- a/renku/ui/cli/log.py +++ b/renku/ui/cli/log.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -54,7 +53,7 @@ Dataset testset Date: 2022-02-03T11:26:55+01:00 Changes: created - Title set to: testset + Name set to: testset Creators modified: + John Doe @@ -148,8 +147,8 @@ def _print_dataset_log(log_entry: DatasetLogViewModel) -> str: if log_entry.details.source: results.append(style_key("Source: ") + log_entry.details.source) - if log_entry.details.title_changed: - results.append(style_key("Title set to: ") + log_entry.details.title_changed) + if log_entry.details.name_changed: + results.append(style_key("Name set to: ") + log_entry.details.name_changed) if log_entry.details.description_changed: results.append(style_key("Description set to: ") + log_entry.details.description_changed) diff --git a/renku/ui/cli/login.py b/renku/ui/cli/login.py index 39a335a8c4..4ce5965faf 100644 --- a/renku/ui/cli/login.py +++ b/renku/ui/cli/login.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/mergetool.py b/renku/ui/cli/mergetool.py index a59a8ae771..feddcef5a5 100644 --- a/renku/ui/cli/mergetool.py +++ b/renku/ui/cli/mergetool.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/migrate.py b/renku/ui/cli/migrate.py index a24bc1f9d4..f94a2f4cbe 100644 --- a/renku/ui/cli/migrate.py +++ b/renku/ui/cli/migrate.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/move.py b/renku/ui/cli/move.py index 3b2f61e71e..81f3526fb4 100644 --- a/renku/ui/cli/move.py +++ b/renku/ui/cli/move.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/project.py b/renku/ui/cli/project.py index 695f9e6447..ec9faf1d1d 100644 --- a/renku/ui/cli/project.py +++ b/renku/ui/cli/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -33,11 +32,12 @@ import json from pathlib import Path +from typing import Optional, Union import click import renku.ui.cli.utils.color as color -from renku.domain_model.constant import NO_VALUE +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.ui.cli.utils.callback import ClickCallback @@ -59,6 +59,7 @@ def project(): type=click.UNPROCESSED, help="Creator's name, email, and affiliation. Accepted format is 'Forename Surname [affiliation]'.", ) +@click.option("-i", "--image", default=NO_VALUE, type=click.UNPROCESSED, help="Path or URL to project's avatar/image.") @click.option( "-m", "--metadata", @@ -71,8 +72,8 @@ def project(): "--unset", default=[], multiple=True, - type=click.Choice(["keywords", "k", "metadata", "m"]), - help="Remove keywords from dataset.", + type=click.Choice(["keywords", "k", "metadata", "m", "description", "d", "image", "i"]), + help="Remove keywords, metadata, description, or image from project.", ) @click.option( "--metadata-source", @@ -80,9 +81,11 @@ def project(): default=NO_VALUE, help="Set the source field in the metadata when editing it if not provided, then the default is 'renku'.", ) -def edit(description, keywords, creators, metadata, unset, metadata_source): +def edit(description, keywords, creators, image, metadata, unset, metadata_source): """Edit project metadata.""" from renku.command.project import edit_project_command + from renku.core.constant import FILESYSTEM_ROOT + from renku.core.image import ImageObjectRequest if list(creators) == [NO_VALUE]: creators = NO_VALUE @@ -100,6 +103,20 @@ def edit(description, keywords, creators, metadata, unset, metadata_source): raise click.UsageError("Cant use '--metadata' together with unsetting metadata") metadata = None + if "d" in unset or "description" in unset: + if description is not NO_VALUE: + raise click.UsageError("Cant use '--description' together with unsetting description") + description = None + + if "i" in unset or "image" in unset: + if image is not NO_VALUE: + raise click.UsageError("Cant use '--image' together with unsetting image") + image_request: Optional[Union[ImageObjectRequest, NoValueType]] = None + elif image is not NO_VALUE: + image_request = ImageObjectRequest(content_url=image, safe_image_paths=[FILESYSTEM_ROOT]) + else: + image_request = NO_VALUE + if metadata_source is not NO_VALUE and metadata is NO_VALUE: raise click.UsageError("The '--metadata-source' option can only be used with the '--metadata' flag") @@ -122,6 +139,7 @@ def edit(description, keywords, creators, metadata, unset, metadata_source): description=description, creator=creators, keywords=keywords, + image_request=image_request, custom_metadata=custom_metadata, custom_metadata_source=metadata_source, ) diff --git a/renku/ui/cli/remove.py b/renku/ui/cli/remove.py index 5be872fa0d..530d4e7d3e 100644 --- a/renku/ui/cli/remove.py +++ b/renku/ui/cli/remove.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/rerun.py b/renku/ui/cli/rerun.py index 0504a11627..7dea31fdb8 100644 --- a/renku/ui/cli/rerun.py +++ b/renku/ui/cli/rerun.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/rollback.py b/renku/ui/cli/rollback.py index 0e3e0f4142..28edc9d220 100644 --- a/renku/ui/cli/rollback.py +++ b/renku/ui/cli/rollback.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/run.py b/renku/ui/cli/run.py index e36c4d4ee5..3b1b755b9b 100644 --- a/renku/ui/cli/run.py +++ b/renku/ui/cli/run.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/save.py b/renku/ui/cli/save.py index 01803fefeb..8b48f6abd2 100644 --- a/renku/ui/cli/save.py +++ b/renku/ui/cli/save.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/service.py b/renku/ui/cli/service.py index 3a41301bff..22948f51f2 100644 --- a/renku/ui/cli/service.py +++ b/renku/ui/cli/service.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -51,6 +50,8 @@ def run_api(addr="0.0.0.0", port=8080, timeout=600): "gunicorn", "renku.ui.service.entrypoint:app", loading_opt, + "-c", + "gunicorn.conf.py", "-b", f"{addr}:{port}", "--timeout", diff --git a/renku/ui/cli/session.py b/renku/ui/cli/session.py index ba40aa8f7a..35c2f9bc43 100644 --- a/renku/ui/cli/session.py +++ b/renku/ui/cli/session.py @@ -117,12 +117,12 @@ connect to it This will create SSH keys for you and setup SSH configuration for connecting to the renku deployment. -You can then use the SSH connection name (``ssh renkulab.io-myproject-sessionid`` in the example) +You can then use the SSH connection name (``ssh renkulab.io-myproject-session-id`` in the example) to connect to the session or in tools such as VSCode. .. note:: - If you need to recreate the generated SSH keys or you want to use existing keys instead, + If you need to recreate the generated SSH keys, or you want to use existing keys instead, you can use the ``renku session ssh-setup`` command to perform this step manually. See the help of the command for more details. @@ -137,6 +137,8 @@ ~~~~~~~~~~~~~~~~~~~~~~~~ The ``session`` command can be used to also list, stop and open active sessions. +If the provider supports sessions hibernation, this command allows pausing and resuming +sessions as well. In order to see active sessions (from any provider) run the following command: .. code-block:: console @@ -162,6 +164,24 @@ The command ``renku session stop --all`` will stop all active sessions regardless of the provider. +If a provider supports session hibernation (e.g. ``renkulab`` provider) you can pause a session using +its ``ID``: + +.. code-block:: console + + $ renku session pause renku-test-e4fe76cc + +A paused session can be later resumed: + +.. code-block:: console + + $ renku session resume renku-test-e4fe76cc + +.. note:: + + Session ``ID`` doesn't need to be passed to the above commands if there is only one interactive session available. + + .. cheatsheet:: :group: Managing Interactive Sessions :command: $ renku session start --provider renkulab @@ -180,6 +200,18 @@ :description: Open a browser tab and connect to a running session. :target: rp +.. cheatsheet:: + :group: Managing Interactive Sessions + :command: $ renku session pause + :description: Pause the specified session. + :target: rp + +.. cheatsheet:: + :group: Managing Interactive Sessions + :command: $ renku session resume + :description: Resume the specified paused session. + :target: rp + .. cheatsheet:: :group: Managing Interactive Sessions :command: $ renku session stop @@ -195,8 +227,15 @@ from renku.command.util import WARNING from renku.core import errors from renku.ui.cli.utils.callback import ClickCallback -from renku.ui.cli.utils.click import shell_complete_session_providers, shell_complete_sessions -from renku.ui.cli.utils.plugins import get_supported_session_providers_names +from renku.ui.cli.utils.click import ( + shell_complete_hibernating_session_providers, + shell_complete_session_providers, + shell_complete_sessions, +) +from renku.ui.cli.utils.plugins import ( + get_supported_hibernating_session_providers_names, + get_supported_session_providers_names, +) @click.group() @@ -215,15 +254,6 @@ def session(): default=None, help="Backend to use for listing interactive sessions.", ) -@click.option( - "config", - "-c", - "--config", - hidden=True, - type=click.Path(exists=True, dir_okay=False), - metavar="", - help="YAML file containing configuration for the provider.", -) @click.option( "--columns", type=click.STRING, @@ -235,7 +265,7 @@ def session(): @click.option( "--format", type=click.Choice(list(SESSION_FORMATS.keys())), default="log", help="Choose an output format." ) -def list_sessions(provider, config, columns, format): +def list_sessions(provider, columns, format): """List interactive sessions.""" from renku.command.session import session_list_command @@ -251,7 +281,7 @@ def list_sessions(provider, config, columns, format): click.echo(WARNING + message) -def session_start_provider_options(*param_decls, **attrs): +def session_start_provider_options(*_, **__): """Sets session provider options groups on the session start command.""" from renku.core.plugin.session import get_supported_session_providers from renku.ui.cli.utils.click import create_options @@ -260,7 +290,7 @@ def session_start_provider_options(*param_decls, **attrs): return create_options(providers=providers, parameter_function="get_start_parameters") -@session.command("start") +@session.command @click.option( "provider", "-p", @@ -309,7 +339,7 @@ def start(provider, config, image, cpu, disk, gpu, memory, **kwargs): ) -@session.command("stop") +@session.command @click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) @click.option( "provider", @@ -340,7 +370,7 @@ def stop(session_name, stop_all, provider): click.echo("Interactive session has been successfully stopped.") -def session_open_provider_options(*param_decls, **attrs): +def session_open_provider_options(*_, **__): """Sets session provider option groups on the session open command.""" from renku.core.plugin.session import get_supported_session_providers from renku.ui.cli.utils.click import create_options @@ -349,7 +379,7 @@ def session_open_provider_options(*param_decls, **attrs): return create_options(providers=providers, parameter_function="get_open_parameters") -@session.command("open") +@session.command @click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) @click.option( "provider", @@ -388,3 +418,51 @@ def ssh_setup(existing_key, force): communicator = ClickCallback() ssh_setup_command().with_communicator(communicator).build().execute(existing_key=existing_key, force=force) + + +@session.command +@click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) +@click.option( + "provider", + "-p", + "--provider", + type=click.Choice(Proxy(get_supported_hibernating_session_providers_names)), + default=None, + shell_complete=shell_complete_hibernating_session_providers, + help="Session provider to use.", +) +def pause(session_name, provider): + """Pause an interactive session.""" + from renku.command.session import session_pause_command + + communicator = ClickCallback() + session_pause_command().with_communicator(communicator).build().execute( + session_name=session_name, provider=provider + ) + + session_message = f"session '{session_name}'" if session_name else "session" + click.echo(f"Interactive {session_message} has been paused.") + + +@session.command +@click.argument("session_name", metavar="", required=False, default=None, shell_complete=shell_complete_sessions) +@click.option( + "provider", + "-p", + "--provider", + type=click.Choice(Proxy(get_supported_hibernating_session_providers_names)), + default=None, + shell_complete=shell_complete_hibernating_session_providers, + help="Session provider to use.", +) +def resume(session_name, provider): + """Resume a paused session.""" + from renku.command.session import session_resume_command + + communicator = ClickCallback() + session_resume_command().with_communicator(communicator).build().execute( + session_name=session_name, provider=provider + ) + + session_message = f"session '{session_name}'" if session_name else "session" + click.echo(f"Interactive {session_message} has been resumed.") diff --git a/renku/ui/cli/status.py b/renku/ui/cli/status.py index d15654909c..73b440d87f 100644 --- a/renku/ui/cli/status.py +++ b/renku/ui/cli/status.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/storage.py b/renku/ui/cli/storage.py index ce5f8a14e2..00ebf40482 100644 --- a/renku/ui/cli/storage.py +++ b/renku/ui/cli/storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/template.py b/renku/ui/cli/template.py index 0fdcdde8a3..d0851d7097 100644 --- a/renku/ui/cli/template.py +++ b/renku/ui/cli/template.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/update.py b/renku/ui/cli/update.py index e7e4120b2a..f1b1ec4675 100644 --- a/renku/ui/cli/update.py +++ b/renku/ui/cli/update.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/cli/utils/click.py b/renku/ui/cli/utils/click.py index 7f3e61e056..c4e1ea7c4a 100644 --- a/renku/ui/cli/utils/click.py +++ b/renku/ui/cli/utils/click.py @@ -71,6 +71,18 @@ def shell_complete_session_providers(ctx, param, incomplete) -> List[str]: return result.output +def shell_complete_hibernating_session_providers(ctx, param, incomplete) -> List[str]: + """Shell completion for session providers names that support hibernation.""" + from renku.command.session import search_hibernating_session_providers_command + + try: + result = search_hibernating_session_providers_command().build().execute(name=incomplete) + except Exception: + return [] + else: + return result.output + + class CaseInsensitiveChoice(click.Choice): """Case-insensitive click choice. diff --git a/renku/ui/cli/utils/plugins.py b/renku/ui/cli/utils/plugins.py index 1997d66075..45f4aec379 100644 --- a/renku/ui/cli/utils/plugins.py +++ b/renku/ui/cli/utils/plugins.py @@ -35,3 +35,10 @@ def get_supported_session_providers_names(): from renku.core.plugin.session import get_supported_session_providers return [p.name for p in get_supported_session_providers()] + + +def get_supported_hibernating_session_providers_names(): + """Return names of session providers that support hibernation.""" + from renku.core.plugin.session import get_supported_hibernating_session_providers + + return [p.name for p in get_supported_hibernating_session_providers()] diff --git a/renku/ui/service/cache/base.py b/renku/ui/service/cache/base.py index 9b0594b171..ec8f80ff05 100644 --- a/renku/ui/service/cache/base.py +++ b/renku/ui/service/cache/base.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/config.py b/renku/ui/service/cache/config.py index b4182d245b..ec37e9233c 100644 --- a/renku/ui/service/cache/config.py +++ b/renku/ui/service/cache/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/files.py b/renku/ui/service/cache/files.py index 3f4a6844cf..5029020540 100644 --- a/renku/ui/service/cache/files.py +++ b/renku/ui/service/cache/files.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/jobs.py b/renku/ui/service/cache/jobs.py index d0bb0b5aa0..0574aba937 100644 --- a/renku/ui/service/cache/jobs.py +++ b/renku/ui/service/cache/jobs.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/models/file.py b/renku/ui/service/cache/models/file.py index 2e4de3a88a..819dcdba49 100644 --- a/renku/ui/service/cache/models/file.py +++ b/renku/ui/service/cache/models/file.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/models/job.py b/renku/ui/service/cache/models/job.py index 38135980a4..59bc3ef3db 100644 --- a/renku/ui/service/cache/models/job.py +++ b/renku/ui/service/cache/models/job.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/models/project.py b/renku/ui/service/cache/models/project.py index 488fd2d0fd..5724c197cb 100644 --- a/renku/ui/service/cache/models/project.py +++ b/renku/ui/service/cache/models/project.py @@ -26,11 +26,11 @@ from renku.ui.service.cache.base import BaseCache from renku.ui.service.config import CACHE_PROJECTS_PATH -from renku.ui.service.utils import normalize_git_url MAX_CONCURRENT_PROJECT_REQUESTS = 10 LOCK_TIMEOUT = 15 NO_BRANCH_FOLDER = "__default_branch__" +DETACHED_HEAD_FOLDER_PREFIX = "__detached_head_" class Project(Model): @@ -55,14 +55,20 @@ class Project(Model): description = TextField() owner = TextField() initialized = BooleanField() + commit_sha = TextField() @property def abs_path(self) -> Path: """Full path of cached project.""" - branch = self.branch + folder_name = self.branch if not self.branch: - branch = NO_BRANCH_FOLDER - return CACHE_PROJECTS_PATH / self.user_id / self.owner / normalize_git_url(self.slug) / branch + if self.commit_sha: + # NOTE: Detached head state + folder_name = f"{DETACHED_HEAD_FOLDER_PREFIX}{self.commit_sha}" + else: + # NOTE: We are on the default branch (i.e. main) + folder_name = NO_BRANCH_FOLDER + return CACHE_PROJECTS_PATH / self.user_id / self.owner / self.slug / folder_name def read_lock(self, timeout: Optional[float] = None): """Shared read lock on the project.""" diff --git a/renku/ui/service/cache/models/user.py b/renku/ui/service/cache/models/user.py index fe10ebbb13..461d9add82 100644 --- a/renku/ui/service/cache/models/user.py +++ b/renku/ui/service/cache/models/user.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/projects.py b/renku/ui/service/cache/projects.py index c48e4e4691..e95d2bce88 100644 --- a/renku/ui/service/cache/projects.py +++ b/renku/ui/service/cache/projects.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/serializers/__init__.py b/renku/ui/service/cache/serializers/__init__.py index d69828c92a..7560361ff5 100644 --- a/renku/ui/service/cache/serializers/__init__.py +++ b/renku/ui/service/cache/serializers/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/serializers/file.py b/renku/ui/service/cache/serializers/file.py index 431942b714..f65b632402 100644 --- a/renku/ui/service/cache/serializers/file.py +++ b/renku/ui/service/cache/serializers/file.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/serializers/job.py b/renku/ui/service/cache/serializers/job.py index e614947300..1c968f73bd 100644 --- a/renku/ui/service/cache/serializers/job.py +++ b/renku/ui/service/cache/serializers/job.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/serializers/project.py b/renku/ui/service/cache/serializers/project.py index 04f5e451cc..d9e484ebc6 100644 --- a/renku/ui/service/cache/serializers/project.py +++ b/renku/ui/service/cache/serializers/project.py @@ -17,7 +17,7 @@ import uuid from datetime import datetime -from marshmallow import fields, post_load +from marshmallow import ValidationError, fields, post_load, validates_schema from renku.ui.service.cache.models.project import Project from renku.ui.service.serializers.common import AccessSchema, CreationSchema, MandatoryUserSchema @@ -39,11 +39,20 @@ class ProjectSchema(CreationSchema, AccessSchema, MandatoryUserSchema): description = fields.String(load_default=None) owner = fields.String(required=True) initialized = fields.Boolean(dump_default=False) + commit_sha = fields.String(required=False, load_default=None, dump_default=None) + branch = fields.String(required=False, load_default=None, dump_default=None) @post_load def make_project(self, data, **options): """Construct project object.""" - data["git_url"] = normalize_git_url(data["git_url"]) + if data.get("git_url"): + data["git_url"] = normalize_git_url(data["git_url"]) data["name"] = normalize_git_url(data["name"]) data["slug"] = normalize_git_url(data["slug"]) return Project(**data) + + @validates_schema + def ensure_only_commit_sha_or_branch(self, data, **kwargs): + """Checks that only a commit SHA or branch is set and not both.""" + if data.get("commit_sha") and data.get("branch"): + raise ValidationError("You cannot specify a commit SHA and a branch, only one or the other") diff --git a/renku/ui/service/cache/serializers/user.py b/renku/ui/service/cache/serializers/user.py index bf7eac1078..c2a977ebc7 100644 --- a/renku/ui/service/cache/serializers/user.py +++ b/renku/ui/service/cache/serializers/user.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/cache/users.py b/renku/ui/service/cache/users.py index 96399dc894..254983084d 100644 --- a/renku/ui/service/cache/users.py +++ b/renku/ui/service/cache/users.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/config.py b/renku/ui/service/config.py index 694761848b..d66d8a4236 100644 --- a/renku/ui/service/config.py +++ b/renku/ui/service/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/__init__.py b/renku/ui/service/controllers/__init__.py index e539546b61..7bcffadcb6 100644 --- a/renku/ui/service/controllers/__init__.py +++ b/renku/ui/service/controllers/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/api/__init__.py b/renku/ui/service/controllers/api/__init__.py index c52391f912..4d4479b32b 100644 --- a/renku/ui/service/controllers/api/__init__.py +++ b/renku/ui/service/controllers/api/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/api/abstract.py b/renku/ui/service/controllers/api/abstract.py index ca197516e4..26e5f72ffa 100644 --- a/renku/ui/service/controllers/api/abstract.py +++ b/renku/ui/service/controllers/api/abstract.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/api/mixins.py b/renku/ui/service/controllers/api/mixins.py index e985681786..3f0bf43317 100644 --- a/renku/ui/service/controllers/api/mixins.py +++ b/renku/ui/service/controllers/api/mixins.py @@ -188,6 +188,7 @@ def local(self): self.request_data.get("branch"), self.user, self.clone_depth is not None, + self.request_data.get("commit_sha"), ) self.context["project_id"] = project.project_id diff --git a/renku/ui/service/controllers/cache_files_delete_chunks.py b/renku/ui/service/controllers/cache_files_delete_chunks.py index 7f9765d76a..266178b559 100644 --- a/renku/ui/service/controllers/cache_files_delete_chunks.py +++ b/renku/ui/service/controllers/cache_files_delete_chunks.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/cache_files_upload.py b/renku/ui/service/controllers/cache_files_upload.py index 08a2ae0947..77e511218f 100644 --- a/renku/ui/service/controllers/cache_files_upload.py +++ b/renku/ui/service/controllers/cache_files_upload.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/cache_list_uploaded.py b/renku/ui/service/controllers/cache_list_uploaded.py index 4a17546e39..3f7658b83c 100644 --- a/renku/ui/service/controllers/cache_list_uploaded.py +++ b/renku/ui/service/controllers/cache_list_uploaded.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/cache_migrate_project.py b/renku/ui/service/controllers/cache_migrate_project.py index 65e2da7616..7c2032f717 100644 --- a/renku/ui/service/controllers/cache_migrate_project.py +++ b/renku/ui/service/controllers/cache_migrate_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/cache_migrations_check.py b/renku/ui/service/controllers/cache_migrations_check.py index 7c5a666c59..2327bf13d6 100644 --- a/renku/ui/service/controllers/cache_migrations_check.py +++ b/renku/ui/service/controllers/cache_migrations_check.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/config_set.py b/renku/ui/service/controllers/config_set.py index 4da6620eae..b19bd5f9fa 100644 --- a/renku/ui/service/controllers/config_set.py +++ b/renku/ui/service/controllers/config_set.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/config_show.py b/renku/ui/service/controllers/config_show.py index e139a041d9..2168af3d44 100644 --- a/renku/ui/service/controllers/config_show.py +++ b/renku/ui/service/controllers/config_show.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/datasets_add_file.py b/renku/ui/service/controllers/datasets_add_file.py index bdd371d079..1d270d232c 100644 --- a/renku/ui/service/controllers/datasets_add_file.py +++ b/renku/ui/service/controllers/datasets_add_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -40,8 +39,8 @@ class DatasetsAddFileCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets add controller.""" - self.ctx = DatasetsAddFileCtrl.REQUEST_SERIALIZER.load(request_data) - self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset add {self.ctx['name']}" + self.ctx = self.REQUEST_SERIALIZER.load(request_data) + self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset add {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -77,10 +76,12 @@ def prepare_paths(self): self.ctx["project_id"], self.ctx["create_dataset"], commit_message, - self.ctx["name"], + self.ctx["slug"], _file["file_url"], job_timeout=int(os.getenv("WORKER_DATASET_JOBS_TIMEOUT", 1800)), result_ttl=int(os.getenv("WORKER_DATASET_JOBS_RESULT_TTL", 500)), + ttl=int(os.getenv("WORKER_DATASET_JOBS_TIMEOUT", 1800)), + failure_ttl=int(os.getenv("WORKER_DATASET_JOBS_RESULT_TTL", 500)), ) enqueued_paths.append(_file["file_url"]) @@ -111,7 +112,7 @@ def renku_op(self): if local_paths: add_to_dataset_command().with_commit_message(self.ctx["commit_message"]).build().execute( - dataset_name=self.ctx["name"], + dataset_slug=self.ctx["slug"], urls=local_paths, create=self.ctx["create_dataset"], force=self.ctx["force"], @@ -133,4 +134,4 @@ def to_response(self): **{"local_paths": local_paths, "enqueued_paths": enqueued_paths, "remote_branch": remote_branch}, } - return result_response(DatasetsAddFileCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_create.py b/renku/ui/service/controllers/datasets_create.py index aaa47a5cb1..d3e9a7c92c 100644 --- a/renku/ui/service/controllers/datasets_create.py +++ b/renku/ui/service/controllers/datasets_create.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,7 +15,7 @@ # limitations under the License. """Renku service datasets create controller.""" from renku.command.dataset import create_dataset_command -from renku.core.dataset.request_model import ImageRequestModel +from renku.core.image import ImageObjectRequest from renku.core.util.metadata import construct_creators from renku.ui.service.cache.models.job import Job from renku.ui.service.config import CACHE_UPLOADS_PATH, MESSAGE_PREFIX @@ -35,8 +34,8 @@ class DatasetsCreateCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets create controller.""" - self.ctx = DatasetsCreateCtrl.REQUEST_SERIALIZER.load(request_data) - self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset create {self.ctx['name']}" + self.ctx = self.REQUEST_SERIALIZER.load(request_data) + self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset create {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project) @@ -53,7 +52,7 @@ def renku_op(self): set_url_for_uploaded_images(images=images, cache=self.cache, user=self.user) images = [ - ImageRequestModel( + ImageObjectRequest( content_url=img["content_url"], position=img["position"], mirror_locally=img.get("mirror_locally", False), @@ -71,8 +70,8 @@ def renku_op(self): .with_commit_message(self.ctx["commit_message"]) .build() .execute( - name=self.ctx["name"], - title=self.ctx.get("title"), + slug=self.ctx["slug"], + name=self.ctx.get("name"), creators=creators, description=self.ctx.get("description"), keywords=self.ctx.get("keywords"), @@ -92,4 +91,4 @@ def to_response(self): op_result = self.ctx op_result["remote_branch"] = remote_branch - return result_response(DatasetsCreateCtrl.RESPONSE_SERIALIZER, op_result) + return result_response(self.RESPONSE_SERIALIZER, op_result) diff --git a/renku/ui/service/controllers/datasets_edit.py b/renku/ui/service/controllers/datasets_edit.py index ed50999b7b..355c1efe36 100644 --- a/renku/ui/service/controllers/datasets_edit.py +++ b/renku/ui/service/controllers/datasets_edit.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,7 +17,7 @@ from typing import Dict, List, Union, cast from renku.command.dataset import edit_dataset_command -from renku.core.dataset.request_model import ImageRequestModel +from renku.core.image import ImageObjectRequest from renku.core.util.metadata import construct_creators from renku.domain_model.constant import NO_VALUE, NoValueType from renku.domain_model.provenance.agent import Person @@ -39,8 +38,8 @@ class DatasetsEditCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets edit list controller.""" - self.ctx = cast(Dict, DatasetsEditCtrl.REQUEST_SERIALIZER.load(request_data)) - self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset edit {self.ctx['name']}" + self.ctx = cast(Dict, self.REQUEST_SERIALIZER.load(request_data)) + self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset edit {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -61,7 +60,7 @@ def renku_op(self): set_url_for_uploaded_images(images=images, cache=self.cache, user=self.user) images = [ - ImageRequestModel( + ImageObjectRequest( content_url=img["content_url"], position=img["position"], mirror_locally=img.get("mirror_locally", False), @@ -78,10 +77,10 @@ def renku_op(self): else: creators = NO_VALUE - if "title" in self.ctx: - title = self.ctx.get("title") + if "name" in self.ctx: + name = self.ctx.get("name") else: - title = NO_VALUE + name = NO_VALUE if "description" in self.ctx: description = self.ctx.get("description") @@ -111,8 +110,8 @@ def renku_op(self): .with_commit_message(self.ctx["commit_message"]) .build() .execute( - self.ctx["name"], - title=title, + self.ctx["slug"], + name=name, description=description, creators=creators, keywords=keywords, @@ -141,6 +140,7 @@ def to_response(self): "edited": edited, "warnings": warnings, "remote_branch": remote_branch, + "git_url": self.ctx["git_url"], } - return result_response(DatasetsEditCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_files_list.py b/renku/ui/service/controllers/datasets_files_list.py index 823b3b706c..37806ea3cd 100644 --- a/renku/ui/service/controllers/datasets_files_list.py +++ b/renku/ui/service/controllers/datasets_files_list.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +30,7 @@ class DatasetsFilesListCtrl(ServiceCtrl, RenkuOperationMixin): def __init__(self, cache, user_data, request_data): """Construct a datasets files list controller.""" - self.ctx = DatasetsFilesListCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) super().__init__(cache, user_data, request_data) @@ -42,10 +41,10 @@ def context(self): def renku_op(self): """Renku operation for the controller.""" - result = list_files_command().build().execute(datasets=[self.ctx["name"]]) + result = list_files_command().build().execute(datasets=[self.ctx["slug"]]) return result.output def to_response(self): """Execute controller flow and serialize to service response.""" self.ctx["files"] = self.execute_op() - return result_response(DatasetsFilesListCtrl.RESPONSE_SERIALIZER, self.ctx) + return result_response(self.RESPONSE_SERIALIZER, self.ctx) diff --git a/renku/ui/service/controllers/datasets_import.py b/renku/ui/service/controllers/datasets_import.py index 7c7938db4e..96107368b9 100644 --- a/renku/ui/service/controllers/datasets_import.py +++ b/renku/ui/service/controllers/datasets_import.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,7 +34,7 @@ class DatasetsImportCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets import controller.""" - self.ctx = DatasetsImportCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset import of {self.ctx['dataset_uri']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -61,11 +60,13 @@ def renku_op(self): job.job_id, self.ctx["project_id"], self.ctx["dataset_uri"], - name=self.ctx.get("name"), + slug=self.ctx.get("slug"), extract=self.ctx.get("extract", False), tag=self.ctx.get("tag", None), job_timeout=int(os.getenv("WORKER_DATASET_JOBS_TIMEOUT", 1800)), result_ttl=int(os.getenv("WORKER_DATASET_JOBS_RESULT_TTL", 500)), + ttl=int(os.getenv("WORKER_DATASET_JOBS_TIMEOUT", 1800)), + failure_ttl=int(os.getenv("WORKER_DATASET_JOBS_RESULT_TTL", 500)), commit_message=self.ctx["commit_message"], data_directory=self.ctx.get("data_directory"), ) @@ -74,4 +75,4 @@ def renku_op(self): def to_response(self): """Execute controller flow and serialize to service response.""" - return result_response(DatasetsImportCtrl.RESPONSE_SERIALIZER, self.execute_op()) + return result_response(self.RESPONSE_SERIALIZER, self.execute_op()) diff --git a/renku/ui/service/controllers/datasets_list.py b/renku/ui/service/controllers/datasets_list.py index 636d25a7d4..6e6a239806 100644 --- a/renku/ui/service/controllers/datasets_list.py +++ b/renku/ui/service/controllers/datasets_list.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -31,7 +30,7 @@ class DatasetsListCtrl(ServiceCtrl, RenkuOperationMixin): def __init__(self, cache, user_data, request_data): """Construct a datasets list controller.""" - self.ctx = DatasetsListCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) super().__init__(cache, user_data, request_data) @property @@ -47,4 +46,4 @@ def renku_op(self): def to_response(self): """Execute controller flow and serialize to service response.""" self.ctx["datasets"] = self.execute_op() - return result_response(DatasetsListCtrl.RESPONSE_SERIALIZER, self.ctx) + return result_response(self.RESPONSE_SERIALIZER, self.ctx) diff --git a/renku/ui/service/controllers/datasets_remove.py b/renku/ui/service/controllers/datasets_remove.py index 009c71d097..b67ac08d4f 100644 --- a/renku/ui/service/controllers/datasets_remove.py +++ b/renku/ui/service/controllers/datasets_remove.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,8 +31,8 @@ class DatasetsRemoveCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets remove controller.""" - self.ctx = DatasetsRemoveCtrl.REQUEST_SERIALIZER.load(request_data) - self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset remove {self.ctx['name']}" + self.ctx = self.REQUEST_SERIALIZER.load(request_data) + self.ctx["commit_message"] = f"{MESSAGE_PREFIX} dataset remove {self.ctx['slug']}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -45,7 +44,7 @@ def context(self): def renku_op(self): """Renku operation for the controller.""" result = ( - remove_dataset_command().with_commit_message(self.ctx["commit_message"]).build().execute(self.ctx["name"]) + remove_dataset_command().with_commit_message(self.ctx["commit_message"]).build().execute(self.ctx["slug"]) ) return result.output @@ -59,4 +58,4 @@ def to_response(self): response = self.ctx response["remote_branch"] = remote_branch - return result_response(DatasetsRemoveCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/datasets_unlink.py b/renku/ui/service/controllers/datasets_unlink.py index 4e92378530..23ffb6deb3 100644 --- a/renku/ui/service/controllers/datasets_unlink.py +++ b/renku/ui/service/controllers/datasets_unlink.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,7 +31,7 @@ class DatasetsUnlinkCtrl(ServiceCtrl, RenkuOpSyncMixin): def __init__(self, cache, user_data, request_data, migrate_project=False): """Construct a datasets unlink list controller.""" - self.ctx = DatasetsUnlinkCtrl.REQUEST_SERIALIZER.load(request_data) + self.ctx = self.REQUEST_SERIALIZER.load(request_data) self.include = self.ctx.get("include_filter") self.exclude = self.ctx.get("exclude_filter") @@ -43,7 +42,7 @@ def __init__(self, cache, user_data, request_data, migrate_project=False): filters = f"-X {self.exclude}" else: filters = f"-I {self.include}" - self.ctx["commit_message"] = f"{MESSAGE_PREFIX} unlink dataset {self.ctx['name']} {filters}" + self.ctx["commit_message"] = f"{MESSAGE_PREFIX} unlink dataset {self.ctx['slug']} {filters}" super().__init__(cache, user_data, request_data, migrate_project=migrate_project) @@ -59,7 +58,7 @@ def renku_op(self): .with_commit_message(self.ctx["commit_message"]) .build() .execute( - name=self.ctx["name"], + slug=self.ctx["slug"], include=self.ctx.get("include_filters"), exclude=self.ctx.get("exclude_filters"), yes=True, @@ -78,6 +77,7 @@ def to_response(self): response = { "unlinked": [record.entity.path for record in op_result], "remote_branch": remote_branch, + "git_url": self.ctx["git_url"], } - return result_response(DatasetsUnlinkCtrl.RESPONSE_SERIALIZER, response) + return result_response(self.RESPONSE_SERIALIZER, response) diff --git a/renku/ui/service/controllers/graph_export.py b/renku/ui/service/controllers/graph_export.py index da1b3297a9..6517919820 100644 --- a/renku/ui/service/controllers/graph_export.py +++ b/renku/ui/service/controllers/graph_export.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/project_edit.py b/renku/ui/service/controllers/project_edit.py index a07a14e9b7..aaa627bf47 100644 --- a/renku/ui/service/controllers/project_edit.py +++ b/renku/ui/service/controllers/project_edit.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,13 +14,17 @@ # See the License for the specific language governing permissions and # limitations under the License. """Renku service project edit controller.""" -from typing import Dict, cast + +from typing import Dict, Optional, Union, cast from renku.command.project import edit_project_command -from renku.domain_model.constant import NO_VALUE +from renku.core.image import ImageObjectRequest +from renku.domain_model.constant import NO_VALUE, NoValueType from renku.ui.service.cache.models.job import Job +from renku.ui.service.config import CACHE_UPLOADS_PATH from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOpSyncMixin +from renku.ui.service.controllers.utils.datasets import set_url_for_uploaded_images from renku.ui.service.serializers.project import ProjectEditRequest, ProjectEditResponseRPC from renku.ui.service.views import result_response @@ -76,6 +79,24 @@ def renku_op(self): else: keywords = NO_VALUE + if "image" not in self.ctx: + image_request: Optional[Union[ImageObjectRequest, NoValueType]] = NO_VALUE + elif self.ctx["image"] is None: + image_request = None + else: + image: Dict = self.ctx.get("image") # type: ignore + + user_cache_dir = CACHE_UPLOADS_PATH / self.user.user_id + + set_url_for_uploaded_images(images=[image], cache=self.cache, user=self.user) + + image_request = ImageObjectRequest( + content_url=image["content_url"], + position=0, + mirror_locally=image.get("mirror_locally", False), + safe_image_paths=[user_cache_dir], + ) + result = ( edit_project_command() .with_commit_message(self.ctx["commit_message"]) @@ -86,6 +107,7 @@ def renku_op(self): custom_metadata=custom_metadata, custom_metadata_source=custom_metadata_source, keywords=keywords, + image_request=image_request, ) ) diff --git a/renku/ui/service/controllers/project_show.py b/renku/ui/service/controllers/project_show.py index c2f17d9ae5..56e83b0c5d 100644 --- a/renku/ui/service/controllers/project_show.py +++ b/renku/ui/service/controllers/project_show.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/templates_create_project.py b/renku/ui/service/controllers/templates_create_project.py index b3dc30e753..d01220d7a7 100644 --- a/renku/ui/service/controllers/templates_create_project.py +++ b/renku/ui/service/controllers/templates_create_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,13 +22,15 @@ from renku.command.init import create_from_template_local_command from renku.core import errors +from renku.core.image import ImageObjectRequest from renku.core.template.template import fetch_templates_source from renku.core.util.contexts import renku_project_context from renku.domain_model.template import Template from renku.infrastructure.repository import Repository -from renku.ui.service.config import MESSAGE_PREFIX +from renku.ui.service.config import CACHE_UPLOADS_PATH, MESSAGE_PREFIX from renku.ui.service.controllers.api.abstract import ServiceCtrl from renku.ui.service.controllers.api.mixins import RenkuOperationMixin +from renku.ui.service.controllers.utils.datasets import set_url_for_uploaded_images from renku.ui.service.errors import UserProjectCreationError, UserTemplateInvalidError from renku.ui.service.serializers.templates import ProjectTemplateRequest, ProjectTemplateResponseRPC from renku.ui.service.utils import new_repo_push @@ -64,8 +65,8 @@ def default_metadata(self): """Default metadata for project creation.""" metadata = { - "__template_source__": self.ctx["git_url"], - "__template_ref__": self.ctx["branch"], + "__template_source__": self.ctx["template_git_url"], + "__template_ref__": self.ctx["ref"], "__template_id__": self.ctx["identifier"], "__namespace__": self.ctx["project_namespace"], "__repository__": self.ctx["project_repository"], @@ -102,6 +103,7 @@ def setup_new_project(self): "owner": self.ctx["project_namespace"], "token": self.ctx["token"], "initialized": True, + "image": self.ctx["image"], } project = self.cache.make_project(self.user, new_project_data) @@ -115,7 +117,7 @@ def setup_new_project(self): def setup_template(self): """Reads template manifest.""" - templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["branch"]) + templates_source = fetch_templates_source(source=self.ctx["template_git_url"], reference=self.ctx["ref"]) identifier = self.ctx["identifier"] try: self.template = templates_source.get_template(id=identifier, reference=None) @@ -150,6 +152,18 @@ def new_project(self): new_project = self.setup_new_project() new_project_path = new_project.abs_path + image = self.ctx.get("image") + if image: + user_cache_dir = CACHE_UPLOADS_PATH / self.user.user_id + set_url_for_uploaded_images(images=[image], cache=self.cache, user=self.user) + + image = ImageObjectRequest( + content_url=image["content_url"], + position=0, + mirror_locally=image.get("mirror_locally", False), + safe_image_paths=[user_cache_dir], + ) + with renku_project_context(new_project_path): create_from_template_local_command().build().execute( self.template.path, @@ -167,6 +181,7 @@ def new_project(self): description=self.ctx["project_description"], data_dir=self.ctx.get("data_directory"), ssh_supported=self.template.ssh_supported, + image_request=image, ) self.new_project_push(new_project_path) diff --git a/renku/ui/service/controllers/templates_read_manifest.py b/renku/ui/service/controllers/templates_read_manifest.py index d46d4e2e32..b3e374e4e7 100644 --- a/renku/ui/service/controllers/templates_read_manifest.py +++ b/renku/ui/service/controllers/templates_read_manifest.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -50,7 +49,7 @@ def template_manifest(self): """Reads template manifest.""" from PIL import Image - templates_source = fetch_templates_source(source=self.ctx["git_url"], reference=self.ctx["branch"]) + templates_source = fetch_templates_source(source=self.ctx["template_git_url"], reference=self.ctx["ref"]) manifest = templates_source.manifest.get_raw_content() # NOTE: convert icons to base64 diff --git a/renku/ui/service/controllers/utils/__init__.py b/renku/ui/service/controllers/utils/__init__.py index 1dfca2b7b5..15e5e19d97 100644 --- a/renku/ui/service/controllers/utils/__init__.py +++ b/renku/ui/service/controllers/utils/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/utils/datasets.py b/renku/ui/service/controllers/utils/datasets.py index 5c7250fc72..ef4cbda623 100644 --- a/renku/ui/service/controllers/utils/datasets.py +++ b/renku/ui/service/controllers/utils/datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/v1/datasets_add_file.py b/renku/ui/service/controllers/v1/datasets_add_file.py new file mode 100644 index 0000000000..5b5d6ae938 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_add_file.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets add controller.""" + +from renku.ui.service.controllers.datasets_add_file import DatasetsAddFileCtrl +from renku.ui.service.serializers.v1.datasets import DatasetAddRequest_2_1, DatasetAddResponseRPC_2_1 + + +class DatasetsAddFileCtrl_2_1(DatasetsAddFileCtrl): + """Controller for datasets add endpoint.""" + + REQUEST_SERIALIZER = DatasetAddRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetAddResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_create.py b/renku/ui/service/controllers/v1/datasets_create.py new file mode 100644 index 0000000000..9701fda2f2 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_create.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets create controller.""" + +from renku.ui.service.controllers.datasets_create import DatasetsCreateCtrl +from renku.ui.service.serializers.v1.datasets import DatasetCreateRequest_2_1, DatasetCreateResponseRPC_2_1 + + +class DatasetsCreateCtrl_2_1(DatasetsCreateCtrl): + """Controller for datasets create endpoint.""" + + REQUEST_SERIALIZER = DatasetCreateRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetCreateResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_edit.py b/renku/ui/service/controllers/v1/datasets_edit.py new file mode 100644 index 0000000000..6835a4081a --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_edit.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets edit controller.""" + +from renku.ui.service.controllers.datasets_edit import DatasetsEditCtrl +from renku.ui.service.serializers.datasets import DatasetEditResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetEditRequest_2_1 + + +class DatasetsEditCtrl_2_1(DatasetsEditCtrl): + """Controller for datasets edit endpoint.""" + + REQUEST_SERIALIZER = DatasetEditRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetEditResponseRPC() diff --git a/renku/ui/service/controllers/v1/datasets_files_list.py b/renku/ui/service/controllers/v1/datasets_files_list.py new file mode 100644 index 0000000000..f09ad202f1 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_files_list.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets files controller.""" + +from renku.ui.service.controllers.datasets_files_list import DatasetsFilesListCtrl +from renku.ui.service.serializers.v1.datasets import DatasetFilesListRequest_2_1, DatasetFilesListResponseRPC_2_1 + + +class DatasetsFilesListCtrl_2_1(DatasetsFilesListCtrl): + """Controller for datasets files list endpoint.""" + + REQUEST_SERIALIZER = DatasetFilesListRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetFilesListResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_import.py b/renku/ui/service/controllers/v1/datasets_import.py new file mode 100644 index 0000000000..f3c14c1667 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_import.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets import controller.""" + +from renku.ui.service.controllers.datasets_import import DatasetsImportCtrl +from renku.ui.service.serializers.datasets import DatasetImportResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetImportRequest_2_1 + + +class DatasetsImportCtrl_2_1(DatasetsImportCtrl): + """Controller for datasets import endpoint.""" + + REQUEST_SERIALIZER = DatasetImportRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetImportResponseRPC() diff --git a/renku/ui/service/controllers/v1/datasets_list.py b/renku/ui/service/controllers/v1/datasets_list.py new file mode 100644 index 0000000000..49efc3391f --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_list.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets list controller.""" + +from renku.ui.service.controllers.datasets_list import DatasetsListCtrl +from renku.ui.service.serializers.datasets import DatasetListRequest +from renku.ui.service.serializers.v1.datasets import DatasetListResponseRPC_2_1 + + +class DatasetsListCtrl_2_1(DatasetsListCtrl): + """Controller for datasets list endpoint.""" + + REQUEST_SERIALIZER = DatasetListRequest() + RESPONSE_SERIALIZER = DatasetListResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_remove.py b/renku/ui/service/controllers/v1/datasets_remove.py new file mode 100644 index 0000000000..ffa7c1b4d4 --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_remove.py @@ -0,0 +1,26 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets remove controller.""" + +from renku.ui.service.controllers.datasets_remove import DatasetsRemoveCtrl +from renku.ui.service.serializers.v1.datasets import DatasetRemoveRequest_2_1, DatasetRemoveResponseRPC_2_1 + + +class DatasetsRemoveCtrl_2_1(DatasetsRemoveCtrl): + """Controller for datasets remove endpoint.""" + + REQUEST_SERIALIZER = DatasetRemoveRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetRemoveResponseRPC_2_1() # type: ignore diff --git a/renku/ui/service/controllers/v1/datasets_unlink.py b/renku/ui/service/controllers/v1/datasets_unlink.py new file mode 100644 index 0000000000..7ed3ad878c --- /dev/null +++ b/renku/ui/service/controllers/v1/datasets_unlink.py @@ -0,0 +1,27 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets unlink controller.""" + +from renku.ui.service.controllers.datasets_unlink import DatasetsUnlinkCtrl +from renku.ui.service.serializers.datasets import DatasetUnlinkResponseRPC +from renku.ui.service.serializers.v1.datasets import DatasetUnlinkRequest_2_1 + + +class DatasetsUnlinkCtrl_2_1(DatasetsUnlinkCtrl): + """Controller for datasets unlink endpoint.""" + + REQUEST_SERIALIZER = DatasetUnlinkRequest_2_1() # type: ignore + RESPONSE_SERIALIZER = DatasetUnlinkResponseRPC() diff --git a/renku/ui/service/controllers/v1/templates.py b/renku/ui/service/controllers/v1/templates.py new file mode 100644 index 0000000000..cf0cb50380 --- /dev/null +++ b/renku/ui/service/controllers/v1/templates.py @@ -0,0 +1,32 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service templates controller.""" + +from renku.ui.service.controllers.templates_create_project import TemplatesCreateProjectCtrl +from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl +from renku.ui.service.serializers.v1.templates import ManifestTemplatesRequest_v2_2, ProjectTemplateRequest_v2_2 + + +class TemplatesCreateProjectCtrl_v2_2(TemplatesCreateProjectCtrl): + """V2.2 create project controller.""" + + REQUEST_SERIALIZER = ProjectTemplateRequest_v2_2() # type: ignore + + +class TemplatesReadManifestCtrl_v2_2(TemplatesReadManifestCtrl): + """V2.2 read manifest controller.""" + + REQUEST_SERIALIZER = ManifestTemplatesRequest_v2_2() # type: ignore diff --git a/renku/ui/service/controllers/version.py b/renku/ui/service/controllers/version.py index 3b1b7b688d..f33e437ed7 100644 --- a/renku/ui/service/controllers/version.py +++ b/renku/ui/service/controllers/version.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/versions_list.py b/renku/ui/service/controllers/versions_list.py index 5027fb8d7b..f71f78cca7 100644 --- a/renku/ui/service/controllers/versions_list.py +++ b/renku/ui/service/controllers/versions_list.py @@ -1,7 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/workflow_plans_export.py b/renku/ui/service/controllers/workflow_plans_export.py index 106ee8862c..2cf88bd371 100644 --- a/renku/ui/service/controllers/workflow_plans_export.py +++ b/renku/ui/service/controllers/workflow_plans_export.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/workflow_plans_list.py b/renku/ui/service/controllers/workflow_plans_list.py index cab07d293b..c8f760eea5 100644 --- a/renku/ui/service/controllers/workflow_plans_list.py +++ b/renku/ui/service/controllers/workflow_plans_list.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/controllers/workflow_plans_show.py b/renku/ui/service/controllers/workflow_plans_show.py index 18e7033768..2d9433ca96 100644 --- a/renku/ui/service/controllers/workflow_plans_show.py +++ b/renku/ui/service/controllers/workflow_plans_show.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/entrypoint.py b/renku/ui/service/entrypoint.py index 1571c5ebda..5c67c94505 100644 --- a/renku/ui/service/entrypoint.py +++ b/renku/ui/service/entrypoint.py @@ -22,10 +22,12 @@ import sentry_sdk from flask import Flask, Response, jsonify, request, url_for from jwt import InvalidTokenError +from prometheus_flask_exporter.multiprocess import GunicornPrometheusMetrics from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.redis import RedisIntegration from sentry_sdk.integrations.rq import RqIntegration +from renku.core.util.util import is_test_session_running from renku.ui.service.cache import cache from renku.ui.service.config import CACHE_DIR, MAX_CONTENT_LENGTH, SENTRY_ENABLED, SENTRY_SAMPLERATE, SERVICE_PREFIX from renku.ui.service.errors import ( @@ -74,6 +76,9 @@ def create_app(custom_exceptions=True): app.config["cache"] = cache + if not is_test_session_running(): + GunicornPrometheusMetrics(app) + build_routes(app) @app.route(SERVICE_PREFIX) diff --git a/renku/ui/service/errors.py b/renku/ui/service/errors.py index 434a0f9b7a..961718bf9c 100644 --- a/renku/ui/service/errors.py +++ b/renku/ui/service/errors.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/gateways/__init__.py b/renku/ui/service/gateways/__init__.py index 015e51c789..bfa472aa9a 100644 --- a/renku/ui/service/gateways/__init__.py +++ b/renku/ui/service/gateways/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/gateways/repository_cache.py b/renku/ui/service/gateways/repository_cache.py index 58aedaf426..c228a73ba7 100644 --- a/renku/ui/service/gateways/repository_cache.py +++ b/renku/ui/service/gateways/repository_cache.py @@ -45,7 +45,13 @@ class LocalRepositoryCache(IRepositoryCache): """Cache for project repos stored on local disk.""" def get( - self, cache: ServiceCache, git_url: str, branch: Optional[str], user: User, shallow: bool = True + self, + cache: ServiceCache, + git_url: str, + branch: Optional[str], + user: User, + shallow: bool = True, + commit_sha: Optional[str] = None, ) -> Project: """Get a project from cache (clone if necessary).""" if git_url is None: @@ -58,12 +64,12 @@ def get( ) except ValueError: # project not found in DB - return self._clone_project(cache, git_url, branch, user, shallow) + return self._clone_project(cache, git_url, branch, user, shallow, commit_sha) if not project.abs_path.exists(): # cache folder doesn't exist anymore project.delete() - return self._clone_project(cache, git_url, branch, user, shallow) + return self._clone_project(cache, git_url, branch, user, shallow, commit_sha) if not shallow and project.is_shallow: self._unshallow_project(project, user) @@ -100,7 +106,13 @@ def _update_project_access_date(self, project: Project): project.save() def _clone_project( - self, cache: ServiceCache, git_url: str, branch: Optional[str], user: User, shallow: bool = True + self, + cache: ServiceCache, + git_url: str, + branch: Optional[str], + user: User, + shallow: bool = True, + commit_sha: Optional[str] = None, ) -> Project: """Clone a project to cache.""" git_url = normalize_git_url(git_url) @@ -124,6 +136,7 @@ def _clone_project( "branch": branch, "git_url": git_url, "user_id": user.user_id, + "commit_sha": commit_sha, } project = cache.make_project(user, project_data, persist=False) @@ -139,6 +152,7 @@ def _clone_project( (Project.user_id == user.user_id) & (Project.git_url == git_url) & (Project.branch == branch) + & (Project.commit_sha == commit_sha) & (Project.project_id != project.project_id) ) except ValueError: @@ -170,7 +184,7 @@ def _clone_project( "user.email": user.email, "pull.rebase": False, }, - checkout_revision=project.branch, + checkout_revision=commit_sha or project.branch, ) ).output project.save() @@ -186,6 +200,9 @@ def _clone_project( def _unshallow_project(self, project: Project, user: User): """Turn a shallow clone into a full clone.""" + if project.commit_sha is not None: + # NOTE: A project in a detached head state at a specific commit SHA does not make sense to be unshallowed + return try: with project.write_lock(), Repository(project.abs_path) as repository: try: @@ -208,6 +225,10 @@ def _maybe_update_cache(self, project: Project, user: User): if project.fetch_age < PROJECT_FETCH_TIME: return + if project.commit_sha is not None: + # NOTE: A project in a detached head state at a specific commit SHA cannot be updated + return + try: with project.write_lock(), Repository(project.abs_path) as repository: try: diff --git a/renku/ui/service/interfaces/__init__.py b/renku/ui/service/interfaces/__init__.py index 2a18f628f8..a45d5f6119 100644 --- a/renku/ui/service/interfaces/__init__.py +++ b/renku/ui/service/interfaces/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/__init__.py b/renku/ui/service/jobs/__init__.py index 88c78d7ba4..5ebb863196 100644 --- a/renku/ui/service/jobs/__init__.py +++ b/renku/ui/service/jobs/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/cleanup.py b/renku/ui/service/jobs/cleanup.py index 836bb711c4..ece12a6511 100644 --- a/renku/ui/service/jobs/cleanup.py +++ b/renku/ui/service/jobs/cleanup.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/constants.py b/renku/ui/service/jobs/constants.py index a431885304..9e5613e05d 100644 --- a/renku/ui/service/jobs/constants.py +++ b/renku/ui/service/jobs/constants.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/contexts.py b/renku/ui/service/jobs/contexts.py index 85cfa47ed0..ee9d34cb8d 100644 --- a/renku/ui/service/jobs/contexts.py +++ b/renku/ui/service/jobs/contexts.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/datasets.py b/renku/ui/service/jobs/datasets.py index 35b2e361fc..f1bc8f0176 100644 --- a/renku/ui/service/jobs/datasets.py +++ b/renku/ui/service/jobs/datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -116,7 +115,7 @@ def dataset_add_remote_file(cache, user, user_job_id, project_id, create_dataset worker_log.debug(f"adding files {urls} to dataset {name}") command = add_to_dataset_command().with_commit_message(commit_message).build() - result = command.execute(dataset_name=name, urls=urls, create=create_dataset) + result = command.execute(dataset_slug=name, urls=urls, create=create_dataset) if result.error: raise result.error diff --git a/renku/ui/service/jobs/delayed_ctrl.py b/renku/ui/service/jobs/delayed_ctrl.py index 8787982047..b27a0206cf 100644 --- a/renku/ui/service/jobs/delayed_ctrl.py +++ b/renku/ui/service/jobs/delayed_ctrl.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/jobs/queues.py b/renku/ui/service/jobs/queues.py index 832879244f..57da70d29c 100644 --- a/renku/ui/service/jobs/queues.py +++ b/renku/ui/service/jobs/queues.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/logger.py b/renku/ui/service/logger.py index 903f4f8d1d..20b909e1c8 100644 --- a/renku/ui/service/logger.py +++ b/renku/ui/service/logger.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/__init__.py b/renku/ui/service/serializers/__init__.py index 7243e946ba..6911917d31 100644 --- a/renku/ui/service/serializers/__init__.py +++ b/renku/ui/service/serializers/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/cache.py b/renku/ui/service/serializers/cache.py index 62be4f0e7e..81cdb64fd9 100644 --- a/renku/ui/service/serializers/cache.py +++ b/renku/ui/service/serializers/cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -32,6 +31,7 @@ AsyncSchema, ErrorResponse, FileDetailsSchema, + GitUrlResponseMixin, RemoteRepositorySchema, RenkuSyncSchema, ) @@ -241,7 +241,7 @@ class ProjectMigrateRequest(AsyncSchema, RemoteRepositorySchema): skip_migrations = fields.Boolean(dump_default=False) -class ProjectMigrateResponse(RenkuSyncSchema): +class ProjectMigrateResponse(RenkuSyncSchema, GitUrlResponseMixin): """Response schema for project migrate.""" was_migrated = fields.Boolean() @@ -376,7 +376,7 @@ def get_obj_type(self, obj): return "error" -class ProjectMigrationCheckResponse(Schema): +class ProjectMigrationCheckResponse(GitUrlResponseMixin): """Response schema for project migration check.""" project_supported = fields.Boolean( diff --git a/renku/ui/service/serializers/common.py b/renku/ui/service/serializers/common.py index b406bb8a90..05ad54e549 100644 --- a/renku/ui/service/serializers/common.py +++ b/renku/ui/service/serializers/common.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -29,7 +28,7 @@ class RemoteRepositoryBaseSchema(Schema): """Schema for tracking a remote repository.""" - git_url = fields.String(metadata={"description": "Remote git repository url."}) + git_url = fields.String(required=True, metadata={"description": "Remote git repository url."}) @pre_load def normalize_url(self, data, **_): @@ -68,6 +67,12 @@ def set_branch_from_ref(self, data, **_): return data +class GitCommitSHA: + """Schema for a commit SHA.""" + + commit_sha = fields.String(load_default=None, metadata={"description": "Git commit SHA."}) + + class AsyncSchema(Schema): """Schema for adding a commit at the end of the operation.""" @@ -163,3 +168,9 @@ class ErrorResponse(Schema): userReference = fields.String() devReference = fields.String() sentry = fields.String() + + +class GitUrlResponseMixin(Schema): + """Response containing a git url.""" + + git_url = fields.String(required=True, metadata={"description": "Remote git repository url."}) diff --git a/renku/ui/service/serializers/config.py b/renku/ui/service/serializers/config.py index e5bfac5623..10a1d8711a 100644 --- a/renku/ui/service/serializers/config.py +++ b/renku/ui/service/serializers/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,11 +17,18 @@ from marshmallow import Schema, fields -from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema +from renku.ui.service.serializers.common import ( + AsyncSchema, + GitCommitSHA, + GitUrlResponseMixin, + MigrateSchema, + RemoteRepositorySchema, + RenkuSyncSchema, +) from renku.ui.service.serializers.rpc import JsonRPCResponse -class ConfigShowRequest(RemoteRepositorySchema): +class ConfigShowRequest(RemoteRepositorySchema, GitCommitSHA): """Request schema for config show.""" @@ -32,7 +38,7 @@ class ConfigShowSchema(Schema): config = fields.Dict(metadata={"description": "Dictionary of configuration items."}, required=True) -class ConfigShowResponse(ConfigShowSchema): +class ConfigShowResponse(ConfigShowSchema, GitUrlResponseMixin): """Response schema for project config show.""" default = fields.Dict(metadata={"description": "Dictionary of default configuration items."}, required=True) @@ -48,7 +54,7 @@ class ConfigSetRequest(AsyncSchema, ConfigShowSchema, MigrateSchema, RemoteRepos """Request schema for config set.""" -class ConfigSetResponse(ConfigShowSchema, RenkuSyncSchema): +class ConfigSetResponse(ConfigShowSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for project config set.""" default = fields.Dict(metadata={"description": "Dictionary of default configuration items."}) diff --git a/renku/ui/service/serializers/datasets.py b/renku/ui/service/serializers/datasets.py index a75569ae6a..512c45cadd 100644 --- a/renku/ui/service/serializers/datasets.py +++ b/renku/ui/service/serializers/datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,9 +19,11 @@ from renku.domain_model.dataset import DatasetCreatorsJson as DatasetCreators from renku.domain_model.dataset import DatasetDetailsJson as DatasetDetails from renku.domain_model.dataset import ImageObjectJson as ImageObject -from renku.domain_model.dataset import ImageObjectRequestJson as ImageObjectRequest +from renku.domain_model.dataset import ImageObjectRequestJson from renku.ui.service.serializers.common import ( AsyncSchema, + GitCommitSHA, + GitUrlResponseMixin, JobDetailsResponse, MigrateSchema, RemoteRepositorySchema, @@ -31,16 +32,16 @@ from renku.ui.service.serializers.rpc import JsonRPCResponse -class DatasetNameSchema(Schema): - """Schema for dataset name.""" +class DatasetSlugSchema(Schema): + """Schema for dataset slug.""" - name = fields.String(metadata={"description": "Mandatory dataset name."}, required=True) + slug = fields.String(metadata={"description": "Mandatory dataset slug."}, required=True) class DatasetDetailsRequest(DatasetDetails): """Request schema with dataset image information.""" - images = fields.List(fields.Nested(ImageObjectRequest)) + images = fields.List(fields.Nested(ImageObjectRequestJson)) custom_metadata: fields.Field = fields.Dict() @@ -51,11 +52,11 @@ class DatasetCreateRequest(AsyncSchema, DatasetDetailsRequest, RemoteRepositoryS # NOTE: Override field in DatasetDetails data_directory = fields.String( # type: ignore load_default=None, - metadata={"description": "Base dataset data directory. '/' by default"}, + metadata={"description": "Base dataset data directory. '/' by default"}, ) -class DatasetCreateResponse(DatasetNameSchema, RenkuSyncSchema): +class DatasetCreateResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset create view.""" @@ -65,11 +66,11 @@ class DatasetCreateResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetCreateResponse) -class DatasetRemoveRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): +class DatasetRemoveRequest(AsyncSchema, DatasetSlugSchema, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset remove.""" -class DatasetRemoveResponse(DatasetNameSchema, RenkuSyncSchema): +class DatasetRemoveResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset create view.""" @@ -88,7 +89,7 @@ class DatasetAddFile(Schema): job_id = fields.String() -class DatasetAddRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): +class DatasetAddRequest(AsyncSchema, DatasetSlugSchema, RemoteRepositorySchema, MigrateSchema): """Request schema for a dataset add file view.""" files = fields.List(fields.Nested(DatasetAddFile), required=True) @@ -108,7 +109,7 @@ def check_files(self, data, **kwargs): return data -class DatasetAddResponse(DatasetNameSchema, RenkuSyncSchema): +class DatasetAddResponse(DatasetSlugSchema, RenkuSyncSchema, GitUrlResponseMixin): """Response schema for a dataset add file view.""" project_id = fields.String(required=True) @@ -121,7 +122,7 @@ class DatasetAddResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetAddResponse) -class DatasetListRequest(RemoteRepositorySchema): +class DatasetListRequest(RemoteRepositorySchema, GitCommitSHA): """Request schema for dataset list view.""" @@ -131,7 +132,7 @@ class DatasetDetailsResponse(DatasetDetails): images = fields.List(fields.Nested(ImageObject)) -class DatasetListResponse(Schema): +class DatasetListResponse(GitUrlResponseMixin): """Response schema for dataset list view.""" datasets = fields.List(fields.Nested(DatasetDetailsResponse), required=True) @@ -143,19 +144,20 @@ class DatasetListResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetListResponse) -class DatasetFilesListRequest(DatasetNameSchema, RemoteRepositorySchema): +class DatasetFilesListRequest(DatasetSlugSchema, RemoteRepositorySchema, GitCommitSHA): """Request schema for dataset files list view.""" -class DatasetFileDetails(DatasetNameSchema): +class DatasetFileDetails(Schema): """Serialize dataset files to a response object.""" + name = fields.String(metadata={"description": "Mandatory dataset file name."}, required=True) path = fields.String() created = fields.DateTime() added = fields.DateTime() -class DatasetFilesListResponse(DatasetNameSchema): +class DatasetFilesListResponse(DatasetSlugSchema, GitUrlResponseMixin): """Response schema for dataset files list view.""" files = fields.List(fields.Nested(DatasetFileDetails), required=True) @@ -171,12 +173,12 @@ class DatasetImportRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): """Dataset import request.""" dataset_uri = fields.String(required=True) - name = fields.String(metadata={"description": "Optional dataset name."}) + slug = fields.String(metadata={"description": "Optional dataset slug."}) extract = fields.Boolean() tag = fields.String(metadata={"description": "Dataset version to import."}) data_directory = fields.String( load_default=None, - metadata={"description": "Base dataset data directory. '/' by default"}, + metadata={"description": "Base dataset data directory. '/' by default"}, ) @@ -189,18 +191,18 @@ class DatasetImportResponseRPC(JsonRPCResponse): class DatasetEditRequest( AsyncSchema, DatasetDetailsRequest, - DatasetNameSchema, + DatasetSlugSchema, RemoteRepositorySchema, MigrateSchema, ): """Dataset edit metadata request.""" - title = fields.String(metadata={"description": "New title of the dataset"}) + name = fields.String(metadata={"description": "New name of the dataset"}) description = fields.String(metadata={"description": "New description of the dataset"}) creators = fields.List(fields.Nested(DatasetCreators), metadata={"description": "New creators of the dataset"}) keywords = fields.List(fields.String(), allow_none=True, metadata={"description": "New keywords for the dataset"}) images = fields.List( - fields.Nested(ImageObjectRequest), allow_none=True, metadata={"description": "New dataset images"} + fields.Nested(ImageObjectRequestJson), allow_none=True, metadata={"description": "New dataset images"} ) custom_metadata = fields.List( fields.Dict(), metadata={"description": "New custom metadata for the dataset"}, allow_none=True @@ -211,7 +213,7 @@ class DatasetEditRequest( ) -class DatasetEditResponse(RenkuSyncSchema): +class DatasetEditResponse(RenkuSyncSchema, GitUrlResponseMixin): """Dataset edit metadata response.""" edited = fields.Dict(required=True) @@ -224,7 +226,7 @@ class DatasetEditResponseRPC(JsonRPCResponse): result = fields.Nested(DatasetEditResponse) -class DatasetUnlinkRequest(AsyncSchema, DatasetNameSchema, RemoteRepositorySchema, MigrateSchema): +class DatasetUnlinkRequest(AsyncSchema, DatasetSlugSchema, RemoteRepositorySchema, MigrateSchema): """Dataset unlink file request.""" include_filters = fields.List(fields.String()) @@ -242,7 +244,7 @@ def check_filters(self, data, **kwargs): return data -class DatasetUnlinkResponse(RenkuSyncSchema): +class DatasetUnlinkResponse(RenkuSyncSchema, GitUrlResponseMixin): """Dataset unlink files response.""" unlinked = fields.List(fields.String()) diff --git a/renku/ui/service/serializers/graph.py b/renku/ui/service/serializers/graph.py index f7081e9be8..9fcb388bde 100644 --- a/renku/ui/service/serializers/graph.py +++ b/renku/ui/service/serializers/graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,11 +16,11 @@ """Renku graph serializers.""" from marshmallow import Schema, fields, validate -from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema +from renku.ui.service.serializers.common import AsyncSchema, GitCommitSHA, MigrateSchema, RemoteRepositorySchema from renku.ui.service.serializers.rpc import JsonRPCResponse -class GraphExportRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): +class GraphExportRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema, GitCommitSHA): """Request schema for dataset list view.""" callback_url = fields.URL() diff --git a/renku/ui/service/serializers/headers.py b/renku/ui/service/serializers/headers.py index eaf67d263f..2eca365afd 100644 --- a/renku/ui/service/serializers/headers.py +++ b/renku/ui/service/serializers/headers.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/jobs.py b/renku/ui/service/serializers/jobs.py index 857e2b2a5f..344ed8f59c 100644 --- a/renku/ui/service/serializers/jobs.py +++ b/renku/ui/service/serializers/jobs.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/project.py b/renku/ui/service/serializers/project.py index e93fa1a41d..8405fdab5b 100644 --- a/renku/ui/service/serializers/project.py +++ b/renku/ui/service/serializers/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -19,8 +18,10 @@ from marshmallow.schema import Schema from renku.domain_model.dataset import DatasetCreatorsJson as DatasetCreators +from renku.domain_model.dataset import ImageObjectRequestJson from renku.ui.service.serializers.common import ( AsyncSchema, + GitCommitSHA, MigrateSchema, RemoteRepositoryBaseSchema, RemoteRepositorySchema, @@ -29,7 +30,7 @@ from renku.ui.service.serializers.rpc import JsonRPCResponse -class ProjectShowRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): +class ProjectShowRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema, GitCommitSHA): """Project show metadata request.""" @@ -78,6 +79,7 @@ class ProjectEditRequest(AsyncSchema, RemoteRepositorySchema, MigrateSchema): metadata={"description": "The source for the JSON-LD metadata"}, ) keywords = fields.List(fields.String(), allow_none=True, metadata={"description": "New keyword(s) for the project"}) + image = fields.Nested(ImageObjectRequestJson, allow_none=True, metadata={"description": "Image for the project"}) class ProjectEditResponse(RenkuSyncSchema): diff --git a/renku/ui/service/serializers/rpc.py b/renku/ui/service/serializers/rpc.py index c3882c9465..8d68d25c58 100644 --- a/renku/ui/service/serializers/rpc.py +++ b/renku/ui/service/serializers/rpc.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/templates.py b/renku/ui/service/serializers/templates.py index 783f5ca519..1154987ab3 100644 --- a/renku/ui/service/serializers/templates.py +++ b/renku/ui/service/serializers/templates.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,29 +17,51 @@ from urllib.parse import urlparse -from marshmallow import Schema, ValidationError, fields, post_load, pre_load -from yagup import GitURL +import yagup +from marshmallow import Schema, ValidationError, fields, post_load, pre_load, validates from yagup.exceptions import InvalidURL from renku.core.util.os import normalize_to_ascii +from renku.domain_model.dataset import ImageObjectRequestJson from renku.ui.service.config import TEMPLATE_CLONE_DEPTH_DEFAULT -from renku.ui.service.serializers.cache import ProjectCloneContext, RepositoryCloneRequest +from renku.ui.service.errors import UserRepoUrlInvalidError +from renku.ui.service.serializers.common import GitUrlResponseMixin from renku.ui.service.serializers.rpc import JsonRPCResponse from renku.ui.service.utils import normalize_git_url -class ManifestTemplatesRequest(RepositoryCloneRequest): +class ManifestTemplatesRequest(Schema): """Request schema for listing manifest templates.""" - url = fields.String(required=True) + template_git_url = fields.String(required=True, metadata={"description": "Template git repository url."}) depth = fields.Integer(load_default=TEMPLATE_CLONE_DEPTH_DEFAULT) + ref = fields.String(load_default=None, metadata={"description": "Remote git branch (or tag or commit SHA)."}) + + @pre_load + def set_fields(self, data, **_): + """Set `ref` field from `branch` if present and set template url.""" + if "branch" in data and not data.get("ref"): + # Backward compatibility: branch and ref were both used. Let's keep branch as the exposed field + # even if internally it gets converted to "ref" later. + data["ref"] = data["branch"] + del data["branch"] + if "url" in data and not data.get("template_git_url"): + # needed for tests that share a fixture + data["template_git_url"] = data["url"] - @pre_load() - def set_git_url(self, data, **kwargs): - """Set git_url field.""" - data["git_url"] = data["url"] return data + @validates("template_git_url") + def validate_template_git_url(self, value): + """Validates git url.""" + if value: + try: + yagup.parse(value) + except InvalidURL as e: + raise UserRepoUrlInvalidError(e, "Invalid `template_git_url`") + + return value + class TemplateParameterSchema(Schema): """Manifest template schema.""" @@ -49,9 +70,10 @@ class TemplateParameterSchema(Schema): value = fields.String(load_default="") -class ProjectTemplateRequest(ProjectCloneContext, ManifestTemplatesRequest): +class ProjectTemplateRequest(ManifestTemplatesRequest): """Request schema for listing manifest templates.""" + token = fields.String() identifier = fields.String(required=True, metadata={"description": "Indentifier of the template"}) initial_branch = fields.String( load_default=None, metadata={"description": "Name for the initial branch in the new project."} @@ -70,19 +92,27 @@ class ProjectTemplateRequest(ProjectCloneContext, ManifestTemplatesRequest): data_directory = fields.String( load_default=None, metadata={"description": "Base dataset data directory in project. Defaults to 'data/'"} ) + image = fields.Nested(ImageObjectRequestJson, load_default=None) + slug = fields.String() + fullname = fields.String() + email = fields.String() @post_load() def add_required_fields(self, data, **kwargs): """Add necessary fields.""" + if "template_git_url" in data and data["template_git_url"]: + data["template_git_url"] = normalize_git_url(data["template_git_url"]) project_name_stripped = normalize_to_ascii(data["project_name"]) project_name_stripped = normalize_git_url(project_name_stripped) if len(project_name_stripped) == 0: raise ValidationError("Project name contains only unsupported characters") new_project_url = f"{data['project_repository']}/{data['project_namespace']}/{project_name_stripped}" try: - _ = GitURL.parse(new_project_url) + _ = yagup.GitURL.parse(new_project_url) except InvalidURL as e: - raise ValidationError("`git_url` contains unsupported characters") from e + raise ValidationError( + "`project_repository`, `project_namespace` and `project_name` do not form a valid git url" + ) from e project_slug = f"{data['project_namespace']}/{project_name_stripped}" data["new_project_url"] = new_project_url @@ -125,7 +155,7 @@ class ManifestTemplatesResponseRPC(JsonRPCResponse): result = fields.Nested(ManifestTemplatesResponse) -class ProjectTemplateResponse(Schema): +class ProjectTemplateResponse(GitUrlResponseMixin): """Response schema for dataset list view.""" url = fields.String(required=True) diff --git a/renku/ui/service/serializers/v1/__init__.py b/renku/ui/service/serializers/v1/__init__.py index 1622eb42a0..2ce0386b41 100644 --- a/renku/ui/service/serializers/v1/__init__.py +++ b/renku/ui/service/serializers/v1/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/v1/cache.py b/renku/ui/service/serializers/v1/cache.py index f7202d1412..d2ad081dce 100644 --- a/renku/ui/service/serializers/v1/cache.py +++ b/renku/ui/service/serializers/v1/cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/v1/datasets.py b/renku/ui/service/serializers/v1/datasets.py new file mode 100644 index 0000000000..ddb9c2031a --- /dev/null +++ b/renku/ui/service/serializers/v1/datasets.py @@ -0,0 +1,232 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Renku service datasets serializers for API before 2.2. + +In versions before 2.2, ``renku:slug`` was referred to as ``name`` and +``schema:name`` was ``title``. From version 2.2 onward, these are called +``slug`` and ``name`` respectively. +""" + +from marshmallow import Schema, ValidationError, fields, post_load + +from renku.domain_model.dataset import AnnotationJson +from renku.domain_model.dataset import DatasetCreatorsJson +from renku.domain_model.dataset import DatasetCreatorsJson as DatasetCreators +from renku.domain_model.dataset import ImageObjectJson as ImageObject +from renku.domain_model.dataset import ImageObjectRequestJson as ImageObjectRequest +from renku.ui.service.serializers.common import AsyncSchema, MigrateSchema, RemoteRepositorySchema, RenkuSyncSchema +from renku.ui.service.serializers.datasets import DatasetAddFile, DatasetFileDetails +from renku.ui.service.serializers.rpc import JsonRPCResponse + + +class DatasetNameSchema_2_1(Schema): + """Schema for dataset name.""" + + name = fields.String(metadata={"description": "Mandatory dataset name."}, required=True, attribute="slug") + + +class DatasetDetails_2_1(DatasetNameSchema_2_1): + """Schema for dataset details.""" + + version = fields.String(allow_none=True) + created_at = fields.String(allow_none=True, attribute="date_created") + + title = fields.String(attribute="name") + creators = fields.List(fields.Nested(DatasetCreatorsJson)) + description = fields.String() + keywords = fields.List(fields.String()) + identifier = fields.String() + storage = fields.String() + + annotations = fields.List(fields.Nested(AnnotationJson)) + + data_directory = fields.Method("get_datadir") + + @staticmethod + def get_datadir(obj): + """Get data directory.""" + if isinstance(obj, dict): + return str(obj.get("datadir_path", obj.get("datadir", ""))) + if hasattr(obj, "datadir_path"): + return obj.datadir_path + + return str(obj.get_datadir()) + + +class DatasetDetailsRequest_2_1(DatasetDetails_2_1): + """Request schema with dataset image information.""" + + images = fields.List(fields.Nested(ImageObjectRequest)) + + custom_metadata: fields.Field = fields.Dict() + + +class DatasetCreateRequest_2_1(AsyncSchema, DatasetDetailsRequest_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset create view.""" + + # NOTE: Override field in DatasetDetails + data_directory = fields.String( # type: ignore + load_default=None, + metadata={"description": "Base dataset data directory. '/' by default"}, + ) + + +class DatasetCreateResponse_2_1(DatasetNameSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset create view.""" + + +class DatasetCreateResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset create view.""" + + result = fields.Nested(DatasetCreateResponse_2_1) + + +class DatasetRemoveRequest_2_1(AsyncSchema, DatasetNameSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset remove.""" + + +class DatasetRemoveResponse_2_1(DatasetNameSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset create view.""" + + +class DatasetRemoveResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset create view.""" + + result = fields.Nested(DatasetRemoveResponse_2_1) + + +class DatasetAddRequest_2_1(AsyncSchema, DatasetNameSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Request schema for a dataset add file view.""" + + files = fields.List(fields.Nested(DatasetAddFile), required=True) + + create_dataset = fields.Boolean(load_default=False) + force = fields.Boolean(load_default=False) + + client_extras = fields.String() + + @post_load() + def check_files(self, data, **_): + """Check serialized file list.""" + for _file in data["files"]: + if "file_id" in _file and "file_path" in _file: + raise ValidationError("invalid reference found: use either `file_id` or `file_path`") + + return data + + +class DatasetAddResponse_2_1(DatasetNameSchema_2_1, RenkuSyncSchema): + """Response schema for a dataset add file view.""" + + project_id = fields.String(required=True) + files = fields.List(fields.Nested(DatasetAddFile), required=True) + + +class DatasetAddResponseRPC_2_1(JsonRPCResponse): + """RPC schema for a dataset add.""" + + result = fields.Nested(DatasetAddResponse_2_1) + + +class DatasetDetailsResponse_2_1(DatasetDetails_2_1): + """Request schema with dataset image information.""" + + images = fields.List(fields.Nested(ImageObject)) + + +class DatasetListResponse_2_1(Schema): + """Response schema for dataset list view.""" + + datasets = fields.List(fields.Nested(DatasetDetailsResponse_2_1), required=True) + + +class DatasetListResponseRPC_2_1(JsonRPCResponse): + """RPC response schema for dataset list view.""" + + result = fields.Nested(DatasetListResponse_2_1) + + +class DatasetFilesListRequest_2_1(DatasetNameSchema_2_1, RemoteRepositorySchema): + """Request schema for dataset files list view.""" + + +class DatasetFilesListResponse_2_1(DatasetNameSchema_2_1): + """Response schema for dataset files list view.""" + + files = fields.List(fields.Nested(DatasetFileDetails), required=True) + + +class DatasetFilesListResponseRPC_2_1(JsonRPCResponse): + """RPC schema for dataset files list view.""" + + result = fields.Nested(DatasetFilesListResponse_2_1) + + +class DatasetImportRequest_2_1(AsyncSchema, RemoteRepositorySchema, MigrateSchema): + """Dataset import request.""" + + dataset_uri = fields.String(required=True) + name = fields.String(metadata={"description": "Optional dataset name."}, attribute="slug") + extract = fields.Boolean() + tag = fields.String(metadata={"description": "Dataset version to import."}) + data_directory = fields.String( + load_default=None, + metadata={"description": "Base dataset data directory. '/' by default"}, + ) + + +class DatasetEditRequest_2_1( + AsyncSchema, + DatasetDetailsRequest_2_1, + DatasetNameSchema_2_1, + RemoteRepositorySchema, + MigrateSchema, +): + """Dataset edit metadata request.""" + + title = fields.String(metadata={"description": "New name of the dataset"}, attribute="name") + description = fields.String(metadata={"description": "New description of the dataset"}) + creators = fields.List(fields.Nested(DatasetCreators), metadata={"description": "New creators of the dataset"}) + keywords = fields.List(fields.String(), allow_none=True, metadata={"description": "New keywords for the dataset"}) + images = fields.List( + fields.Nested(ImageObjectRequest), allow_none=True, metadata={"description": "New dataset images"} + ) + custom_metadata = fields.List( + fields.Dict(), metadata={"description": "New custom metadata for the dataset"}, allow_none=True + ) + custom_metadata_source = fields.String( + allow_none=True, + metadata={"description": "Source for the custom metadata for the dataset"}, + ) + + +class DatasetUnlinkRequest_2_1(AsyncSchema, DatasetNameSchema_2_1, RemoteRepositorySchema, MigrateSchema): + """Dataset unlink file request.""" + + include_filters = fields.List(fields.String()) + exclude_filters = fields.List(fields.String()) + + @post_load() + def check_filters(self, data, **_): + """Check filters.""" + include_filter = data.get("include_filters") + exclude_filter = data.get("exclude_filters") + + if not include_filter and not exclude_filter: + raise ValidationError("one of the filters must be specified") + + return data diff --git a/renku/ui/service/serializers/v1/templates.py b/renku/ui/service/serializers/v1/templates.py index b6e4d77cf1..841d7ab92a 100644 --- a/renku/ui/service/serializers/v1/templates.py +++ b/renku/ui/service/serializers/v1/templates.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,9 +15,19 @@ # limitations under the License. """Renku service template serializers.""" -from marshmallow import Schema, fields +from urllib.parse import urlparse + +from marshmallow import Schema, ValidationError, fields, post_load, pre_load +from yagup import GitURL +from yagup.exceptions import InvalidURL +from renku.core.util.os import normalize_to_ascii +from renku.domain_model.dataset import ImageObjectRequestJson +from renku.ui.service.config import TEMPLATE_CLONE_DEPTH_DEFAULT +from renku.ui.service.serializers.cache import ProjectCloneContext, RepositoryCloneRequest from renku.ui.service.serializers.rpc import JsonRPCResponse +from renku.ui.service.serializers.templates import TemplateParameterSchema +from renku.ui.service.utils import normalize_git_url class ManifestTemplateSchema_1_5(Schema): @@ -45,3 +54,67 @@ class ManifestTemplatesResponseRPC_1_5(JsonRPCResponse): """RPC schema for listing manifest templates.""" result = fields.Nested(ManifestTemplatesResponse_1_5) + + +class ManifestTemplatesRequest_v2_2(RepositoryCloneRequest): + """Request schema for listing manifest templates.""" + + url = fields.String(required=True) + depth = fields.Integer(load_default=TEMPLATE_CLONE_DEPTH_DEFAULT) + + @pre_load() + def set_git_url(self, data, **kwargs): + """Set git_url field.""" + if data.get("url"): + data["git_url"] = data.get("url") + data["template_git_url"] = data["git_url"] + return data + + +class ProjectTemplateRequest_v2_2(ProjectCloneContext, ManifestTemplatesRequest_v2_2): + """Request schema for listing manifest templates.""" + + identifier = fields.String(required=True, metadata={"description": "Indentifier of the template"}) + initial_branch = fields.String( + load_default=None, metadata={"description": "Name for the initial branch in the new project."} + ) + parameters = fields.List( + fields.Nested(TemplateParameterSchema), load_default=[], metadata={"description": "Template parameters"} + ) + project_name = fields.String(required=True, metadata={"description": "Project name"}) + project_namespace = fields.String(required=True, metadata={"description": "Project namespace"}) + project_repository = fields.String(required=True, metadata={"description": "Project remote repository"}) + project_description = fields.String(load_default=None, metadata={"description": "Project description"}) + project_keywords = fields.List(fields.String(), load_default=None, metadata={"description": "Project keywords"}) + project_custom_metadata = fields.Dict( + load_default=None, metadata={"description": "Project custom JSON-LD metadata"} + ) + data_directory = fields.String( + load_default=None, metadata={"description": "Base dataset data directory in project. Defaults to 'data/'"} + ) + image = fields.Nested(ImageObjectRequestJson, load_default=None) + + @post_load() + def add_required_fields(self, data, **kwargs): + """Add necessary fields.""" + data["template_git_url"] = data.get("git_url") + project_name_stripped = normalize_to_ascii(data["project_name"]) + project_name_stripped = normalize_git_url(project_name_stripped) + if len(project_name_stripped) == 0: + raise ValidationError("Project name contains only unsupported characters") + new_project_url = f"{data['project_repository']}/{data['project_namespace']}/{project_name_stripped}" + try: + _ = GitURL.parse(new_project_url) + except InvalidURL as e: + raise ValidationError("`git_url` contains unsupported characters") from e + + project_slug = f"{data['project_namespace']}/{project_name_stripped}" + data["new_project_url"] = new_project_url + data["project_name_stripped"] = project_name_stripped + data["project_slug"] = project_slug + + new_project_url_parsed = urlparse(new_project_url) + url = "oauth2:{}@{}".format(data["token"], new_project_url_parsed.netloc) + data["new_project_url_with_auth"] = new_project_url_parsed._replace(netloc=url).geturl() + + return data diff --git a/renku/ui/service/serializers/version.py b/renku/ui/service/serializers/version.py index 4ba8c8c787..49bcd97423 100644 --- a/renku/ui/service/serializers/version.py +++ b/renku/ui/service/serializers/version.py @@ -1,5 +1,5 @@ -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/versions_list.py b/renku/ui/service/serializers/versions_list.py index 9fa27d2525..ccb023071f 100644 --- a/renku/ui/service/serializers/versions_list.py +++ b/renku/ui/service/serializers/versions_list.py @@ -1,5 +1,5 @@ -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/serializers/workflows.py b/renku/ui/service/serializers/workflows.py index 65a2c0a8d3..e6fe36fb5a 100644 --- a/renku/ui/service/serializers/workflows.py +++ b/renku/ui/service/serializers/workflows.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,11 +22,11 @@ from renku.domain_model.dataset import DatasetCreatorsJson from renku.infrastructure.persistent import Persistent from renku.ui.cli.utils.plugins import get_supported_formats -from renku.ui.service.serializers.common import RemoteRepositorySchema +from renku.ui.service.serializers.common import GitCommitSHA, GitUrlResponseMixin, RemoteRepositorySchema from renku.ui.service.serializers.rpc import JsonRPCResponse -class WorkflowPlansListRequest(RemoteRepositorySchema): +class WorkflowPlansListRequest(RemoteRepositorySchema, GitCommitSHA): """Request schema for plan list view.""" @@ -74,7 +73,7 @@ class WorflowPlanEntryResponse(AbstractPlanResponse): children = fields.List(fields.String) -class WorkflowPlansListResponse(Schema): +class WorkflowPlansListResponse(GitUrlResponseMixin): """Response schema for plan list view.""" plans = fields.List(fields.Nested(WorflowPlanEntryResponse), required=True) @@ -86,7 +85,7 @@ class WorkflowPlansListResponseRPC(JsonRPCResponse): result = fields.Nested(WorkflowPlansListResponse) -class WorkflowPlansShowRequest(RemoteRepositorySchema): +class WorkflowPlansShowRequest(RemoteRepositorySchema, GitCommitSHA): """Request schema for plan show view.""" plan_id = fields.String(required=True) @@ -137,7 +136,7 @@ class ParameterSchema(ParameterBaseSchema): pass -class PlanDetailsResponse(AbstractPlanResponse): +class PlanDetailsResponse(AbstractPlanResponse, GitUrlResponseMixin): """Schema for Plan details.""" last_executed = fields.DateTime() @@ -191,7 +190,7 @@ class LinkSchema(Schema): sink_entries = fields.List(fields.Nested(ParameterTargetSchema), data_key="sinks") -class CompositePlanDetailsResponse(AbstractPlanResponse): +class CompositePlanDetailsResponse(AbstractPlanResponse, GitUrlResponseMixin): """Schema for Plan details.""" steps = fields.List(fields.Nested(PlanReferenceSchema), data_key="plans") @@ -223,7 +222,7 @@ class WorkflowPlansShowResponseRPC(JsonRPCResponse): ) -class WorkflowPlansExportRequest(RemoteRepositorySchema): +class WorkflowPlansExportRequest(RemoteRepositorySchema, GitCommitSHA): """Request schema for exporting a plan.""" plan_id = fields.String(required=True) diff --git a/renku/ui/service/utils/callback.py b/renku/ui/service/utils/callback.py index 5af07af52e..5b1f8ca103 100644 --- a/renku/ui/service/utils/callback.py +++ b/renku/ui/service/utils/callback.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/utils/json_encoder.py b/renku/ui/service/utils/json_encoder.py index 3c78b90ad8..29876322c9 100644 --- a/renku/ui/service/utils/json_encoder.py +++ b/renku/ui/service/utils/json_encoder.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/utils/squash.py b/renku/ui/service/utils/squash.py index 986989c90a..81cbff8ee7 100644 --- a/renku/ui/service/utils/squash.py +++ b/renku/ui/service/utils/squash.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/utils/timeout.py b/renku/ui/service/utils/timeout.py index 60e0b06d33..c25499ec7f 100644 --- a/renku/ui/service/utils/timeout.py +++ b/renku/ui/service/utils/timeout.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/__init__.py b/renku/ui/service/views/__init__.py index 2a11f1a13f..5e0017656c 100644 --- a/renku/ui/service/views/__init__.py +++ b/renku/ui/service/views/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/api_versions.py b/renku/ui/service/views/api_versions.py index e7511a3f8a..d4074bbbd9 100644 --- a/renku/ui/service/views/api_versions.py +++ b/renku/ui/service/views/api_versions.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -63,12 +62,20 @@ def add_url_rule( V1_4 = ApiVersion("1.4") V1_5 = ApiVersion("1.5") V2_0 = ApiVersion("2.0") -V2_1 = ApiVersion("2.1", is_base_version=True) +V2_1 = ApiVersion("2.1") +V2_2 = ApiVersion("2.2", is_base_version=True) -VERSIONS_FROM_V1_5 = [V1_5, V2_0, V2_1] +VERSIONS_FROM_V2_2 = [V2_2] +VERSIONS_FROM_V2_1 = [V2_1] + VERSIONS_FROM_V2_2 +VERSIONS_FROM_V2_0 = [V2_0] + VERSIONS_FROM_V2_1 +VERSIONS_FROM_V1_5 = [V1_5] + VERSIONS_FROM_V2_0 VERSIONS_FROM_V1_4 = [V1_4] + VERSIONS_FROM_V1_5 VERSIONS_FROM_V1_1 = [V1_1, V1_2, V1_3] + VERSIONS_FROM_V1_4 ALL_VERSIONS = [V1_0] + VERSIONS_FROM_V1_1 +VERSIONS_BEFORE_1_1 = [V1_0] +VERSIONS_BEFORE_2_0 = [V1_1, V1_2, V1_3, V1_4, V1_5] + VERSIONS_BEFORE_1_1 +VERSIONS_BEFORE_2_2 = [V2_0, V2_1] + VERSIONS_BEFORE_2_0 + MINIMUM_VERSION = V1_0 -MAXIMUM_VERSION = V2_1 +MAXIMUM_VERSION = V2_2 diff --git a/renku/ui/service/views/apispec.py b/renku/ui/service/views/apispec.py index 19f3f64cf3..a696d1cc35 100644 --- a/renku/ui/service/views/apispec.py +++ b/renku/ui/service/views/apispec.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/cache.py b/renku/ui/service/views/cache.py index 9803c25e4d..aa5df0bade 100644 --- a/renku/ui/service/views/cache.py +++ b/renku/ui/service/views/cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,14 +25,20 @@ from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.gateways.repository_cache import LocalRepositoryCache from renku.ui.service.jobs.cleanup import cache_files_cleanup -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VERSIONS_FROM_V1_1, VersionedBlueprint +from renku.ui.service.views.api_versions import ( + ALL_VERSIONS, + VERSIONS_FROM_V1_1, + VERSIONS_FROM_V2_0, + VERSIONS_FROM_V2_1, + VersionedBlueprint, +) from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_migration_read_errors, handle_migration_write_errors, ) -from renku.ui.service.views.v1.cache import add_v1_specific_endpoints +from renku.ui.service.views.v1.cache import add_v1_specific_cache_endpoints CACHE_BLUEPRINT_TAG = "cache" cache_blueprint = VersionedBlueprint("cache", __name__, url_prefix=SERVICE_PREFIX) @@ -156,7 +161,7 @@ def migrate_project_view(user_data, cache): @cache_blueprint.route( - "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] + "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_0 ) @handle_common_except @handle_migration_read_errors @@ -184,7 +189,7 @@ def migration_check_project_view(user_data, cache): return MigrationsCheckCtrl(cache, user_data, dict(request.args), GitlabAPIProvider()).to_response() -@cache_blueprint.route("/cache.cleanup", methods=["GET"], provide_automatic_options=False, versions=[V2_1]) +@cache_blueprint.route("/cache.cleanup", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_1) @handle_common_except @handle_migration_read_errors @requires_cache @@ -210,4 +215,4 @@ def cache_cleanup(user_data, cache): return jsonify({"result": "ok"}) -cache_blueprint = add_v1_specific_endpoints(cache_blueprint) +cache_blueprint = add_v1_specific_cache_endpoints(cache_blueprint) diff --git a/renku/ui/service/views/config.py b/renku/ui/service/views/config.py index 60c89e294b..85baed6a90 100644 --- a/renku/ui/service/views/config.py +++ b/renku/ui/service/views/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/datasets.py b/renku/ui/service/views/datasets.py index 39b6b9d6c3..3b2765dc82 100644 --- a/renku/ui/service/views/datasets.py +++ b/renku/ui/service/views/datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,19 +25,22 @@ from renku.ui.service.controllers.datasets_list import DatasetsListCtrl from renku.ui.service.controllers.datasets_remove import DatasetsRemoveCtrl from renku.ui.service.controllers.datasets_unlink import DatasetsUnlinkCtrl -from renku.ui.service.views.api_versions import ALL_VERSIONS, VersionedBlueprint +from renku.ui.service.views.api_versions import VERSIONS_FROM_V2_2, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_datasets_unlink_errors, handle_datasets_write_errors, ) +from renku.ui.service.views.v1.datasets import add_v1_specific_dataset_endpoints DATASET_BLUEPRINT_TAG = "datasets" dataset_blueprint = VersionedBlueprint(DATASET_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) -@dataset_blueprint.route("/datasets.list", methods=["GET"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @requires_cache @optional_identity @@ -65,7 +67,7 @@ def list_datasets_view(user_data, cache): @dataset_blueprint.route( - "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=ALL_VERSIONS + "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @requires_cache @@ -92,7 +94,9 @@ def list_dataset_files_view(user_data, cache): return DatasetsFilesListCtrl(cache, user_data, dict(request.args)).to_response() -@dataset_blueprint.route("/datasets.add", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.add", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -121,7 +125,9 @@ def add_file_to_dataset_view(user_data, cache): return DatasetsAddFileCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.create", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.create", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -150,7 +156,9 @@ def create_dataset_view(user_data, cache): return DatasetsCreateCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @accepts_json @requires_cache @@ -178,7 +186,9 @@ def remove_dataset_view(user_data, cache): return DatasetsRemoveCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.import", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.import", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @accepts_json @requires_cache @@ -206,7 +216,9 @@ def import_dataset_view(user_data, cache): return DatasetsImportCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_write_errors @accepts_json @@ -237,7 +249,9 @@ def edit_dataset_view(user_data, cache): return DatasetsEditCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -@dataset_blueprint.route("/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS) +@dataset_blueprint.route( + "/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 +) @handle_common_except @handle_datasets_unlink_errors @accepts_json @@ -264,3 +278,6 @@ def unlink_file_view(user_data, cache): - datasets """ return DatasetsUnlinkCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore + + +dataset_blueprint = add_v1_specific_dataset_endpoints(dataset_blueprint) diff --git a/renku/ui/service/views/decorators.py b/renku/ui/service/views/decorators.py index c2edec4c34..8aea47be6f 100644 --- a/renku/ui/service/views/decorators.py +++ b/renku/ui/service/views/decorators.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/error_handlers.py b/renku/ui/service/views/error_handlers.py index 0e828d6f0c..5cfab9e0bc 100644 --- a/renku/ui/service/views/error_handlers.py +++ b/renku/ui/service/views/error_handlers.py @@ -1,6 +1,5 @@ -# -# Copyright 2022-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -360,7 +359,7 @@ def decorated_function(*args, **kwargs): error_message = str(e) if "Duplicate dataset image" in error_message: raise UserDatasetsMultipleImagesError(e) - elif "couldn't be mirrored" in error_message: + elif "Cannot download image with url" in error_message: raise UserDatasetsUnreachableImageError(e) raise except ValidationError as e: diff --git a/renku/ui/service/views/graph.py b/renku/ui/service/views/graph.py index 20cf315097..4b49202c1b 100644 --- a/renku/ui/service/views/graph.py +++ b/renku/ui/service/views/graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/jobs.py b/renku/ui/service/views/jobs.py index 6529b2adb2..65ff86c05f 100644 --- a/renku/ui/service/views/jobs.py +++ b/renku/ui/service/views/jobs.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/project.py b/renku/ui/service/views/project.py index 3b70b30318..2221682486 100644 --- a/renku/ui/service/views/project.py +++ b/renku/ui/service/views/project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/templates.py b/renku/ui/service/views/templates.py index f515840abb..f6e746147c 100644 --- a/renku/ui/service/views/templates.py +++ b/renku/ui/service/views/templates.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,21 +19,21 @@ from renku.ui.service.config import SERVICE_PREFIX from renku.ui.service.controllers.templates_create_project import TemplatesCreateProjectCtrl from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl -from renku.ui.service.views.api_versions import ALL_VERSIONS, V2_0, V2_1, VersionedBlueprint +from renku.ui.service.views.api_versions import VERSIONS_FROM_V2_2, VersionedBlueprint from renku.ui.service.views.decorators import accepts_json, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, handle_templates_create_errors, handle_templates_read_errors, ) -from renku.ui.service.views.v1.templates import add_v1_specific_endpoints +from renku.ui.service.views.v1.templates import add_v1_specific_template_endpoints, add_v2_specific_template_endpoints TEMPLATES_BLUEPRINT_TAG = "templates" templates_blueprint = VersionedBlueprint(TEMPLATES_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) @templates_blueprint.route( - "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1] + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @handle_templates_read_errors @@ -74,7 +73,7 @@ def read_manifest_from_template(user_data, cache): @templates_blueprint.route( - "/templates.create_project", methods=["POST"], provide_automatic_options=False, versions=ALL_VERSIONS + "/templates.create_project", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_FROM_V2_2 ) @handle_common_except @handle_templates_create_errors @@ -104,4 +103,5 @@ def create_project_from_template(user_data, cache): return TemplatesCreateProjectCtrl(cache, user_data, dict(request.json)).to_response() # type: ignore -templates_blueprint = add_v1_specific_endpoints(templates_blueprint) +templates_blueprint = add_v1_specific_template_endpoints(templates_blueprint) +templates_blueprint = add_v2_specific_template_endpoints(templates_blueprint) diff --git a/renku/ui/service/views/v1/__init__.py b/renku/ui/service/views/v1/__init__.py index 8553cdf809..cccabad448 100644 --- a/renku/ui/service/views/v1/__init__.py +++ b/renku/ui/service/views/v1/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/v1/cache.py b/renku/ui/service/views/v1/cache.py index 78101db73a..f721d60772 100644 --- a/renku/ui/service/views/v1/cache.py +++ b/renku/ui/service/views/v1/cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -25,7 +24,7 @@ from renku.ui.service.gateways.gitlab_api_provider import GitlabAPIProvider from renku.ui.service.serializers.v1.cache import ProjectMigrateResponseRPC_1_0, ProjectMigrationCheckResponseRPC_1_5 from renku.ui.service.views import result_response -from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 +from renku.ui.service.views.api_versions import VERSIONS_BEFORE_1_1, VERSIONS_BEFORE_2_0 from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity from renku.ui.service.views.error_handlers import ( handle_common_except, @@ -118,15 +117,12 @@ def migration_check_project_view_1_5(user_data, cache): return result_response(ProjectMigrationCheckResponseRPC_1_5(), asdict(result)) -def add_v1_specific_endpoints(cache_blueprint): +def add_v1_specific_cache_endpoints(cache_blueprint): """Add v1 only endpoints to blueprint.""" - cache_blueprint.route("/cache.migrate", methods=["POST"], provide_automatic_options=False, versions=[V1_0])( - migrate_project_view_1_0 - ) cache_blueprint.route( - "/cache.migrations_check", - methods=["GET"], - provide_automatic_options=False, - versions=[V1_0, V1_1, V1_2, V1_3, V1_4, V1_5], + "/cache.migrate", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_1_1 + )(migrate_project_view_1_0) + cache_blueprint.route( + "/cache.migrations_check", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_0 )(migration_check_project_view_1_5) return cache_blueprint diff --git a/renku/ui/service/views/v1/datasets.py b/renku/ui/service/views/v1/datasets.py new file mode 100644 index 0000000000..214f8cbbd4 --- /dev/null +++ b/renku/ui/service/views/v1/datasets.py @@ -0,0 +1,285 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Renku service datasets view.""" +from flask import request + +from renku.ui.service.config import SERVICE_PREFIX +from renku.ui.service.controllers.v1.datasets_add_file import DatasetsAddFileCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_create import DatasetsCreateCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_edit import DatasetsEditCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_files_list import DatasetsFilesListCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_import import DatasetsImportCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_list import DatasetsListCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_remove import DatasetsRemoveCtrl_2_1 +from renku.ui.service.controllers.v1.datasets_unlink import DatasetsUnlinkCtrl_2_1 +from renku.ui.service.views.api_versions import VERSIONS_BEFORE_2_2, VersionedBlueprint +from renku.ui.service.views.decorators import accepts_json, optional_identity, requires_cache, requires_identity +from renku.ui.service.views.error_handlers import ( + handle_common_except, + handle_datasets_unlink_errors, + handle_datasets_write_errors, +) + +DATASET_BLUEPRINT_TAG = "datasets" +dataset_blueprint = VersionedBlueprint(DATASET_BLUEPRINT_TAG, __name__, url_prefix=SERVICE_PREFIX) + + +@handle_common_except +@requires_cache +@optional_identity +def list_datasets_view_2_1(user_data, cache): + """ + List all datasets in a project. + + --- + get: + description: List all datasets in a project. + parameters: + - in: query + schema: DatasetListRequest + responses: + 200: + description: Listing of all datasets in a project. + content: + application/json: + schema: DatasetListResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsListCtrl_2_1(cache, user_data, dict(request.args)).to_response() + + +@handle_common_except +@requires_cache +@optional_identity +def list_dataset_files_view_2_1(user_data, cache): + """ + List files in a dataset. + + --- + get: + description: List files in a dataset. + parameters: + - in: query + schema: DatasetFilesListRequest_2_1 + responses: + 200: + description: Listing of all files in a dataset. + content: + application/json: + schema: DatasetFilesListResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsFilesListCtrl_2_1(cache, user_data, dict(request.args)).to_response() + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def add_file_to_dataset_view_2_1(user_data, cache): + """ + Add the uploaded file to a cloned repository. + + --- + post: + description: Add the uploaded file to a cloned repository. + requestBody: + content: + application/json: + schema: DatasetAddRequest_2_1 + responses: + 200: + description: Details of the added files. + content: + application/json: + schema: DatasetAddResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsAddFileCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def create_dataset_view_2_1(user_data, cache): + """ + Create a new dataset in a project. + + --- + post: + description: Create a new dataset in a project. + requestBody: + content: + application/json: + schema: DatasetCreateRequest_2_1 + responses: + 200: + description: Properties of the created dataset. + content: + application/json: + schema: DatasetCreateResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsCreateCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@accepts_json +@requires_cache +@requires_identity +def remove_dataset_view_2_1(user_data, cache): + """ + Remove a dataset from a project. + + --- + post: + description: Remove a dataset from a project. + requestBody: + content: + application/json: + schema: DatasetRemoveRequest_2_1 + responses: + 200: + description: Details of the removed dataset. + content: + application/json: + schema: DatasetRemoveResponseRPC_2_1 + tags: + - datasets + """ + return DatasetsRemoveCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@accepts_json +@requires_cache +@requires_identity +def import_dataset_view_2_1(user_data, cache): + """ + Import a dataset view. + + --- + post: + description: Import a dataset into a project. + requestBody: + content: + application/json: + schema: DatasetImportRequest_2_1 + responses: + 200: + description: Details of the dispatched import dataset job. + content: + application/json: + schema: DatasetImportResponseRPC + tags: + - datasets + """ + return DatasetsImportCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_write_errors +@accepts_json +@requires_cache +@requires_identity +def edit_dataset_view_2_1(user_data, cache): + """ + Edit dataset metadata view. + + Not passing a field leaves it unchanged. + + --- + post: + description: Edit dataset metadata. + requestBody: + content: + application/json: + schema: DatasetEditRequest_2_1 + responses: + 200: + description: Status of the requested dataset edits. + content: + application/json: + schema: DatasetEditResponseRPC + tags: + - datasets + """ + return DatasetsEditCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +@handle_common_except +@handle_datasets_unlink_errors +@accepts_json +@requires_cache +@requires_identity +def unlink_file_view_2_1(user_data, cache): + """ + Unlink a file from a dataset view. + + --- + post: + description: Unlink a file from a dataset. + requestBody: + content: + application/json: + schema: DatasetUnlinkRequest_2_1 + responses: + 200: + description: Details of the unlinked files. + content: + application/json: + schema: DatasetUnlinkResponseRPC + tags: + - datasets + """ + return DatasetsUnlinkCtrl_2_1(cache, user_data, dict(request.json)).to_response() # type: ignore + + +def add_v1_specific_dataset_endpoints(dataset_blueprint): + """Add v1 only endpoints to blueprint.""" + dataset_blueprint.route( + "/datasets.list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(list_datasets_view_2_1) + dataset_blueprint.route( + "/datasets.files_list", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(list_dataset_files_view_2_1) + dataset_blueprint.route( + "/datasets.add", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(add_file_to_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.create", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(create_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.remove", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(remove_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.import", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(import_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.edit", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(edit_dataset_view_2_1) + dataset_blueprint.route( + "/datasets.unlink", methods=["POST"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_2 + )(unlink_file_view_2_1) + + return dataset_blueprint diff --git a/renku/ui/service/views/v1/templates.py b/renku/ui/service/views/v1/templates.py index 85d4380bb6..0ecf134944 100644 --- a/renku/ui/service/views/v1/templates.py +++ b/renku/ui/service/views/v1/templates.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,10 +17,15 @@ from flask import request from renku.ui.service.controllers.templates_read_manifest import TemplatesReadManifestCtrl +from renku.ui.service.controllers.v1.templates import TemplatesCreateProjectCtrl_v2_2, TemplatesReadManifestCtrl_v2_2 from renku.ui.service.serializers.v1.templates import ManifestTemplatesResponseRPC_1_5 -from renku.ui.service.views.api_versions import V1_0, V1_1, V1_2, V1_3, V1_4, V1_5 -from renku.ui.service.views.decorators import requires_cache, requires_identity -from renku.ui.service.views.error_handlers import handle_common_except, handle_templates_read_errors +from renku.ui.service.views.api_versions import V2_0, V2_1, V2_2, VERSIONS_BEFORE_2_0 +from renku.ui.service.views.decorators import accepts_json, requires_cache, requires_identity +from renku.ui.service.views.error_handlers import ( + handle_common_except, + handle_templates_create_errors, + handle_templates_read_errors, +) @handle_common_except @@ -64,12 +68,88 @@ def read_manifest_from_template_1_5(user_data, cache): return ctrl.to_response() -def add_v1_specific_endpoints(templates_blueprint): +@handle_common_except +@handle_templates_read_errors +@requires_cache +@requires_identity +def read_manifest_from_template_v2_2(user_data, cache): + """ + Read templates from the manifest file of a template repository. + + --- + get: + description: Read templates from the manifest file of a template repository. + parameters: + - in: query + name: url + required: true + schema: + type: string + - in: query + name: ref + schema: + type: string + - in: query + name: depth + schema: + type: string + responses: + 200: + description: Listing of templates in the repository. + content: + application/json: + schema: ManifestTemplatesResponseRPC + tags: + - templates + """ + ctrl = TemplatesReadManifestCtrl_v2_2(cache, user_data, dict(request.args)) + return ctrl.to_response() + + +@handle_common_except +@handle_templates_create_errors +@accepts_json +@requires_cache +@requires_identity +def create_project_from_template_v2_2(user_data, cache): + """ + Create a new project starting using a remote template. + + --- + post: + description: Create a new project using a remote template. + requestBody: + content: + application/json: + schema: ProjectTemplateRequest + responses: + 200: + description: Details of the created project. + content: + application/json: + schema: ProjectTemplateResponseRPC + tags: + - templates + """ + ctrl = TemplatesCreateProjectCtrl_v2_2(cache, user_data, dict(request.json)) # type: ignore + + return ctrl.to_response() + + +def add_v1_specific_template_endpoints(templates_blueprint): """Add v1 only endpoints to blueprint.""" templates_blueprint.route( - "/templates.read_manifest", - methods=["GET"], - provide_automatic_options=False, - versions=[V1_0, V1_1, V1_2, V1_3, V1_4, V1_5], + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=VERSIONS_BEFORE_2_0 )(read_manifest_from_template_1_5) return templates_blueprint + + +def add_v2_specific_template_endpoints(templates_blueprint): + """Add old v2 endpoints to blueprint.""" + templates_blueprint.route( + "/templates.read_manifest", methods=["GET"], provide_automatic_options=False, versions=[V2_0, V2_1, V2_2] + )(read_manifest_from_template_v2_2) + templates_blueprint.route( + "/templates.create_project", methods=["POST"], provide_automatic_options=False, versions=[V2_0, V2_1, V2_2] + )(create_project_from_template_v2_2) + return templates_blueprint diff --git a/renku/ui/service/views/version.py b/renku/ui/service/views/version.py index e667c24168..d0577b991b 100644 --- a/renku/ui/service/views/version.py +++ b/renku/ui/service/views/version.py @@ -1,5 +1,5 @@ -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/versions_list.py b/renku/ui/service/views/versions_list.py index f531891fbc..79bc646b94 100644 --- a/renku/ui/service/views/versions_list.py +++ b/renku/ui/service/views/versions_list.py @@ -1,5 +1,5 @@ -# Copyright 2022 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/views/workflow_plans.py b/renku/ui/service/views/workflow_plans.py index e43ee6104d..3147ed569a 100644 --- a/renku/ui/service/views/workflow_plans.py +++ b/renku/ui/service/views/workflow_plans.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/ui/service/worker.py b/renku/ui/service/worker.py index 5206bffca6..ee331d1211 100644 --- a/renku/ui/service/worker.py +++ b/renku/ui/service/worker.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/renku/version.py b/renku/version.py index 4a3f1379ac..fe38037948 100644 --- a/renku/version.py +++ b/renku/version.py @@ -25,7 +25,7 @@ __version__ = cast(str, version("renku")) __template_version__ = "0.7.1" -__minimum_project_version__ = "2.4.0" +__minimum_project_version__ = "2.8.0" def is_release(version: Optional[str] = None): diff --git a/run-tests.sh b/run-tests.sh index b691be4de8..34a0e36d80 100755 --- a/run-tests.sh +++ b/run-tests.sh @@ -1,8 +1,6 @@ #!/usr/bin/env sh -# -*- coding: utf-8 -*- -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/start-telepresence.sh b/start-telepresence.sh index 1738271d80..57ef830fe6 100755 --- a/start-telepresence.sh +++ b/start-telepresence.sh @@ -1,7 +1,6 @@ #!/bin/bash -# -# Copyright 2021 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/__init__.py b/tests/__init__.py index b865a30e91..558215f4c8 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/__init__.py b/tests/api/__init__.py index 59f3aebba3..673c1586cd 100644 --- a/tests/api/__init__.py +++ b/tests/api/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_activity.py b/tests/api/test_activity.py index 4f9a06cdcb..69ed5b49c3 100644 --- a/tests/api/test_activity.py +++ b/tests/api/test_activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_dataset.py b/tests/api/test_dataset.py index 6406f47cb3..dcb742130e 100644 --- a/tests/api/test_dataset.py +++ b/tests/api/test_dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,9 +27,9 @@ def test_list_datasets(project_with_datasets): with Project(): datasets = Dataset.list() - assert {"dataset-1", "dataset-2"} == {d.name for d in datasets} + assert {"dataset-1", "dataset-2"} == {d.slug for d in datasets} - dataset = next(d for d in Dataset.list() if d.name == "dataset-2") + dataset = next(d for d in Dataset.list() if d.slug == "dataset-2") assert {"P1", "P2"} == {c.name for c in dataset.creators} @@ -38,7 +37,7 @@ def test_list_datasets_outside_a_context(project_with_datasets): """Test listing datasets outside a project context.""" datasets = Dataset.list() - assert {"dataset-1", "dataset-2"} == {d.name for d in datasets} + assert {"dataset-1", "dataset-2"} == {d.slug for d in datasets} def test_list_datasets_outside_a_renku_project(directory_tree): @@ -58,7 +57,7 @@ def test_list_datasets_outside_a_renku_project(directory_tree): def test_list_dataset_files(project_with_datasets, dataset, files_paths): """Test listing datasets files.""" with Project() as project: - dataset = next(d for d in Dataset.list() if d.name == dataset) + dataset = next(d for d in Dataset.list() if d.slug == dataset) assert set(files_paths) == {f.path for f in dataset.files} assert {project.path / p for p in files_paths} == {d.full_path for d in dataset.files} diff --git a/tests/api/test_parameter.py b/tests/api/test_parameter.py index a4ce860791..34be2739a3 100644 --- a/tests/api/test_parameter.py +++ b/tests/api/test_parameter.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_plan.py b/tests/api/test_plan.py index 5ef221b361..6f54baff83 100644 --- a/tests/api/test_plan.py +++ b/tests/api/test_plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_project.py b/tests/api/test_project.py index 09545558e2..ed13d0bfd5 100644 --- a/tests/api/test_project.py +++ b/tests/api/test_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/api/test_rdfgraph.py b/tests/api/test_rdfgraph.py index fbf9eaf020..13d75982e6 100644 --- a/tests/api/test_rdfgraph.py +++ b/tests/api/test_rdfgraph.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_config.py b/tests/cli/test_config.py index be0f9dea06..ad77361e8c 100644 --- a/tests/cli/test_config.py +++ b/tests/cli/test_config.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_datasets.py b/tests/cli/test_datasets.py index b7aeb73ad3..e873447cc9 100644 --- a/tests/cli/test_datasets.py +++ b/tests/cli/test_datasets.py @@ -141,8 +141,8 @@ def test_dataset_show(runner, project, subdirectory, datadir_option, datadir): "dataset", "create", "my-dataset", - "--title", - "Long Title", + "--name", + "Long Name", "--description", "# t1\n## t2\nsome description here", "-c", @@ -164,11 +164,11 @@ def test_dataset_show(runner, project, subdirectory, datadir_option, datadir): result = runner.invoke(cli, ["dataset", "show", "my-dataset"]) assert 0 == result.exit_code, format_result_exception(result) assert "some description here" in result.output - assert "Long Title" in result.output + assert "Long Name" in result.output assert "keyword-1" in result.output assert "keyword-2" in result.output assert "Created: " in result.output - assert "Name: my-dataset" in result.output + assert "Slug: my-dataset" in result.output assert "John Doe " in result.output assert "some_unique_value" in result.output assert "https://schema.org/specialProperty" in result.output @@ -199,8 +199,8 @@ def test_dataset_show_tag(runner, project, subdirectory): "dataset", "create", "my-dataset", - "--title", - "Long Title", + "--name", + "Long Name", "--description", "description1", ], @@ -250,19 +250,19 @@ def test_dataset_show_tag(runner, project, subdirectory): assert "description3" not in result.output -def test_datasets_create_different_names(runner, project): - """Test creating datasets with same title but different name.""" - result = runner.invoke(cli, ["dataset", "create", "dataset-1", "--title", "title"]) +def test_datasets_create_different_slugs(runner, project): + """Test creating datasets with same name but different slugs.""" + result = runner.invoke(cli, ["dataset", "create", "dataset-1", "--name", "name"]) assert 0 == result.exit_code, format_result_exception(result) assert "OK" in result.output - result = runner.invoke(cli, ["dataset", "create", "dataset-2", "--title", "title"]) + result = runner.invoke(cli, ["dataset", "create", "dataset-2", "--name", "name"]) assert 0 == result.exit_code, format_result_exception(result) assert "OK" in result.output -def test_datasets_create_with_same_name(runner, project): - """Test creating datasets with same name.""" +def test_datasets_create_with_same_slug(runner, project): + """Test creating datasets with same slug.""" result = runner.invoke(cli, ["dataset", "create", "dataset"]) assert 0 == result.exit_code, format_result_exception(result) assert "OK" in result.output @@ -273,7 +273,7 @@ def test_datasets_create_with_same_name(runner, project): @pytest.mark.parametrize( - "name", + "slug", [ "any name /@#$!", "name longer than 24 characters", @@ -284,13 +284,13 @@ def test_datasets_create_with_same_name(runner, project): "name ends in.lock", ], ) -def test_datasets_invalid_name(runner, project, name): - """Test creating datasets with invalid name.""" - result = runner.invoke(cli, ["dataset", "create", name]) +def test_datasets_invalid_slug(runner, project, slug): + """Test creating datasets with invalid slug.""" + result = runner.invoke(cli, ["dataset", "create", slug]) assert 2 == result.exit_code - assert f"Dataset name '{name}' is not valid" in result.output - assert f"Hint: '{get_slug(name)}' is valid" in result.output + assert f"Dataset slug '{slug}' is not valid" in result.output + assert f"Hint: '{get_slug(slug)}' is valid" in result.output def test_datasets_create_dirty_exception_untracked(runner, project): @@ -442,15 +442,15 @@ def test_datasets_list_non_empty(output_format, runner, project, datadir_option, @pytest.mark.parametrize( "columns,headers,values", [ - ("title,short_name", ["TITLE", "NAME"], ["my-dataset", "Long Title"]), - ("title,name", ["TITLE", "NAME"], ["my-dataset", "Long Title"]), + ("title,short_name", ["NAME", "SLUG"], ["my-dataset", "Long Name"]), + ("name,slug", ["NAME", "SLUG"], ["my-dataset", "Long Name"]), ("creators", ["CREATORS"], ["John Doe"]), ], ) def test_datasets_list_with_columns(runner, project, columns, headers, values): """Test listing datasets with custom column name.""" result = runner.invoke( - cli, ["dataset", "create", "my-dataset", "--title", "Long Title", "-c", "John Doe "] + cli, ["dataset", "create", "my-dataset", "--name", "Long Name", "-c", "John Doe "] ) assert 0 == result.exit_code, format_result_exception(result) @@ -611,7 +611,7 @@ def test_multiple_file_to_dataset(tmpdir, runner, project): assert "OK" in result.output dataset = get_dataset_with_injection("dataset") - assert dataset.title == "dataset" + assert dataset.name == "dataset" paths = [] for i in range(3): @@ -760,7 +760,7 @@ def test_repository_file_to_dataset(runner, project, subdirectory): assert 0 == result.exit_code, format_result_exception(result) dataset = get_dataset_with_injection("dataset") - assert dataset.title == "dataset" + assert dataset.name == "dataset" assert dataset.find_file("data/dataset/a") is not None @@ -772,7 +772,7 @@ def test_relative_import_to_dataset(tmpdir, runner, project, subdirectory): assert "OK" in result.output dataset = get_dataset_with_injection("dataset") - assert dataset.title == "dataset" + assert dataset.name == "dataset" zero_data = tmpdir.join("zero.txt") zero_data.write("zero") @@ -858,7 +858,7 @@ def test_dataset_add_with_copy(tmpdir, runner, project): received_inodes = [] dataset = get_dataset_with_injection("my-dataset") - assert dataset.title == "my-dataset" + assert dataset.name == "my-dataset" for file in dataset.files: path = (project.path / file.entity.path).resolve() @@ -1010,12 +1010,12 @@ def test_datasets_ls_files_json(runner, project, tmpdir, large_file): assert file1["creators"] assert file1["size"] - assert file1["dataset_name"] + assert file1["dataset_slug"] assert file1["dataset_id"] assert file2["creators"] assert file2["size"] - assert file2["dataset_name"] + assert file2["dataset_slug"] assert file2["dataset_id"] @@ -1124,7 +1124,7 @@ def test_datasets_ls_files_correct_paths(runner, project, directory_tree): def test_datasets_ls_files_with_name(directory_tree, runner, project): """Test listing of data within dataset with include/exclude filters.""" # create a dataset - result = runner.invoke(cli, ["dataset", "create", "my-dataset", "--title", "Long Title"]) + result = runner.invoke(cli, ["dataset", "create", "my-dataset", "--name", "Long Name"]) assert 0 == result.exit_code, format_result_exception(result) # add data to dataset @@ -1301,7 +1301,7 @@ def test_dataset_edit(runner, project, dirty, subdirectory): result = runner.invoke( cli, - ["dataset", "create", "dataset", "-t", "original title", "-k", "keyword-1", "--metadata", str(metadata_path)], + ["dataset", "create", "dataset", "-n", "original name", "-k", "keyword-1", "--metadata", str(metadata_path)], ) assert 0 == result.exit_code, format_result_exception(result) @@ -1320,12 +1320,12 @@ def test_dataset_edit(runner, project, dirty, subdirectory): dataset = get_dataset_with_injection("dataset") assert " new description " == dataset.description - assert "original title" == dataset.title + assert "original name" == dataset.name assert {creator1, creator2}.issubset({c.full_identity for c in dataset.creators}) - result = runner.invoke(cli, ["dataset", "edit", "dataset", "-t", " new title "], catch_exceptions=False) + result = runner.invoke(cli, ["dataset", "edit", "dataset", "-n", " new name "], catch_exceptions=False) assert 0 == result.exit_code, format_result_exception(result) - assert "Successfully updated: title." in result.output + assert "Successfully updated: name." in result.output result = runner.invoke( cli, ["dataset", "edit", "dataset", "-k", "keyword-2", "-k", "keyword-3"], catch_exceptions=False @@ -1348,7 +1348,7 @@ def test_dataset_edit(runner, project, dirty, subdirectory): dataset = get_dataset_with_injection("dataset") assert " new description " == dataset.description - assert "new title" == dataset.title + assert "new name" == dataset.name assert {creator1, creator2}.issubset({c.full_identity for c in dataset.creators}) assert {"keyword-2", "keyword-3"} == set(dataset.keywords) assert 1 == len(dataset.annotations) @@ -1389,8 +1389,8 @@ def test_dataset_edit_metadata(runner, project, source, metadata): "dataset", "create", "dataset", - "-t", - "original title", + "-n", + "original name", "-k", "keyword-1", ] @@ -1442,8 +1442,8 @@ def test_dataset_edit_unset(runner, project, dirty, subdirectory): "dataset", "create", "dataset", - "-t", - "original title", + "-n", + "original name", "-c", "John Doe ", "-k", @@ -1470,7 +1470,7 @@ def test_dataset_edit_unset(runner, project, dirty, subdirectory): @pytest.mark.parametrize("dirty", [False, True]) def test_dataset_edit_no_change(runner, project, dirty): """Check metadata editing does not commit when there is no change.""" - result = runner.invoke(cli, ["dataset", "create", "dataset", "-t", "original title"]) + result = runner.invoke(cli, ["dataset", "create", "dataset", "-n", "original name"]) assert 0 == result.exit_code, format_result_exception(result) if dirty: @@ -1539,7 +1539,7 @@ def test_dataset_tag(tmpdir, runner, project, subdirectory): assert 0 == result.exit_code, format_result_exception(result) with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("my-dataset") + dataset = datasets_provenance.get_by_slug("my-dataset") all_tags = datasets_provenance.get_all_tags(dataset) assert {dataset.id} == {t.dataset_id.value for t in all_tags} @@ -2095,8 +2095,8 @@ def test_datasets_provenance_after_create(runner, project): "dataset", "create", "my-data", - "--title", - "Long Title", + "--name", + "Long Name", "--description", "some description here", "-c", @@ -2111,10 +2111,10 @@ def test_datasets_provenance_after_create(runner, project): assert 0 == runner.invoke(cli, args, catch_exceptions=False).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("my-data") + dataset = datasets_provenance.get_by_slug("my-data") - assert "Long Title" == dataset.title - assert "my-data" == dataset.name + assert "Long Name" == dataset.name + assert "my-data" == dataset.slug assert "some description here" == dataset.description assert "John Doe" in [c.name for c in dataset.creators] assert "john.doe@mail.ch" in [c.email for c in dataset.creators] @@ -2134,7 +2134,7 @@ def test_datasets_provenance_after_create_when_adding(runner, project): assert 0 == runner.invoke(cli, ["dataset", "add", "--copy", "--create", "my-data", "README.md"]).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("my-data") + dataset = datasets_provenance.get_by_slug("my-data") assert dataset.initial_identifier == dataset.identifier assert dataset.derived_from is None @@ -2152,7 +2152,7 @@ def test_datasets_provenance_after_edit(runner, project): dataset = get_dataset_with_injection("my-data") with get_datasets_provenance_with_injection() as datasets_provenance: - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") old_version = datasets_provenance.get_previous_version(current_version) assert_dataset_is_mutated(old=old_version, new=dataset) @@ -2172,7 +2172,7 @@ def test_datasets_provenance_after_add(runner, project, directory_tree): ) with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("my-data") + dataset = datasets_provenance.get_by_slug("my-data") path = os.path.join(DATA_DIR, "my-data", "file1") file = dataset.find_file(path) @@ -2197,7 +2197,7 @@ def test_datasets_provenance_after_multiple_adds(runner, project, directory_tree assert 1 == len(provenance) - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") old_version = datasets_provenance.get_by_id(current_version.derived_from.url_id) old_dataset_file_ids = {f.id for f in old_version.files} @@ -2221,7 +2221,7 @@ def test_datasets_provenance_after_add_with_overwrite(runner, project, directory assert 1 == len(provenance) - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") old_version = datasets_provenance.get_by_id(current_version.derived_from.url_id) old_dataset_file_ids = {f.id for f in old_version.files} @@ -2238,7 +2238,7 @@ def test_datasets_provenance_after_file_unlink(runner, project, directory_tree): dataset = get_dataset_with_injection("my-data") with get_datasets_provenance_with_injection() as datasets_provenance: - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") old_version = datasets_provenance.get_by_id(Dataset.generate_id(dataset.initial_identifier)) path = os.path.join(DATA_DIR, "my-data", directory_tree.name, "file1") @@ -2260,7 +2260,7 @@ def test_datasets_provenance_after_remove(runner, project, directory_tree): assert 0 == runner.invoke(cli, ["dataset", "rm", "my-data"]).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") provenance = datasets_provenance.get_provenance_tails() assert current_version is None @@ -2283,7 +2283,7 @@ def test_datasets_provenance_after_adding_tag(runner, project): with get_datasets_provenance_with_injection() as datasets_provenance: provenance = datasets_provenance.get_provenance_tails() - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") assert 1 == len(provenance) assert current_version.identifier == current_version.initial_identifier @@ -2303,7 +2303,7 @@ def test_datasets_provenance_after_removing_tag(runner, project): with get_datasets_provenance_with_injection() as datasets_provenance: provenance = datasets_provenance.get_provenance_tails() - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") assert 1 == len(provenance) assert current_version.identifier == current_version.initial_identifier @@ -2321,7 +2321,7 @@ def test_datasets_provenance_multiple(runner, project, directory_tree): assert 0 == runner.invoke(cli, ["dataset", "unlink", "my-data", "--include", "*/dir1/*"], input="y").exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - tail_dataset = datasets_provenance.get_by_name("my-data", immutable=True) + tail_dataset = datasets_provenance.get_by_slug("my-data", immutable=True) provenance = datasets_provenance.get_provenance_tails() # NOTE: We only keep the tail of provenance chain for each dataset in the provenance diff --git a/tests/cli/test_errors.py b/tests/cli/test_errors.py index 303e64a2de..a7c5d84886 100644 --- a/tests/cli/test_errors.py +++ b/tests/cli/test_errors.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2019- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_gc.py b/tests/cli/test_gc.py index 95a54e8ee6..2bbd1c5d49 100644 --- a/tests/cli/test_gc.py +++ b/tests/cli/test_gc.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_gitignore.py b/tests/cli/test_gitignore.py index c124594ae7..14590609ec 100644 --- a/tests/cli/test_gitignore.py +++ b/tests/cli/test_gitignore.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_graph.py b/tests/cli/test_graph.py index 3f02bb7676..45e5560f38 100644 --- a/tests/cli/test_graph.py +++ b/tests/cli/test_graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_indirect.py b/tests/cli/test_indirect.py index 447c407a3d..9a4e36d31c 100644 --- a/tests/cli/test_indirect.py +++ b/tests/cli/test_indirect.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_init.py b/tests/cli/test_init.py index 12538aeeee..fbc5295a75 100644 --- a/tests/cli/test_init.py +++ b/tests/cli/test_init.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -471,3 +470,45 @@ def test_init_with_description(isolated_runner, template): result = isolated_runner.invoke(cli, ["graph", "export", "--strict"]) assert 0 == result.exit_code, format_result_exception(result) assert "my project description" in result.output + + +@pytest.mark.parametrize( + "image", + [Path(__file__).parent / ".." / "data" / "renku.png", "https://en.wikipedia.org/static/images/icons/wikipedia.png"], +) +def test_init_with_image(isolated_runner, template, image): + """Test project initialization with image.""" + result = isolated_runner.invoke(cli, ["init", "--image", image, "new-project", "-t", template["id"]]) + + assert 0 == result.exit_code, format_result_exception(result) + + project = Database.from_path(Path("new-project") / ".renku" / "metadata").get("project") + + assert (Path("new-project") / ".renku" / "images" / "project" / "0.png").exists() + assert ".renku/images/project/0.png" == project.image.content_url + assert 0 == project.image.position + + +@pytest.mark.parametrize( + "image, error", + [ + ("non-existing.png", "Image with local path"), + ("/non-existing", "Image with local path"), + ("https://example.com/non-existing.png", "Cannot download image with url"), + ], +) +def test_init_with_non_existing_image(isolated_runner, template, image, error): + """Test project initialization fails when the image doesn't exist.""" + result = isolated_runner.invoke(cli, ["init", "--image", image, "new-project", "-t", template["id"]]) + + assert 1 == result.exit_code, format_result_exception(result) + assert error in result.output + + +@pytest.mark.parametrize("image", [Path(__file__).parent / ".." / "data" / "workflow-file.yml", "/etc/passwd"]) +def test_init_with_non_image_files(isolated_runner, template, image): + """Test project initialization fails when the file isn't an image format.""" + result = isolated_runner.invoke(cli, ["init", "--image", image, "new-project", "-t", template["id"]]) + + assert 1 == result.exit_code, format_result_exception(result) + assert "isn't a valid image file" in result.output diff --git a/tests/cli/test_integration_datasets.py b/tests/cli/test_integration_datasets.py index c6974d81c9..1664cac698 100644 --- a/tests/cli/test_integration_datasets.py +++ b/tests/cli/test_integration_datasets.py @@ -58,7 +58,7 @@ [ { "doi": "10.5281/zenodo.2658634", - "name": "pyndl_naive_discr_v0.6.4", + "slug": "pyndl_naive_discr_v0.6.4", "creator": "Konstantin Sering, Marc Weitz, David-Elias Künstle, Lennart Schneider", "version": "v0.6.4", "keywords": { @@ -71,7 +71,7 @@ }, { "doi": "10.7910/DVN/F4NUMR", - "name": "replication_data_for_2.2", + "slug": "replication_data_for_2.2", "creator": "James Druckman, Martin Kifer, Michael Parkin", "version": "2", "keywords": {"Social Sciences"}, @@ -98,17 +98,17 @@ def test_dataset_import_real_doi(runner, project, doi, prefix, sleep_after): assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) assert "OK" in result.output + str(result.stderr_bytes) - result = runner.invoke(cli, ["dataset", "ls", "-c", "name,creators"]) + result = runner.invoke(cli, ["dataset", "ls", "-c", "slug,creators"]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) - assert doi["name"] in result.output + assert doi["slug"] in result.output assert doi["creator"] in result.output - result = runner.invoke(cli, ["dataset", "ls-tags", doi["name"]]) + result = runner.invoke(cli, ["dataset", "ls-tags", doi["slug"]]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) assert doi["version"] in result.output - dataset = get_dataset_with_injection(doi["name"]) + dataset = get_dataset_with_injection(doi["slug"]) assert doi["doi"] in dataset.same_as.url assert dataset.date_created is None assert dataset.date_published is not None @@ -151,7 +151,7 @@ def test_dataset_import_real_doi(runner, project, doi, prefix, sleep_after): @pytest.mark.vcr def test_dataset_import_real_param(doi, input, runner, project, sleep_after): """Test dataset import and check metadata parsing.""" - result = runner.invoke(cli, ["dataset", "import", "--name", "remote", doi], input=input) + result = runner.invoke(cli, ["dataset", "import", "--slug", "remote", doi], input=input) if "y" == input: assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) @@ -208,7 +208,7 @@ def test_dataset_import_real_doi_warnings(runner, project, sleep_after): result = runner.invoke(cli, ["dataset", "ls"]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) - assert "pyndl_naive_discr_v1.1.1" in result.output + assert "pyndl_naive_discr_v1.1.2" in result.output @pytest.mark.parametrize( @@ -261,7 +261,7 @@ def test_dataset_import_real_http(runner, project, url, sleep_after): def test_dataset_import_and_extract(runner, project, sleep_after): """Test dataset import and extract files.""" url = "https://zenodo.org/record/2658634" - result = runner.invoke(cli, ["dataset", "import", "--extract", "--short-name", "remote", url], input="y") + result = runner.invoke(cli, ["dataset", "import", "--extract", "--slug", "remote", url], input="y") assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) dataset = get_dataset_with_injection("remote") @@ -276,11 +276,11 @@ def test_dataset_import_and_extract(runner, project, sleep_after): def test_dataset_import_different_names(runner, project, sleep_after): """Test can import same DOI under different names.""" doi = "10.5281/zenodo.2658634" - result = runner.invoke(cli, ["dataset", "import", "--short-name", "name-1", doi], input="y") + result = runner.invoke(cli, ["dataset", "import", "--slug", "name-1", doi], input="y") assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) assert "OK" in result.output + str(result.stderr_bytes) - result = runner.invoke(cli, ["dataset", "import", "--short-name", "name-2", doi], input="y") + result = runner.invoke(cli, ["dataset", "import", "--slug", "name-2", doi], input="y") assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) assert "OK" in result.output @@ -304,13 +304,13 @@ def test_dataset_import_ignore_uncompressed_files(runner, project, sleep_after): def test_dataset_reimport_removed_dataset(runner, project, sleep_after): """Test re-importing of deleted datasets works.""" doi = "10.5281/zenodo.2658634" - result = runner.invoke(cli, ["dataset", "import", doi, "--short-name", "my-dataset"], input="y") + result = runner.invoke(cli, ["dataset", "import", doi, "--slug", "my-dataset"], input="y") assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) result = runner.invoke(cli, ["dataset", "rm", "my-dataset"]) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) - result = runner.invoke(cli, ["dataset", "import", doi, "--short-name", "my-dataset"], input="y") + result = runner.invoke(cli, ["dataset", "import", doi, "--slug", "my-dataset"], input="y") assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) @@ -508,7 +508,7 @@ def test_dataset_import_renkulab_errors(runner, project, url, exit_code): [ ( "https://dev.renku.ch/projects/renku-test-projects/dataset-import/datasets/non-existing-dataset", - "Cannot fetch dataset with name 'non-existing-dataset'", + "Cannot fetch dataset with slug 'non-existing-dataset'", ), ( "https://dev.renku.ch/projects/invalid/project-path/datasets/860f6b5b46364c83b6a9b38ef198bcc0", @@ -986,7 +986,7 @@ def test_export_dataverse_no_dataverse_url(runner, project, dataverse_demo): @pytest.mark.vcr def test_export_imported_dataset_to_dataverse(runner, project, dataverse_demo, zenodo_sandbox): """Test exporting an imported Zenodo dataset to dataverse.""" - result = runner.invoke(cli, ["dataset", "import", "10.5281/zenodo.2658634", "--short-name", "my-data"], input="y") + result = runner.invoke(cli, ["dataset", "import", "10.5281/zenodo.2658634", "--slug", "my-data"], input="y") assert 0 == result.exit_code, format_result_exception(result) result = runner.invoke( @@ -1226,7 +1226,7 @@ def test_dataset_update(project, runner, params): def test_dataset_update_zenodo(project, runner, doi): """Test updating datasets from external providers.""" result = runner.invoke( - cli, ["dataset", "import", "--short-name", "imported_dataset", doi], input="y", catch_exceptions=False + cli, ["dataset", "import", "--slug", "imported_dataset", doi], input="y", catch_exceptions=False ) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) commit_sha_after_file1_delete = project.repository.head.commit.hexsha @@ -1265,7 +1265,7 @@ def test_dataset_update_dataverse(project, runner, doi, with_injection): Since dataverse does not have DOIs/IDs for each version, we need to fake the check. """ result = runner.invoke( - cli, ["dataset", "import", "--short-name", "imported_dataset", doi], input="y", catch_exceptions=False + cli, ["dataset", "import", "--slug", "imported_dataset", doi], input="y", catch_exceptions=False ) assert 0 == result.exit_code, format_result_exception(result) + str(result.stderr_bytes) @@ -1888,7 +1888,7 @@ def test_datasets_provenance_after_import(runner, project): assert 0 == runner.invoke(cli, ["dataset", "import", "-y", "--name", "my-data", "10.7910/DVN/F4NUMR"]).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - assert datasets_provenance.get_by_name("my-data") is not None + assert datasets_provenance.get_by_slug("my-data") is not None @pytest.mark.integration @@ -1903,7 +1903,7 @@ def test_datasets_provenance_after_git_update(project, runner): assert 0 == runner.invoke(cli, ["dataset", "update", "--all"], catch_exceptions=False).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") assert current_version.identifier != current_version.initial_identifier @@ -1919,7 +1919,7 @@ def test_datasets_provenance_after_external_provider_update(project, runner): assert 0 == runner.invoke(cli, ["dataset", "update", "my-data"]).exit_code with get_datasets_provenance_with_injection() as datasets_provenance: - current_version = datasets_provenance.get_by_name("my-data") + current_version = datasets_provenance.get_by_slug("my-data") assert current_version.identifier != current_version.initial_identifier @@ -1936,7 +1936,7 @@ def test_datasets_import_with_tag(project, runner): assert 0 == result.exit_code, format_result_exception(result) with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("parts") + dataset = datasets_provenance.get_by_slug("parts") dataset_path = project.path / "data" / "parts" assert "v1" == dataset.version @@ -1992,7 +1992,7 @@ def test_dataset_update_removes_deleted_files(project, runner, with_injection): project.repository.commit("metadata updated") with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("parts") + dataset = datasets_provenance.get_by_slug("parts") assert 4 == len(dataset.files) @@ -2000,7 +2000,7 @@ def test_dataset_update_removes_deleted_files(project, runner, with_injection): assert 0 == result.exit_code, format_result_exception(result) with get_datasets_provenance_with_injection() as datasets_provenance: - dataset = datasets_provenance.get_by_name("parts") + dataset = datasets_provenance.get_by_slug("parts") assert 2 == len(dataset.files) assert {"data/parts/part_categories.csv", "data/parts/parts.csv"} == {f.entity.path for f in dataset.files} @@ -2278,13 +2278,13 @@ def get_path(uri): mock_cloud_storage = mocker.patch("renku.infrastructure.storage.factory.StorageFactory.get_storage", autospec=True) instance_cloud_storage = mock_cloud_storage.return_value - dataset_name = "cloud-data" + dataset_slug = "cloud-data" instance_cloud_storage.get_hashes.return_value = [FileHash(uri=f, path=get_path(f), size=42, hash=f) for f in files] if storage: - result = create_cloud_storage_dataset(dataset_name, storage) + result = create_cloud_storage_dataset(dataset_slug, storage) assert result.exit_code == 0, format_result_exception(result) - result = runner.invoke(cli, ["dataset", "add", dataset_name, *args, *files], input="\n\nn\n") + result = runner.invoke(cli, ["dataset", "add", dataset_slug, *args, *files], input="\n\nn\n") assert result.exit_code == 0, format_result_exception(result) assert instance_cloud_storage.get_hashes.call_count == len(files) @@ -2359,14 +2359,14 @@ def test_invalid_cloud_storage_args( mock_cloud_storage_storage = mocker.patch( "renku.infrastructure.storage.factory.StorageFactory.get_storage", autospec=True ) - dataset_name = "test-cloud-dataset" + dataset_slug = "test-cloud-dataset" if "--create" not in cmd_args: instance_cloud_storage_storage = mock_cloud_storage_storage.return_value - res = create_cloud_storage_dataset(dataset_name, storage) + res = create_cloud_storage_dataset(dataset_slug, storage) assert res.exit_code == 0 instance_cloud_storage_storage.exists.assert_called_with(storage) - res = runner.invoke(cli, ["dataset", "add", dataset_name, *cmd_args]) + res = runner.invoke(cli, ["dataset", "add", dataset_slug, *cmd_args]) assert res.exit_code != 0 assert expected_error_msg in res.stderr diff --git a/tests/cli/test_isolation.py b/tests/cli/test_isolation.py index 3fb95993db..173f314257 100644 --- a/tests/cli/test_isolation.py +++ b/tests/cli/test_isolation.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_log.py b/tests/cli/test_log.py index 583952e0e6..7c90522278 100644 --- a/tests/cli/test_log.py +++ b/tests/cli/test_log.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -62,7 +61,7 @@ def test_dataset_log(runner, project): result = runner.invoke(cli, ["dataset", "add", "--copy", "test-set", "my_file"]) assert 0 == result.exit_code, format_result_exception(result) result = runner.invoke( - cli, ["dataset", "edit", "test-set", "-t", "new title", "-d", "new description", "-k", "a", "-k", "b"] + cli, ["dataset", "edit", "test-set", "-n", "new name", "-d", "new description", "-k", "a", "-k", "b"] ) assert 0 == result.exit_code, format_result_exception(result) result = runner.invoke(cli, ["dataset", "unlink", "test-set", "--include", "my_file"], input="y") @@ -80,7 +79,7 @@ def test_dataset_log(runner, project): assert "Files modified" in result.output assert "- data/test-set/my_file" in result.output assert "+ data/test-set/my_file" in result.output - assert "Title set to: new title" in result.output + assert "Name set to: new name" in result.output assert "Description set to: new description" in result.output assert "Keywords modified" in result.output assert "Creators modified" in result.output diff --git a/tests/cli/test_login.py b/tests/cli/test_login.py index 58149978bc..e6992c3cd4 100644 --- a/tests/cli/test_login.py +++ b/tests/cli/test_login.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_merge.py b/tests/cli/test_merge.py index 183d7c4bb2..a0af7c28f5 100644 --- a/tests/cli/test_merge.py +++ b/tests/cli/test_merge.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -142,7 +141,7 @@ def test_mergetool(runner, project, directory_tree, run_shell, with_injection): assert len(activities) == 7 assert len(plans) == 7 - shared_dataset = next(d for d in datasets if d.name == "shared-dataset") + shared_dataset = next(d for d in datasets if d.slug == "shared-dataset") assert "remote description" == shared_dataset.description diff --git a/tests/cli/test_migrate.py b/tests/cli/test_migrate.py index 3ba1158662..4defed20a5 100644 --- a/tests/cli/test_migrate.py +++ b/tests/cli/test_migrate.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -371,7 +370,7 @@ def test_commands_fail_on_old_repository(isolated_runner, old_repository_with_su ["doctor"], ["githooks", "install"], ["help"], - ["init", "-i", "1", "--force"], + ["init", "-t", "python-minimal", "--force"], ["storage", "check"], ], ) diff --git a/tests/cli/test_output_option.py b/tests/cli/test_output_option.py index b60f4fe432..82c5afdb9f 100644 --- a/tests/cli/test_output_option.py +++ b/tests/cli/test_output_option.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_project.py b/tests/cli/test_project.py index 25a46f6b99..e283ccfd52 100644 --- a/tests/cli/test_project.py +++ b/tests/cli/test_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,6 +16,7 @@ """Test ``project`` command.""" import json +from pathlib import Path import pytest @@ -68,17 +68,21 @@ def test_project_edit(runner, project, subdirectory, with_injection): "keyword1", "-k", "keyword2", + "--image", + Path(__file__).parent / ".." / "data" / "renku.png", ], ) assert 0 == result.exit_code, format_result_exception(result) - assert "Successfully updated: creator, description, keywords, custom_metadata." in result.output + assert "Successfully updated: creator, description, keywords, custom_metadata, image." in result.output assert "Warning: No email or wrong format for: Forename Surname" in result.output assert project.repository.is_dirty() commit_sha_after = project.repository.head.commit.hexsha assert commit_sha_before != commit_sha_after + assert (project.path / ".renku" / "images" / "project" / "0.png").exists() + with with_injection(): project_gateway = ProjectGateway() project = project_gateway.get_project() @@ -99,6 +103,9 @@ def test_project_edit(runner, project, subdirectory, with_injection): assert "Renku Version:" in result.output assert "Keywords:" in result.output + assert ".renku/images/project/0.png" == project.image.content_url + assert 0 == project.image.position + result = runner.invoke(cli, ["graph", "export", "--format", "json-ld", "--strict"]) assert 0 == result.exit_code, format_result_exception(result) @@ -148,27 +155,33 @@ def test_project_edit_unset(runner, project, subdirectory, with_injection): "keyword1", "-k", "keyword2", + "--image", + Path(__file__).parent / ".." / "data" / "renku.png", ], ) assert 0 == result.exit_code, format_result_exception(result) - assert "Successfully updated: creator, description, keywords, custom_metadata." in result.output + assert "Successfully updated: creator, description, keywords, custom_metadata, image." in result.output assert "Warning: No email or wrong format for: Forename Surname" in result.output commit_sha_before = project.repository.head.commit.hexsha + assert (project.path / ".renku" / "images" / "project" / "0.png").exists() + result = runner.invoke( cli, - ["project", "edit", "-u", "keywords", "-u", "metadata"], + ["project", "edit", "-u", "keywords", "-u", "metadata", "-u", "image"], ) assert 0 == result.exit_code, format_result_exception(result) - assert "Successfully updated: keywords, custom_metadata." in result.output + assert "Successfully updated: keywords, custom_metadata, image." in result.output assert project.repository.is_dirty() commit_sha_after = project.repository.head.commit.hexsha assert commit_sha_before != commit_sha_after + assert not (project.path / ".renku" / "images" / "project" / "0.png").exists() + with with_injection(): project_gateway = ProjectGateway() project = project_gateway.get_project() @@ -185,6 +198,8 @@ def test_project_edit_unset(runner, project, subdirectory, with_injection): assert "Renku Version:" in result.output assert "Keywords:" in result.output + assert project.image is None + result = runner.invoke(cli, ["graph", "export", "--format", "json-ld", "--strict"]) assert 0 == result.exit_code, format_result_exception(result) diff --git a/tests/cli/test_range_queries.py b/tests/cli/test_range_queries.py index fc62220a74..6e79dfd801 100644 --- a/tests/cli/test_range_queries.py +++ b/tests/cli/test_range_queries.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_remove.py b/tests/cli/test_remove.py index 55b9d94270..cfac23414c 100644 --- a/tests/cli/test_remove.py +++ b/tests/cli/test_remove.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_rerun.py b/tests/cli/test_rerun.py index 4b8b1e25ea..8bdd1fee79 100644 --- a/tests/cli/test_rerun.py +++ b/tests/cli/test_rerun.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_rollback.py b/tests/cli/test_rollback.py index 1effb780be..798eddf54d 100644 --- a/tests/cli/test_rollback.py +++ b/tests/cli/test_rollback.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_run.py b/tests/cli/test_run.py index 47ef233806..1241233f0e 100644 --- a/tests/cli/test_run.py +++ b/tests/cli/test_run.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_save.py b/tests/cli/test_save.py index c5b1e391ef..3bb57184a9 100644 --- a/tests/cli/test_save.py +++ b/tests/cli/test_save.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_service.py b/tests/cli/test_service.py index 555a887aac..359832eeaf 100644 --- a/tests/cli/test_service.py +++ b/tests/cli/test_service.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_session.py b/tests/cli/test_session.py index 6f2e15ed83..287cb5375c 100644 --- a/tests/cli/test_session.py +++ b/tests/cli/test_session.py @@ -175,3 +175,18 @@ def test_session_list_format(runner, project, dummy_session_provider, format, ou assert 0 == result.exit_code, format_result_exception(result) assert length == len(result.output.splitlines()) assert output in result.output + + +def test_session_detached_start(runner, project, dummy_session_provider): + """Test starting a session in a detached HEAD repository.""" + # NOTE: Make a dummy commit for project to have two commits + (project.repository.path / "README.md").write_text("changes") + project.repository.add(all=True) + project.repository.commit("dummy commit") + + project.repository.checkout("HEAD~") + + result = runner.invoke(cli, ["session", "start", "-p", "dummy"]) + + assert 1 == result.exit_code, format_result_exception(result) + assert "Cannot start a session from a detached HEAD" in result.output diff --git a/tests/cli/test_status.py b/tests/cli/test_status.py index bfd254b844..6958ea2cb3 100644 --- a/tests/cli/test_status.py +++ b/tests/cli/test_status.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_template.py b/tests/cli/test_template.py index 8030c7bb6a..89bbbe0c91 100644 --- a/tests/cli/test_template.py +++ b/tests/cli/test_template.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_update.py b/tests/cli/test_update.py index 9ff04ca095..7cd6950d3f 100644 --- a/tests/cli/test_update.py +++ b/tests/cli/test_update.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/cli/test_workflow_file.py b/tests/cli/test_workflow_file.py index 5c30643535..189e6d72ed 100644 --- a/tests/cli/test_workflow_file.py +++ b/tests/cli/test_workflow_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/__init__.py b/tests/core/__init__.py index a948e74708..4d36ab37b9 100644 --- a/tests/core/__init__.py +++ b/tests/core/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/__init__.py b/tests/core/commands/__init__.py index 715c4f229c..cf096a2af4 100644 --- a/tests/core/commands/__init__.py +++ b/tests/core/commands/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/providers/test_dataverse.py b/tests/core/commands/providers/test_dataverse.py index 39d6d6fd63..87ed4dfcfe 100644 --- a/tests/core/commands/providers/test_dataverse.py +++ b/tests/core/commands/providers/test_dataverse.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_cli.py b/tests/core/commands/test_cli.py index 76f9d98afe..4c869064ca 100644 --- a/tests/core/commands/test_cli.py +++ b/tests/core/commands/test_cli.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_dataset.py b/tests/core/commands/test_dataset.py index 60807e6ccd..da18d48711 100644 --- a/tests/core/commands/test_dataset.py +++ b/tests/core/commands/test_dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -42,7 +41,7 @@ from renku.core.util.contexts import chdir from renku.core.util.git import get_git_user from renku.core.util.urls import get_slug -from renku.domain_model.dataset import Dataset, Url, is_dataset_name_valid +from renku.domain_model.dataset import Dataset, Url from renku.domain_model.project_context import project_context from renku.domain_model.provenance.agent import Person from renku.infrastructure.gateway.dataset_gateway import DatasetGateway @@ -128,7 +127,7 @@ def test_creator_parse(): def test_creators_with_same_email(project_with_injection): """Test creators with different names and same email address.""" - with DatasetContext(name="dataset", create=True) as dataset: + with DatasetContext(slug="dataset", create=True) as dataset: dataset.creators = [Person(name="me", email="me@example.com"), Person(name="me2", email="me@example.com")] DatasetsProvenance().add_or_update(dataset) @@ -141,7 +140,7 @@ def test_creators_with_same_email(project_with_injection): def test_create_dataset_command_custom_message(project): """Test create dataset custom message.""" create_dataset_command().with_commit_message("my dataset").with_database(write=True).build().execute( - "ds1", title="", description="", creators=[] + "ds1", name="", description="", creators=[] ) last_commit = Repository(".").head.commit @@ -151,19 +150,19 @@ def test_create_dataset_command_custom_message(project): def test_list_datasets_default(project): """Test a default dataset listing.""" create_dataset_command().with_commit_message("my dataset").with_database(write=True).build().execute( - "ds1", title="", description="", creators=[] + "ds1", name="", description="", creators=[] ) datasets = list_datasets_command().with_database().build().execute().output assert isinstance(datasets, list) - assert "ds1" in [dataset.title for dataset in datasets] + assert "ds1" in [dataset.name for dataset in datasets] def test_list_files_default(project, tmpdir): """Test a default file listing.""" create_dataset_command().with_commit_message("my dataset").with_database(write=True).build().execute( - "ds1", title="", description="", creators=[] + "ds1", name="", description="", creators=[] ) data_file = tmpdir / Path("some-file") data_file.write_text("1,2,3", encoding="utf-8") @@ -180,7 +179,7 @@ def test_unlink_default(directory_tree, project): with chdir(project.path): create_dataset_command().with_database(write=True).build().execute("dataset") add_to_dataset_command().with_database(write=True).build().execute( - dataset_name="dataset", urls=[str(directory_tree / "dir1")] + dataset_slug="dataset", urls=[str(directory_tree / "dir1")] ) with pytest.raises(ParameterError): @@ -255,12 +254,6 @@ def test_dataset_name_slug(name, slug): assert slug == get_slug(name) -def test_uppercase_dataset_name_is_valid(): - """Test dataset name can have uppercase characters.""" - assert is_dataset_name_valid("UPPER-CASE") - assert is_dataset_name_valid("Pascal-Case") - - @pytest.mark.integration def test_get_dataset_by_tag(with_injection, tmp_path): """Test getting datasets by a given tag.""" @@ -270,7 +263,7 @@ def test_get_dataset_by_tag(with_injection, tmp_path): with project_context.with_path(repository.path), with_injection(): dataset_gateway = DatasetGateway() - parts_dataset = dataset_gateway.get_by_name("parts") + parts_dataset = dataset_gateway.get_by_slug("parts") returned_datasets = get_dataset_by_tag(dataset=parts_dataset, tag="v1") selected_tag = next(tag for tag in dataset_gateway.get_all_tags(parts_dataset) if tag.name == "v1") diff --git a/tests/core/commands/test_doctor.py b/tests/core/commands/test_doctor.py index 5d1d4dfbff..5dd4c869ac 100644 --- a/tests/core/commands/test_doctor.py +++ b/tests/core/commands/test_doctor.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_graph.py b/tests/core/commands/test_graph.py index 65ca13a124..a3996659bc 100644 --- a/tests/core/commands/test_graph.py +++ b/tests/core/commands/test_graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -260,7 +259,7 @@ def test_graph_export_full(): MagicMock( spec=Dataset, id="/datasets/abcdefg12345", - name="my-dataset", + slug="my-dataset", dataset_files=[ DatasetFile( id="/dataset-files/abcdefg123456789", @@ -278,7 +277,7 @@ def test_graph_export_full(): ] dataset_gateway.get_by_id.return_value = Dataset( id="/datasets/0000000aaaaaaa", - name="my-dataset", + slug="my-dataset", date_created=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), date_modified=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), date_removed=None, @@ -336,7 +335,10 @@ def test_graph_export_full(): project_gateway = MagicMock(spec=IProjectGateway) project_gateway.get_project.return_value = MagicMock( - spec=Project, id="/projects/my-project", date_created=datetime.fromisoformat("2022-07-12T16:29:14+02:00") + spec=Project, + id="/projects/my-project", + date_created=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), + image=None, ) result = get_graph_for_all_objects( @@ -444,6 +446,7 @@ def test_graph_export_full(): "https://swissdatasciencecenter.github.io/renku-ontology#originalIdentifier": [{"@value": "abcdefg"}], "http://schema.org/dateCreated": [{"@value": "2022-07-12T16:29:14+02:00"}], "http://schema.org/dateModified": [{"@value": "2022-07-12T16:29:14+02:00"}], + "https://swissdatasciencecenter.github.io/renku-ontology#slug": [{"@value": "my-dataset"}], }, { "@id": "/entities/1234567890/data/my-dataset", diff --git a/tests/core/commands/test_indirect.py b/tests/core/commands/test_indirect.py index 75c96a50cd..1bfde6283e 100644 --- a/tests/core/commands/test_indirect.py +++ b/tests/core/commands/test_indirect.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_log.py b/tests/core/commands/test_log.py index 37eef06291..57f48974ca 100644 --- a/tests/core/commands/test_log.py +++ b/tests/core/commands/test_log.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -103,8 +102,8 @@ def test_log_dataset_create_simple(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" - new_dataset.title = None + new_dataset.slug = "ds" + new_dataset.name = None new_dataset.description = None new_dataset.derived_from = None new_dataset.same_as = None @@ -137,7 +136,7 @@ def test_log_dataset_create_simple(mocker): assert "ds" == entry.id assert "Dataset 'ds': created" == entry.description - assert entry.details.title_changed is None + assert entry.details.name_changed is None assert not entry.details.description_changed assert not entry.details.creators_added assert not entry.details.creators_removed @@ -156,10 +155,10 @@ def test_log_dataset_create_complex(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" + new_dataset.slug = "ds" new_dataset.derived_from = None new_dataset.same_as = None - new_dataset.title = "new-title" + new_dataset.name = "new-name" new_dataset.description = "new-description" new_dataset.dataset_files = [] new_dataset.creators = [mocker.MagicMock(full_identity="John")] @@ -193,7 +192,7 @@ def test_log_dataset_create_complex(mocker): assert "ds" == entry.id assert "Dataset 'ds': created" == entry.description - assert "new-title" == entry.details.title_changed + assert "new-name" == entry.details.name_changed assert "new-description" == entry.details.description_changed assert ["John"] == entry.details.creators_added assert not entry.details.files_added @@ -214,10 +213,10 @@ def test_log_dataset_add_create(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" + new_dataset.slug = "ds" new_dataset.derived_from = None new_dataset.same_as = None - new_dataset.title = "new-title" + new_dataset.name = "new-name" new_dataset.description = "new-description" new_dataset.dataset_files = [ mocker.MagicMock(date_removed=None, entity=mocker.MagicMock(path="file_a")), @@ -251,7 +250,7 @@ def test_log_dataset_add_create(mocker): assert "ds" == entry.id assert "Dataset 'ds': created, 2 file(s) added" == entry.description - assert "new-title" == entry.details.title_changed + assert "new-name" == entry.details.name_changed assert "new-description" == entry.details.description_changed assert not entry.details.creators_added assert {"file_b", "file_a"} == set(entry.details.files_added) @@ -271,9 +270,9 @@ def test_log_dataset_import(mocker): """Test getting dataset viewmodels for an imported dataset.""" new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" + new_dataset.slug = "ds" new_dataset.derived_from = None - new_dataset.title = "new-title" + new_dataset.name = "new-name" new_dataset.description = "new-description" new_dataset.same_as = mocker.MagicMock(value="http://renkulab.io/my/dataset") new_dataset.dataset_files = [ @@ -308,7 +307,7 @@ def test_log_dataset_import(mocker): assert "ds" == entry.id assert "Dataset 'ds': imported, 2 file(s) added" == entry.description - assert "new-title" == entry.details.title_changed + assert "new-name" == entry.details.name_changed assert "new-description" == entry.details.description_changed assert not entry.details.creators_added assert {"file_b", "file_a"} == set(entry.details.files_added) @@ -329,7 +328,7 @@ def test_log_dataset_deleted(mocker): """Test getting dataset viewmodels for deleted dataset.""" old_dataset = mocker.MagicMock() old_dataset.id = "old" - old_dataset.name = "ds" + old_dataset.slug = "ds" old_dataset.derived_from = None old_dataset.same_as = None old_dataset.dataset_files = [] @@ -337,8 +336,8 @@ def test_log_dataset_deleted(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" - new_dataset.title = None + new_dataset.slug = "ds" + new_dataset.name = None new_dataset.description = None new_dataset.derived_from = Url(url_id="old") new_dataset.same_as = None @@ -380,7 +379,7 @@ def _mock_get_by_id(id): assert "ds" == entry.id assert "Dataset 'ds': deleted" == entry.description - assert entry.details.title_changed is None + assert entry.details.name_changed is None assert not entry.details.description_changed assert not entry.details.files_added assert not entry.details.files_removed @@ -401,8 +400,8 @@ def test_log_dataset_files_removed(mocker): """Test getting dataset viewmodels for dataset with files removed.""" old_dataset = mocker.MagicMock() old_dataset.id = "old" - old_dataset.name = "ds" - old_dataset.title = None + old_dataset.slug = "ds" + old_dataset.name = None old_dataset.description = None old_dataset.derived_from = None old_dataset.same_as = None @@ -414,8 +413,8 @@ def test_log_dataset_files_removed(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" - new_dataset.title = None + new_dataset.slug = "ds" + new_dataset.name = None new_dataset.description = None new_dataset.derived_from = Url(url_id="old") new_dataset.same_as = None @@ -458,7 +457,7 @@ def _mock_get_by_id(id): assert "ds" == entry.id assert "Dataset 'ds': 1 file(s) removed" == entry.description - assert entry.details.title_changed is None + assert entry.details.name_changed is None assert not entry.details.description_changed assert not entry.details.files_added assert ["file_b"] == entry.details.files_removed @@ -479,8 +478,8 @@ def test_log_dataset_metadata_modified(mocker): """Test getting dataset viewmodels for dataset with files removed.""" old_dataset = mocker.MagicMock() old_dataset.id = "old" - old_dataset.name = "ds" - old_dataset.title = "old-title" + old_dataset.slug = "ds" + old_dataset.name = "old-name" old_dataset.description = "old-description" old_dataset.dataset_files = [] old_dataset.creators = [mocker.MagicMock(full_identity="John")] @@ -493,8 +492,8 @@ def test_log_dataset_metadata_modified(mocker): new_dataset = mocker.MagicMock() new_dataset.id = "new" - new_dataset.name = "ds" - new_dataset.title = "new-title" + new_dataset.slug = "ds" + new_dataset.name = "new-name" new_dataset.description = "new-description" new_dataset.derived_from = Url(url_id="old") new_dataset.same_as = None @@ -540,7 +539,7 @@ def _mock_get_by_id(id): assert "ds" == entry.id assert "Dataset 'ds': metadata modified" == entry.description - assert "new-title" == entry.details.title_changed + assert "new-name" == entry.details.name_changed assert "new-description" == entry.details.description_changed assert not entry.details.files_added assert not entry.details.files_removed diff --git a/tests/core/commands/test_merge.py b/tests/core/commands/test_merge.py index 50213b0f70..84d1165e79 100644 --- a/tests/core/commands/test_merge.py +++ b/tests/core/commands/test_merge.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,9 +42,9 @@ def test_merge_dataset_tree_non_conflict_dataset(): def test_merge_dataset_tree_local_modified_dataset(mocker): """Test merging dataset tree with local modifications.""" - dataset = Dataset(id=Dataset.generate_id(uuid4().hex), name="dataset", title="First") + dataset = Dataset(id=Dataset.generate_id(uuid4().hex), slug="dataset", name="First") modified_dataset = dataset.copy() - modified_dataset.update_metadata(title="Modified title") + modified_dataset.update_metadata(name="Modified name") modified_dataset.derive_from(dataset, creator=None) local = BTree({"dataset": modified_dataset}) @@ -59,14 +58,14 @@ def test_merge_dataset_tree_local_modified_dataset(mocker): merged = merger.merge_btrees(local, remote) assert "dataset" in merged.keys() - assert "Modified title" == merged["dataset"].title + assert "Modified name" == merged["dataset"].name def test_merge_dataset_tree_remote_modified_dataset(mocker): """Test merging dataset tree with remote modifications.""" - dataset = Dataset(id=Dataset.generate_id(uuid4().hex), name="dataset", title="First") + dataset = Dataset(id=Dataset.generate_id(uuid4().hex), slug="dataset", name="First") modified_dataset = dataset.copy() - modified_dataset.update_metadata(title="Modified title") + modified_dataset.update_metadata(name="Modified name") modified_dataset.derive_from(dataset, creator=None) local = BTree({"dataset": dataset}) @@ -80,20 +79,20 @@ def test_merge_dataset_tree_remote_modified_dataset(mocker): merged = merger.merge_btrees(local, remote) assert "dataset" in merged.keys() - assert "Modified title" == merged["dataset"].title + assert "Modified name" == merged["dataset"].name def test_merge_dataset_tree_both_modified_dataset(mocker): """Test merging dataset tree with remote and local modifications.""" - dataset = Dataset(id=Dataset.generate_id(uuid4().hex), name="dataset", title="First") + dataset = Dataset(id=Dataset.generate_id(uuid4().hex), slug="dataset", name="First") modified_local_dataset = dataset.copy() - modified_local_dataset.update_metadata(title="Modified local title") + modified_local_dataset.update_metadata(name="Modified local name") modified_local_dataset.derive_from(dataset, creator=None) modified_remote_dataset = dataset.copy() - modified_remote_dataset.update_metadata(title="Modified remote title") + modified_remote_dataset.update_metadata(name="Modified remote name") modified_remote_dataset.derive_from(dataset, creator=None) - unrelated_dataset = Dataset(id=Dataset.generate_id(uuid4().hex), name="unrelated_dataset", title="unrelated") + unrelated_dataset = Dataset(id=Dataset.generate_id(uuid4().hex), slug="unrelated_dataset", name="unrelated") all_datasets = [dataset, modified_local_dataset, modified_remote_dataset, unrelated_dataset] @@ -109,16 +108,16 @@ def test_merge_dataset_tree_both_modified_dataset(mocker): mocker.patch("renku.infrastructure.git_merger.communication.prompt", mocker.MagicMock(return_value="l")) merged = merger.merge_btrees(local, remote) assert "dataset" in merged.keys() - assert "Modified local title" == merged["dataset"].title + assert "Modified local name" == merged["dataset"].name assert "unrelated_dataset" in merged.keys() - assert "unrelated" == merged["unrelated_dataset"].title + assert "unrelated" == merged["unrelated_dataset"].name mocker.patch("renku.infrastructure.git_merger.communication.prompt", mocker.MagicMock(return_value="r")) merged = merger.merge_btrees(local, remote) assert "dataset" in merged.keys() - assert "Modified remote title" == merged["dataset"].title + assert "Modified remote name" == merged["dataset"].name assert "unrelated_dataset" in merged.keys() - assert "unrelated" == merged["unrelated_dataset"].title + assert "unrelated" == merged["unrelated_dataset"].name def test_merge_plan_tree_non_conflict_plan(): diff --git a/tests/core/commands/test_plan_factory.py b/tests/core/commands/test_plan_factory.py index 18834ac6a4..94fc70ec9b 100644 --- a/tests/core/commands/test_plan_factory.py +++ b/tests/core/commands/test_plan_factory.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_renku.py b/tests/core/commands/test_renku.py index 0ac20a6ff5..fb899197e8 100644 --- a/tests/core/commands/test_renku.py +++ b/tests/core/commands/test_renku.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_serialization.py b/tests/core/commands/test_serialization.py index a3dc77c42a..a26b72b741 100644 --- a/tests/core/commands/test_serialization.py +++ b/tests/core/commands/test_serialization.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_status.py b/tests/core/commands/test_status.py index deb294a348..193d2d172d 100644 --- a/tests/core/commands/test_status.py +++ b/tests/core/commands/test_status.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/commands/test_storage.py b/tests/core/commands/test_storage.py index 33f68fa5b0..90075a7f39 100644 --- a/tests/core/commands/test_storage.py +++ b/tests/core/commands/test_storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/__init__.py b/tests/core/fixtures/__init__.py index d667267b1e..29e90f982c 100644 --- a/tests/core/fixtures/__init__.py +++ b/tests/core/fixtures/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/core_database.py b/tests/core/fixtures/core_database.py index 95036003a4..1df4d801c7 100644 --- a/tests/core/fixtures/core_database.py +++ b/tests/core/fixtures/core_database.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -78,7 +77,7 @@ def database() -> Iterator[Tuple["Database", DummyStorage]]: @pytest.fixture def with_injection(): - """Factory fixture for test injections manager.""" + """Factory fixture for test injection manager.""" from renku.command.command_builder.command import inject, remove_injector from renku.domain_model.project_context import project_context diff --git a/tests/core/fixtures/core_datasets.py b/tests/core/fixtures/core_datasets.py index 733dda1467..af4f1c586d 100644 --- a/tests/core/fixtures/core_datasets.py +++ b/tests/core/fixtures/core_datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -54,7 +53,7 @@ def project_with_datasets(project, directory_tree, with_injection) -> Generator[ person_2 = Person.from_string("P2 ") with with_injection(): - create_dataset(name="dataset-1", keywords=["dataset", "1"], creators=[person_1]) + create_dataset(slug="dataset-1", keywords=["dataset", "1"], creators=[person_1]) dataset = add_to_dataset("dataset-2", urls=[str(p) for p in directory_tree.glob("*")], create=True, copy=True) dataset.keywords = ["dataset", "2"] diff --git a/tests/core/fixtures/core_models.py b/tests/core/fixtures/core_models.py index 9b9d3bd531..460549f609 100644 --- a/tests/core/fixtures/core_models.py +++ b/tests/core/fixtures/core_models.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/core_plugins.py b/tests/core/fixtures/core_plugins.py index 30c282aa36..86e3ad314a 100644 --- a/tests/core/fixtures/core_plugins.py +++ b/tests/core/fixtures/core_plugins.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/core_projects.py b/tests/core/fixtures/core_projects.py index 08ee4ca01f..2052f02279 100644 --- a/tests/core/fixtures/core_projects.py +++ b/tests/core/fixtures/core_projects.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/core_serialization.py b/tests/core/fixtures/core_serialization.py index cc9fabf29c..1823bdc2cc 100644 --- a/tests/core/fixtures/core_serialization.py +++ b/tests/core/fixtures/core_serialization.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/fixtures/core_workflow.py b/tests/core/fixtures/core_workflow.py index d6025329ab..dda9195b52 100644 --- a/tests/core/fixtures/core_workflow.py +++ b/tests/core/fixtures/core_workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/incubation/__init__.py b/tests/core/incubation/__init__.py index 285c754313..ddcb31d88f 100644 --- a/tests/core/incubation/__init__.py +++ b/tests/core/incubation/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/incubation/test_communication.py b/tests/core/incubation/test_communication.py index fc5355dcba..f6b00ea421 100644 --- a/tests/core/incubation/test_communication.py +++ b/tests/core/incubation/test_communication.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/management/test_repository.py b/tests/core/management/test_repository.py index 5f14c99ffd..2c6fcadbeb 100644 --- a/tests/core/management/test_repository.py +++ b/tests/core/management/test_repository.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -28,7 +27,7 @@ def test_latest_version(project, with_injection): """Test returning the latest version of `SoftwareAgent`.""" from renku import __version__ - create_dataset_command().build().execute("ds1", title="", description="", creators=[]) + create_dataset_command().build().execute("ds1", name="", description="", creators=[]) with project_context.with_path(project.path), with_injection(): assert __version__ == project_context.latest_agent @@ -38,7 +37,7 @@ def test_latest_version_user_commits(project, with_injection): """Test retrieval of `SoftwareAgent` with latest non-renku command.""" from renku import __version__ - create_dataset_command().build().execute("ds1", title="", description="", creators=[]) + create_dataset_command().build().execute("ds1", name="", description="", creators=[]) file = Path("my-file") file.write_text("123") diff --git a/tests/core/management/test_storage.py b/tests/core/management/test_storage.py index 95b92db85a..b4448df39c 100644 --- a/tests/core/management/test_storage.py +++ b/tests/core/management/test_storage.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/__init__.py b/tests/core/metadata/__init__.py index a713f0383c..1ae0168bc6 100644 --- a/tests/core/metadata/__init__.py +++ b/tests/core/metadata/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_activity_gateway.py b/tests/core/metadata/test_activity_gateway.py index 2ba2adc9bc..c64832a4fc 100644 --- a/tests/core/metadata/test_activity_gateway.py +++ b/tests/core/metadata/test_activity_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_database.py b/tests/core/metadata/test_database.py index 823f237a9a..7f1792e90b 100644 --- a/tests/core/metadata/test_database.py +++ b/tests/core/metadata/test_database.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_immutable.py b/tests/core/metadata/test_immutable.py index 81a4c26530..07bcef3312 100644 --- a/tests/core/metadata/test_immutable.py +++ b/tests/core/metadata/test_immutable.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_plan_gateway.py b/tests/core/metadata/test_plan_gateway.py index e73d2660d3..5236814653 100644 --- a/tests/core/metadata/test_plan_gateway.py +++ b/tests/core/metadata/test_plan_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_project_gateway.py b/tests/core/metadata/test_project_gateway.py index d0749645a0..518d5dc346 100644 --- a/tests/core/metadata/test_project_gateway.py +++ b/tests/core/metadata/test_project_gateway.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/metadata/test_repository.py b/tests/core/metadata/test_repository.py index 0bc6c88108..3ba5c0120a 100644 --- a/tests/core/metadata/test_repository.py +++ b/tests/core/metadata/test_repository.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -327,3 +326,23 @@ def test_ignored_paths(paths, ignored, project): from renku.domain_model.project_context import project_context assert project_context.repository.get_ignored_paths(*paths) == ignored + + +def test_remote(git_repository): + """Test get remote of a repository.""" + assert 1 == len(git_repository.remotes) + + remote = git_repository.remotes[0] + + assert "origin" == remote.name + assert "8853e0c1112e512c36db9cc76faff560b655e5d5" == remote.head + + +def test_remote_detached(git_repository): + """Test get remote of a repository with detached head.""" + git_repository.checkout("HEAD~") + + remote = git_repository.remotes[0] + + assert "origin" == remote.name + assert remote.head is None diff --git a/tests/core/models/__init__.py b/tests/core/models/__init__.py index 6693fb31a1..0a17342acc 100644 --- a/tests/core/models/__init__.py +++ b/tests/core/models/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_activity.py b/tests/core/models/test_activity.py index c921efb8f2..9c09e7da3f 100644 --- a/tests/core/models/test_activity.py +++ b/tests/core/models/test_activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_agents.py b/tests/core/models/test_agents.py index ccd53c5eb9..6a4279046c 100644 --- a/tests/core/models/test_agents.py +++ b/tests/core/models/test_agents.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_calamus.py b/tests/core/models/test_calamus.py index 458917ad4d..8978a3261f 100644 --- a/tests/core/models/test_calamus.py +++ b/tests/core/models/test_calamus.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_git.py b/tests/core/models/test_git.py index d9847e49d3..79a24c5dcb 100644 --- a/tests/core/models/test_git.py +++ b/tests/core/models/test_git.py @@ -1,6 +1,5 @@ -# -# Copyright 2018-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_shacl_schema.py b/tests/core/models/test_shacl_schema.py index 8be713dd81..f2274e76c8 100644 --- a/tests/core/models/test_shacl_schema.py +++ b/tests/core/models/test_shacl_schema.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/models/test_template.py b/tests/core/models/test_template.py index 981933fcd9..af558c44ee 100644 --- a/tests/core/models/test_template.py +++ b/tests/core/models/test_template.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/plugins/__init__.py b/tests/core/plugins/__init__.py index f4ae41d8c6..42c5c6c83b 100644 --- a/tests/core/plugins/__init__.py +++ b/tests/core/plugins/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/plugins/test_run.py b/tests/core/plugins/test_run.py index 993cc89a3d..827feced0f 100644 --- a/tests/core/plugins/test_run.py +++ b/tests/core/plugins/test_run.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/plugins/test_session.py b/tests/core/plugins/test_session.py index 22c4cbe27e..d7b1ebbc62 100644 --- a/tests/core/plugins/test_session.py +++ b/tests/core/plugins/test_session.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/plugins/test_workflow.py b/tests/core/plugins/test_workflow.py index d51b6c34b1..50334d5323 100644 --- a/tests/core/plugins/test_workflow.py +++ b/tests/core/plugins/test_workflow.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/test_activity.py b/tests/core/test_activity.py index 582847419a..576347253d 100644 --- a/tests/core/test_activity.py +++ b/tests/core/test_activity.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/test_dataset.py b/tests/core/test_dataset.py index d24ce521fd..01c308bd98 100644 --- a/tests/core/test_dataset.py +++ b/tests/core/test_dataset.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/test_plan.py b/tests/core/test_plan.py index 0d8023ad5a..4a06be5dec 100644 --- a/tests/core/test_plan.py +++ b/tests/core/test_plan.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/test_project_context.py b/tests/core/test_project_context.py index 58dca77f66..40d47e1803 100644 --- a/tests/core/test_project_context.py +++ b/tests/core/test_project_context.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/test_template.py b/tests/core/test_template.py index c1f1a5441b..8f31b99b92 100644 --- a/tests/core/test_template.py +++ b/tests/core/test_template.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/utils/__init__.py b/tests/core/utils/__init__.py index f83f44aeb8..e08ef804f0 100644 --- a/tests/core/utils/__init__.py +++ b/tests/core/utils/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/utils/test_git.py b/tests/core/utils/test_git.py index 12a5044a80..4444d16f39 100644 --- a/tests/core/utils/test_git.py +++ b/tests/core/utils/test_git.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/utils/test_os.py b/tests/core/utils/test_os.py index 3f54cb4e75..3bd3a29072 100644 --- a/tests/core/utils/test_os.py +++ b/tests/core/utils/test_os.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/utils/test_utils.py b/tests/core/utils/test_utils.py index 2bdddf57ef..e9ddc58b2e 100644 --- a/tests/core/utils/test_utils.py +++ b/tests/core/utils/test_utils.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/workflow/__init__.py b/tests/core/workflow/__init__.py index 0d59f19333..5581e31076 100644 --- a/tests/core/workflow/__init__.py +++ b/tests/core/workflow/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/core/workflow/test_workflow_file.py b/tests/core/workflow/test_workflow_file.py index ae680819c0..160e48543b 100644 --- a/tests/core/workflow/test_workflow_file.py +++ b/tests/core/workflow/test_workflow_file.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/data/renku.png b/tests/data/renku.png new file mode 100644 index 0000000000..b3518b1a18 Binary files /dev/null and b/tests/data/renku.png differ diff --git a/tests/domain_model/__init__.py b/tests/domain_model/__init__.py new file mode 100644 index 0000000000..75fc4d5270 --- /dev/null +++ b/tests/domain_model/__init__.py @@ -0,0 +1,16 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Domain models tests.""" diff --git a/tests/domain_model/test_dataset.py b/tests/domain_model/test_dataset.py new file mode 100644 index 0000000000..f4f95de59c --- /dev/null +++ b/tests/domain_model/test_dataset.py @@ -0,0 +1,115 @@ +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and +# Eidgenössische Technische Hochschule Zürich (ETHZ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Dataset domain model tests.""" + +import pytest + +from renku.core import errors +from renku.domain_model.dataset import Dataset, is_dataset_slug_valid + + +@pytest.mark.parametrize( + "slug", ["lowercase", "UPPERCASE", "PascalCase", "12345", "with-dashes", "with_underlines", "with.dots"] +) +def test_valid_dataset_slug(slug): + """Test several valid dataset slug.""" + assert is_dataset_slug_valid(slug) + + +@pytest.mark.parametrize( + "invalid_slug", + [ + None, + "", # Empty string + "nön-ascii", + "with spaces", + "invalid:characters", + "_starts-with-underline", + "ends-with-underline_", + "-starts-with-dash", + "ends-with-dash-", + ".starts-with-dot", + "ends-with-dot.", + "double__underlines", + "double--dashes", + "double..dots", + "back-to-back-_non-alphanumeric", + "ends-with.lock", + ], +) +def test_invalid_dataset_slug(invalid_slug): + """Test several invalid dataset slug.""" + assert not is_dataset_slug_valid(invalid_slug) + + +def test_load_existing_metadata_in_new_model(): + """ + Test that existing projects metadata can be correctly loaded in the new dataset model that replaces name/title with + slug/name. + """ + dataset = Dataset.__new__(Dataset) + + dataset.__setstate__({"name": "machine-friendly-slug", "title": "Human Readable Name", "dataset_files": []}) + + assert "machine-friendly-slug" == dataset.slug + assert "Human Readable Name" == dataset.name + assert dataset.title is None + + +@pytest.mark.parametrize("name, title", [("", "Human Readable Name"), ("Human Readable Name", "")]) +def test_create_dataset_from_existing_metadata(name, title): + """Test that datasets can be correctly created using existing projects metadata where slug has no value.""" + dataset = Dataset(name=name, title=title) + + assert "human_readable_name" == dataset.slug + assert "Human Readable Name" == dataset.name + assert dataset.title is None + + +@pytest.mark.parametrize("slug", ["", None]) +def test_slug_is_created_from_name_when_missing(slug): + """Test that when no slug is passed to the constructor, one is automatically created from the name.""" + dataset = Dataset(slug=slug, name="Human Readable Name") + + assert "human_readable_name" == dataset.slug + assert "Human Readable Name" == dataset.name + assert dataset.title is None + + +@pytest.mark.parametrize("name", ["", None]) +def test_slug_is_not_copied_to_name_when_name_is_missing(name): + """Test that dataset doesn't use slug as name when name isn't passed.""" + dataset = Dataset(slug="machine-friendly-slug", name=name) + + assert "machine-friendly-slug" == dataset.slug + assert name == dataset.name + assert dataset.title is None + + +@pytest.mark.parametrize("name", ["", None, "Human Readable Name"]) +def test_mixed_metadata_adjusts_name_and_title(name): + """Test that a mixed metadata where slug and title are both present, uses title as name.""" + dataset = Dataset(slug="machine-friendly-slug", name=name, title="Dataset Title") + + assert "machine-friendly-slug" == dataset.slug + assert "Dataset Title" == dataset.name + assert dataset.title is None + + +def test_creation_fails_when_slug_and_name_and_title_are_missing(): + """At least one of the slug, name, or title should have a value.""" + with pytest.raises(errors.ParameterError): + Dataset(slug=None, name=None, title=None) diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index c684eb03b3..3a351eaedd 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/common.py b/tests/fixtures/common.py index e581b60ecf..6a8c77f854 100644 --- a/tests/fixtures/common.py +++ b/tests/fixtures/common.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/communication.py b/tests/fixtures/communication.py index cc799c9a58..fc0637a6bb 100644 --- a/tests/fixtures/communication.py +++ b/tests/fixtures/communication.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/config.py b/tests/fixtures/config.py index a6e4d10fb3..6b73d6b4f5 100644 --- a/tests/fixtures/config.py +++ b/tests/fixtures/config.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/domain_models.py b/tests/fixtures/domain_models.py index eb8e8bbd26..f9932b41b9 100644 --- a/tests/fixtures/domain_models.py +++ b/tests/fixtures/domain_models.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -27,12 +26,12 @@ def empty_dataset_model(): """Dataset without files.""" from renku.domain_model.dataset import Dataset - def _create_dataset(name="my-dataset", identifier="5e77e63037614ea89309e21befe91dbb"): + def _create_dataset(slug="my-dataset", identifier="5e77e63037614ea89309e21befe91dbb"): return Dataset( id=Dataset.generate_id(identifier), identifier=identifier, initial_identifier=identifier, - name=name, + slug=slug, date_created=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), ) @@ -45,10 +44,10 @@ def dataset_model(): from renku.domain_model.dataset import Dataset, DatasetFile from renku.domain_model.entity import Entity - def _create_dataset(name="my-dataset", num_files=2, identifier="14249f1571fb4a2786ddeb7f706b9833"): + def _create_dataset(slug="my-dataset", num_files=2, identifier="14249f1571fb4a2786ddeb7f706b9833"): files = [] for i in range(num_files): - path = f"data/{name}/{i}" + path = f"data/{slug}/{i}" files.append( DatasetFile( id=DatasetFile.generate_id(), @@ -60,7 +59,7 @@ def _create_dataset(name="my-dataset", num_files=2, identifier="14249f1571fb4a27 id=Dataset.generate_id(identifier), identifier=identifier, initial_identifier=identifier, - name=name, + slug=slug, dataset_files=files, date_created=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), date_modified=datetime.fromisoformat("2022-07-12T16:29:14+02:00"), @@ -73,9 +72,9 @@ def _create_dataset(name="my-dataset", num_files=2, identifier="14249f1571fb4a27 def derived_dataset_model(empty_dataset_model, dataset_model): """Dataset with modification.""" - def _create_dataset(name="my-dataset", identifier="5d8e5f72ef21441291cbf19db5a417ce"): - source_dataset = empty_dataset_model(name) - new_dataset = dataset_model(name) + def _create_dataset(slug="my-dataset", identifier="5d8e5f72ef21441291cbf19db5a417ce"): + source_dataset = empty_dataset_model(slug) + new_dataset = dataset_model(slug) new_dataset.derive_from(source_dataset, identifier=identifier) return new_dataset diff --git a/tests/fixtures/repository.py b/tests/fixtures/repository.py index 24ec3870c2..e24364e145 100644 --- a/tests/fixtures/repository.py +++ b/tests/fixtures/repository.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/runners.py b/tests/fixtures/runners.py index a849986b7e..0c8428b2b5 100644 --- a/tests/fixtures/runners.py +++ b/tests/fixtures/runners.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/session.py b/tests/fixtures/session.py index 9180a348e3..1646831adb 100644 --- a/tests/fixtures/session.py +++ b/tests/fixtures/session.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/fixtures/templates.py b/tests/fixtures/templates.py index d65e88a5a8..45b594d022 100644 --- a/tests/fixtures/templates.py +++ b/tests/fixtures/templates.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -59,7 +58,7 @@ def template(template_metadata): "url": "https://github.com/SwissDataScienceCenter/renku-project-template", "id": "python-minimal", "index": 1, - "branch": "master", + "ref": "master", # TODO: Add template parameters here once parameters are added to the template. "metadata": {}, "default_metadata": template_metadata, @@ -84,7 +83,7 @@ def project_init(template): "init_custom": [ "init", "--template-ref", - template["branch"], + template["ref"], "--template-id", "python-minimal", data["test_project"], @@ -92,7 +91,7 @@ def project_init(template): "init_custom_template": ( "https://gitlab.dev.renku.ch/renku-python-integration-tests/core-it-template-variable-test-project" ), - "remote": ["--template-source", template["url"], "--template-ref", template["branch"]], + "remote": ["--template-source", template["url"], "--template-ref", template["ref"]], "id": ["--template-id", template["id"]], "force": ["--force"], "parameters": ["--parameter", "p1=v1", "--parameter", "p2=v2"], diff --git a/tests/service/__init__.py b/tests/service/__init__.py index 4f7fd8ff44..bf42d3b70d 100644 --- a/tests/service/__init__.py +++ b/tests/service/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/cache/test_cache.py b/tests/service/cache/test_cache.py index cfacf15579..34bb84b455 100644 --- a/tests/service/cache/test_cache.py +++ b/tests/service/cache/test_cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/cache/test_project_management_cache.py b/tests/service/cache/test_project_management_cache.py new file mode 100644 index 0000000000..c5502b88fe --- /dev/null +++ b/tests/service/cache/test_project_management_cache.py @@ -0,0 +1,41 @@ +import pytest +from marshmallow.exceptions import ValidationError + +from renku.ui.service.cache.projects import ProjectManagementCache, User + + +@pytest.mark.parametrize( + "commit_sha,branch,exception", + [ + (None, None, None), + ("commit_sha", None, None), + (None, "branch", None), + (None, "master", None), + ("commit_sha", "master", ValidationError), + ], +) +def test_make_project(mock_redis, commit_sha, branch, exception): + cache = ProjectManagementCache() + user = User(user_id="user_id") + project_data = { + "slug": "slug", + "name": "name", + "owner": "owner", + "branch": branch, + "commit_sha": commit_sha, + } + if exception is not None: + with pytest.raises(exception): + cache.make_project(user=user, project_data=project_data, persist=True) + else: + cache.make_project(user=user, project_data=project_data, persist=True) + projects = list(cache.get_projects(user)) + assert len(projects) == 1 + if commit_sha is None: + assert projects[0].commit_sha == "" + else: + assert projects[0].commit_sha == commit_sha + if branch is None: + assert projects[0].branch == "" + else: + assert projects[0].branch == branch diff --git a/tests/service/cache/test_project_model.py b/tests/service/cache/test_project_model.py new file mode 100644 index 0000000000..07ff557efa --- /dev/null +++ b/tests/service/cache/test_project_model.py @@ -0,0 +1,17 @@ +import pytest + +from renku.ui.service.cache.models.project import DETACHED_HEAD_FOLDER_PREFIX, NO_BRANCH_FOLDER, Project + + +@pytest.mark.parametrize( + "commit_sha,branch,expected_folder", + [ + (None, None, NO_BRANCH_FOLDER), + ("commit_sha", None, f"{DETACHED_HEAD_FOLDER_PREFIX}commit_sha"), + (None, "branch", "branch"), + (None, "master", "master"), + ], +) +def test_project_model_path(commit_sha, branch, expected_folder): + project = Project(name="name", slug="slug", commit_sha=commit_sha, user_id="user_id", owner="owner", branch=branch) + assert project.abs_path.stem == expected_folder diff --git a/tests/service/controllers/test_templates_create_project.py b/tests/service/controllers/test_templates_create_project.py index e38320ee33..6238ec74ac 100644 --- a/tests/service/controllers/test_templates_create_project.py +++ b/tests/service/controllers/test_templates_create_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,28 +42,22 @@ def test_template_create_project_ctrl(ctrl_init, svc_client_templates_creation, # Ctrl state. expected_context = { - "timestamp", - "owner", "project_namespace", "token", "email", "project_repository", - "url", "identifier", "parameters", "project_name", - "name", - "slug", "project_description", "new_project_url", "fullname", "project_slug", - "git_url", + "template_git_url", "project_name_stripped", "depth", - "branch", + "ref", "new_project_url_with_auth", - "url_with_auth", } assert expected_context.issubset(set(ctrl.context.keys())) @@ -83,7 +76,7 @@ def test_template_create_project_ctrl(ctrl_init, svc_client_templates_creation, expected_metadata.add("__renku_version__") assert expected_metadata == set(received_metadata.keys()) assert payload["url"] == received_metadata["__template_source__"] - assert payload["branch"] == received_metadata["__template_ref__"] + assert payload["ref"] == received_metadata["__template_ref__"] assert payload["identifier"] == received_metadata["__template_id__"] assert payload["project_namespace"] == received_metadata["__namespace__"] assert payload["project_repository"] == received_metadata["__repository__"] diff --git a/tests/service/controllers/test_templates_read_manifest.py b/tests/service/controllers/test_templates_read_manifest.py index 2a4d3d3d8c..1322a986c9 100644 --- a/tests/service/controllers/test_templates_read_manifest.py +++ b/tests/service/controllers/test_templates_read_manifest.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/controllers/utils/test_project_clone.py b/tests/service/controllers/utils/test_project_clone.py index 55596113ad..63bc94046e 100644 --- a/tests/service/controllers/utils/test_project_clone.py +++ b/tests/service/controllers/utils/test_project_clone.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/controllers/utils/test_remote_project.py b/tests/service/controllers/utils/test_remote_project.py index 490eee1bbd..6e9c27339d 100644 --- a/tests/service/controllers/utils/test_remote_project.py +++ b/tests/service/controllers/utils/test_remote_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/__init__.py b/tests/service/fixtures/__init__.py index e98dc3b05a..4c13142c77 100644 --- a/tests/service/fixtures/__init__.py +++ b/tests/service/fixtures/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_cache.py b/tests/service/fixtures/service_cache.py index 71f7493d93..50a4c5f0cd 100644 --- a/tests/service/fixtures/service_cache.py +++ b/tests/service/fixtures/service_cache.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_client.py b/tests/service/fixtures/service_client.py index ff38b98db9..879e72a1be 100644 --- a/tests/service/fixtures/service_client.py +++ b/tests/service/fixtures/service_client.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_controllers.py b/tests/service/fixtures/service_controllers.py index 14eef25529..a702fe8d88 100644 --- a/tests/service/fixtures/service_controllers.py +++ b/tests/service/fixtures/service_controllers.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_data.py b/tests/service/fixtures/service_data.py index 0ae53f9d64..c8a7815e94 100644 --- a/tests/service/fixtures/service_data.py +++ b/tests/service/fixtures/service_data.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_endpoints.py b/tests/service/fixtures/service_endpoints.py index e4f229856d..374c2f8cac 100644 --- a/tests/service/fixtures/service_endpoints.py +++ b/tests/service/fixtures/service_endpoints.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -100,7 +99,7 @@ def unlink_file_setup(svc_client_with_repo): unlink_payload = { "git_url": url_components.href, - "name": response.json["result"]["name"], + "slug": response.json["result"]["slug"], "include_filters": [response.json["result"]["files"][0]["file_path"]], } diff --git a/tests/service/fixtures/service_integration.py b/tests/service/fixtures/service_integration.py index d0157276e1..d75bb7f11f 100644 --- a/tests/service/fixtures/service_integration.py +++ b/tests/service/fixtures/service_integration.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_jobs.py b/tests/service/fixtures/service_jobs.py index 564693f2bd..5165748149 100644 --- a/tests/service/fixtures/service_jobs.py +++ b/tests/service/fixtures/service_jobs.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/fixtures/service_projects.py b/tests/service/fixtures/service_projects.py index 5d274cc2f9..085ca6c82a 100644 --- a/tests/service/fixtures/service_projects.py +++ b/tests/service/fixtures/service_projects.py @@ -1,6 +1,5 @@ -# -# Copyright 2021 Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/jobs/test_config.py b/tests/service/jobs/test_config.py index 7119a09b26..0bc77be8c5 100644 --- a/tests/service/jobs/test_config.py +++ b/tests/service/jobs/test_config.py @@ -1,6 +1,5 @@ -# -# Copyright 2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -43,7 +42,7 @@ def test_delay_config_set(svc_client_cache, it_remote_repo_url, view_user_data): updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"config", "remote_branch"} == set(updated_job.ctrl_result["result"].keys()) + assert {"config", "remote_branch", "git_url"} == set(updated_job.ctrl_result["result"].keys()) @pytest.mark.service diff --git a/tests/service/jobs/test_datasets.py b/tests/service/jobs/test_datasets.py index 8e51457f17..00b865016f 100644 --- a/tests/service/jobs/test_datasets.py +++ b/tests/service/jobs/test_datasets.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -258,14 +257,14 @@ def test_dataset_add_remote_file(url, svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_url": url}], } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "name", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) dest = make_project_path( user, @@ -280,7 +279,7 @@ def test_dataset_add_remote_file(url, svc_client_with_repo): job_id = response.json["result"]["files"][0]["job_id"] commit_message = "service: dataset add remote file" - dataset_add_remote_file(user, job_id, project_id, True, commit_message, payload["name"], url) + dataset_add_remote_file(user, job_id, project_id, True, commit_message, payload["slug"], url) new_commit = Repository(dest).head.commit @@ -307,7 +306,7 @@ def test_delay_add_file_job(svc_client_cache, it_remote_repo_url_temp_branch, vi { "git_url": it_remote_repo_url, "branch": branch, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, "migrate_project": True, @@ -325,7 +324,7 @@ def test_delay_add_file_job(svc_client_cache, it_remote_repo_url_temp_branch, vi updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"remote_branch", "project_id", "files", "name"} == updated_job.ctrl_result["result"].keys() + assert {"remote_branch", "project_id", "files", "slug", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -351,7 +350,7 @@ def test_delay_add_file_job_failure(svc_client_cache, it_remote_repo_url_temp_br { "git_url": it_remote_repo_url, "branch": branch, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, "migrate_project": False, @@ -382,7 +381,7 @@ def test_delay_create_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran { "git_url": it_remote_repo_url, "branch": branch, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, "migrate_project": True, @@ -403,7 +402,7 @@ def test_delay_create_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"name", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"slug", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -419,7 +418,7 @@ def test_delay_create_dataset_failure(svc_client_cache, it_remote_repo_url_temp_ { "git_url": it_remote_repo_url, "branch": branch, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, } @@ -455,7 +454,7 @@ def test_delay_remove_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran request_payload = { "git_url": it_remote_repo_url, "branch": branch, - "name": "mydata", + "slug": "mydata", "migrate_project": True, } @@ -470,7 +469,7 @@ def test_delay_remove_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, delete_job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"name", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"slug", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() @pytest.mark.service @@ -484,12 +483,12 @@ def test_delay_remove_dataset_job_failure(svc_client_cache, it_remote_repo_url_t it_remote_repo_url, ref = it_remote_repo_url_temp_branch _, _, cache = svc_client_cache user = cache.ensure_user(view_user_data) - dataset_name = uuid.uuid4().hex + dataset_slug = uuid.uuid4().hex request_payload = { "git_url": it_remote_repo_url, "branch": ref, - "name": dataset_name, + "slug": dataset_slug, } context = DatasetRemoveRequest().load(request_payload) @@ -517,8 +516,8 @@ def test_delay_edit_dataset_job(svc_client_cache, it_remote_repo_url_temp_branch { "git_url": it_remote_repo_url, "branch": branch, - "name": "mydata", - "title": f"new title => {uuid.uuid4().hex}", + "slug": "mydata", + "name": f"new name => {uuid.uuid4().hex}", # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, "migrate_project": True, @@ -539,8 +538,8 @@ def test_delay_edit_dataset_job(svc_client_cache, it_remote_repo_url_temp_branch updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"warnings", "remote_branch", "edited"} == updated_job.ctrl_result["result"].keys() - assert {"title"} == updated_job.ctrl_result["result"]["edited"].keys() + assert {"warnings", "remote_branch", "edited", "git_url"} == updated_job.ctrl_result["result"].keys() + assert {"name"} == updated_job.ctrl_result["result"]["edited"].keys() @pytest.mark.service @@ -556,8 +555,8 @@ def test_delay_edit_dataset_job_failure(svc_client_cache, it_remote_repo_url_tem { "git_url": it_remote_repo_url, "branch": branch, - "name": "mydata", - "title": f"new title => {uuid.uuid4().hex}", + "slug": "mydata", + "name": f"new name => {uuid.uuid4().hex}", "migrate_project": False, } ) @@ -589,7 +588,7 @@ def test_delay_unlink_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran { "git_url": it_remote_repo_url, "branch": branch, - "name": "ds1", + "slug": "ds1", "include_filters": ["data1"], # NOTE: We test with this only to check that recursive invocation is being prevented. "is_delayed": True, @@ -611,7 +610,7 @@ def test_delay_unlink_dataset_job(svc_client_cache, it_remote_repo_url_temp_bran updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"unlinked", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"unlinked", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() assert ["data/data1"] == updated_job.ctrl_result["result"]["unlinked"] @@ -625,7 +624,7 @@ def test_delay_unlink_dataset_job_failure(svc_client_cache, it_remote_repo_url_t it_remote_repo_url, branch = it_remote_repo_url_temp_branch context = DatasetUnlinkRequest().load( - {"git_url": it_remote_repo_url, "branch": branch, "name": "ds1", "include_filters": ["data1"]} + {"git_url": it_remote_repo_url, "branch": branch, "slug": "ds1", "include_filters": ["data1"]} ) _, _, cache = svc_client_cache @@ -655,7 +654,7 @@ def test_unlink_dataset_sync(svc_client_cache, it_remote_repo_url_temp_branch, v { "git_url": it_remote_repo_url, "branch": branch, - "name": "ds1", + "slug": "ds1", "include_filters": ["data1"], "migrate_project": True, } @@ -675,7 +674,7 @@ def test_unlink_dataset_sync(svc_client_cache, it_remote_repo_url_temp_branch, v updated_job = delayed_ctrl_job(context, view_user_data, job.job_id, renku_module, renku_ctrl) assert updated_job - assert {"unlinked", "remote_branch"} == updated_job.ctrl_result["result"].keys() + assert {"unlinked", "remote_branch", "git_url"} == updated_job.ctrl_result["result"].keys() assert ["data/data1"] == updated_job.ctrl_result["result"]["unlinked"] diff --git a/tests/service/jobs/test_jobs.py b/tests/service/jobs/test_jobs.py index 9be09f9695..b3618e1fac 100644 --- a/tests/service/jobs/test_jobs.py +++ b/tests/service/jobs/test_jobs.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/jobs/test_project.py b/tests/service/jobs/test_project.py index dad1d59fae..5e94bc24f4 100644 --- a/tests/service/jobs/test_project.py +++ b/tests/service/jobs/test_project.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/serializers/test_headers.py b/tests/service/serializers/test_headers.py index b0e3f8aeb1..3b38120971 100644 --- a/tests/service/serializers/test_headers.py +++ b/tests/service/serializers/test_headers.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/__init__.py b/tests/service/views/__init__.py index a0f2960530..66dc96951f 100644 --- a/tests/service/views/__init__.py +++ b/tests/service/views/__init__.py @@ -1,6 +1,5 @@ -# -# Copyright 2017-2023- Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_api_versions_views.py b/tests/service/views/test_api_versions_views.py index 51560cf2f5..13abf756c4 100644 --- a/tests/service/views/test_api_versions_views.py +++ b/tests/service/views/test_api_versions_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2022-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_cache_views.py b/tests/service/views/test_cache_views.py index 033ac9b767..ec947522e9 100644 --- a/tests/service/views/test_cache_views.py +++ b/tests/service/views/test_cache_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -933,7 +932,7 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ transaction_id=project_context.transaction_id, commit_message="Create dataset", ): - with DatasetContext(name="my_dataset", create=True, commit_database=True) as dataset: + with DatasetContext(slug="my_dataset", create=True, commit_database=True) as dataset: dataset.creators = [Person(name="me", email="me@example.com", id="me_id")] remote_repo_checkout.push() @@ -945,19 +944,19 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ assert response assert 200 == response.status_code - assert {"datasets"} == set(response.json["result"].keys()), response.json + assert {"datasets", "git_url"} == set(response.json["result"].keys()), response.json assert 1 == len(response.json["result"]["datasets"]) payload = { "git_url": remote_url, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers) assert response assert 200 == response.status_code - assert {"name", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch"} == set(response.json["result"].keys()) remote_repo_checkout.pull() @@ -965,8 +964,8 @@ def test_cache_gets_synchronized(local_remote_repository, directory_tree, quick_ datasets = DatasetGateway().get_all_active_datasets() assert 2 == len(datasets) - assert any(d.name == "my_dataset" for d in datasets) - assert any(d.name == payload["name"] for d in datasets) + assert any(d.slug == "my_dataset" for d in datasets) + assert any(d.slug == payload["slug"] for d in datasets) @pytest.mark.service diff --git a/tests/service/views/test_config_views.py b/tests/service/views/test_config_views.py index 150b8e2d38..f28324d4dc 100644 --- a/tests/service/views/test_config_views.py +++ b/tests/service/views/test_config_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_dataset_views.py b/tests/service/views/test_dataset_views.py index 16f409ed23..1d1e302169 100644 --- a/tests/service/views/test_dataset_views.py +++ b/tests/service/views/test_dataset_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,14 +74,14 @@ def test_create_dataset_view(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] @pytest.mark.service @@ -92,13 +91,13 @@ def test_create_dataset_view_with_datadir(svc_client_with_repo): """Create a new dataset successfully.""" svc_client, headers, project_id, url_components = svc_client_with_repo - payload = {"git_url": url_components.href, "name": uuid.uuid4().hex, "data_directory": "my-folder/"} + payload = {"git_url": url_components.href, "slug": uuid.uuid4().hex, "data_directory": "my-folder/"} response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -106,7 +105,7 @@ def test_create_dataset_view_with_datadir(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) assert ds["data_directory"] == "my-folder" @@ -119,12 +118,12 @@ def test_remote_create_dataset_view(svc_client_cache, it_remote_repo_url): payload = { "git_url": it_remote_repo_url, - "name": f"{uuid.uuid4().hex}", + "slug": f"{uuid.uuid4().hex}", } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) @pytest.mark.service @@ -136,7 +135,7 @@ def test_delay_create_dataset_view(svc_client_cache, it_remote_repo_url): payload = { "git_url": it_remote_repo_url, - "name": f"{uuid.uuid4().hex}", + "slug": f"{uuid.uuid4().hex}", "is_delayed": True, } @@ -154,7 +153,7 @@ def test_create_dataset_wrong_ref_view(svc_client_with_repo): payload = { "git_url": "http://doesnotexistanywhere994455/a/b.git", - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) @@ -170,21 +169,21 @@ def test_remove_dataset_view(svc_client_with_repo): svc_client, headers, project_id, url_components = svc_client_with_repo payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) response = svc_client.post("/datasets.remove", data=json.dumps(payload), headers=headers) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] - # NOTE: Ensure that dataset does not exists in this project anymore! + # NOTE: Ensure that dataset does not exist in this project anymore! response = svc_client.get("/datasets.list", query_string={"git_url": url_components.href}, headers=headers) assert_rpc_response(response) - datasets = [ds["name"] for ds in response.json["result"]["datasets"]] - assert payload["name"] not in datasets + datasets = [ds["slug"] for ds in response.json["result"]["datasets"]] + assert payload["slug"] not in datasets @pytest.mark.integration @@ -194,7 +193,7 @@ def test_remote_remove_view(svc_client, it_remote_repo_url, identity_headers): """Test creating a delayed remove.""" response = svc_client.post( "/datasets.remove", - data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, name="mydata")), + data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, slug="mydata")), headers=identity_headers, ) @@ -212,8 +211,8 @@ def test_create_dataset_with_metadata(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "keywords": ["keyword1", "keyword2"], @@ -222,8 +221,8 @@ def test_create_dataset_with_metadata(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -231,9 +230,9 @@ def test_create_dataset_with_metadata(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) - assert payload["title"] == ds["title"] + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) assert payload["name"] == ds["name"] + assert payload["slug"] == ds["slug"] assert payload["description"] == ds["description"] assert payload["creators"] == ds["creators"] assert payload["keywords"] == ds["keywords"] @@ -248,8 +247,8 @@ def test_create_dataset_with_images(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [ @@ -264,8 +263,8 @@ def test_create_dataset_with_images(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [ @@ -277,8 +276,8 @@ def test_create_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -286,10 +285,10 @@ def test_create_dataset_with_images(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) - assert payload["title"] == ds["title"] assert payload["name"] == ds["name"] + assert payload["slug"] == ds["slug"] assert payload["description"] == ds["description"] assert payload["creators"] == ds["creators"] assert len(ds["images"]) == 2 @@ -310,8 +309,8 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "custom_metadata": { @@ -325,8 +324,8 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -334,10 +333,10 @@ def test_create_dataset_with_custom_metadata(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) - assert payload["title"] == ds["title"] assert payload["name"] == ds["name"] + assert payload["slug"] == ds["slug"] assert payload["description"] == ds["description"] assert payload["creators"] == ds["creators"] assert payload["custom_metadata"] == ds["annotations"][0]["body"] @@ -356,8 +355,8 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [{"content_url": "https://renkulab.io/api/doesnt_exist.png", "position": 1, "mirror_locally": True}], @@ -369,8 +368,8 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [{"content_url": img_url, "position": 1, "mirror_locally": True}], @@ -378,8 +377,8 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -387,7 +386,7 @@ def test_create_dataset_with_image_download(svc_client_with_repo, img_url): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) assert len(ds["images"]) == 1 img1 = next(img for img in ds["images"] if img["position"] == 1) @@ -407,8 +406,8 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [{"file_id": file_id1, "position": 1}, {"file_id": file_id2, "position": 2}], @@ -416,8 +415,8 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -425,10 +424,10 @@ def test_create_dataset_with_uploaded_images(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) - assert payload["title"] == ds["title"] assert payload["name"] == ds["name"] + assert payload["slug"] == ds["slug"] assert payload["description"] == ds["description"] assert payload["creators"] == ds["creators"] assert len(ds["images"]) == 2 @@ -450,8 +449,8 @@ def test_create_dataset_invalid_creator(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, - "title": "my little dataset", + "slug": uuid.uuid4().hex, + "name": "my little dataset", "creators": [{"name": None, "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", } @@ -472,7 +471,7 @@ def test_create_dataset_view_dataset_exists(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": "mydataset", + "slug": "mydataset", } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) @@ -491,7 +490,7 @@ def test_create_dataset_view_unknown_param(svc_client_with_repo): svc_client, headers, project_id, url_components = svc_client_with_repo unknown_field = "remote_name" - payload = {"git_url": url_components.href, "name": "mydata", unknown_field: "origin"} + payload = {"git_url": url_components.href, "slug": "mydata", unknown_field: "origin"} response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response, "error") @@ -508,7 +507,7 @@ def test_create_dataset_with_no_identity(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": "mydata", + "slug": "mydata", "remote_name": "origin", } @@ -528,7 +527,7 @@ def test_add_file_view_with_no_identity(svc_client_with_repo): svc_client, headers, project_id, url_components = svc_client_with_repo payload = { "git_url": url_components.href, - "name": "mydata", + "slug": "mydata", "remote_name": "origin", } @@ -551,7 +550,7 @@ def test_add_file_view(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_id": file_id}], } @@ -559,7 +558,7 @@ def test_add_file_view(svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "project_id", "files", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "project_id", "files", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert 1 == len(response.json["result"]["files"]) assert file_id == response.json["result"]["files"][0]["file_id"] @@ -572,7 +571,7 @@ def test_remote_add_view(svc_client, it_remote_repo_url, identity_headers): response = svc_client.post( "/datasets.add", data=json.dumps( - dict(git_url=it_remote_repo_url, is_delayed=True, name="mydata", files=[{"file_path": "somefile.txt"}]) + dict(git_url=it_remote_repo_url, is_delayed=True, slug="mydata", files=[{"file_path": "somefile.txt"}]) ), headers=identity_headers, ) @@ -593,7 +592,7 @@ def test_add_file_failure(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, "create_dataset": True, "files": [{"file_id": file_id}, {"file_path": "my problem right here"}], } @@ -616,7 +615,7 @@ def test_list_datasets_view(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "version", @@ -624,8 +623,8 @@ def test_list_datasets_view(svc_client_with_repo): "identifier", "images", "created_at", + "slug", "name", - "title", "creators", "keywords", "annotations", @@ -671,7 +670,7 @@ def test_list_datasets_view_remote(svc_client_with_repo, it_remote_repo_url): response = svc_client.get("/datasets.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "version", @@ -679,8 +678,8 @@ def test_list_datasets_view_remote(svc_client_with_repo, it_remote_repo_url): "identifier", "images", "created_at", + "slug", "name", - "title", "creators", "keywords", "annotations", @@ -712,13 +711,13 @@ def test_list_dataset_files_anonymous(svc_client_with_repo, it_remote_repo_url): """Check listing of existing dataset files.""" svc_client, _, _, _ = svc_client_with_repo - params = {"git_url": it_remote_repo_url, "name": "ds1"} + params = {"git_url": it_remote_repo_url, "slug": "ds1"} response = svc_client.get("/datasets.files_list", query_string=params, headers={}) assert_rpc_response(response, "error") assert UserRepoNoAccessError.code == response.json["error"]["code"] - params = {"git_url": "https://gitlab.dev.renku.ch/renku-python-integration-tests/no-renku", "name": "mydata"} + params = {"git_url": "https://gitlab.dev.renku.ch/renku-python-integration-tests/no-renku", "slug": "mydata"} response = svc_client.get("/datasets.files_list", query_string=params, headers={}) assert_rpc_response(response, "error") @@ -734,14 +733,14 @@ def test_list_datasets_files_remote(svc_client_with_repo, it_remote_repo_url): """Check listing of existing dataset files.""" svc_client, headers, _, _ = svc_client_with_repo - params = dict(git_url=it_remote_repo_url, name="ds1") + params = dict(git_url=it_remote_repo_url, slug="ds1") response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"files", "name"} == set(response.json["result"].keys()) + assert {"files", "slug", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["files"]) - assert "ds1" == response.json["result"]["name"] + assert "ds1" == response.json["result"]["slug"] @pytest.mark.integration @@ -751,7 +750,7 @@ def test_remote_create_view(svc_client, it_remote_repo_url, identity_headers): """Test creating a delayed dataset create.""" response = svc_client.post( "/datasets.create", - data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, name=uuid.uuid4().hex)), + data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, slug=uuid.uuid4().hex)), headers=identity_headers, ) @@ -769,13 +768,13 @@ def test_create_and_list_datasets_view(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params_list = { "git_url": url_components.href, @@ -783,15 +782,15 @@ def test_create_and_list_datasets_view(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) - assert {"datasets"} == set(response.json["result"].keys()) + assert {"datasets", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["datasets"]) assert { "creators", - "name", + "slug", "identifier", "images", "version", - "title", + "name", "description", "created_at", "keywords", @@ -800,7 +799,7 @@ def test_create_and_list_datasets_view(svc_client_with_repo): "data_directory", } == set(response.json["result"]["datasets"][0].keys()) - assert payload["name"] in [ds["name"] for ds in response.json["result"]["datasets"]] + assert payload["slug"] in [ds["slug"] for ds in response.json["result"]["datasets"]] @pytest.mark.service @@ -815,24 +814,24 @@ def test_list_dataset_files(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": "mydata", + "slug": "mydata", "files": [{"file_id": file_id}], } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert file_id == response.json["result"]["files"][0]["file_id"] params = { "git_url": url_components.href, - "name": "mydata", + "slug": "mydata", } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"name", "files"} == set(response.json["result"].keys()) - assert params["name"] == response.json["result"]["name"] + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) + assert params["slug"] == response.json["result"]["slug"] assert file_name in [file["name"] for file in response.json["result"]["files"]] assert {"name", "path", "added"} == response.json["result"]["files"][0].keys() @@ -866,32 +865,32 @@ def test_add_with_unpacked_archive(datapack_zip, svc_client_with_repo): file_ = mm["file2"] payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } headers["Content-Type"] = content_type response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] - payload = {"git_url": url_components.href, "name": payload["name"], "files": [{"file_id": file_["file_id"]}]} + payload = {"git_url": url_components.href, "slug": payload["slug"], "files": [{"file_id": file_["file_id"]}]} response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert file_["file_id"] == response.json["result"]["files"][0]["file_id"] params = { "git_url": url_components.href, - "name": payload["name"], + "slug": payload["slug"], } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"name", "files"} == set(response.json["result"].keys()) - assert params["name"] == response.json["result"]["name"] + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) + assert params["slug"] == response.json["result"]["slug"] assert file_["file_name"] in [file["name"] for file in response.json["result"]["files"]] @@ -926,34 +925,34 @@ def test_add_with_unpacked_archive_all(datapack_zip, svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } headers["Content-Type"] = content_type response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] payload = { "git_url": url_components.href, - "name": payload["name"], + "slug": payload["slug"], "files": files, } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert files == response.json["result"]["files"] params = { "git_url": url_components.href, - "name": payload["name"], + "slug": payload["slug"], } response = svc_client.get("/datasets.files_list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"name", "files"} == set(response.json["result"].keys()) - assert params["name"] == response.json["result"]["name"] + assert {"slug", "files", "git_url"} == set(response.json["result"].keys()) + assert params["slug"] == response.json["result"]["slug"] assert file_["file_name"] in [file["name"] for file in response.json["result"]["files"]] @@ -965,23 +964,23 @@ def test_add_existing_file(svc_client_with_repo): svc_client, headers, project_id, url_components = svc_client_with_repo payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] files = [{"file_path": "README.md"}] payload = { "git_url": url_components.href, - "name": payload["name"], + "slug": payload["slug"], "files": files, } response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert files == response.json["result"]["files"] @@ -1084,7 +1083,7 @@ def test_dataset_add_remote(url, svc_client_cache, project_metadata): response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "name", "project_id", "remote_branch"} == set(response.json["result"]) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"]) job_id = response.json["result"]["files"][0]["job_id"] user_job = cache.get_job(user, job_id) @@ -1120,7 +1119,7 @@ def test_dataset_add_multiple_remote(svc_client_cache, project_metadata): response = client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"files", "name", "project_id", "remote_branch"} == set(response.json["result"]) + assert {"files", "slug", "project_id", "remote_branch", "git_url"} == set(response.json["result"]) for file in response.json["result"]["files"]: job_id = file["job_id"] @@ -1149,7 +1148,7 @@ def test_add_remote_and_local_file(svc_client_with_repo): response = svc_client.post("/datasets.add", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "files", "project_id", "remote_branch"} == set(response.json["result"].keys()) + assert {"slug", "files", "project_id", "remote_branch", "git_url"} == set(response.json["result"].keys()) for pair in zip(response.json["result"]["files"], payload["files"]): if "job_id" in pair[0]: assert pair[0].pop("job_id") @@ -1190,18 +1189,18 @@ def test_add_remote_and_local_file(svc_client_with_repo): def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metadata_source): """Test editing dataset metadata.""" svc_client, headers, project_id, url_components = svc_client_with_repo - name = uuid.uuid4().hex + slug = uuid.uuid4().hex payload = { "git_url": url_components.href, - "name": name, + "slug": slug, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params_list = { "git_url": url_components.href, @@ -1212,8 +1211,8 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada edit_payload = { "git_url": url_components.href, - "name": name, - "title": "my new title", + "slug": slug, + "name": "my new name", "keywords": ["keyword1"], "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "custom_metadata": custom_metadata, @@ -1222,9 +1221,9 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada edit_payload["custom_metadata_source"] = custom_metadata_source response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert { - "title": "my new title", + "name": "my new name", "keywords": ["keyword1"], "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "custom_metadata": custom_metadata, @@ -1237,13 +1236,13 @@ def test_edit_datasets_view(svc_client_with_repo, custom_metadata, custom_metada def test_edit_datasets_view_without_modification(svc_client_with_repo): """Test editing dataset metadata.""" svc_client, headers, project_id, url_components = svc_client_with_repo - name = uuid.uuid4().hex + slug = uuid.uuid4().hex payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], - "title": "my-title", + "name": "my-name", "description": "my description", "keywords": ["keywords"], } @@ -1251,8 +1250,8 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params_list = { "git_url": url_components.href, @@ -1263,12 +1262,12 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): assert_rpc_response(response) edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, } response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {} == response.json["result"]["edited"] params_list = { @@ -1278,9 +1277,9 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) - assert payload["title"] == ds["title"] + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) assert payload["name"] == ds["name"] + assert payload["slug"] == ds["slug"] assert payload["description"] == ds["description"] assert payload["creators"] == ds["creators"] assert payload["keywords"] == ds["keywords"] @@ -1292,13 +1291,13 @@ def test_edit_datasets_view_without_modification(svc_client_with_repo): def test_edit_datasets_view_unset_values(svc_client_with_repo): """Test editing dataset metadata.""" svc_client, headers, project_id, url_components = svc_client_with_repo - name = uuid.uuid4().hex + slug = uuid.uuid4().hex payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], - "title": "my-title", + "name": "my-name", "description": "my description", "keywords": ["keywords"], "images": [ @@ -1309,8 +1308,8 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"git_url", "slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params_list = { "git_url": url_components.href, @@ -1321,7 +1320,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): assert_rpc_response(response) edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "keywords": None, "images": None, "custom_metadata": None, @@ -1329,7 +1328,7 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "git_url", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert { "keywords": [], "custom_metadata": None, @@ -1345,8 +1344,8 @@ def test_edit_datasets_view_unset_values(svc_client_with_repo): response = svc_client.get("/datasets.list", query_string=params_list, headers=headers) assert_rpc_response(response) - ds = next(ds for ds in response.json["result"]["datasets"] if ds["name"] == payload["name"]) - assert edit_payload["name"] == ds["name"] + ds = next(ds for ds in response.json["result"]["datasets"] if ds["slug"] == payload["slug"]) + assert edit_payload["slug"] == ds["slug"] assert 0 == len(ds["keywords"]) assert 0 == len(ds["annotations"]) assert 0 == len(ds["images"]) @@ -1359,12 +1358,12 @@ def test_edit_dataset_with_images(svc_client_with_repo): """Edit images of a dataset.""" svc_client, headers, project_id, url_components = svc_client_with_repo - name = uuid.uuid4().hex + slug = uuid.uuid4().hex payload = { "git_url": url_components.href, - "name": name, - "title": "my little dataset", + "slug": slug, + "name": "my little dataset", "creators": [{"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}], "description": "my little description", "images": [ @@ -1376,8 +1375,8 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) assert_rpc_response(response) - assert {"name", "remote_branch"} == set(response.json["result"].keys()) - assert payload["name"] == response.json["result"]["name"] + assert {"slug", "remote_branch", "git_url"} == set(response.json["result"].keys()) + assert payload["slug"] == response.json["result"]["slug"] params = { "git_url": url_components.href, @@ -1390,7 +1389,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit reordering and add edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "images": [ {"content_url": "data/renku_logo.png", "position": 1}, {"content_url": "https://example.com/image1.jpg", "position": 2}, @@ -1401,7 +1400,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {"images"} == response.json["result"]["edited"].keys() images = response.json["result"]["edited"]["images"] @@ -1418,7 +1417,7 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit with duplicate position edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "images": [ {"content_url": "data/renku_logo.png", "position": 1}, {"content_url": "https://example.com/image1.jpg", "position": 2}, @@ -1433,26 +1432,26 @@ def test_edit_dataset_with_images(svc_client_with_repo): # NOTE: test edit remove images edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "images": [], } response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert {"images"} == response.json["result"]["edited"].keys() assert 0 == len(response.json["result"]["edited"]["images"]) # NOTE: test edit no change edit_payload = { "git_url": url_components.href, - "name": name, + "slug": slug, "images": [], } response = svc_client.post("/datasets.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) - assert {"warnings", "edited", "remote_branch"} == set(response.json["result"]) + assert {"warnings", "edited", "remote_branch", "git_url"} == set(response.json["result"]) assert 0 == len(response.json["result"]["edited"].keys()) @@ -1463,7 +1462,7 @@ def test_remote_edit_view(svc_client, it_remote_repo_url, identity_headers): """Test creating a delayed edit.""" response = svc_client.post( "/datasets.edit", - data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, name="mydata")), + data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, slug="mydata")), headers=identity_headers, ) @@ -1482,7 +1481,7 @@ def test_protected_branch(svc_client_with_repo): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) @@ -1500,7 +1499,7 @@ def test_unlink_file(unlink_file_setup): response = svc_client.post("/datasets.unlink", data=json.dumps(unlink_payload), headers=headers) assert_rpc_response(response) - assert {"unlinked", "remote_branch"} == set(response.json["result"].keys()) + assert {"unlinked", "remote_branch", "git_url"} == set(response.json["result"].keys()) assert any(p.endswith("README.md") for p in response.json["result"]["unlinked"]) @@ -1511,7 +1510,7 @@ def test_remote_unlink_view(svc_client, it_remote_repo_url, identity_headers): """Test creating a delayed unlink.""" response = svc_client.post( "/datasets.unlink", - data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, name="mydata", include_filters=["data1"])), + data=json.dumps(dict(git_url=it_remote_repo_url, is_delayed=True, slug="mydata", include_filters=["data1"])), headers=identity_headers, ) diff --git a/tests/service/views/test_exceptions.py b/tests/service/views/test_exceptions.py index e6f96822da..c19da7c7a2 100644 --- a/tests/service/views/test_exceptions.py +++ b/tests/service/views/test_exceptions.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -106,7 +105,7 @@ def test_migration_required_flag(svc_client_setup): payload = { "git_url": url_components.href, - "name": uuid.uuid4().hex, + "slug": uuid.uuid4().hex, } response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=headers) @@ -155,7 +154,7 @@ def test_project_uninitialized(svc_client, it_non_renku_repo_url, identity_heade assert "error" in response.json assert response.json["error"]["code"] == 1110 - payload["name"] = uuid.uuid4().hex + payload["slug"] = uuid.uuid4().hex response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers) @@ -175,7 +174,7 @@ def test_project_no_commits(svc_client, it_no_commit_repo_url, identity_headers) assert "error" in response.json assert response.json["error"]["code"] == 1110 - payload["name"] = uuid.uuid4().hex + payload["slug"] = uuid.uuid4().hex response = svc_client.post("/datasets.create", data=json.dumps(payload), headers=identity_headers) assert_rpc_response(response, "error") @@ -184,7 +183,7 @@ def test_project_no_commits(svc_client, it_no_commit_repo_url, identity_headers) @pytest.mark.service @pytest.mark.integration -# @retry_failed +@retry_failed @pytest.mark.parametrize( "git_url", [ diff --git a/tests/service/views/test_graph.py b/tests/service/views/test_graph.py index 7e8d215523..218f5e8a1b 100644 --- a/tests/service/views/test_graph.py +++ b/tests/service/views/test_graph.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_graph_views.py b/tests/service/views/test_graph_views.py index a95b179ccc..7e3e362c9c 100644 --- a/tests/service/views/test_graph_views.py +++ b/tests/service/views/test_graph_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2020 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_jobs_views.py b/tests/service/views/test_jobs_views.py index 5e31ca8d4d..c1c235cf18 100644 --- a/tests/service/views/test_jobs_views.py +++ b/tests/service/views/test_jobs_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_project_views.py b/tests/service/views/test_project_views.py index 5c256220ee..afb0317056 100644 --- a/tests/service/views/test_project_views.py +++ b/tests/service/views/test_project_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -92,6 +91,7 @@ def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadat "description": "my new title", "creator": {"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}, "custom_metadata": custom_metadata, + "image": {"content_url": "https://en.wikipedia.org/static/images/icons/wikipedia.png"}, } if custom_metadata_source is not None: edit_payload["custom_metadata_source"] = custom_metadata_source @@ -103,6 +103,11 @@ def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadat "description": "my new title", "creator": {"name": "name123", "email": "name123@ethz.ch", "affiliation": "ethz"}, "custom_metadata": custom_metadata, + "image": { + "content_url": "https://en.wikipedia.org/static/images/icons/wikipedia.png", + "mirror_locally": False, + "position": 0, + }, } == response.json["result"]["edited"] edit_payload = { @@ -112,7 +117,7 @@ def test_edit_project_view(svc_client_with_repo, custom_metadata, custom_metadat assert_rpc_response(response) assert {"warning", "edited", "remote_branch"} == set(response.json["result"]) - assert 0 == len(response.json["result"]["edited"]) + assert 0 == len(response.json["result"]["edited"]), response.json["result"]["edited"] @pytest.mark.service @@ -135,10 +140,11 @@ def test_edit_project_view_unset(svc_client_with_repo): "https://schema.org/property2": "test", } ], + "image": {"content_url": "https://en.wikipedia.org/static/images/icons/wikipedia.png"}, } - response = svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) + svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) - edit_payload = {"git_url": url_components.href, "custom_metadata": None, "keywords": None} + edit_payload = {"git_url": url_components.href, "custom_metadata": None, "keywords": None, "image": None} response = svc_client.post("/project.edit", data=json.dumps(edit_payload), headers=headers) assert_rpc_response(response) @@ -146,6 +152,7 @@ def test_edit_project_view_unset(svc_client_with_repo): assert { "keywords": None, "custom_metadata": None, + "image": None, } == response.json[ "result" ]["edited"] diff --git a/tests/service/views/test_templates_views.py b/tests/service/views/test_templates_views.py index 3e26292fc5..4b28eae15f 100644 --- a/tests/service/views/test_templates_views.py +++ b/tests/service/views/test_templates_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,7 +74,7 @@ def test_compare_manifests(svc_client_with_templates): assert {"result"} == set(response.json.keys()) assert response.json["result"]["templates"] - templates_source = fetch_templates_source(source=template_params["url"], reference=template_params["branch"]) + templates_source = fetch_templates_source(source=template_params["url"], reference=template_params["ref"]) manifest_file = templates_source.path / TEMPLATE_MANIFEST manifest = TemplatesManifest.from_path(manifest_file).get_raw_content() @@ -131,6 +130,10 @@ def test_create_project_from_template(svc_client_templates_creation, with_inject svc_client, headers, payload, rm_remote = svc_client_templates_creation payload["data_directory"] = "my-folder/" + payload["image"] = { + "content_url": "https://en.wikipedia.org/static/images/icons/wikipedia.png", + "mirror_locally": True, + } response = svc_client.post("/templates.create_project", data=json.dumps(payload), headers=headers) @@ -163,6 +166,8 @@ def test_create_project_from_template(svc_client_templates_creation, with_inject assert old_metadata_path.exists() assert "'http://schema.org/schemaVersion': '9'" in old_metadata_path.read_text() + assert (project_path / ".renku" / "images" / "project" / "0.png").exists() + # NOTE: successfully re-use old name after cleanup assert rm_remote() is True sleep(1) # NOTE: sleep to make sure remote isn't locked @@ -207,7 +212,7 @@ def test_create_project_from_template_failures(svc_client_templates_creation): assert 200 == response.status_code assert {"error"} == set(response.json.keys()) assert UserProjectCreationError.code == response.json["error"]["code"], response.json - assert "git_url" in response.json["error"]["devMessage"] + assert "`project_repository`, `project_namespace`" in response.json["error"]["devMessage"] # NOTE: missing fields -- unlikely to happen. If that is the case, we should determine if it's a user error or not payload_missing_field = deepcopy(payload) diff --git a/tests/service/views/test_utils.py b/tests/service/views/test_utils.py index 7f5da665b3..ec3c4ad34b 100644 --- a/tests/service/views/test_utils.py +++ b/tests/service/views/test_utils.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,7 +35,7 @@ def test_error_response(svc_client): def test_result_response(svc_client): """Test result response utility.""" - ctx = {"datasets": [{"name": "my-dataset"}]} + ctx = {"datasets": [{"slug": "my-dataset"}]} response = result_response(DatasetListResponseRPC(), ctx).json assert response @@ -46,7 +45,7 @@ def test_result_response(svc_client): expected = ctx["datasets"][0] received = response["result"]["datasets"][0] - assert expected["name"] == received["name"] + assert expected["slug"] == received["slug"] def test_result_response_with_none(svc_client): diff --git a/tests/service/views/test_version_views.py b/tests/service/views/test_version_views.py index ce3866665d..5d8b0c9769 100644 --- a/tests/service/views/test_version_views.py +++ b/tests/service/views/test_version_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/service/views/test_workflow_plan_views.py b/tests/service/views/test_workflow_plan_views.py index ee6cf90209..1233a82b38 100644 --- a/tests/service/views/test_workflow_plan_views.py +++ b/tests/service/views/test_workflow_plan_views.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -38,7 +37,7 @@ def test_list_workflow_plans_view(svc_client_with_repo): response = svc_client.get("/workflow_plans.list", query_string=params, headers=headers) assert_rpc_response(response) - assert {"plans"} == set(response.json["result"].keys()) + assert {"plans", "git_url"} == set(response.json["result"].keys()) assert 0 != len(response.json["result"]["plans"]) assert { "children", diff --git a/tests/service/views/v1_0/test_cache_views_1_0.py b/tests/service/views/v1_0/test_cache_views_1_0.py index 996c75253d..24d0976a2b 100644 --- a/tests/service/views/v1_0/test_cache_views_1_0.py +++ b/tests/service/views/v1_0/test_cache_views_1_0.py @@ -1,6 +1,5 @@ -# -# Copyright 2019-2023 - Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/utils.py b/tests/utils.py index 5541a91451..890bdf4add 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,6 +1,5 @@ -# -# Copyright 2020-2023 -Swiss Data Science Center (SDSC) -# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and +# Copyright Swiss Data Science Center (SDSC). A partnership between +# École Polytechnique Fédérale de Lausanne (EPFL) and # Eidgenössische Technische Hochschule Zürich (ETHZ). # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -74,7 +73,7 @@ def make_dataset_add_payload(git_url, urls, name=None): return { "git_url": git_url, - "name": name or uuid.uuid4().hex, + "slug": name or uuid.uuid4().hex, "create_dataset": True, "force": False, "files": files, @@ -156,7 +155,7 @@ def load_dataset(name: str) -> Optional["Dataset"]: datasets_provenance = DatasetsProvenance() - return datasets_provenance.get_by_name(name) + return datasets_provenance.get_by_slug(name) def get_test_bindings() -> Tuple[Dict, Dict[Type, Callable[[], Any]]]: @@ -213,7 +212,7 @@ def with_dataset( """Yield an editable metadata object for a dataset.""" from renku.core.dataset.datasets_provenance import DatasetsProvenance - dataset = DatasetsProvenance().get_by_name(name=name, strict=True, immutable=True) + dataset = DatasetsProvenance().get_by_slug(slug=name, strict=True, immutable=True) if not dataset: return None