diff --git a/.github/ghci.yaml b/.github/ghci.yaml
deleted file mode 100644
index 6259009b5..000000000
--- a/.github/ghci.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-# yaml-language-server: $schema=https://geoservices-int.camptocamp.com/github/schema.json
-
-changelog:
- create-release: false
diff --git a/.github/publish.yaml b/.github/publish.yaml
new file mode 100644
index 000000000..68fe3f423
--- /dev/null
+++ b/.github/publish.yaml
@@ -0,0 +1,8 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/camptocamp/tag-publish/0.7.1/tag_publish/schema.json
+
+pypi:
+ versions:
+ - version_tag
+ - version_branch
+ packages:
+ - {}
diff --git a/.github/spell-ignore-words.txt b/.github/spell-ignore-words.txt
index 05a726cef..ca45bcc25 100644
--- a/.github/spell-ignore-words.txt
+++ b/.github/spell-ignore-words.txt
@@ -2,7 +2,6 @@ dpkg
repology.org
pypi
Snyk
-c2cciutils-publish
fixup
codespell
pipenv
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
index eb8a5ed3e..764c11e5a 100644
--- a/.github/workflows/main.yaml
+++ b/.github/workflows/main.yaml
@@ -87,7 +87,7 @@ jobs:
run: make build
- name: Publish
- run: c2cciutils-publish
+ run: tag-publish
if: |
env.HAS_SECRETS == 'HAS_SECRETS'
diff --git a/.nvmrc b/.nvmrc
deleted file mode 100644
index 209e3ef4b..000000000
--- a/.nvmrc
+++ /dev/null
@@ -1 +0,0 @@
-20
diff --git a/Dockerfile b/Dockerfile
index 44b23b669..bac0cc1da 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,7 +9,7 @@ RUN --mount=type=cache,target=/var/lib/apt/lists \
&& apt-get upgrade --yes \
&& apt-get install --yes --no-install-recommends apt-utils \
&& DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get install --yes tzdata \
- && apt-get install --yes --no-install-recommends binutils python3-pip python3-venv \
+ && apt-get install --yes --no-install-recommends binutils git python3-pip python3-venv \
&& python3 -m venv /venv
ENV PATH=/venv/bin:$PATH
@@ -27,7 +27,7 @@ RUN --mount=type=cache,target=/root/.cache \
# Do the conversion
COPY poetry.lock pyproject.toml ./
ENV POETRY_DYNAMIC_VERSIONING_BYPASS=0.0.0
-RUN poetry export --extras=checks --extras=publish --extras=audit --extras=version --output=requirements.txt \
+RUN poetry export --output=requirements.txt \
&& poetry export --with=dev --output=requirements-dev.txt
# Base, the biggest thing is to install the Python packages
@@ -49,25 +49,12 @@ FROM base AS run
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
-COPY .nvmrc /tmp
-RUN --mount=type=cache,target=/var/lib/apt/lists --mount=type=cache,target=/var/cache \
- apt-get update \
- && apt-get --assume-yes upgrade \
- && apt-get install --assume-yes --no-install-recommends apt-transport-https gnupg curl \
- && NODE_MAJOR="$(cat /tmp/.nvmrc)" \
- && echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_MAJOR}.x nodistro main" > /etc/apt/sources.list.d/nodesource.list \
- && curl --silent https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor --output=/etc/apt/keyrings/nodesource.gpg \
- && apt-get update \
- && apt-get install --assume-yes --no-install-recommends "nodejs=${NODE_MAJOR}.*" libmagic1 git python3-dev libpq-dev gcc python-is-python3
-
RUN python3 -m compileall -q -- *
COPY . ./
ARG VERSION=dev
RUN --mount=type=cache,target=/root/.cache \
- --mount=type=cache,target=/root/.npm \
- cd c2cciutils && npm install && cd - \
- && POETRY_DYNAMIC_VERSIONING_BYPASS=${VERSION} python3 -m pip install --disable-pip-version-check --no-deps --editable=. \
+ POETRY_DYNAMIC_VERSIONING_BYPASS=${VERSION} python3 -m pip install --disable-pip-version-check --no-deps --editable=. \
&& python3 -m pip freeze > /requirements.txt \
&& python3 -m compileall -q /app/c2cciutils
diff --git a/README.md b/README.md
index 9b5f47c92..41e5ba619 100644
--- a/README.md
+++ b/README.md
@@ -46,7 +46,6 @@ C2cciutils make easier to have those workflows in a project:
- `auto-review.yaml`: Auto review the Renovate pull requests
- `backport.yaml`: Trigger the backports (work with labels)
-- `clean.yaml`: Clean the Docker images related on a deleted feature branch
- `main.yaml`: Main workflow especially with the c2cciutils-checks command
All the provided commands used in the workflow:
@@ -54,8 +53,6 @@ All the provided commands used in the workflow:
- `c2cciutils`: some generic tools.
- `c2cciutils-version`: Create a new version of the project.
- `c2cciutils-env`: Print some environment information.
-- `c2cciutils-publish`: Publish the project.
-- `c2cciutils-clean`: Delete Docker images on Docker Hub after corresponding branch have been deleted.
## Utilities
@@ -102,7 +99,6 @@ In the CI we need to have the following secrets::
## Use locally, in the projects that use c2cciutils
Install it: `python3 -m pip install --user --requirement ci/requirements.txt`
-Dry run publish: `GITHUB_REF=... c2cciutils-publish --dry-run ...`
## Configuration
@@ -159,183 +155,6 @@ Select a formatter:
- Configure Default Formatter...
- Select the formatter
-## Publishing
-
-### To pypi
-
-The config is like this:
-
-```yaml
-versions:
- # List of kinds of versions you want to publish, that can be:
- # rebuild (specified with --type),
- # version_tag, version_branch, feature_branch, feature_tag (for pull request)
-```
-
-It we have a `setup.py` file, we will be in legacy mode:
-When publishing, the version computed from arguments or `GITHUB_REF` is put in environment variable `VERSION`, thus you should use it in `setup.py`, example:
-
-```python
-VERSION = os.environ.get("VERSION", "1.0.0")
-```
-
-Also we consider that we use `poetry` with [poetry-dynamic-versioning](https://pypi.org/project/poetry-dynamic-versioning/) to manage the version, and [poetry-plugin-tweak-dependencies-version](https://pypi.org/project/poetry-plugin-tweak-dependencies-version/) to manage the dependencies versions.
-
-Example of configuration:
-
-```toml
-[tool.poetry-dynamic-versioning]
-enable = true
-vcs = "git"
-pattern = "^(?P\\d+(\\.\\d+)*)"
-format-jinja = """
-{%- if env.get("VERSION_TYPE") == "version_branch" -%}
-{{serialize_pep440(bump_version(base, 1 if env.get("IS_MASTER") == "TRUE" else 2), dev=distance)}}
-{%- elif distance == 0 -%}
-{{serialize_pep440(base)}}
-{%- else -%}
-{{serialize_pep440(bump_version(base), dev=distance)}}
-{%- endif -%}
-"""
-
-```
-
-Note that we can access to the environment variables `VERSION`,`VERSION_TYPE` and `IS_MASTER`.
-
-Then by default:
-
-- Tag with `1.2.3` => release `1.2.3`
-- Commit on feature branch just do a validation
-- Commit on `master` branch after the tag 1.3.0 => release `1.4.0.dev1`
-- Commit on `1.3` branch after the tag 1.3.0 => release `1.3.1.dev1`
-
-#### Authentication
-
-If the file `~/.pypirc` exists we consider that we ar already logged in also
-we will do the login with the `pypi` server with OpenID Connect (OIDC).
-
-The OIDC login is recommended because it didn't needs any additional secrets,
-but it need some configuration on pypi in the package,
-see the [GitHub Documentation](https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/configuring-openid-connect-in-pypi#adding-the-identity-provider-to-pypi).
-
-#### Integration if the package directly in a Docker image
-
-To make it working in the `Dockerfile` you should have in the `poetry` stage:
-
-```Dockerfile
-ENV POETRY_DYNAMIC_VERSIONING_BYPASS=dev
-RUN poetry export --extras=checks --extras=publish --output=requirements.txt \
- && poetry export --with=dev --output=requirements-dev.txt
-```
-
-And in the `run` stage
-
-```Dockerfile
-ARG VERSION=dev
-RUN --mount=type=cache,target=/root/.cache \
- POETRY_DYNAMIC_VERSIONING_BYPASS=${VERSION} python3 -m pip install --disable-pip-version-check --no-deps --editable=.
-```
-
-And in the `Makefile`:
-
-```Makefile
-VERSION = $(strip $(shell poetry version --short))
-
-.PHONY: build
-build: ## Build the Docker images
- docker build --build-arg=VERSION=$(VERSION) --tag=$(GITHUB_REPOSITORY) .
-```
-
-### To Docker registry
-
-The config is like this:
-
-```yaml
-latest: True
-images:
- - # The base name of the image we want to publish
- name:
-repository:
- :
- # The fqdn name of the server if not Docker hub
- server:
- # List of kinds of versions you want to publish, that can be: rebuild (specified using --type),
- # version_tag, version_branch, feature_branch, feature_tag (for pull request)
- version:
- # List of tags we want to publish interpreted with `format(version=version)`
- # e.g. if you use `{version}-lite` when you publish the version `1.2.3` the source tag
- # (that should be built by the application build) is `latest-lite`, and it will be published
- # with the tag `1.2.3-lite`.
- tags:
- # If your images are published by different jobs you can separate them in different groups
- # and publish them with `c2cciutils-publish --group=`
- group:
-```
-
-By default, the last line of the `SECURITY.md` file will be published (`docker`) with the tag
-`latest`. Set `latest` to `False` to disable it.
-
-With the `c2cciutils-clean` the images on Docker hub for `feature_branch` will be removed on branch removing.
-
-## Download applications
-
-In case some executables or applications from GitHub releases or any other URLs are required on the CI host
-and are not handled by any dependency manager, we provide a set of tools to install them and manage upgrades
-through Renovate.
-
-Create an application file (e.-g. `applications.yaml`) with:
-
-```yaml
-# yaml-language-server: $schema=https://raw.githubusercontent.com/camptocamp/c2cciutils/master/c2cciutils/schema-applications.json
-
-# Application from GitHub release
-/:
- get-file-name:
- to-file-name:
- finish-command: # The command you want to run after the file is downloaded
- - - chmod # To be executable (usually required)
- - +x
- -
- - - # Print the version of the application
- - --version
-# Application from GitHub release in a tar file (or tar.gz)
-/:
- get-file-name:
- type: tar
- tar-file-name:
- to-file-name:
- finish-command: [...] # The command you want to run after the file is downloaded
-# Application from an URL
-:
- url-pattern:
- to-file-name:
- finish-command: [...] # The command you want to run after the file is downloaded
-```
-
-In the attributes `url-pattern`, `get-file-name` you can use the following variables:
-
-- `{version}`: The version of the application present in the version file.
-- `{version_quote}`: The URL encoded version.
-- `{short_version}`: The version without the `v` prefix.
-
-The `applications-versions.yaml` file is a map of applications and their versions.
-
-Add in your Renovate configuration:
-
-```json5
- regexManagers: [
- {
- fileMatch: ['^applications-versions.yaml$'],
- matchStrings: [
- '(?[^\\s]+): (?[^\\s]+) # (?[^\\s]+)',
- ],
- },
- ],
-```
-
-Now you need to call `c2cciutils-download-applications --applications-file=applications.yaml --versions-file=applications-version.yaml`
-to install required applications on CI host before using them (an already installed application is installed only if needed).
-
## Use Renovate to trigger a new build instead of the legacy rebuild
Run the command `c2cciutils-docker-versions-gen camptocamp/image[:tag]` to generate a file that is a kind of package lock of the Debian packages in the file `ci/dpkg-versions.yaml`.
diff --git a/c2cciutils/__init__.py b/c2cciutils/__init__.py
index bf2565953..0361e1f0d 100644
--- a/c2cciutils/__init__.py
+++ b/c2cciutils/__init__.py
@@ -2,14 +2,12 @@
c2cciutils shared utils function.
"""
-import glob
import json
import os.path
import re
import subprocess # nosec
import sys
-from re import Match, Pattern
-from typing import Any, Optional, TypedDict, cast
+from typing import Any, Optional, cast
import requests
import ruamel.yaml
@@ -37,25 +35,6 @@ def get_repository() -> str:
return "camptocamp/project"
-def merge(default_config: Any, config: Any) -> Any:
- """
- Deep merge the dictionaries (on dictionaries only, not on arrays).
-
- Arguments:
- default_config: The default config that will be applied
- config: The base config, will be modified
- """
- if not isinstance(default_config, dict) or not isinstance(config, dict):
- return config
-
- for key in default_config.keys():
- if key not in config:
- config[key] = default_config[key]
- else:
- merge(default_config[key], config[key])
- return config
-
-
def get_master_branch(repo: list[str]) -> tuple[str, bool]:
"""Get the name of the master branch."""
master_branch = "master"
@@ -82,52 +61,6 @@ def get_config() -> c2cciutils.configuration.Configuration:
yaml_ = ruamel.yaml.YAML()
config = yaml_.load(open_file)
- repository = get_repository()
- repo = repository.split("/")
- master_branch, _ = get_master_branch(repo)
-
- merge(
- {
- "version": {
- "tag_to_version_re": [
- {"from": r"([0-9]+.[0-9]+.[0-9]+)", "to": r"\1"},
- ],
- "branch_to_version_re": [
- {"from": r"([0-9]+.[0-9]+)", "to": r"\1"},
- {"from": master_branch, "to": master_branch},
- ],
- }
- },
- config,
- )
-
- has_docker_files = bool(
- subprocess.run(
- ["git", "ls-files", "*/Dockerfile*", "Dockerfile*"], stdout=subprocess.PIPE, check=True
- ).stdout
- )
- has_python_package = bool(
- subprocess.run(
- ["git", "ls-files", "setup.py", "*/setup.py"], stdout=subprocess.PIPE, check=True
- ).stdout
- ) or bool(
- subprocess.run(
- ["git", "ls-files", "pyproject.toml", "*/pyproject.toml"], stdout=subprocess.PIPE, check=True
- ).stdout
- )
-
- publish_config = merge(c2cciutils.configuration.PUBLISH_DEFAULT, {})
- publish_config["pypi"]["packages"] = [{"path": "."}] if has_python_package else []
- publish_config["docker"]["images"] = [{"name": get_repository()}] if has_docker_files else []
- publish_config["helm"]["folders"] = [
- os.path.dirname(f) for f in glob.glob("./**/Chart.yaml", recursive=True)
- ]
-
- default_config = {
- "publish": publish_config,
- }
- merge(default_config, config)
-
return config
@@ -174,98 +107,6 @@ def error(
print(f"[{error_type}] {result}")
-VersionTransform = TypedDict(
- "VersionTransform",
- {
- # The from regular expression
- "from": Pattern[str],
- # The expand regular expression: https://docs.python.org/3/library/re.html#re.Match.expand
- "to": str,
- },
- total=False,
-)
-
-
-def compile_re(config: c2cciutils.configuration.VersionTransform, prefix: str = "") -> list[VersionTransform]:
- """
- Compile the from as a regular expression of a dictionary of the config list.
-
- to be used with convert and match
-
- Arguments:
- config: The transform config
- prefix: The version prefix
-
- Return the compiled transform config.
- """
- result = []
- for conf in config:
- new_conf = cast(VersionTransform, dict(conf))
-
- from_re = conf.get("from", r"(.*)")
- if from_re[0] == "^":
- from_re = from_re[1:]
- if from_re[-1] != "$":
- from_re += "$"
- from_re = f"^{re.escape(prefix)}{from_re}"
-
- new_conf["from"] = re.compile(from_re)
- result.append(new_conf)
- return result
-
-
-def match(
- value: str, config: list[VersionTransform]
-) -> tuple[Optional[Match[str]], Optional[VersionTransform], str]:
- """
- Get the matched version.
-
- Arguments:
- value: That we want to match with
- config: The result of `compile`
-
- Returns the re match object, the matched config and the value as a tuple
- On no match it returns None, value
- """
- for conf in config:
- matched = conf["from"].match(value)
- if matched is not None:
- return matched, conf, value
- return None, None, value
-
-
-def does_match(value: str, config: list[VersionTransform]) -> bool:
- """
- Check if the version match with the config patterns.
-
- Arguments:
- value: That we want to match with
- config: The result of `compile`
-
- Returns True it it does match else False
- """
- matched, _, _ = match(value, config)
- return matched is not None
-
-
-def get_value(matched: Optional[Match[str]], config: Optional[VersionTransform], value: str) -> str:
- """
- Get the final value.
-
- `match`, `config` and `value` are the result of `match`.
-
- The `config` should have a `to` key with an expand template.
-
- Arguments:
- matched: The matched object to a regular expression
- config: The result of `compile`
- value: The default value on returned no match
-
- Return the value
- """
- return matched.expand(config.get("to", r"\1")) if matched is not None and config is not None else value
-
-
def print_versions(config: c2cciutils.configuration.PrintVersions) -> bool:
"""
Print some tools version.
@@ -319,17 +160,6 @@ def gopass(key: str, default: Optional[str] = None) -> Optional[str]:
raise
-def gopass_put(secret: str, key: str) -> None:
- """
- Put an entry in gopass.
-
- Arguments:
- secret: The secret value
- key: The key
- """
- subprocess.check_output(["gopass", "insert", "--force", key], input=secret.encode())
-
-
def add_authorization_header(headers: dict[str, str]) -> dict[str, str]:
"""
Add the Authorization header needed to be authenticated on GitHub.
@@ -406,20 +236,3 @@ def graphql(query_file: str, variables: dict[str, Any], default: Any = None) ->
if "data" not in json_response:
raise RuntimeError(f"GraphQL no data: {json.dumps(json_response, indent=2)}")
return cast(dict[str, Any], json_response["data"])
-
-
-def snyk_exec() -> tuple[str, dict[str, str]]:
- """Get the Snyk cli executable path."""
- if not os.path.exists(os.path.join(os.path.dirname(__file__), "node_modules")):
- subprocess.run(["npm", "install"], cwd=os.path.dirname(__file__), check=True) # nosec
-
- env = {**os.environ}
- env["FORCE_COLOR"] = "true"
- if "SNYK_TOKEN" not in env:
- token = gopass("gs/ci/snyk/token")
- if token is not None:
- env["SNYK_TOKEN"] = token
- if "SNYK_ORG" in env:
- subprocess.run(["snyk", "config", "set", f"org={env['SNYK_ORG']}"], check=True, env=env)
-
- return os.path.join(os.path.dirname(os.path.abspath(__file__)), "node_modules/snyk/bin/snyk"), env
diff --git a/c2cciutils/applications-versions.yaml b/c2cciutils/applications-versions.yaml
index 95e7b47af..f9fdcdd29 100644
--- a/c2cciutils/applications-versions.yaml
+++ b/c2cciutils/applications-versions.yaml
@@ -1,4 +1,3 @@
# https://docs.renovatebot.com/modules/datasource/#github-releases-datasource
k3d-io/k3d: v5.7.4 # github-releases
postgresql: 16.1.0 # helm - https://charts.bitnami.com/bitnami
-helm/chart-releaser: v1.6.1 # github-releases
diff --git a/c2cciutils/configuration.py b/c2cciutils/configuration.py
index c8102f522..0c502cbfa 100644
--- a/c2cciutils/configuration.py
+++ b/c2cciutils/configuration.py
@@ -2,7 +2,7 @@
Automatically generated file from a JSON schema.
"""
-from typing import Any, Literal, TypedDict, Union
+from typing import Any, TypedDict
class Configuration(TypedDict, total=False):
@@ -19,32 +19,6 @@ class Configuration(TypedDict, total=False):
The print versions configuration
"""
- publish: "Publish"
- """
- Publish.
-
- The publishing configurations
-
- default:
- docker:
- images:
- helm:
- folders:
- versions:
- - version_tag
- pypi:
- packages:
- versions:
- - version_tag
- """
-
- version: "Version"
- """
- Version.
-
- The version configurations
- """
-
k8s: "K8SConfiguration"
"""
K8s configuration.
@@ -53,37 +27,11 @@ class Configuration(TypedDict, total=False):
{}
"""
- dpkg: "Dpkg"
- """
- dpkg.
-
- The configuration use t manage the dpkg packages
- """
-
DB_CONFIGURATION_DEFAULT: dict[str, Any] = {}
""" Default value of the field path 'K8s configuration db' """
-DISPATCH_CONFIG_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'Publish Docker config dispatch oneof0' """
-
-
-DOCKER_DISPATCH_EVENT_TYPE_DEFAULT = "image-update"
-""" Default value of the field path 'dispatch config event-type' """
-
-
-DOCKER_DISPATCH_REPOSITORY_DEFAULT = "camptocamp/argocd-gs-gmf-apps"
-""" Default value of the field path 'dispatch config repository' """
-
-
-DOCKER_REPOSITORY_DEFAULT = {
- "github": {"server": "ghcr.io", "versions": ["version_tag", "version_branch", "rebuild"]},
- "dockerhub": {},
-}
-""" Default value of the field path 'Publish Docker config repository' """
-
-
# | DB configuration.
# |
# | Database configuration
@@ -107,54 +55,6 @@ class Configuration(TypedDict, total=False):
)
-# | dispatch config.
-# |
-# | Send a dispatch event to an other repository
-# |
-# | default:
-# | {}
-DispatchConfig = TypedDict(
- "DispatchConfig",
- {
- # | Docker dispatch repository.
- # |
- # | The repository name to be triggered
- # |
- # | default: camptocamp/argocd-gs-gmf-apps
- "repository": str,
- # | Docker dispatch event type.
- # |
- # | The event type to be triggered
- # |
- # | default: image-update
- "event-type": str,
- },
- total=False,
-)
-
-
-class Dpkg(TypedDict, total=False):
- """
- dpkg.
-
- The configuration use t manage the dpkg packages
- """
-
- packages_mapping: dict[str, str]
- """
- dpkg packages mapping.
-
- The mapping of source package found in the image to package present in repology.org
- """
-
- ignored_packages: list[str]
- """
- dpkg ignored packages.
-
- The list of packages that should be ignored
- """
-
-
# | K3d configuration.
# |
# | default:
@@ -240,62 +140,6 @@ class K8SConfiguration(TypedDict, total=False):
""" Default value of the field path 'Print versions versions' """
-PUBLISH_DEFAULT = {
- "pypi": {"versions": ["version_tag"], "packages": ""},
- "docker": {"images": ""},
- "helm": {"versions": ["version_tag"], "folders": ""},
-}
-""" Default value of the field path 'configuration publish' """
-
-
-PUBLISH_DOCKER_IMAGE_GROUP_DEFAULT = "default"
-""" Default value of the field path 'Publish Docker image group' """
-
-
-PUBLISH_DOCKER_IMAGE_TAGS_DEFAULT = ["{version}"]
-""" Default value of the field path 'Publish Docker image tags' """
-
-
-PUBLISH_DOCKER_LATEST_DEFAULT = True
-""" Default value of the field path 'Publish Docker config latest' """
-
-
-PUBLISH_DOCKER_REPOSITORY_VERSIONS_DEFAULT = ["version_tag", "version_branch", "rebuild", "feature_branch"]
-""" Default value of the field path 'Publish Docker repository versions' """
-
-
-PUBLISH_DOCKER_SNYK_MONITOR_ARGS_DEFAULT = ["--app-vulns"]
-""" Default value of the field path 'Publish Docker config snyk monitor_args' """
-
-
-PUBLISH_DOCKER_SNYK_TEST_ARGS_DEFAULT = ["--app-vulns", "--severity-threshold=critical"]
-""" Default value of the field path 'Publish Docker config snyk test_args' """
-
-
-PUBLISH_GOOGLE_CALENDAR_CONFIG_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'Publish Google calendar oneof0' """
-
-
-PUBLISH_GOOGLE_CALENDAR_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'publish_google_calendar' """
-
-
-PUBLISH_GOOGLE_CALENDAR_ON_DEFAULT = ["version_branch", "version_tag", "rebuild"]
-""" Default value of the field path 'Publish Google calendar config on' """
-
-
-PUBLISH_PIP_PACKAGE_GROUP_DEFAULT = "default"
-""" Default value of the field path 'publish pypi package group' """
-
-
-PUBLISH_PYPI_CONFIG_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'publish pypi oneof0' """
-
-
-PUBLISH_PYPI_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'publish_pypi' """
-
-
class PrintVersions(TypedDict, total=False):
"""
Print versions.
@@ -353,355 +197,6 @@ class PrintVersions(TypedDict, total=False):
"""
-class Publish(TypedDict, total=False):
- """
- Publish.
-
- The publishing configurations
-
- default:
- docker:
- images:
- helm:
- folders:
- versions:
- - version_tag
- pypi:
- packages:
- versions:
- - version_tag
- """
-
- docker: "PublishDocker"
- """
- Publish Docker.
-
- The configuration used to publish on Docker
-
- Aggregation type: oneOf
- Subtype: "PublishDockerConfig"
- """
-
- pypi: "PublishPypi"
- """
- publish pypi.
-
- Configuration to publish on pypi
-
- default:
- {}
-
- Aggregation type: oneOf
- Subtype: "PublishPypiConfig"
- """
-
- helm: "PublishHelm"
- """
- publish helm.
-
- Configuration to publish Helm charts on GitHub release
-
- Aggregation type: oneOf
- Subtype: "PublishHelmConfig"
- """
-
- google_calendar: "PublishGoogleCalendar"
- """
- Publish Google calendar.
-
- The configuration to publish on Google Calendar
-
- default:
- {}
-
- Aggregation type: oneOf
- Subtype: "PublishGoogleCalendarConfig"
- """
-
-
-PublishDocker = Union["PublishDockerConfig", Literal[False]]
-"""
-Publish Docker.
-
-The configuration used to publish on Docker
-
-Aggregation type: oneOf
-Subtype: "PublishDockerConfig"
-"""
-
-
-class PublishDockerConfig(TypedDict, total=False):
- """
- Publish Docker config.
-
- The configuration used to publish on Docker
- """
-
- latest: bool
- """
- Publish Docker latest.
-
- Publish the latest version on tag latest
-
- default: True
- """
-
- images: list["PublishDockerImage"]
- """ List of images to be published """
-
- repository: dict[str, "PublishDockerRepository"]
- """
- Docker repository.
-
- The repository where we should publish the images
-
- default:
- dockerhub: {}
- github:
- server: ghcr.io
- versions:
- - version_tag
- - version_branch
- - rebuild
- """
-
- dispatch: Union["DispatchConfig", "_PublishDockerConfigDispatchOneof1"]
- """
- Send a dispatch event to an other repository
-
- default:
- {}
-
- Aggregation type: oneOf
- Subtype: "DispatchConfig"
- """
-
- snyk: "_PublishDockerConfigSnyk"
- """ Checks the published images with Snyk """
-
-
-class PublishDockerImage(TypedDict, total=False):
- """Publish Docker image."""
-
- group: str
- """
- Publish Docker image group.
-
- The image is in the group, should be used with the --group option of c2cciutils-publish script
-
- default: default
- """
-
- name: str
- """ The image name """
-
- tags: list[str]
- """
- publish docker image tags.
-
- The tag name, will be formatted with the version=, the image with version=latest should be present when we call the c2cciutils-publish script
-
- default:
- - '{version}'
- """
-
-
-class PublishDockerRepository(TypedDict, total=False):
- """Publish Docker repository."""
-
- server: str
- """ The server URL """
-
- versions: list[str]
- """
- Publish Docker repository versions.
-
- The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script
-
- default:
- - version_tag
- - version_branch
- - rebuild
- - feature_branch
- """
-
-
-PublishGoogleCalendar = Union["PublishGoogleCalendarConfig", "_PublishGoogleCalendarOneof1"]
-"""
-Publish Google calendar.
-
-The configuration to publish on Google Calendar
-
-default:
- {}
-
-Aggregation type: oneOf
-Subtype: "PublishGoogleCalendarConfig"
-"""
-
-
-class PublishGoogleCalendarConfig(TypedDict, total=False):
- """
- Publish Google calendar config.
-
- The configuration to publish on Google Calendar
-
- default:
- {}
- """
-
- on: list[str]
- """
- Publish Google calendar on.
-
- default:
- - version_branch
- - version_tag
- - rebuild
- """
-
-
-PublishHelm = Union["PublishHelmConfig", Literal[False]]
-"""
-publish helm.
-
-Configuration to publish Helm charts on GitHub release
-
-Aggregation type: oneOf
-Subtype: "PublishHelmConfig"
-"""
-
-
-class PublishHelmConfig(TypedDict, total=False):
- """
- publish helm config.
-
- Configuration to publish on Helm charts on GitHub release
- """
-
- folders: list[str]
- """ The folders that will be published """
-
- versions: list[str]
- """ The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script """
-
-
-PublishPypi = Union["PublishPypiConfig", "_PublishPypiOneof1"]
-"""
-publish pypi.
-
-Configuration to publish on pypi
-
-default:
- {}
-
-Aggregation type: oneOf
-Subtype: "PublishPypiConfig"
-"""
-
-
-class PublishPypiConfig(TypedDict, total=False):
- """
- publish pypi config.
-
- Configuration to publish on pypi
-
- default:
- {}
- """
-
- packages: list["PublishPypiPackage"]
- """ The configuration of packages that will be published """
-
- versions: list[str]
- """ The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script """
-
-
-class PublishPypiPackage(TypedDict, total=False):
- """
- publish pypi package.
-
- The configuration of package that will be published
- """
-
- group: str
- """
- Publish pip package group.
-
- The image is in the group, should be used with the --group option of c2cciutils-publish script
-
- default: default
- """
-
- path: str
- """ The path of the pypi package """
-
- build_command: list[str]
- """ The command used to do the build """
-
-
-class Version(TypedDict, total=False):
- """
- Version.
-
- The version configurations
- """
-
- branch_to_version_re: "VersionTransform"
- """
- Version transform.
-
- A version transformer definition
- """
-
- tag_to_version_re: "VersionTransform"
- """
- Version transform.
-
- A version transformer definition
- """
-
-
-VersionTransform = list["_VersionTransformItem"]
-"""
-Version transform.
-
-A version transformer definition
-"""
-
-
-_PUBLISH_DOCKER_CONFIG_DISPATCH_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'Publish Docker config dispatch' """
-
-
-_PUBLISH_DOCKER_CONFIG_DISPATCH_ONEOF1_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'Publish Docker config dispatch oneof1' """
-
-
-_PUBLISH_DOCKER_SNYK_MONITOR_ARGS_ONEOF0_DEFAULT = ["--app-vulns"]
-""" Default value of the field path 'Publish Docker Snyk monitor args oneof0' """
-
-
-_PUBLISH_DOCKER_SNYK_MONITOR_ARGS_ONEOF1_DEFAULT = ["--app-vulns"]
-""" Default value of the field path 'Publish Docker Snyk monitor args oneof1' """
-
-
-_PUBLISH_DOCKER_SNYK_TEST_ARGS_ONEOF0_DEFAULT = ["--app-vulns", "--severity-threshold=critical"]
-""" Default value of the field path 'Publish Docker Snyk test args oneof0' """
-
-
-_PUBLISH_DOCKER_SNYK_TEST_ARGS_ONEOF1_DEFAULT = ["--app-vulns", "--severity-threshold=critical"]
-""" Default value of the field path 'Publish Docker Snyk test args oneof1' """
-
-
-_PUBLISH_GOOGLE_CALENDAR_ONEOF1_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'Publish Google calendar oneof1' """
-
-
-_PUBLISH_PYPI_ONEOF1_DEFAULT: dict[str, Any] = {}
-""" Default value of the field path 'publish pypi oneof1' """
-
-
class _PrintVersionsVersionsItem(TypedDict, total=False):
cmd: list[str]
""" The command that should be used """
@@ -711,95 +206,3 @@ class _PrintVersionsVersionsItem(TypedDict, total=False):
prefix: str
""" Prefix added when we print the version """
-
-
-_PublishDockerConfigDispatchOneof1 = Literal[False]
-"""
-default:
- {}
-"""
-
-
-class _PublishDockerConfigSnyk(TypedDict, total=False):
- """Checks the published images with Snyk"""
-
- monitor_args: Union["_PublishDockerSnykMonitorArgsOneof0", "_PublishDockerSnykMonitorArgsOneof1"]
- """
- Publish Docker Snyk monitor args.
-
- The arguments to pass to the Snyk container monitor command
-
- default:
- - --app-vulns
-
- Aggregation type: oneOf
- """
-
- test_args: Union["_PublishDockerSnykTestArgsOneof0", "_PublishDockerSnykTestArgsOneof1"]
- """
- Publish Docker Snyk test args.
-
- The arguments to pass to the Snyk container test command
-
- default:
- - --app-vulns
- - --severity-threshold=critical
-
- Aggregation type: oneOf
- """
-
-
-_PublishDockerSnykMonitorArgsOneof0 = list[str]
-"""
-default:
- - --app-vulns
-"""
-
-
-_PublishDockerSnykMonitorArgsOneof1 = Literal[False]
-"""
-default:
- - --app-vulns
-"""
-
-
-_PublishDockerSnykTestArgsOneof0 = list[str]
-"""
-default:
- - --app-vulns
- - --severity-threshold=critical
-"""
-
-
-_PublishDockerSnykTestArgsOneof1 = Literal[False]
-"""
-default:
- - --app-vulns
- - --severity-threshold=critical
-"""
-
-
-_PublishGoogleCalendarOneof1 = Literal[False]
-"""
-default:
- {}
-"""
-
-
-_PublishPypiOneof1 = Literal[False]
-"""
-default:
- {}
-"""
-
-
-_VersionTransformItem = TypedDict(
- "_VersionTransformItem",
- {
- # | The from regular expression
- "from": str,
- # | The expand regular expression: https://docs.python.org/3/library/re.html#re.Match.expand
- "to": str,
- },
- total=False,
-)
diff --git a/c2cciutils/lib/docker.py b/c2cciutils/lib/docker.py
deleted file mode 100644
index ca0c92b90..000000000
--- a/c2cciutils/lib/docker.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-Some utility functions for Docker images.
-"""
-
-import os
-import subprocess # nosec: B404
-from typing import Optional, cast
-
-import yaml
-from debian_inspector.version import Version
-
-import c2cciutils.configuration
-
-
-def get_dpkg_packages_versions(
- image: str,
- default_distribution: Optional[str] = None,
- default_release: Optional[str] = None,
-) -> tuple[bool, dict[str, Version]]:
- """
- Get the versions of the dpkg packages installed in the image.
-
- `get_dpkg_packages_versions("org/image:tag")` will return something like:
- (true, {"debian_11/api": "2.2.0", ...})
-
- Where `debian_11` corresponds on last path element for 'Debian 11'
- from https://repology.org/repositories/statistics
- """
- dpkg_configuration = c2cciutils.get_config().get("dpkg", {})
-
- os_release = {}
- try:
- os_release_process = subprocess.run(
- ["docker", "run", "--rm", "--entrypoint=", image, "cat", "/etc/os-release"],
- stdout=subprocess.PIPE,
- check=True,
- )
- os_release = dict([e.split("=") for e in os_release_process.stdout.decode().split("\n") if e])
- except subprocess.CalledProcessError:
- print("Info: /etc/os-release not found in the image")
-
- lsb_release = {}
- try:
- lsb_release_process = subprocess.run(
- ["docker", "run", "--rm", "--entrypoint=", image, "cat", "/etc/lsb-release"],
- stdout=subprocess.PIPE,
- check=True,
- )
- lsb_release = dict([e.split("=") for e in lsb_release_process.stdout.decode().split("\n") if e])
- except subprocess.CalledProcessError:
- print("Info: /etc/lsb-release not found in the image")
-
- distribution = os_release.get("ID", lsb_release.get("DISTRIB_ID", default_distribution))
- release = os_release.get("VERSION_ID", lsb_release.get("DISTRIB_RELEASE", default_release))
- if distribution is None:
- print("Could not get the distribution of the image, you should provide a default distribution")
- return False, {}
- if release is None:
- print("Could not get the release of the image, you should provide a default release")
- return False, {}
-
- distribution_final = distribution.strip('"').lower()
- release_final = release.strip('"').replace(".", "_")
- prefix = f"{distribution_final}_{release_final}/"
- print(f"Found distribution '{distribution_final}', release '{release_final}'.")
-
- if distribution_final == "ubuntu" and release_final == "18_04":
- print("Warning: Ubuntu 18.04 is not supported")
- return False, {}
-
- package_version: dict[str, Version] = {}
- packages_status_process = subprocess.run(
- ["docker", "run", "--rm", "--entrypoint=", image, "dpkg", "--status"],
- stdout=subprocess.PIPE,
- check=True,
- )
- packages_status_1 = packages_status_process.stdout.decode().split("\n")
- packages_status_2 = [e.split(": ", maxsplit=1) for e in packages_status_1]
- packages_status = [e for e in packages_status_2 if len(e) == 2]
- package = None
- version = None
- for name, value in packages_status:
- if name == "Package":
- if package is not None:
- if version is None:
- print(f"Error: Missing version for package {package}")
- else:
- if package not in dpkg_configuration.get("ignored_packages", []):
- package = dpkg_configuration.get("packages_mapping", {}).get(package, package)
- if package in package_version and version != package_version[package]:
- print(
- f"The package {package} has different version ({package_version[package]} != {version})"
- )
- if package not in ("base-files",):
- package_version[package] = version
- package = value
- version = None
- if name == "Version" and version is None:
- version = Version.from_string(value)
-
- return True, {f"{prefix}{k}": v for k, v in package_version.items()}
-
-
-def get_versions_config() -> tuple[dict[str, dict[str, str]], bool]:
- """
- Get the versions from the config file.
- """
- if os.path.exists("ci/dpkg-versions.yaml"):
- with open("ci/dpkg-versions.yaml", encoding="utf-8") as versions_file:
- return (
- cast(dict[str, dict[str, str]], yaml.load(versions_file.read(), Loader=yaml.SafeLoader)),
- True,
- )
- return {}, False
-
-
-def check_versions(
- versions_config: dict[str, str],
- image: str,
- default_distribution: Optional[str] = None,
- default_release: Optional[str] = None,
-) -> bool:
- """
- Check if the versions are correct.
-
- The versions of packages in the image should be present in the config file.
- The versions of packages in the image shouldn't be older than the versions of the config file.
- """
- result, versions_image = get_dpkg_packages_versions(image, default_distribution, default_release)
- if not result:
- return False
-
- success = True
- for package, version in versions_image.items():
- if package not in versions_config:
- print(f"Package {package} with version {version} is not in the config file for the image {image}")
- success = False
- elif Version.from_string(versions_config[package]) > version:
- print(
- f"Package {package} is older than the config file for the image {image}: "
- f"{versions_config[package]} > {version}."
- )
- success = False
-
- return success
diff --git a/c2cciutils/lib/oidc.py b/c2cciutils/lib/oidc.py
deleted file mode 100755
index 82abd5303..000000000
--- a/c2cciutils/lib/oidc.py
+++ /dev/null
@@ -1,186 +0,0 @@
-"""
-Manage OpenID Connect (OIDC) token exchange for external services.
-
-Inspired by
-https://github.com/pypa/gh-action-pypi-publish/blob/unstable/v1/oidc-exchange.py
-"""
-
-import base64
-import json
-import os
-import sys
-from typing import NoReturn
-
-import id as oidc_id
-import requests
-
-
-class _OidcError(Exception):
- pass
-
-
-def _fatal(message: str) -> NoReturn:
- # HACK: GitHub Actions' annotations don't work across multiple lines naively;
- # translating `\n` into `%0A` (i.e., HTML percent-encoding) is known to work.
- # See: https://github.com/actions/toolkit/issues/193
- message = message.replace("\n", "%0A")
- print(f"::error::Trusted publishing exchange failure: {message}", file=sys.stderr)
- raise _OidcError(message)
-
-
-def _debug(message: str) -> None:
- print(f"::debug::{message.title()}", file=sys.stderr)
-
-
-def _render_claims(token: str) -> str:
- _, payload, _ = token.split(".", 2)
-
- # urlsafe_b64decode needs padding; JWT payloads don't contain any.
- payload += "=" * (4 - (len(payload) % 4))
- claims = json.loads(base64.urlsafe_b64decode(payload))
-
- return f"""
-The claims rendered below are **for debugging purposes only**. You should **not**
-use them to configure a trusted publisher unless they already match your expectations.
-
-If a claim is not present in the claim set, then it is rendered as `MISSING`.
-
-* `sub`: `{claims.get('sub', 'MISSING')}`
-* `repository`: `{claims.get('repository', 'MISSING')}`
-* `repository_owner`: `{claims.get('repository_owner', 'MISSING')}`
-* `repository_owner_id`: `{claims.get('repository_owner_id', 'MISSING')}`
-* `job_workflow_ref`: `{claims.get('job_workflow_ref', 'MISSING')}`
-* `ref`: `{claims.get('ref')}`
-
-See https://docs.pypi.org/trusted-publishers/troubleshooting/ for more help.
-"""
-
-
-def _get_token(hostname: str) -> str:
- # Indices are expected to support `https://{hostname}/_/oidc/audience`,
- # which tells OIDC exchange clients which audience to use.
- audience_resp = requests.get(f"https://{hostname}/_/oidc/audience", timeout=5)
- audience_resp.raise_for_status()
-
- _debug(f"selected trusted publishing exchange endpoint: https://{hostname}/_/oidc/mint-token")
-
- try:
- oidc_token = oidc_id.detect_credential(audience=audience_resp.json()["audience"])
- except oidc_id.IdentityError as identity_error:
- _fatal(
- f"""
-OpenID Connect token retrieval failed: {identity_error}
-
-This generally indicates a workflow configuration error, such as insufficient
-permissions. Make sure that your workflow has `id-token: write` configured
-at the job level, e.g.:
-
-```yaml
-permissions:
- id-token: write
-```
-
-Learn more at https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings.
-"""
- )
-
- # Now we can do the actual token exchange.
- mint_token_resp = requests.post(
- f"https://{hostname}/_/oidc/mint-token",
- json={"token": oidc_token},
- timeout=5,
- )
-
- try:
- mint_token_payload = mint_token_resp.json()
- except requests.JSONDecodeError:
- # Token exchange failure normally produces a JSON error response, but
- # we might have hit a server error instead.
- _fatal(
- f"""
-Token request failed: the index produced an unexpected
-{mint_token_resp.status_code} response.
-
-This strongly suggests a server configuration or downtime issue; wait
-a few minutes and try again.
-
-You can monitor PyPI's status here: https://status.python.org/
-""" # noqa: E702
- )
-
- # On failure, the JSON response includes the list of errors that
- # occurred during minting.
- if not mint_token_resp.ok:
- reasons = "\n".join(
- f'* `{error["code"]}`: {error["description"]}'
- for error in mint_token_payload["errors"] # noqa: W604
- )
-
- rendered_claims = _render_claims(oidc_token)
-
- _fatal(
- f"""
-Token request failed: the server refused the request for the following reasons:
-
-{reasons}
-
-This generally indicates a trusted publisher configuration error, but could
-also indicate an internal error on GitHub or PyPI's part.
-
-{rendered_claims}
-"""
- )
-
- pypi_token = mint_token_payload.get("token")
- if not isinstance(pypi_token, str):
- _fatal(
- """
-Token response error: the index gave us an invalid response.
-
-This strongly suggests a server configuration or downtime issue; wait
-a few minutes and try again.
-"""
- )
-
- # Mask the newly minted PyPI token, so that we don't accidentally leak it in logs.
- print(f"::add-mask::{pypi_token}")
-
- # This final print will be captured by the subshell in `twine-upload.sh`.
- return pypi_token
-
-
-def pypi_login() -> None:
- """
- Connect to PyPI using OpenID Connect and mint a token for the user.
-
- See Also
- - https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/about-security-hardening-with-openid-connect
- - https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/configuring-openid-connect-in-pypi
- """
- pypirc_filename = os.path.expanduser("~/.pypirc")
-
- if os.path.exists(pypirc_filename):
- print(f"::info::{pypirc_filename} already exists; consider as already logged in.") # noqa: E702
- return
-
- if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
- print(
- """::error::Not available, you probably miss the permission `id-token: write`.
- ```
- permissions:
- id-token: write
- ```
- See also: https://docs.github.com/en/actions/security-for-github-actions/security-hardening-your-deployments/about-security-hardening-with-openid-connect"""
- )
- return
-
- try:
- token = _get_token("pypi.org")
- with open(pypirc_filename, "w", encoding="utf-8") as pypirc_file:
- pypirc_file.write("[pypi]\n")
- pypirc_file.write("repository: https://upload.pypi.org/legacy/\n")
- pypirc_file.write("username: __token__\n")
- pypirc_file.write(f"password: {token}\n")
- except _OidcError:
- # Already visible in logs; no need to re-raise.
- return
diff --git a/c2cciutils/package-lock.json b/c2cciutils/package-lock.json
deleted file mode 100644
index f1557bc45..000000000
--- a/c2cciutils/package-lock.json
+++ /dev/null
@@ -1,437 +0,0 @@
-{
- "name": "c2ccicheck",
- "version": "1.0.0",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "c2ccicheck",
- "version": "1.0.0",
- "dependencies": {
- "snyk": "1.1294.0"
- }
- },
- "node_modules/@sentry-internal/tracing": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.119.2.tgz",
- "integrity": "sha512-V2W+STWrafyGJhQv3ulMFXYDwWHiU6wHQAQBShsHVACiFaDrJ2kPRet38FKv4dMLlLlP2xN+ss2e5zv3tYlTiQ==",
- "license": "MIT",
- "dependencies": {
- "@sentry/core": "7.119.2",
- "@sentry/types": "7.119.2",
- "@sentry/utils": "7.119.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@sentry/core": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.119.2.tgz",
- "integrity": "sha512-hQr3d2yWq/2lMvoyBPOwXw1IHqTrCjOsU1vYKhAa6w9vGbJZFGhKGGE2KEi/92c3gqGn+gW/PC7cV6waCTDuVA==",
- "license": "MIT",
- "dependencies": {
- "@sentry/types": "7.119.2",
- "@sentry/utils": "7.119.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@sentry/integrations": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry/integrations/-/integrations-7.119.2.tgz",
- "integrity": "sha512-dCuXKvbUE3gXVVa696SYMjlhSP6CxpMH/gl4Jk26naEB8Xjsn98z/hqEoXLg6Nab73rjR9c/9AdKqBbwVMHyrQ==",
- "license": "MIT",
- "dependencies": {
- "@sentry/core": "7.119.2",
- "@sentry/types": "7.119.2",
- "@sentry/utils": "7.119.2",
- "localforage": "^1.8.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@sentry/node": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry/node/-/node-7.119.2.tgz",
- "integrity": "sha512-TPNnqxh+Myooe4jTyRiXrzrM2SH08R4+nrmBls4T7lKp2E5R/3mDSe/YTn5rRcUt1k1hPx1NgO/taG0DoS5cXA==",
- "license": "MIT",
- "dependencies": {
- "@sentry-internal/tracing": "7.119.2",
- "@sentry/core": "7.119.2",
- "@sentry/integrations": "7.119.2",
- "@sentry/types": "7.119.2",
- "@sentry/utils": "7.119.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@sentry/types": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.119.2.tgz",
- "integrity": "sha512-ydq1tWsdG7QW+yFaTp0gFaowMLNVikIqM70wxWNK+u98QzKnVY/3XTixxNLsUtnAB4Y+isAzFhrc6Vb5GFdFeg==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@sentry/utils": {
- "version": "7.119.2",
- "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.119.2.tgz",
- "integrity": "sha512-TLdUCvcNgzKP0r9YD7tgCL1PEUp42TObISridsPJ5rhpVGQJvpr+Six0zIkfDUxerLYWZoK8QMm9KgFlPLNQzA==",
- "license": "MIT",
- "dependencies": {
- "@sentry/types": "7.119.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/boolean": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/boolean/-/boolean-3.2.0.tgz",
- "integrity": "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw==",
- "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.",
- "license": "MIT"
- },
- "node_modules/define-data-property": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
- "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
- "license": "MIT",
- "dependencies": {
- "es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "gopd": "^1.0.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/define-properties": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
- "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
- "license": "MIT",
- "dependencies": {
- "define-data-property": "^1.0.1",
- "has-property-descriptors": "^1.0.0",
- "object-keys": "^1.1.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/detect-node": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz",
- "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==",
- "license": "MIT"
- },
- "node_modules/es-define-property": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
- "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
- "license": "MIT",
- "dependencies": {
- "get-intrinsic": "^1.2.4"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-errors": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es6-error": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
- "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
- "license": "MIT"
- },
- "node_modules/escape-string-regexp": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
- "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
- "license": "MIT",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/function-bind": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-intrinsic": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
- "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
- "has-proto": "^1.0.1",
- "has-symbols": "^1.0.3",
- "hasown": "^2.0.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/global-agent": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/global-agent/-/global-agent-3.0.0.tgz",
- "integrity": "sha512-PT6XReJ+D07JvGoxQMkT6qji/jVNfX/h364XHZOWeRzy64sSFr+xJ5OX7LI3b4MPQzdL4H8Y8M0xzPpsVMwA8Q==",
- "license": "BSD-3-Clause",
- "dependencies": {
- "boolean": "^3.0.1",
- "es6-error": "^4.1.1",
- "matcher": "^3.0.0",
- "roarr": "^2.15.3",
- "semver": "^7.3.2",
- "serialize-error": "^7.0.1"
- },
- "engines": {
- "node": ">=10.0"
- }
- },
- "node_modules/globalthis": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
- "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
- "license": "MIT",
- "dependencies": {
- "define-properties": "^1.2.1",
- "gopd": "^1.0.1"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/gopd": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
- "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
- "license": "MIT",
- "dependencies": {
- "get-intrinsic": "^1.1.3"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-property-descriptors": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
- "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
- "license": "MIT",
- "dependencies": {
- "es-define-property": "^1.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-proto": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
- "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-symbols": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
- "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hasown": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
- "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
- "license": "MIT",
- "dependencies": {
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/immediate": {
- "version": "3.0.6",
- "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
- "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==",
- "license": "MIT"
- },
- "node_modules/json-stringify-safe": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
- "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==",
- "license": "ISC"
- },
- "node_modules/lie": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz",
- "integrity": "sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==",
- "license": "MIT",
- "dependencies": {
- "immediate": "~3.0.5"
- }
- },
- "node_modules/localforage": {
- "version": "1.10.0",
- "resolved": "https://registry.npmjs.org/localforage/-/localforage-1.10.0.tgz",
- "integrity": "sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==",
- "license": "Apache-2.0",
- "dependencies": {
- "lie": "3.1.1"
- }
- },
- "node_modules/matcher": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/matcher/-/matcher-3.0.0.tgz",
- "integrity": "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng==",
- "license": "MIT",
- "dependencies": {
- "escape-string-regexp": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/object-keys": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/roarr": {
- "version": "2.15.4",
- "resolved": "https://registry.npmjs.org/roarr/-/roarr-2.15.4.tgz",
- "integrity": "sha512-CHhPh+UNHD2GTXNYhPWLnU8ONHdI+5DI+4EYIAOaiD63rHeYlZvyh8P+in5999TTSFgUYuKUAjzRI4mdh/p+2A==",
- "license": "BSD-3-Clause",
- "dependencies": {
- "boolean": "^3.0.1",
- "detect-node": "^2.0.4",
- "globalthis": "^1.0.1",
- "json-stringify-safe": "^5.0.1",
- "semver-compare": "^1.0.0",
- "sprintf-js": "^1.1.2"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/semver": {
- "version": "7.6.3",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
- "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
- "license": "ISC",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/semver-compare": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz",
- "integrity": "sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==",
- "license": "MIT"
- },
- "node_modules/serialize-error": {
- "version": "7.0.1",
- "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-7.0.1.tgz",
- "integrity": "sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==",
- "license": "MIT",
- "dependencies": {
- "type-fest": "^0.13.1"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/snyk": {
- "version": "1.1294.0",
- "resolved": "https://registry.npmjs.org/snyk/-/snyk-1.1294.0.tgz",
- "integrity": "sha512-4RBj3Lfccz5+6L2Kw9bt7icF+ex3antwt9PkSl2oEulI7mgqvc8VUFLnezg8c6PY60IPM9DrSSmNjXBac10I3Q==",
- "hasInstallScript": true,
- "license": "Apache-2.0",
- "dependencies": {
- "@sentry/node": "^7.36.0",
- "global-agent": "^3.0.0"
- },
- "bin": {
- "snyk": "bin/snyk"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/sprintf-js": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
- "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==",
- "license": "BSD-3-Clause"
- },
- "node_modules/type-fest": {
- "version": "0.13.1",
- "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz",
- "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==",
- "license": "(MIT OR CC0-1.0)",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- }
- }
-}
diff --git a/c2cciutils/package.json b/c2cciutils/package.json
deleted file mode 100644
index 001faa272..000000000
--- a/c2cciutils/package.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "name": "c2ccicheck",
- "version": "1.0.0",
- "description": "",
- "author": "",
- "dependencies": {
- "snyk": "1.1294.0"
- }
-}
diff --git a/c2cciutils/publish.py b/c2cciutils/publish.py
deleted file mode 100644
index 3ae07c5e6..000000000
--- a/c2cciutils/publish.py
+++ /dev/null
@@ -1,464 +0,0 @@
-"""
-The publishing functions.
-"""
-
-import argparse
-import datetime
-import glob
-import os
-import pickle # nosec
-import re
-import subprocess # nosec
-import sys
-import tomllib
-import uuid
-from typing import Optional
-
-import ruamel.yaml
-from google.auth.transport.requests import Request
-from google.oauth2.credentials import Credentials
-from google_auth_oauthlib.flow import InstalledAppFlow
-from googleapiclient.discovery import build
-
-import c2cciutils.configuration
-
-
-class GoogleCalendar:
- """
- Interact with the Google Calendar API.
- """
-
- # pylint: disable=too-many-instance-attributes
- def __init__(self) -> None:
- """
- Initialize.
- """
- self.scopes = ["https://www.googleapis.com/auth/calendar"] # in fact it is better to hard-code this
- self.credentials_pickle_file = os.environ.get("TMP_CREDS_FILE", f"/tmp/{uuid.uuid4()}.pickle")
- self.credentials_json_file = os.environ.get(
- "GOOGLE_CREDS_JSON_FILE", "~/google-credentials-c2cibot.json"
- ) # used to refresh the refresh_token or to initialize the credentials the first time
- self.calendar_id = os.environ.get(
- "GOOGLE_CALENDAR_ID", c2cciutils.gopass("gs/ci/google_calendar/calendarId")
- )
- self.token = os.environ.get("GOOGLE_TOKEN", c2cciutils.gopass("gs/ci/google_calendar/token"))
- self.token_uri = os.environ.get(
- "GOOGLE_TOKEN_URI", c2cciutils.gopass("gs/ci/google_calendar/token_uri")
- )
- self.refresh_token = os.environ.get(
- "GOOGLE_REFRESH_TOKEN",
- c2cciutils.gopass("gs/ci/google_calendar/refresh_token"),
- )
- self.client_id = os.environ.get(
- "GOOGLE_CLIENT_ID", c2cciutils.gopass("gs/ci/google_calendar/client_id")
- )
- self.client_secret = os.environ.get(
- "GOOGLE_CLIENT_SECRET",
- c2cciutils.gopass("gs/ci/google_calendar/client_secret"),
- )
-
- self.creds: Credentials = self.init_calendar_service()
- self._update_creds()
- self.service = build("calendar", "v3", credentials=self.creds)
-
- def init_calendar_service(self) -> Credentials: # type: ignore
- """
- Initialize the calendar service.
- """
- # The file token pickle stores the user's access and refresh tokens, and is
- # created automatically when the authorization flow completes for the first
- # time.
- if os.path.exists(self.credentials_pickle_file):
- with open(self.credentials_pickle_file, "rb") as token:
- creds = pickle.load(token) # nosec
- # If there are no (valid) credentials available, let the user log in.
- if not creds or not creds.valid: # pylint: disable=possibly-used-before-assignment
- if creds and creds.expired and creds.refresh_token:
- creds.refresh(Request()) # type: ignore
- else:
- if self.token:
- creds = Credentials( # type: ignore
- self.token,
- refresh_token=self.refresh_token,
- token_uri=self.token_uri,
- client_id=self.client_id,
- client_secret=self.client_secret,
- scopes=self.scopes,
- )
- else:
- flow = InstalledAppFlow.from_client_secrets_file(self.credentials_json_file, self.scopes)
- creds = flow.run_local_server(port=0)
- self.refresh_token = creds
-
- # Save the credentials for the next run
- with open(self.credentials_pickle_file, "wb") as token:
- pickle.dump(creds, token)
-
- def _update_creds(self) -> None:
- """
- Update the credentials.
- """
- self.client_id = self.creds.client_id
- self.client_secret = self.creds.client_secret
- self.token = self.creds.token
- self.token_uri = self.creds.token_uri
- self.refresh_token = self.creds.refresh_token
-
- def print_all_calendars(self) -> None:
- """
- Print all calendar events.
- """
- # list all the calendars that the user has access to.
- # used to debug credentials
- print("Getting list of calendars")
- calendars_result = self.service.calendarList().list().execute()
-
- calendars = calendars_result.get("items", [])
-
- if not calendars:
- print("::error::No calendars found.")
- for calendar in calendars:
- summary = calendar["summary"]
- event_id = calendar["id"]
- primary = "Primary" if calendar.get("primary") else ""
- print(f"{summary}\t{event_id}\t{primary}")
-
- def print_latest_events(self, time_min: Optional[datetime.datetime] = None) -> None:
- """
- Print latest events.
-
- Arguments:
- time_min: The time to be considered.
- """
- now = datetime.datetime.utcnow()
- if not time_min:
- time_min = datetime.datetime.utcnow() - datetime.timedelta(days=30)
- events_result = (
- self.service.events()
- .list(
- calendarId=self.calendar_id,
- timeMin=time_min.isoformat() + "Z",
- timeMax=now.isoformat() + "Z",
- singleEvents=True,
- orderBy="startTime",
- )
- .execute()
- )
- events = events_result.get("items", [])
-
- if not events:
- print("::error::No upcoming events found.")
- for event in events:
- start = event["start"].get("dateTime", event["start"].get("date"))
- print(start, event["summary"])
-
- def create_event(
- self,
- summary: str = f"dummy/image:{datetime.datetime.now().isoformat()}",
- description: str = "description",
- ) -> None:
- """
- Create a calendar event.
-
- Arguments:
- summary: The event summary
- description: The event description
- """
- now = datetime.datetime.now()
- start = now.isoformat()
- end = (now + datetime.timedelta(minutes=15)).isoformat()
- body = {
- "summary": summary,
- "description": description,
- "start": {"dateTime": start, "timeZone": "Europe/Zurich"},
- "end": {"dateTime": end, "timeZone": "Europe/Zurich"},
- }
-
- event_result = self.service.events().insert(calendarId=self.calendar_id, body=body).execute()
- print(f"Created event with id: {event_result['id']}")
-
- def save_credentials_to_gopass(self) -> None:
- """
- Save the calendar credentials to gopass.
- """
- objects_to_save = {
- "gs/ci/google_calendar/calendarId": self.calendar_id,
- "gs/ci/google_calendar/token": self.token,
- "gs/ci/google_calendar/token_uri": self.token_uri,
- "gs/ci/google_calendar/refresh_token": self.refresh_token,
- "gs/ci/google_calendar/client_id": self.client_id,
- "gs/ci/google_calendar/client_secret": self.client_secret,
- }
- for key, secret in objects_to_save.items():
- assert secret is not None
- c2cciutils.gopass_put(secret, key)
-
- def __del__(self) -> None:
- """
- Delete the credentials file.
- """
- if os.path.exists(self.credentials_pickle_file):
- os.remove(self.credentials_pickle_file)
-
-
-def main_calendar() -> None:
- """
- Run the calendar main function.
- """
- parser = argparse.ArgumentParser(
- description="Interact with google API for the Docker publishing calendar"
- )
- parser.add_argument(
- "--refresh-gopass-credentials",
- action="store_true",
- help="Refresh the credentials in gopass using google API",
- )
- parser.add_argument(
- "--show-events-since",
- help="show the calendar events since a date in 'YYYY-mm-dd' format",
- type=lambda s: datetime.datetime.strptime(s, "%Y-%m-%d"),
- )
- parser.add_argument(
- "--create-test-event",
- action="store_true",
- help="Create a dummy event to check that the calendar settings are correct",
- )
- args = parser.parse_args()
-
- if args.show_events_since or args.refresh_gopass_credentials or args.create_test_event:
- google_calendar = GoogleCalendar()
- else:
- parser.print_help()
-
- if args.show_events_since:
- google_calendar.print_latest_events( # pylint: disable=possibly-used-before-assignment
- args.show_events_since
- )
-
- if args.refresh_gopass_credentials:
- google_calendar.save_credentials_to_gopass()
-
- if args.create_test_event:
- google_calendar.create_event()
-
-
-def pip(
- package: c2cciutils.configuration.PublishPypiPackage, version: str, version_type: str, publish: bool
-) -> bool:
- """
- Publish to pypi.
-
- Arguments:
- version: The version that will be published
- version_type: Describe the kind of release we do: rebuild (specified using --type), version_tag,
- version_branch, feature_branch, feature_tag (for pull request)
- publish: If False only check the package
- package: The package configuration
- """
- print(f"::group::{'Publishing' if publish else 'Checking'} '{package.get('path')}' to pypi")
- sys.stdout.flush()
- sys.stderr.flush()
-
- try:
- env = {}
- env["VERSION"] = version
- env["VERSION_TYPE"] = version_type
- full_repo = c2cciutils.get_repository()
- full_repo_split = full_repo.split("/")
- master_branch, _ = c2cciutils.get_master_branch(full_repo_split)
- is_master = master_branch == version
- env["IS_MASTER"] = "TRUE" if is_master else "FALSE"
-
- cwd = os.path.abspath(package.get("path", "."))
-
- dist = os.path.join(cwd, "dist")
- if not os.path.exists(dist):
- os.mkdir(dist)
- if os.path.exists(os.path.join(cwd, "setup.py")):
- cmd = ["python3", "./setup.py", "egg_info", "--no-date"]
- cmd += (
- ["--tag-build=dev" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")]
- if version_type in ("version_branch", "rebuild")
- else []
- )
- cmd.append("bdist_wheel")
- else:
- if not os.path.exists(dist):
- os.mkdir(dist)
- cmd = ["pip", "wheel", "--no-deps", "--wheel-dir=dist", "."]
- if os.path.exists(os.path.join(cwd, "pyproject.toml")):
- use_poetry = False
- if "build_command" not in package:
- with open(os.path.join(cwd, "pyproject.toml"), "rb") as project_file:
- pyproject = tomllib.load(project_file)
- re_splitter = re.compile(r"[<>=]+")
- for requirement in pyproject.get("build-system", {}).get("requires", []):
- requirement_split = re_splitter.split(requirement)
- if requirement_split[0] in ("poetry", "poetry-core"):
- use_poetry = True
- break
- subprocess.run(
- ["pip", "install", *pyproject.get("build-system", {}).get("requires", [])], check=True
- )
- if use_poetry:
- freeze = subprocess.run(["pip", "freeze"], check=True, stdout=subprocess.PIPE)
- for freeze_line in freeze.stdout.decode("utf-8").split("\n"):
- if freeze_line.startswith("poetry-") or freeze_line.startswith("poetry="):
- print(freeze_line)
- env_bash = " ".join([f"{key}={value}" for key, value in env.items()])
- print(f"Run in {cwd}: {env_bash} poetry build")
- sys.stdout.flush()
- sys.stderr.flush()
- subprocess.run(["poetry", "build"], cwd=cwd, env={**os.environ, **env}, check=True)
- cmd = []
- if cmd:
- cmd = package.get("build_command", cmd)
- subprocess.check_call(cmd, cwd=cwd, env=env)
- cmd = ["twine"]
- cmd += ["upload", "--verbose", "--disable-progress-bar"] if publish else ["check"]
- cmd += glob.glob(os.path.join(cwd, "dist/*.whl"))
- cmd += glob.glob(os.path.join(cwd, "dist/*.tar.gz"))
- subprocess.check_call(cmd)
- print("::endgroup::")
- except subprocess.CalledProcessError as exception:
- print(f"Error: {exception}")
- print("::endgroup::")
- print("::error::With error")
- return False
- return True
-
-
-def docker(
- config: c2cciutils.configuration.PublishDockerRepository,
- name: str,
- image_config: c2cciutils.configuration.PublishDockerImage,
- tag_src: str,
- dst_tags: list[str],
- images_full: list[str],
-) -> bool:
- """
- Publish to a Docker registry.
-
- config is like:
- server: # The server fqdn
-
- image_config is like:
- name: # The image name
-
- Arguments:
- config: The publishing config
- name: The repository name, just used to print messages
- image_config: The image config
- tag_src: The source tag (usually latest)
- dst_tags: Publish using the provided tags
- images_full: The list of published images (with tag), used to build the dispatch event
- """
- print(
- f"::group::Publishing {image_config['name']} to the server {name} using the tags {', '.join(dst_tags)}"
- )
- sys.stdout.flush()
- sys.stderr.flush()
-
- try:
- new_images_full = []
- if "server" in config:
- for tag in dst_tags:
- subprocess.run(
- [
- "docker",
- "tag",
- f"{image_config['name']}:{tag_src}",
- f"{config['server']}/{image_config['name']}:{tag}",
- ],
- check=True,
- )
- new_images_full.append(f"{config['server']}/{image_config['name']}:{tag}")
- else:
- for tag in dst_tags:
- if tag_src != tag:
- subprocess.run(
- [
- "docker",
- "tag",
- f"{image_config['name']}:{tag_src}",
- f"{image_config['name']}:{tag}",
- ],
- check=True,
- )
- new_images_full.append(f"{image_config['name']}:{tag}")
-
- for image in new_images_full:
- subprocess.run(["docker", "push", image], check=True)
- images_full += new_images_full
-
- print("::endgroup::")
- except subprocess.CalledProcessError as exception:
- print(f"Error: {exception}")
- print("::endgroup::")
- print("::error::With error")
- return False
- return True
-
-
-def helm(folder: str, version: str, owner: str, repo: str, commit_sha: str, token: str) -> bool:
- """
- Publish to pypi.
-
- Arguments:
- folder: The folder to be published
- version: The version that will be published
- owner: The GitHub repository owner
- repo: The GitHub repository name
- commit_sha: The sha of the current commit
- token: The GitHub token
- """
- print(f"::group::Publishing Helm chart from '{folder}' to GitHub release")
- sys.stdout.flush()
- sys.stderr.flush()
-
- try:
- yaml_ = ruamel.yaml.YAML()
- with open(os.path.join(folder, "Chart.yaml"), encoding="utf-8") as open_file:
- chart = yaml_.load(open_file)
- chart["version"] = version
- with open(os.path.join(folder, "Chart.yaml"), "w", encoding="utf-8") as open_file:
- yaml_.dump(chart, open_file)
- for index, dependency in enumerate(chart.get("dependencies", [])):
- if dependency["repository"].startswith("https://"):
- subprocess.run(["helm", "repo", "add", str(index), dependency["repository"]], check=True)
-
- subprocess.run(["cr", "package", folder], check=True)
- subprocess.run(
- [
- "cr",
- "upload",
- f"--owner={owner}",
- f"--git-repo={repo}",
- f"--commit={commit_sha}",
- "--release-name-template={{ .Version }}",
- f"--token={token}",
- ],
- check=True,
- )
- if not os.path.exists(".cr-index"):
- os.mkdir(".cr-index")
- subprocess.run(
- [
- "cr",
- "index",
- f"--owner={owner}",
- f"--git-repo={repo}",
- f"--charts-repo=https://{owner}.github.io/{repo}",
- "--push",
- "--release-name-template={{ .Version }}",
- f"--token={token}",
- ],
- check=True,
- )
- print("::endgroup::")
- except subprocess.CalledProcessError as exception:
- print(f"Error: {exception}")
- print("::endgroup::")
- print("::error::With error")
- return False
- return True
diff --git a/c2cciutils/schema-applications.json b/c2cciutils/schema-applications.json
deleted file mode 100644
index 9cf04560a..000000000
--- a/c2cciutils/schema-applications.json
+++ /dev/null
@@ -1,50 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema",
- "$id": "https://raw.githubusercontent.com/camptocamp/tilecloud-chain/master/tilecloud_chain/schema.json",
- "title": "Applications configuration",
- "description": "All the applications configuration",
- "type": "object",
- "definitions": {},
- "additionalProperties": {
- "title": "Application configuration",
- "description": "An application configuration",
- "type": "object",
- "properties": {
- "url-pattern": {
- "title": "URL pattern",
- "description": "URL pattern, to be used for files that didn't come from GitHub release, available arguments: {version}",
- "type": "string"
- },
- "type": {
- "title": "The type of file",
- "description": "The type of file",
- "type": "string",
- "enum": ["tar"]
- },
- "get-file-name": {
- "title": "The filename to get",
- "description": "The name of the file to get in the GitHub release",
- "type": "string"
- },
- "to-file-name": {
- "title": "The created tile name",
- "description": "The name of the final tile we will create",
- "type": "string"
- },
- "tar-file-name": {
- "title": "The tile name to get in the tar file",
- "type": "string"
- },
- "finish-commands": {
- "title": "The commands to run after the tile creation",
- "type": "array",
- "items": {
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- }
- }
-}
diff --git a/c2cciutils/schema.json b/c2cciutils/schema.json
index a4bbe9ea2..939409b72 100644
--- a/c2cciutils/schema.json
+++ b/c2cciutils/schema.json
@@ -6,251 +6,6 @@
"description": "C2C CI utils configuration file",
"additionalProperties": false,
"definitions": {
- "publish_docker": {
- "title": "Publish Docker",
- "description": "The configuration used to publish on Docker",
- "oneOf": [
- {
- "title": "Publish Docker config",
- "description": "The configuration used to publish on Docker",
- "type": "object",
- "properties": {
- "latest": {
- "description": "Publish the latest version on tag latest",
- "title": "Publish Docker latest",
- "default": true,
- "type": "boolean"
- },
- "images": {
- "description": "List of images to be published",
- "type": "array",
- "items": {
- "title": "Publish Docker image",
- "type": "object",
- "properties": {
- "group": {
- "description": "The image is in the group, should be used with the --group option of c2cciutils-publish script",
- "title": "Publish Docker image group",
- "default": "default",
- "type": "string"
- },
- "name": {
- "description": "The image name",
- "type": "string"
- },
- "tags": {
- "description": "The tag name, will be formatted with the version=, the image with version=latest should be present when we call the c2cciutils-publish script",
- "title": "publish docker image tags",
- "type": "array",
- "default": ["{version}"],
- "items": {
- "type": "string"
- }
- }
- }
- }
- },
- "repository": {
- "title": "Docker repository",
- "description": "The repository where we should publish the images",
- "default": {
- "github": {
- "server": "ghcr.io",
- "versions": ["version_tag", "version_branch", "rebuild"]
- },
- "dockerhub": {}
- },
- "type": "object",
- "additionalProperties": {
- "title": "Publish Docker repository",
- "type": "object",
- "properties": {
- "server": {
- "description": "The server URL",
- "type": "string"
- },
- "versions": {
- "description": "The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script",
- "title": "Publish Docker repository versions",
- "type": "array",
- "default": ["version_tag", "version_branch", "rebuild", "feature_branch"],
- "items": {
- "type": "string"
- }
- }
- }
- }
- },
- "dispatch": {
- "description": "Send a dispatch event to an other repository",
- "default": {},
- "oneOf": [
- {
- "type": "object",
- "title": "dispatch config",
- "description": "Send a dispatch event to an other repository",
- "properties": {
- "repository": {
- "title": "Docker dispatch repository",
- "description": "The repository name to be triggered",
- "default": "camptocamp/argocd-gs-gmf-apps",
- "type": "string"
- },
- "event-type": {
- "title": "Docker dispatch event type",
- "description": "The event type to be triggered",
- "default": "image-update",
- "type": "string"
- }
- }
- },
- { "const": false }
- ]
- },
- "snyk": {
- "description": "Checks the published images with Snyk",
- "type": "object",
- "properties": {
- "monitor_args": {
- "description": "The arguments to pass to the Snyk container monitor command",
- "title": "Publish Docker Snyk monitor args",
- "default": ["--app-vulns"],
- "oneOf": [
- {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- { "const": false }
- ]
- },
- "test_args": {
- "description": "The arguments to pass to the Snyk container test command",
- "title": "Publish Docker Snyk test args",
- "default": ["--app-vulns", "--severity-threshold=critical"],
- "oneOf": [
- {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- { "const": false }
- ]
- }
- }
- }
- }
- },
- { "const": false }
- ]
- },
- "publish_google_calendar": {
- "title": "Publish Google calendar",
- "description": "The configuration to publish on Google Calendar",
- "default": {},
- "oneOf": [
- {
- "title": "Publish Google calendar config",
- "description": "The configuration to publish on Google Calendar",
- "type": "object",
- "properties": {
- "on": {
- "title": "Publish Google calendar on",
- "default": ["version_branch", "version_tag", "rebuild"],
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- { "const": false }
- ]
- },
- "publish_pypi": {
- "title": "publish pypi",
- "description": "Configuration to publish on pypi",
- "default": {},
- "oneOf": [
- {
- "title": "publish pypi config",
- "description": "Configuration to publish on pypi",
- "type": "object",
- "properties": {
- "packages": {
- "description": "The configuration of packages that will be published",
- "type": "array",
- "items": {
- "title": "publish pypi package",
- "description": "The configuration of package that will be published",
- "type": "object",
- "properties": {
- "group": {
- "description": "The image is in the group, should be used with the --group option of c2cciutils-publish script",
- "title": "Publish pip package group",
- "default": "default",
- "type": "string"
- },
- "path": {
- "description": "The path of the pypi package",
- "type": "string"
- },
- "build_command": {
- "description": "The command used to do the build",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- }
- },
- "versions": {
- "description": "The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- {
- "const": false
- }
- ]
- },
- "publish_helm": {
- "title": "publish helm",
- "description": "Configuration to publish Helm charts on GitHub release",
- "oneOf": [
- {
- "title": "publish helm config",
- "description": "Configuration to publish on Helm charts on GitHub release",
- "type": "object",
- "properties": {
- "folders": {
- "description": "The folders that will be published",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "versions": {
- "description": "The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
- },
- {
- "const": false
- }
- ]
- },
"print_versions": {
"title": "Print versions",
"description": "The print versions configuration",
@@ -290,58 +45,12 @@
}
}
}
- },
- "version_transform": {
- "title": "Version transform",
- "description": "A version transformer definition",
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "from": {
- "description": "The from regular expression",
- "type": "string"
- },
- "to": {
- "description": "The expand regular expression: https://docs.python.org/3/library/re.html#re.Match.expand",
- "type": "string"
- }
- }
- }
}
},
+
"properties": {
"print_versions": { "$ref": "#/definitions/print_versions" },
- "publish": {
- "title": "Publish",
- "description": "The publishing configurations",
- "default": {
- "pypi": { "versions": ["version_tag"], "packages": "" },
- "docker": {
- "images": ""
- },
- "helm": {
- "versions": ["version_tag"],
- "folders": ""
- }
- },
- "type": "object",
- "properties": {
- "docker": { "$ref": "#/definitions/publish_docker" },
- "pypi": { "$ref": "#/definitions/publish_pypi" },
- "helm": { "$ref": "#/definitions/publish_helm" },
- "google_calendar": { "$ref": "#/definitions/publish_google_calendar" }
- }
- },
- "version": {
- "title": "Version",
- "description": "The version configurations",
- "type": "object",
- "properties": {
- "branch_to_version_re": { "$ref": "#/definitions/version_transform" },
- "tag_to_version_re": { "$ref": "#/definitions/version_transform" }
- }
- },
+
"k8s": {
"title": "K8s configuration",
"default": {},
@@ -388,30 +97,6 @@
}
}
}
- },
- "dpkg": {
- "title": "dpkg",
- "description": "The configuration use t manage the dpkg packages",
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "packages_mapping": {
- "title": "dpkg packages mapping",
- "description": "The mapping of source package found in the image to package present in repology.org",
- "type": "object",
- "additionalProperties": {
- "type": "string"
- }
- },
- "ignored_packages": {
- "title": "dpkg ignored packages",
- "description": "The list of packages that should be ignored",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- }
}
}
}
diff --git a/c2cciutils/scripts/clean.py b/c2cciutils/scripts/clean.py
deleted file mode 100644
index b90d089c1..000000000
--- a/c2cciutils/scripts/clean.py
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-The clean main function.
-"""
-
-import argparse
-import json
-import os
-import sys
-from typing import cast
-
-import requests
-import yaml
-
-import c2cciutils
-
-
-def clean(image: str, tag: str, token: str) -> None:
- """
- Delete an image from Docker hub.
-
- Arguments:
- image: The image name that should be deleted (/)
- tag: The tag that should be deleted
- token: The token used to be authenticated on Docker hub
- """
- print(f"Delete image '{image}:{tag}'.")
-
- response = requests.head(
- f"https://hub.docker.com/v2/repositories/{image}/tags/{tag}/",
- headers={"Authorization": "JWT " + token},
- timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30")),
- )
- if response.status_code == 404:
- return
- if not response.ok:
- print(f"Error checking image '{image}:{tag}' status.")
- print(response.text)
- sys.exit(1)
-
- response = requests.delete(
- f"https://hub.docker.com/v2/repositories/{image}/tags/{tag}/",
- headers={"Authorization": "JWT " + token},
- timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30")),
- )
- if not response.ok:
- print("::error::Error on deleting tag: " + tag)
- print(response.text)
- sys.exit(1)
-
-
-def main() -> None:
- """
- Run the main function.
- """
- parser = argparse.ArgumentParser(
- description=(
- "Clean the Docker images on Docker Hub for the branch we delete "
- "(get from the GitHub event information)."
- )
- )
- parser.parse_args()
-
- username = (
- os.environ["DOCKERHUB_USERNAME"]
- if "DOCKERHUB_USERNAME" in os.environ
- else c2cciutils.gopass("gs/ci/dockerhub/username")
- )
- password = (
- os.environ["DOCKERHUB_PASSWORD"]
- if "DOCKERHUB_PASSWORD" in os.environ
- else c2cciutils.gopass("gs/ci/dockerhub/password")
- )
- token = requests.post(
- "https://hub.docker.com/v2/users/login/",
- headers={"Content-Type": "application/json"},
- data=json.dumps(
- {
- "username": username,
- "password": password,
- }
- ),
- timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30")),
- ).json()["token"]
-
- with open(os.environ["GITHUB_EVENT_PATH"], encoding="utf-8") as event_file:
- event = json.loads(event_file.read())
- print(yaml.dump(event))
- if "pull_request" in event:
- ref = str(event["number"])
- else:
- ref = event["ref"]
-
- ref = ref.replace("/", "_")
-
- config = c2cciutils.get_config()
-
- docker_config = cast(
- c2cciutils.configuration.PublishDockerConfig,
- config.get("publish", {}).get("docker", {}) if config.get("publish", {}).get("docker", False) else {},
- )
- for image in docker_config.get("images", []):
- for tag in image.get("tags", []):
- clean(image["name"], tag.format(version=ref), token)
-
-
-if __name__ == "__main__":
- main()
diff --git a/c2cciutils/scripts/docker_versions_gen.py b/c2cciutils/scripts/docker_versions_gen.py
deleted file mode 100644
index 19239c039..000000000
--- a/c2cciutils/scripts/docker_versions_gen.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import argparse
-
-import yaml
-
-import c2cciutils.lib.docker
-
-
-def main() -> None:
- """Dump the actual versions of packages in image to file ci/dpkg-versions.yaml."""
- argparser = argparse.ArgumentParser(
- description="Dump the actual versions of packages in image to file ci/dpkg-versions.yaml."
- )
- argparser.add_argument("--distribution", help="The default distribution code to be used")
- argparser.add_argument("--release", help="The default release version to be used")
- argparser.add_argument("images", help="The image to check", nargs="+")
- args = argparser.parse_args()
-
- versions_config, _ = c2cciutils.lib.docker.get_versions_config()
- for image in args.images:
- _, versions_image = c2cciutils.lib.docker.get_dpkg_packages_versions(
- image,
- default_distribution=args.distribution,
- default_release=args.release,
- )
- versions_config[image] = {k: str(v) for k, v in versions_image.items()}
-
- with open("ci/dpkg-versions.yaml", "w", encoding="utf-8") as versions_file:
- versions_file.write("# See repository list: https://repology.org/repositories/statistics\n\n")
- versions_file.write(yaml.dump(versions_config, Dumper=yaml.SafeDumper, default_flow_style=False))
-
-
-if __name__ == "__main__":
- main()
diff --git a/c2cciutils/scripts/pin_pipenv.py b/c2cciutils/scripts/pin_pipenv.py
deleted file mode 100644
index a5e2d74f5..000000000
--- a/c2cciutils/scripts/pin_pipenv.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python3
-
-
-import argparse
-import configparser
-import json
-
-
-def main() -> None:
- """Run the command."""
- parser = argparse.ArgumentParser(
- description="""Output packages with versions from Pipfile.lock in Pipfile format (similar to pip freeze).
-Useful to pin all the dependency in the Pipfile, on stabilization branch to be able to upgrade one package that have a security issue."""
- )
- parser.add_argument("--packages", action="store_true", help="Output only the packages section")
- parser.add_argument("--dev-packages", action="store_true", help="Output only the dev-packages section")
- parser.add_argument("--pipfile", default="Pipfile", help="The base Pipfile filename")
- args = parser.parse_args()
-
- packages = {}
- dev_packages = {}
-
- with open(args.pipfile + ".lock", encoding="utf-8") as pipfilelock_file:
- pipfilelock = json.loads(pipfilelock_file.read())
- for pkg, pkg_config in pipfilelock["default"].items():
- packages[pkg] = pkg_config["version"]
- for pkg, pkg_config in pipfilelock["develop"].items():
- dev_packages[pkg] = pkg_config["version"]
-
- config = configparser.ConfigParser()
- config.read(args.pipfile)
-
- if args.packages or not args.packages and not args.dev_packages:
- print("[packages]")
- print("# Lock dependencies")
- for pkg, version in packages.items():
- if pkg not in config["packages"] and f'"{pkg}"' not in config["packages"]:
- quote = '"' if "." in pkg else ""
- print(f'{quote}{pkg}{quote} = "{version}"')
-
- if args.packages and args.dev_packages or not args.packages and not args.dev_packages:
- print()
-
- if args.dev_packages or not args.packages and not args.dev_packages:
- print("[dev-packages]")
- print("# Lock dependencies")
- for pkg, version in dev_packages.items():
- if pkg not in config["dev-packages"] and f'"{pkg}"' not in config["dev-packages"]:
- quote = '"' if "." in pkg else ""
- print(f'{quote}{pkg}{quote} = "{version}"')
-
-
-if __name__ == "__main__":
- main()
diff --git a/c2cciutils/scripts/publish.py b/c2cciutils/scripts/publish.py
deleted file mode 100644
index f3db17aa1..000000000
--- a/c2cciutils/scripts/publish.py
+++ /dev/null
@@ -1,478 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-The publish script.
-"""
-
-import argparse
-import os
-import re
-import subprocess # nosec
-import sys
-from re import Match
-from typing import Optional, cast
-
-import requests
-import security_md
-import yaml
-
-import c2cciutils
-import c2cciutils.configuration
-import c2cciutils.env
-import c2cciutils.lib.docker
-import c2cciutils.lib.oidc
-import c2cciutils.publish
-import c2cciutils.scripts.download_applications
-from c2cciutils.publish import GoogleCalendar
-from c2cciutils.scripts.trigger_image_update import dispatch
-
-
-def match(tpe: str, base_re: str) -> Optional[Match[str]]:
- """
- Return the match for `GITHUB_REF` basically like: `refs//`.
-
- Arguments:
- tpe: The type of ref we want to match (heads, tag, ...)
- base_re: The regular expression to match the value
- """
- if base_re[0] == "^":
- base_re = base_re[1:]
- if base_re[-1] != "$":
- base_re += "$"
- return re.match(f"^refs/{tpe}/{base_re}", os.environ["GITHUB_REF"])
-
-
-def to_version(full_config: c2cciutils.configuration.Configuration, value: str, kind: str) -> str:
- """
- Compute publish version from branch name or tag.
-
- Arguments:
- full_config: The full configuration
- value: The value to be transformed
- kind: The name of the transformer in the configuration
- """
- item_re = c2cciutils.compile_re(
- cast(
- c2cciutils.configuration.VersionTransform, full_config["version"].get(kind + "_to_version_re", [])
- )
- )
- value_match = c2cciutils.match(value, item_re)
- if value_match[0] is not None:
- return c2cciutils.get_value(*value_match)
- return value
-
-
-def main() -> None:
- """
- Run the publish.
- """
- parser = argparse.ArgumentParser(description="Publish the project.")
- parser.add_argument("--group", default="default", help="The publishing group")
- parser.add_argument("--version", help="The version to publish to")
- parser.add_argument(
- "--docker-versions",
- help="The versions to publish on Docker registry, comma separated, ex: 'x,x.y,x.y.z,latest'.",
- )
- parser.add_argument("--snyk-version", help="The version to publish to Snyk")
- parser.add_argument("--branch", help="The branch from which to compute the version")
- parser.add_argument("--tag", help="The tag from which to compute the version")
- parser.add_argument("--dry-run", action="store_true", help="Don't do the publish")
- parser.add_argument(
- "--type",
- help="The type of version, if no argument provided auto-determinate, can be: "
- "rebuild (in case of rebuild), version_tag, version_branch, feature_branch, feature_tag "
- "(for pull request)",
- )
- args = parser.parse_args()
-
- config = c2cciutils.get_config()
- c2cciutils.env.print_environment(config)
-
- # Describe the kind of release we do: rebuild (specified with --type), version_tag, version_branch,
- # feature_branch, feature_tag (for pull request)
- version: str = ""
- ref = os.environ.get("GITHUB_REF", "refs/heads/fake-local")
- local = "GITHUB_REF" not in os.environ
-
- if len([e for e in [args.version, args.branch, args.tag] if e is not None]) > 1:
- print("::error::you specified more than one of the arguments --version, --branch or --tag")
- sys.exit(1)
-
- version_type = args.type
-
- tag_match = c2cciutils.match(
- ref,
- c2cciutils.compile_re(config["version"].get("tag_to_version_re", []), "refs/tags/"),
- )
- branch_match = c2cciutils.match(
- ref,
- c2cciutils.compile_re(config["version"].get("branch_to_version_re", []), "refs/heads/"),
- )
- ref_match = re.match(r"refs/pull/(.*)/merge", ref)
-
- if args.version is not None:
- version = args.version
- elif args.branch is not None:
- version = to_version(config, args.branch, "branch")
- elif args.tag is not None:
- version = to_version(config, args.tag, "tag")
- elif tag_match[0] is not None:
- if version_type is None:
- version_type = "version_tag"
- else:
- print("::warning::you specified the argument --type but not one of --version, --branch or --tag")
- version = c2cciutils.get_value(*tag_match)
- elif branch_match[0] is not None:
- if version_type is None:
- version_type = "version_branch"
- else:
- print("::warning::you specified the argument --type but not one of --version, --branch or --tag")
- version = c2cciutils.get_value(*branch_match)
- elif ref_match is not None:
- version = c2cciutils.get_value(ref_match, {}, ref)
- if version_type is None:
- version_type = "feature_branch"
- elif ref.startswith("refs/heads/"):
- if version_type is None:
- version_type = "feature_branch"
- else:
- print("::warning::you specified the argument --type but not one of --version, --branch or --tag")
- # By the way we replace '/' by '_' because it isn't supported by Docker
- version = "_".join(ref.split("/")[2:])
- elif ref.startswith("refs/tags/"):
- if version_type is None:
- version_type = "feature_tag"
- else:
- print("::warning::you specified the argument --type but not one of --version, --branch or --tag")
- # By the way we replace '/' by '_' because it isn't supported by Docker
- version = "_".join(ref.split("/")[2:])
- else:
- print(
- f"WARNING: {ref} is not supported, only ref starting with 'refs/heads/' or 'refs/tags/' "
- "are supported, ignoring"
- )
- sys.exit(0)
-
- if version_type is None:
- print(
- "::error::you specified one of the arguments --version, --branch or --tag but not the --type, GitHub ref is: {ref}"
- )
- sys.exit(1)
-
- if version_type is not None:
- if args.dry_run:
- print(f"Create release type {version_type}: {version} (dry run)")
- else:
- print(f"Create release type {version_type}: {version}")
-
- success = True
- pypi_config = cast(
- c2cciutils.configuration.PublishPypiConfig,
- config.get("publish", {}).get("pypi", {}) if config.get("publish", {}).get("pypi", False) else {},
- )
- if pypi_config:
- if pypi_config["packages"]:
- c2cciutils.lib.oidc.pypi_login()
-
- for package in pypi_config["packages"]:
- if package.get("group", c2cciutils.configuration.PUBLISH_PIP_PACKAGE_GROUP_DEFAULT) == args.group:
- publish = version_type in pypi_config.get("versions", [])
- if args.dry_run:
- print(
- f"{'Publishing' if publish else 'Checking'} "
- f"'{package.get('path')}' to pypi, skipping (dry run)"
- )
- else:
- success &= c2cciutils.publish.pip(package, version, version_type, publish)
-
- google_calendar = None
- google_calendar_publish = config.get("publish", {}).get("google_calendar", False) is not False
- google_calendar_config = cast(
- c2cciutils.configuration.PublishGoogleCalendarConfig,
- config.get("publish", {}).get("google_calendar", {}),
- )
-
- docker_config = cast(
- c2cciutils.configuration.PublishDockerConfig,
- config.get("publish", {}).get("docker", {}) if config.get("publish", {}).get("docker", False) else {},
- )
- if docker_config:
- full_repo = c2cciutils.get_repository()
- full_repo_split = full_repo.split("/")
- master_branch, _ = c2cciutils.get_master_branch(full_repo_split)
- security_text = ""
- if local:
- with open("SECURITY.md", encoding="utf-8") as security_file:
- security_text = security_file.read()
- else:
- security_response = requests.get(
- f"https://raw.githubusercontent.com/{full_repo}/{master_branch}/SECURITY.md",
- headers=c2cciutils.add_authorization_header({}),
- timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30")),
- )
- c2cciutils.check_response(security_response, False)
- if security_response.ok:
- security_text = security_response.text
- elif security_response.status_code != 404:
- print(f"::error:: {security_response.status_code} {security_response.text}")
- sys.exit(1)
-
- security = security_md.Security(security_text)
- version_index = security.version_index
- alternate_tag_index = security.alternate_tag_index
-
- row_index = -1
- if version_index >= 0:
- for index, row in enumerate(security.data):
- if row[version_index] == version:
- row_index = index
- break
-
- alt_tags = set()
- if alternate_tag_index >= 0 and row_index >= 0:
- alt_tags = {
- t.strip() for t in security.data[row_index][alternate_tag_index].split(",") if t.strip()
- }
- if version_index >= 0 and security.data[-1][version_index] == version:
- add_latest = True
- for data in security.data:
- row_tags = {t.strip() for t in data[alternate_tag_index].split(",") if t.strip()}
- print(row_tags)
- if "latest" in row_tags:
- print("latest found in ", row_tags)
- add_latest = False
- break
- if add_latest:
- alt_tags.add("latest")
-
- images_src: set[str] = set()
- images_full: list[str] = []
- images_snyk: set[str] = set()
- versions = args.docker_versions.split(",") if args.docker_versions else [version]
- for image_conf in docker_config.get("images", []):
- if (
- image_conf.get("group", c2cciutils.configuration.PUBLISH_DOCKER_IMAGE_GROUP_DEFAULT)
- == args.group
- ):
- for tag_config in image_conf.get(
- "tags", c2cciutils.configuration.PUBLISH_DOCKER_IMAGE_TAGS_DEFAULT
- ):
- tag_src = tag_config.format(version="latest")
- image_source = f"{image_conf['name']}:{tag_src}"
- images_src.add(image_source)
- tag_snyk = tag_config.format(version=args.snyk_version or version).lower()
- image_snyk = f"{image_conf['name']}:{tag_snyk}"
-
- # Workaround sine we have the business plan
- image_snyk = f"{image_conf['name']}_{tag_snyk}"
-
- if not args.dry_run:
- subprocess.run(["docker", "tag", image_source, image_snyk], check=True)
- images_snyk.add(image_snyk)
- if tag_snyk != tag_src and not args.dry_run:
- subprocess.run(
- [
- "docker",
- "tag",
- image_source,
- f"{image_conf['name']}:{tag_snyk}",
- ],
- check=True,
- )
-
- tags_calendar = []
- for name, conf in {
- **cast(
- dict[str, c2cciutils.configuration.PublishDockerRepository],
- c2cciutils.configuration.DOCKER_REPOSITORY_DEFAULT,
- ),
- **docker_config.get("repository", {}),
- }.items():
- for docker_version in versions:
- tag_dst = tag_config.format(version=docker_version)
- if tag_dst not in tags_calendar:
- tags_calendar.append(tag_dst)
- if version_type in conf.get(
- "versions",
- c2cciutils.configuration.PUBLISH_DOCKER_REPOSITORY_VERSIONS_DEFAULT,
- ):
- tags = [
- tag_config.format(version=alt_tag)
- for alt_tag in [docker_version, *alt_tags]
- ]
-
- if args.dry_run:
- for tag in tags:
- print(
- f"Publishing {image_conf['name']}:{tag} to {name}, skipping (dry run)"
- )
- else:
- success &= c2cciutils.publish.docker(
- conf, name, image_conf, tag_src, tags, images_full
- )
-
- if google_calendar_publish:
- if version_type in google_calendar_config.get(
- "on", c2cciutils.configuration.PUBLISH_GOOGLE_CALENDAR_ON_DEFAULT
- ):
- if not google_calendar:
- google_calendar = GoogleCalendar()
- summary = f"{image_conf['name']}:{', '.join(tags_calendar)}"
- description = "\n".join(
- [
- f"Published the image {image_conf['name']}",
- f"Published on: {', '.join(docker_config['repository'].keys())}",
- f"With tags: {', '.join(tags_calendar)}",
- f"For version type: {version_type}",
- ]
- )
-
- google_calendar.create_event(summary, description)
-
- if args.dry_run:
- sys.exit(0)
-
- dispatch_config = docker_config.get("dispatch", {})
- if dispatch_config is not False and images_full:
- dispatch(
- dispatch_config.get(
- "repository", c2cciutils.configuration.DOCKER_DISPATCH_REPOSITORY_DEFAULT
- ),
- dispatch_config.get(
- "event-type", c2cciutils.configuration.DOCKER_DISPATCH_EVENT_TYPE_DEFAULT
- ),
- images_full,
- )
-
- snyk_exec, env = c2cciutils.snyk_exec()
- for image in images_snyk:
- print(f"::group::Snyk check {image}")
- sys.stdout.flush()
- sys.stderr.flush()
- try:
- if version_type in ("version_branch", "version_tag"):
- monitor_args = docker_config.get("snyk", {}).get(
- "monitor_args",
- c2cciutils.configuration.PUBLISH_DOCKER_SNYK_MONITOR_ARGS_DEFAULT,
- )
- if monitor_args is not False:
- subprocess.run( # pylint: disable=subprocess-run-check
- [
- snyk_exec,
- "container",
- "monitor",
- *monitor_args,
- # Available only on the business plan
- # f"--project-tags=tag={image.split(':')[-1]}",
- image,
- ],
- env=env,
- )
- test_args = docker_config.get("snyk", {}).get(
- "test_args", c2cciutils.configuration.PUBLISH_DOCKER_SNYK_TEST_ARGS_DEFAULT
- )
- snyk_error = False
- if test_args is not False:
- proc = subprocess.run(
- [snyk_exec, "container", "test", *test_args, image],
- check=False,
- env=env,
- )
- if proc.returncode != 0:
- snyk_error = True
- print("::endgroup::")
- if snyk_error:
- print("::error::Critical vulnerability found by Snyk in the published image.")
- except subprocess.CalledProcessError as exception:
- print(f"Error: {exception}")
- print("::endgroup::")
- print("::error::With error")
-
- versions_config, dpkg_config_found = c2cciutils.lib.docker.get_versions_config()
- dpkg_success = True
- for image in images_src:
- dpkg_success &= c2cciutils.lib.docker.check_versions(versions_config.get(image, {}), image)
-
- if not dpkg_success:
- current_versions_in_images: dict[str, dict[str, str]] = {}
- if dpkg_config_found:
- with open("ci/dpkg-versions.yaml", encoding="utf-8") as dpkg_versions_file:
- current_versions_in_images = yaml.load(dpkg_versions_file, Loader=yaml.SafeLoader)
- for image in images_src:
- _, versions_image = c2cciutils.lib.docker.get_dpkg_packages_versions(image)
- for dpkg_package, package_version in versions_image.items():
- if dpkg_package not in current_versions_in_images.get(image, {}):
- current_versions_in_images.setdefault(image, {})[dpkg_package] = str(package_version)
- for dpkg_package in list(current_versions_in_images[image].keys()):
- if dpkg_package not in versions_image:
- del current_versions_in_images[image][dpkg_package]
- if dpkg_config_found:
- print(
- "::error::Some packages are have a greater version in the config raster then in the image."
- )
- print("Current versions of the Debian packages in Docker images:")
- print(yaml.dump(current_versions_in_images, Dumper=yaml.SafeDumper, default_flow_style=False))
- if dpkg_config_found:
- with open("ci/dpkg-versions.yaml", "w", encoding="utf-8") as dpkg_versions_file:
- yaml.dump(
- current_versions_in_images,
- dpkg_versions_file,
- Dumper=yaml.SafeDumper,
- default_flow_style=False,
- )
-
- if dpkg_config_found:
- success = False
-
- helm_config = cast(
- c2cciutils.configuration.PublishHelmConfig,
- config.get("publish", {}).get("helm", {}) if config.get("publish", {}).get("helm", False) else {},
- )
- if helm_config and helm_config["folders"] and version_type in helm_config.get("versions", []):
- c2cciutils.scripts.download_applications.download_c2cciutils_applications("helm/chart-releaser")
-
- owner, repo = full_repo_split
- commit_sha = (
- subprocess.run(["git", "rev-parse", "HEAD"], check=True, stdout=subprocess.PIPE)
- .stdout.strip()
- .decode()
- )
- token = (
- os.environ["GITHUB_TOKEN"].strip()
- if "GITHUB_TOKEN" in os.environ
- else c2cciutils.gopass("gs/ci/github/token/gopass")
- )
- assert token is not None
- if version_type == "version_branch":
- last_tag = (
- subprocess.run(
- ["git", "describe", "--abbrev=0", "--tags"], check=True, stdout=subprocess.PIPE
- )
- .stdout.strip()
- .decode()
- )
- expression = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+$")
- while expression.match(last_tag) is None:
- last_tag = (
- subprocess.run(
- ["git", "describe", "--abbrev=0", "--tags", f"{last_tag}^"],
- check=True,
- stdout=subprocess.PIPE,
- )
- .stdout.strip()
- .decode()
- )
-
- versions = last_tag.split(".")
- versions[-1] = str(int(versions[-1]) + 1)
- version = ".".join(versions)
-
- for folder in helm_config["folders"]:
- success &= c2cciutils.publish.helm(folder, version, owner, repo, commit_sha, token)
-
- if not success:
- sys.exit(1)
-
-
-if __name__ == "__main__":
- main()
diff --git a/c2cciutils/scripts/trigger_image_update.py b/c2cciutils/scripts/trigger_image_update.py
deleted file mode 100644
index 23f7f2877..000000000
--- a/c2cciutils/scripts/trigger_image_update.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-Trigger an image update on the argocd repository.
-"""
-
-import argparse
-import os.path
-import random
-import subprocess # nosec
-import sys
-
-import requests
-import yaml
-
-
-def main() -> None:
- """
- Trigger an image update on the argocd repository.
-
- Only the branch present in the HELM_RELEASE_NAMES environment variable will be considered.
- """
- parser = argparse.ArgumentParser(
- description="""Trigger an image update on the argocd repository.
-
- Only the branch present in the HELM_RELEASE_NAMES environment variable will be considered."""
- )
- parser.add_argument("--version", help="The version to be exported")
- parser.add_argument("--event-type", default="image-update", help="The event name to be triggered")
- parser.add_argument(
- "--repository",
- default="camptocamp/argocd-gs-platform-ch-development-apps",
- help="The repository name to be triggered",
- )
-
- args = parser.parse_args()
-
- if args.version:
- version = args.version
- else:
- ref = os.environ["GITHUB_REF"].split("/")
-
- if ref[1] != "heads":
- print("::error::Not a branch")
- sys.exit(0)
-
- version = "/".join(ref[2:])
-
- if version not in os.environ.get("HELM_RELEASE_NAMES", "").split(","):
- print("::error::Not a release branch")
- sys.exit(0)
-
- images_full = []
- with open("ci/config.yaml", encoding="utf-8") as config_file:
- ci_config = yaml.load(config_file, Loader=yaml.SafeLoader)
- for image_config in ci_config.get("publish", {}).get("docker", {}).get("images", []):
- images_full.append(image_config["name"])
-
- dispatch(args.repository, args.event_type, [f"{image}:{version}" for image in images_full])
-
-
-def dispatch(repository: str, event_type: str, images_full: list[str]) -> None:
- """
- Trigger an image update on the argocd repository.
- """
- id_ = random.randint(1, 100000) # nosec
- print(f"Triggering {event_type}:{id_} on {repository} with {','.join(images_full)}")
-
- response = requests.post(
- f"https://api.github.com/repos/{repository}/dispatches",
- headers={
- "Content-Type": "application/json2",
- "Accept": "application/vnd.github.v3+json",
- "Authorization": "token "
- + subprocess.run(
- ["gopass", "show", "gs/ci/github/token/gopass"], check=True, stdout=subprocess.PIPE
- )
- .stdout.decode()
- .strip(),
- },
- json={"event_type": event_type, "client_payload": {"name": " ".join(images_full), "id": id_}},
- timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30")),
- )
- response.raise_for_status()
-
-
-if __name__ == "__main__":
- main()
diff --git a/c2cciutils/scripts/version.py b/c2cciutils/scripts/version.py
deleted file mode 100644
index 92c12d549..000000000
--- a/c2cciutils/scripts/version.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python3
-
-import argparse
-import json
-import os
-import re
-import subprocess # nosec
-
-import multi_repo_automation as mra
-import ruamel.yaml.comments
-
-import c2cciutils
-
-
-def main() -> None:
- """Create a new version with its stabilization branch."""
- args_parser = argparse.ArgumentParser(
- description="Create a new version with its stabilization branch",
- usage="""
-This will:
-- Stash all your changes
-- Checkout the master branch
-- Pull it from origin
-- Push it to a new stabilization branch
-- Checkout a new branch named new-version
-- Do the changes for the new version
- - Update the SECURITY.md config
- - Update the Renovate config
- - Update the audit workflow
- - Create the backport label
-- Push it
-- Create a pull request
-- Go back to your old branch
-
-If you run the tool without any version it will check that everything is OK regarding the SECURITY.md available on GitHub.
- """,
- )
- args_parser.add_argument(
- "--version",
- help="The version to create",
- )
- args_parser.add_argument(
- "--force",
- action="store_true",
- help="Force create the branch and push it",
- )
- args_parser.add_argument(
- "--supported-until",
- help="The date until the version is supported, can also be To be defined or Best effort",
- default="Best effort",
- )
- args_parser.add_argument(
- "--upstream-supported-until",
- help="The date until the version is supported upstream",
- )
- arguments = args_parser.parse_args()
-
- # Get the repo information e.g.:
- # {
- # "name": "camptocamp/c2cciutils",
- # "remote": "origin",
- # "dir": "/home/user/src/c2cciutils",
- # }
- # can be override with a repo.yaml file
- repo = mra.get_repo_config()
-
- # Stash all your changes
- subprocess.run(["git", "stash", "--all", "--message=Stashed by release creation"], check=True)
- old_branch_name = subprocess.run(
- ["git", "rev-parse", "--abbrev-ref", "HEAD"],
- stdout=subprocess.PIPE,
- check=True,
- ).stdout.strip()
-
- # Checkout the master branch
- subprocess.run(["git", "checkout", repo.get("master_branch", "master")], check=True)
-
- # Pull it from origin
- subprocess.run(
- ["git", "pull", repo.get("remote", "origin"), repo.get("master_branch", "master")], check=True
- )
-
- # Push it to a new stabilization branch
- if arguments.version:
- subprocess.run(
- [
- "git",
- "push",
- *(["--force"] if arguments.force else []),
- repo.get("remote", "origin"),
- f"HEAD:{arguments.version}",
- ],
- check=not arguments.force,
- )
-
- version = arguments.version
- branch_name = "new-version" if version is None else f"new-version-{version}"
-
- # Checkout a new branch named new-version
- if arguments.force:
- subprocess.run(["git", "branch", "-D", branch_name]) # pylint: disable=subprocess-run-check
- subprocess.run(["git", "checkout", "-b", branch_name], check=True)
-
- # # # Do the changes for the new version # # #
-
- remotes = [r for r in mra.run(["git", "remote"], stdout=subprocess.PIPE).stdout.split() if r != ""]
- remote_branches = [
- b.strip()[len("remotes/") :]
- for b in mra.run(["git", "branch", "--all"], stdout=subprocess.PIPE).stdout.split()
- if b != "" and b.strip().startswith("remotes/")
- ]
- if "upstream" in remotes:
- remote_branches = [b[len("upstream") + 1 :] for b in remote_branches if b.startswith("upstream/")]
- elif "origin" in remotes:
- remote_branches = [b[len("origin") + 1 :] for b in remote_branches if b.startswith("origin/")]
- else:
- remote_branches = ["/".join(b.split("/")[1:]) for b in remote_branches]
-
- config = c2cciutils.get_config()
- branch_re = c2cciutils.compile_re(config["version"].get("branch_to_version_re", []))
- branches_match = [c2cciutils.match(b, branch_re) for b in remote_branches]
- version_branch = {m.groups()[0] if m.groups() else b: b for m, c, b in branches_match if m is not None}
-
- stabilization_branches = [
- version_branch.get(version, version) for version in mra.get_stabilization_versions(repo)
- ]
- modified_files = []
-
- if version:
- stabilization_branches.append(version)
-
- if os.path.exists("SECURITY.md"):
- modified_files.append("SECURITY.md")
- with mra.Edit("SECURITY.md") as security_md:
- security_md_lines = security_md.data.split("\n")
- index = -1
- for i, line in enumerate(security_md_lines):
- if line.startswith("| "):
- index = i
-
- new_line = f"| {version} | {arguments.supported_until} |"
- if arguments.upstream_supported_until:
- new_line += f" {arguments.upstream_supported_until} |"
-
- security_md.data = "\n".join(
- [*security_md_lines[: index + 1], new_line, *security_md_lines[index + 1 :]]
- )
-
- stabilization_branches_with_master = [*stabilization_branches, repo.get("master_branch", "master")]
-
- for labels in mra.gh_json("label", ["name"], "list"):
- if labels["name"].startswith("backport "):
- if labels["name"].replace("backport ", "") not in stabilization_branches_with_master:
- mra.gh("label", "delete", labels["name"], "--yes")
-
- for branch in stabilization_branches_with_master:
- mra.gh(
- "label",
- "create",
- "--force",
- f"backport {branch}",
- "--color=5aed94",
- f"--description=Backport the pull request to the '{branch}' branch",
- )
-
- if os.path.exists(".github/renovate.json5"):
- modified_files.append(".github/renovate.json5")
- with mra.EditRenovateConfig(".github/renovate.json5") as renovate_config:
- if stabilization_branches:
- if "baseBranches: " in renovate_config.data:
- renovate_config.data = re.sub(
- r"(.*baseBranches: )\[[^\]]*\](.*)",
- rf"\1{json.dumps(stabilization_branches_with_master)}\2",
- renovate_config.data,
- )
- else:
- renovate_config.add(
- f"baseBranches: {json.dumps(stabilization_branches_with_master)},\n", "baseBranches"
- )
-
- if stabilization_branches and os.path.exists(".github/workflows/audit.yaml"):
- modified_files.append(".github/workflows/audit.yaml")
- with mra.EditYAML(".github/workflows/audit.yaml") as yaml:
- for job in yaml["jobs"].values():
- matrix = job.get("strategy", {}).get("matrix", {})
- if "include" in matrix and version:
- new_include = dict(matrix["include"][-1])
- new_include["branch"] = version
- matrix["include"].append(new_include)
-
- if "branch" in matrix and stabilization_branches:
- yaml_stabilization_branches = ruamel.yaml.comments.CommentedSeq(stabilization_branches)
- yaml_stabilization_branches._yaml_add_comment( # pylint: disable=protected-access
- [
- ruamel.yaml.CommentToken("\n\n", ruamel.yaml.error.CommentMark(0), None),
- None,
- None,
- None,
- ],
- len(stabilization_branches) - 1,
- )
- job["strategy"]["matrix"]["branch"] = yaml_stabilization_branches
-
- # Commit the changes
- message = f"Create the new version '{version}'" if version else "Update the supported versions"
- if os.path.exists(".pre-commit-config.yaml"):
- subprocess.run(["pre-commit", "run", "--color=never", "--all-files"], check=False)
- subprocess.run(["git", "add", *modified_files], check=True)
- subprocess.run(["git", "commit", f"--message={message}"], check=True)
-
- # Push it
- subprocess.run(
- [
- "git",
- "push",
- *(["--force"] if arguments.force else []),
- repo.get("remote", "origin"),
- branch_name,
- ],
- check=True,
- )
-
- # Create a pull request
- url = mra.gh(
- "pr",
- "create",
- f"--title={message}",
- "--body=",
- f"--head={branch_name}",
- f"--base={repo.get('master_branch', 'master')}",
- ).strip()
-
- # Go back to your old branch
- subprocess.run(["git", "checkout", old_branch_name, "--"], check=True)
-
- if url:
- subprocess.run([mra.get_browser(), url], check=True)
- else:
- mra.gh("browse")
-
-
-if __name__ == "__main__":
- main()
diff --git a/ci/config.yaml b/ci/config.yaml
deleted file mode 100644
index 75a77a4a3..000000000
--- a/ci/config.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/camptocamp/c2cciutils/master/c2cciutils/schema.json
-
-publish:
- pypi:
- versions:
- - version_tag
- - version_branch
- - rebuild
diff --git a/ci/requires_release b/ci/requires_release
deleted file mode 100755
index 2f8fcf56d..000000000
--- a/ci/requires_release
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python3
-
-import datetime
-from distutils.version import LooseVersion
-
-import requests
-
-
-def main() -> None:
- url = "https://pypi.org/pypi/c2cciutils/json"
- data = requests.get(url, timeout=int(os.environ.get("C2CCIUTILS_TIMEOUT", "30"))).json()
- versions = [LooseVersion(v) for v in data["releases"].keys()]
- versions = [v for v in versions if len(v.version) == 4]
- versions = sorted(versions)
- version = versions[-1].vstring
-
- date_str = version.split(".dev")[1][:8]
- date_obj = datetime.datetime.strptime(date_str, "%Y%m%d")
- days = (datetime.datetime.now() - date_obj).days
- required = "true" if days > 30 else "false"
-
- print(f"days: {days}")
- print(f"required: {required}")
- with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as output_file:
- output_file.write(f"days={days}\n")
- output_file.write(f"required={required}\n")
-
-
-if __name__ == "__main__":
- main()
diff --git a/config.md b/config.md
index ffc7df633..8461a8660 100644
--- a/config.md
+++ b/config.md
@@ -5,14 +5,6 @@ _C2C CI utils configuration file_
## Properties
- **`print_versions`**: Refer to _[#/definitions/print_versions](#definitions/print_versions)_.
-- **`publish`** _(object)_: The publishing configurations. Default: `{"pypi": {"versions": ["version_tag"], "packages": ""}, "docker": {"images": ""}, "helm": {"versions": ["version_tag"], "folders": ""}}`.
- - **`docker`**: Refer to _[#/definitions/publish_docker](#definitions/publish_docker)_.
- - **`pypi`**: Refer to _[#/definitions/publish_pypi](#definitions/publish_pypi)_.
- - **`helm`**: Refer to _[#/definitions/publish_helm](#definitions/publish_helm)_.
- - **`google_calendar`**: Refer to _[#/definitions/publish_google_calendar](#definitions/publish_google_calendar)_.
-- **`version`** _(object)_: The version configurations.
- - **`branch_to_version_re`**: Refer to _[#/definitions/version_transform](#definitions/version_transform)_.
- - **`tag_to_version_re`**: Refer to _[#/definitions/version_transform](#definitions/version_transform)_.
- **`k8s`** _(object)_: Default: `{}`.
- **`k3d`** _(object)_: Default: `{}`.
- **`install-commands`** _(array)_: Default: `[["k3d", "cluster", "create", "test-cluster", "--no-lb", "--no-rollback"]]`.
@@ -21,73 +13,9 @@ _C2C CI utils configuration file_
- **`db`** _(object)_: Database configuration. Default: `{}`.
- **`chart-options`** _(object)_: Can contain additional properties. Default: `{"persistence.enabled": "false", "tls.enabled": "true", "tls.autoGenerated": "true", "auth.postgresPassword": "mySuperTestingPassword", "volumePermissions.enabled": "true"}`.
- **Additional properties** _(string)_
-- **`dpkg`** _(object)_: The configuration use t manage the dpkg packages. Cannot contain additional properties.
- - **`packages_mapping`** _(object)_: The mapping of source package found in the image to package present in repology.org. Can contain additional properties.
- - **Additional properties** _(string)_
- - **`ignored_packages`** _(array)_: The list of packages that should be ignored.
- - **Items** _(string)_
## Definitions
-- **`publish_docker`**: The configuration used to publish on Docker.
- - **One of**
- - _object_: The configuration used to publish on Docker.
- - **`latest`** _(boolean)_: Publish the latest version on tag latest. Default: `true`.
- - **`images`** _(array)_: List of images to be published.
- - **Items** _(object)_
- - **`group`** _(string)_: The image is in the group, should be used with the --group option of c2cciutils-publish script. Default: `"default"`.
- - **`name`** _(string)_: The image name.
- - **`tags`** _(array)_: The tag name, will be formatted with the version=, the image with version=latest should be present when we call the c2cciutils-publish script. Default: `["{version}"]`.
- - **Items** _(string)_
- - **`repository`** _(object)_: The repository where we should publish the images. Can contain additional properties. Default: `{"github": {"server": "ghcr.io", "versions": ["version_tag", "version_branch", "rebuild"]}, "dockerhub": {}}`.
- - **Additional properties** _(object)_
- - **`server`** _(string)_: The server URL.
- - **`versions`** _(array)_: The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script. Default: `["version_tag", "version_branch", "rebuild", "feature_branch"]`.
- - **Items** _(string)_
- - **`dispatch`**: Send a dispatch event to an other repository. Default: `{}`.
- - **One of**
- - _object_: Send a dispatch event to an other repository.
- - **`repository`** _(string)_: The repository name to be triggered. Default: `"camptocamp/argocd-gs-gmf-apps"`.
- - **`event-type`** _(string)_: The event type to be triggered. Default: `"image-update"`.
- - : Must be: `false`.
- - **`snyk`** _(object)_: Checks the published images with Snyk.
- - **`monitor_args`**: The arguments to pass to the Snyk container monitor command. Default: `["--app-vulns"]`.
- - **One of**
- - _array_
- - **Items** _(string)_
- - : Must be: `false`.
- - **`test_args`**: The arguments to pass to the Snyk container test command. Default: `["--app-vulns", "--severity-threshold=critical"]`.
- - **One of**
- - _array_
- - **Items** _(string)_
- - : Must be: `false`.
- - : Must be: `false`.
-- **`publish_google_calendar`**: The configuration to publish on Google Calendar. Default: `{}`.
- - **One of**
- - _object_: The configuration to publish on Google Calendar.
- - **`on`** _(array)_: Default: `["version_branch", "version_tag", "rebuild"]`.
- - **Items** _(string)_
- - : Must be: `false`.
-- **`publish_pypi`**: Configuration to publish on pypi. Default: `{}`.
- - **One of**
- - _object_: Configuration to publish on pypi.
- - **`packages`** _(array)_: The configuration of packages that will be published.
- - **Items** _(object)_: The configuration of package that will be published.
- - **`group`** _(string)_: The image is in the group, should be used with the --group option of c2cciutils-publish script. Default: `"default"`.
- - **`path`** _(string)_: The path of the pypi package.
- - **`build_command`** _(array)_: The command used to do the build.
- - **Items** _(string)_
- - **`versions`** _(array)_: The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script.
- - **Items** _(string)_
- - : Must be: `false`.
-- **`publish_helm`**: Configuration to publish Helm charts on GitHub release.
- - **One of**
- - _object_: Configuration to publish on Helm charts on GitHub release.
- - **`folders`** _(array)_: The folders that will be published.
- - **Items** _(string)_
- - **`versions`** _(array)_: The kind or version that should be published, tag, branch or value of the --version argument of the c2cciutils-publish script.
- - **Items** _(string)_
- - : Must be: `false`.
- **`print_versions`** _(object)_: The print versions configuration.
- **`versions`** _(array)_: Default: `[{"name": "python", "cmd": ["python3", "--version"]}, {"name": "pip", "cmd": ["python3", "-m", "pip", "--version"]}, {"name": "node", "prefix": "node ", "cmd": ["node", "--version"]}, {"name": "npm", "prefix": "npm ", "cmd": ["npm", "--version"]}, {"name": "make", "cmd": ["make", "--version"]}, {"name": "docker", "cmd": ["docker", "--version"]}, {"name": "docker compose", "cmd": ["docker", "compose", "version"]}, {"name": "java", "cmd": ["java", "-version"]}, {"name": "helm", "cmd": ["helm", "version"], "prefix": "HELM: "}]`.
- **Items** _(object)_
@@ -95,7 +23,3 @@ _C2C CI utils configuration file_
- **Items** _(string)_
- **`name`** _(string)_: The name.
- **`prefix`** _(string)_: Prefix added when we print the version.
-- **`version_transform`** _(array)_: A version transformer definition.
- - **Items** _(object)_
- - **`from`** _(string)_: The from regular expression.
- - **`to`** _(string)_: The expand regular expression: https://docs.python.org/3/library/re.html#re.Match.expand.
diff --git a/example-project/.github/workflows/main.yaml b/example-project/.github/workflows/main.yaml
index ae5b991e5..4507eccaf 100644
--- a/example-project/.github/workflows/main.yaml
+++ b/example-project/.github/workflows/main.yaml
@@ -55,5 +55,5 @@ jobs:
run: docker build --tag=${GITHUB_REPOSITORY} .
- name: Publish
- run: c2cciutils-publish
+ run: tag-publish
if: env.HAS_SECRETS == 'HAS_SECRETS'
diff --git a/poetry.lock b/poetry.lock
index 0d0f8ecd4..b0d012c0a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,15 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
-
-[[package]]
-name = "annotated-types"
-version = "0.7.0"
-description = "Reusable constraint types to use with typing.Annotated"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
- {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
-]
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]]
name = "astroid"
@@ -25,40 +14,6 @@ files = [
[package.dependencies]
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
-[[package]]
-name = "attrs"
-version = "24.2.0"
-description = "Classes Without Boilerplate"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
- {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
-]
-
-[package.extras]
-benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
-tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
-
-[[package]]
-name = "backports-tarfile"
-version = "1.2.0"
-description = "Backport of CPython tarfile module"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"},
- {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"},
-]
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"]
-
[[package]]
name = "bandit"
version = "1.7.10"
@@ -108,17 +63,6 @@ typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "
uv = ["uv (>=0.1.18)"]
virtualenv = ["virtualenv (>=20.0.35)"]
-[[package]]
-name = "cachetools"
-version = "5.5.0"
-description = "Extensible memoizing collections and decorators"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
- {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
-]
-
[[package]]
name = "certifi"
version = "2024.8.30"
@@ -130,96 +74,6 @@ files = [
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
-[[package]]
-name = "cffi"
-version = "1.17.1"
-description = "Foreign Function Interface for Python calling C code."
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
- {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
- {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
- {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
- {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
- {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
- {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
- {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
- {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
- {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
- {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
- {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
- {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
- {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
- {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
- {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
- {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
- {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
- {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
- {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
- {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
- {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
- {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
- {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
- {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
- {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
- {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
- {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
- {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
- {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
- {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
- {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
- {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
- {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
- {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
- {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
- {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
- {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
- {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
- {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
- {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
- {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
- {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
- {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
- {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
- {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
- {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
- {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
- {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
- {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
- {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
- {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
- {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
- {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
- {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
- {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
- {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
- {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
- {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
- {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
- {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
- {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
- {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
- {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
- {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
- {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
- {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
-]
-
-[package.dependencies]
-pycparser = "*"
-
-[[package]]
-name = "chardet"
-version = "5.2.0"
-description = "Universal encoding detector for Python 3"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"},
- {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
-]
-
[[package]]
name = "charset-normalizer"
version = "3.4.0"
@@ -345,99 +199,6 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-[[package]]
-name = "configupdater"
-version = "3.2"
-description = "Parser like ConfigParser but for updating configuration files"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "ConfigUpdater-3.2-py2.py3-none-any.whl", hash = "sha256:0f65a041627d7693840b4dd743581db4c441c97195298a29d075f91b79539df2"},
- {file = "ConfigUpdater-3.2.tar.gz", hash = "sha256:9fdac53831c1b062929bf398b649b87ca30e7f1a735f3fbf482072804106306b"},
-]
-
-[package.extras]
-testing = ["flake8", "pytest", "pytest-cov", "pytest-randomly", "pytest-xdist", "sphinx"]
-
-[[package]]
-name = "cryptography"
-version = "43.0.3"
-description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
- {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
- {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
- {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
- {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
- {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
- {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
- {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
- {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
- {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
- {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
-]
-
-[package.dependencies]
-cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
-
-[package.extras]
-docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
-docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
-nox = ["nox"]
-pep8test = ["check-sdist", "click", "mypy", "ruff"]
-sdist = ["build"]
-ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
-test-randomorder = ["pytest-randomly"]
-
-[[package]]
-name = "debian-inspector"
-version = "31.1.0"
-description = "Utilities to parse Debian package, copyright and control files."
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "debian_inspector-31.1.0-py3-none-any.whl", hash = "sha256:77dfeb34492dd49d8593d4f7146ffa3f71fca703737824e09d7472e0eafca567"},
- {file = "debian_inspector-31.1.0.tar.gz", hash = "sha256:ebcfbc17064f10bd3b6d2122cdbc97b71a494af0ebbafaf9a8ceadfe8b164f99"},
-]
-
-[package.dependencies]
-attrs = ">=19.2,<20.1.0 || >20.1.0"
-chardet = ">=3.0.0"
-
-[package.extras]
-docs = ["Sphinx (>=5.0.2)", "doc8 (>=0.11.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-dark-mode (>=1.3.0)", "sphinx-rtd-theme (>=1.0.0)", "sphinxcontrib-apidoc (>=0.3.0)"]
-testing = ["aboutcode-toolkit (>=7.0.2)", "black", "commoncode", "isort", "pycodestyle (>=2.8.0)", "pytest (>=6,!=7.0.0)", "pytest-xdist (>=2)", "twine"]
-
-[[package]]
-name = "defusedxml"
-version = "0.7.1"
-description = "XML bomb protection for Python stdlib modules"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-files = [
- {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
- {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
-]
-
[[package]]
name = "dill"
version = "0.3.9"
@@ -537,172 +298,6 @@ gitdb = ">=4.0.1,<5"
doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"]
test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"]
-[[package]]
-name = "google-api-core"
-version = "2.22.0"
-description = "Google API client core library"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "google_api_core-2.22.0-py3-none-any.whl", hash = "sha256:a6652b6bd51303902494998626653671703c420f6f4c88cfd3f50ed723e9d021"},
- {file = "google_api_core-2.22.0.tar.gz", hash = "sha256:26f8d76b96477db42b55fd02a33aae4a42ec8b86b98b94969b7333a2c828bf35"},
-]
-
-[package.dependencies]
-google-auth = ">=2.14.1,<3.0.dev0"
-googleapis-common-protos = ">=1.56.2,<2.0.dev0"
-proto-plus = [
- {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""},
- {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
-]
-protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
-requests = ">=2.18.0,<3.0.0.dev0"
-
-[package.extras]
-async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"]
-grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
-grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
-grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
-
-[[package]]
-name = "google-api-python-client"
-version = "2.151.0"
-description = "Google API Client Library for Python"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c"},
- {file = "google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034"},
-]
-
-[package.dependencies]
-google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0"
-google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0"
-google-auth-httplib2 = ">=0.2.0,<1.0.0"
-httplib2 = ">=0.19.0,<1.dev0"
-uritemplate = ">=3.0.1,<5"
-
-[[package]]
-name = "google-auth"
-version = "2.35.0"
-description = "Google Authentication Library"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"},
- {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"},
-]
-
-[package.dependencies]
-cachetools = ">=2.0.0,<6.0"
-pyasn1-modules = ">=0.2.1"
-rsa = ">=3.1.4,<5"
-
-[package.extras]
-aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
-enterprise-cert = ["cryptography", "pyopenssl"]
-pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
-reauth = ["pyu2f (>=0.1.5)"]
-requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
-
-[[package]]
-name = "google-auth-httplib2"
-version = "0.2.0"
-description = "Google Authentication Library: httplib2 transport"
-optional = true
-python-versions = "*"
-files = [
- {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
- {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
-]
-
-[package.dependencies]
-google-auth = "*"
-httplib2 = ">=0.19.0"
-
-[[package]]
-name = "google-auth-oauthlib"
-version = "1.2.1"
-description = "Google Authentication Library"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f"},
- {file = "google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263"},
-]
-
-[package.dependencies]
-google-auth = ">=2.15.0"
-requests-oauthlib = ">=0.7.0"
-
-[package.extras]
-tool = ["click (>=6.0.0)"]
-
-[[package]]
-name = "googleapis-common-protos"
-version = "1.65.0"
-description = "Common protobufs used in Google APIs"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"},
- {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"},
-]
-
-[package.dependencies]
-protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0"
-
-[package.extras]
-grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
-
-[[package]]
-name = "httplib2"
-version = "0.22.0"
-description = "A comprehensive HTTP client library."
-optional = true
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
- {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
-]
-
-[package.dependencies]
-pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
-
-[[package]]
-name = "id"
-version = "1.4.0"
-description = "A tool for generating OIDC identities"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "id-1.4.0-py3-none-any.whl", hash = "sha256:a0391117c98fa9851ebd2b22df0dc6fd6aacbd89a4ec95c173f1311ca9bb7329"},
- {file = "id-1.4.0.tar.gz", hash = "sha256:23c06772e8bd3e3a44ee3f167868bf5a8e385b0c1e2cc707ad36eb7486b4765b"},
-]
-
-[package.dependencies]
-pydantic = "*"
-requests = "*"
-
-[package.extras]
-dev = ["build", "bump (>=1.3.2)", "id[lint,test]"]
-lint = ["bandit", "interrogate", "mypy", "ruff (<0.4.2)", "types-requests"]
-test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"]
-
-[[package]]
-name = "identify"
-version = "2.6.1"
-description = "File identification library for Python"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"},
- {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"},
-]
-
-[package.extras]
-license = ["ukkonen"]
-
[[package]]
name = "idna"
version = "3.10"
@@ -754,137 +349,6 @@ files = [
[package.extras]
colors = ["colorama (>=0.4.6)"]
-[[package]]
-name = "jaraco-classes"
-version = "3.4.0"
-description = "Utility functions for Python class constructs"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"},
- {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"},
-]
-
-[package.dependencies]
-more-itertools = "*"
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
-
-[[package]]
-name = "jaraco-context"
-version = "6.0.1"
-description = "Useful decorators and context managers"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"},
- {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"},
-]
-
-[package.dependencies]
-"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""}
-
-[package.extras]
-doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
-
-[[package]]
-name = "jaraco-functools"
-version = "4.1.0"
-description = "Functools like those found in stdlib"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"},
- {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"},
-]
-
-[package.dependencies]
-more-itertools = "*"
-
-[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
-cover = ["pytest-cov"]
-doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-enabler = ["pytest-enabler (>=2.2)"]
-test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"]
-type = ["pytest-mypy"]
-
-[[package]]
-name = "jeepney"
-version = "0.8.0"
-description = "Low-level, pure Python DBus protocol wrapper."
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"},
- {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"},
-]
-
-[package.extras]
-test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"]
-trio = ["async_generator", "trio"]
-
-[[package]]
-name = "json5"
-version = "0.9.25"
-description = "A Python implementation of the JSON5 data format."
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"},
- {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"},
-]
-
-[[package]]
-name = "keyring"
-version = "25.5.0"
-description = "Store and access your passwords safely."
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "keyring-25.5.0-py3-none-any.whl", hash = "sha256:e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741"},
- {file = "keyring-25.5.0.tar.gz", hash = "sha256:4c753b3ec91717fe713c4edd522d625889d8973a349b0e582622f49766de58e6"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""}
-"jaraco.classes" = "*"
-"jaraco.context" = "*"
-"jaraco.functools" = "*"
-jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
-pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""}
-SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
-
-[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
-completion = ["shtab (>=1.1.0)"]
-cover = ["pytest-cov"]
-doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-enabler = ["pytest-enabler (>=2.2)"]
-test = ["pyfakefs", "pytest (>=6,!=8.1.*)"]
-type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"]
-
-[[package]]
-name = "markdown"
-version = "3.7"
-description = "Python implementation of John Gruber's Markdown."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"},
- {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"},
-]
-
-[package.dependencies]
-importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
-
-[package.extras]
-docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
-testing = ["coverage", "pyyaml"]
-
[[package]]
name = "markdown-it-py"
version = "3.0.0"
@@ -909,16 +373,6 @@ profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
-[[package]]
-name = "markdown-table"
-version = "2020.12.3"
-description = "markown table generator"
-optional = false
-python-versions = "*"
-files = [
- {file = "markdown-table-2020.12.3.tar.gz", hash = "sha256:df0de8e86d14183b1dab61aaa5a78ad683f8f7ca7dddda182ecbd403b321193f"},
-]
-
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -941,42 +395,6 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
-[[package]]
-name = "more-itertools"
-version = "10.5.0"
-description = "More routines for operating on iterables, beyond itertools"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"},
- {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"},
-]
-
-[[package]]
-name = "multi-repo-automation"
-version = "1.3.0"
-description = "Library for automation updates on multiple repositories."
-optional = true
-python-versions = ">=3.9"
-files = [
- {file = "multi_repo_automation-1.3.0-py3-none-any.whl", hash = "sha256:aea5b368bfcab3ccf488d55066d88ace08d709d16b8d7e9f72a15b0512e729a6"},
- {file = "multi_repo_automation-1.3.0.tar.gz", hash = "sha256:f9fdbd78f83a3a71d61515dcda6309ad84529eec662107b52642d038f2fca4a8"},
-]
-
-[package.dependencies]
-configupdater = "*"
-identify = "*"
-idna = "*"
-json5 = "*"
-PyYAML = "*"
-requests = "*"
-"ruamel.yaml" = "*"
-tomlkit = "*"
-typing_extensions = "*"
-
-[package.extras]
-update-stabilization-branches = ["c2cciutils"]
-
[[package]]
name = "mypy"
version = "1.13.0"
@@ -1041,47 +459,6 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
-[[package]]
-name = "nh3"
-version = "0.2.18"
-description = "Python bindings to the ammonia HTML sanitization library."
-optional = true
-python-versions = "*"
-files = [
- {file = "nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86"},
- {file = "nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307"},
- {file = "nh3-0.2.18-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f"},
- {file = "nh3-0.2.18-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe"},
- {file = "nh3-0.2.18-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a"},
- {file = "nh3-0.2.18-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50"},
- {file = "nh3-0.2.18-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204"},
- {file = "nh3-0.2.18-cp37-abi3-win32.whl", hash = "sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be"},
- {file = "nh3-0.2.18-cp37-abi3-win_amd64.whl", hash = "sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844"},
- {file = "nh3-0.2.18.tar.gz", hash = "sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4"},
-]
-
-[[package]]
-name = "oauthlib"
-version = "3.2.2"
-description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
- {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
-]
-
-[package.extras]
-rsa = ["cryptography (>=3.0.0)"]
-signals = ["blinker (>=1.4.0)"]
-signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
-
[[package]]
name = "packaging"
version = "24.1"
@@ -1118,20 +495,6 @@ files = [
[package.dependencies]
flake8-polyfill = ">=1.0.2,<2"
-[[package]]
-name = "pkginfo"
-version = "1.10.0"
-description = "Query metadata from sdists / bdists / installed packages."
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"},
- {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"},
-]
-
-[package.extras]
-testing = ["pytest", "pytest-cov", "wheel"]
-
[[package]]
name = "platformdirs"
version = "4.3.6"
@@ -1211,68 +574,6 @@ files = [
{file = "prospector_profile_utils-1.9.1.tar.gz", hash = "sha256:008efa6797a85233fd8093dcb9d86f5fa5d89673e431c15cb1496a91c9b2c601"},
]
-[[package]]
-name = "proto-plus"
-version = "1.25.0"
-description = "Beautiful, Pythonic protocol buffers."
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "proto_plus-1.25.0-py3-none-any.whl", hash = "sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961"},
- {file = "proto_plus-1.25.0.tar.gz", hash = "sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91"},
-]
-
-[package.dependencies]
-protobuf = ">=3.19.0,<6.0.0dev"
-
-[package.extras]
-testing = ["google-api-core (>=1.31.5)"]
-
-[[package]]
-name = "protobuf"
-version = "5.28.3"
-description = ""
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24"},
- {file = "protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868"},
- {file = "protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687"},
- {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584"},
- {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135"},
- {file = "protobuf-5.28.3-cp38-cp38-win32.whl", hash = "sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548"},
- {file = "protobuf-5.28.3-cp38-cp38-win_amd64.whl", hash = "sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b"},
- {file = "protobuf-5.28.3-cp39-cp39-win32.whl", hash = "sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535"},
- {file = "protobuf-5.28.3-cp39-cp39-win_amd64.whl", hash = "sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36"},
- {file = "protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed"},
- {file = "protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b"},
-]
-
-[[package]]
-name = "pyasn1"
-version = "0.6.1"
-description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
- {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
-]
-
-[[package]]
-name = "pyasn1-modules"
-version = "0.4.1"
-description = "A collection of ASN.1-based protocols modules"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
- {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
-]
-
-[package.dependencies]
-pyasn1 = ">=0.4.6,<0.7.0"
-
[[package]]
name = "pycodestyle"
version = "2.11.1"
@@ -1284,141 +585,6 @@ files = [
{file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"},
]
-[[package]]
-name = "pycparser"
-version = "2.22"
-description = "C parser in Python"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
- {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
-]
-
-[[package]]
-name = "pydantic"
-version = "2.9.2"
-description = "Data validation using Python type hints"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
- {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
-]
-
-[package.dependencies]
-annotated-types = ">=0.6.0"
-pydantic-core = "2.23.4"
-typing-extensions = [
- {version = ">=4.6.1", markers = "python_version < \"3.13\""},
- {version = ">=4.12.2", markers = "python_version >= \"3.13\""},
-]
-
-[package.extras]
-email = ["email-validator (>=2.0.0)"]
-timezone = ["tzdata"]
-
-[[package]]
-name = "pydantic-core"
-version = "2.23.4"
-description = "Core functionality for Pydantic validation and serialization"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
- {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
- {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
- {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
- {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
- {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
- {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
- {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
- {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
- {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
- {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
- {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
- {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
- {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
- {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
- {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
- {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
- {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
- {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
- {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
- {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
- {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
- {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
- {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
- {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
- {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
- {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
- {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
- {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"},
- {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"},
- {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"},
- {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"},
- {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"},
- {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"},
- {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"},
- {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"},
- {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"},
- {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"},
- {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"},
- {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"},
- {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"},
- {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
- {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"},
- {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"},
- {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
-]
-
-[package.dependencies]
-typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
-
[[package]]
name = "pydocstyle"
version = "6.3.0"
@@ -1551,20 +717,6 @@ files = [
[package.dependencies]
pylint = ">=1.7"
-[[package]]
-name = "pyparsing"
-version = "3.2.0"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
-optional = true
-python-versions = ">=3.9"
-files = [
- {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"},
- {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"},
-]
-
-[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
-
[[package]]
name = "pyproject-hooks"
version = "1.2.0"
@@ -1599,17 +751,6 @@ trove-classifiers = ">=2022.6.26"
[package.extras]
test = ["setuptools (>=60)", "zest.releaser[recommended]"]
-[[package]]
-name = "pywin32-ctypes"
-version = "0.2.3"
-description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"},
- {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"},
-]
-
[[package]]
name = "pyyaml"
version = "6.0.2"
@@ -1672,25 +813,6 @@ files = [
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
-[[package]]
-name = "readme-renderer"
-version = "44.0"
-description = "readme_renderer is a library for rendering readme descriptions for Warehouse"
-optional = true
-python-versions = ">=3.9"
-files = [
- {file = "readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151"},
- {file = "readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1"},
-]
-
-[package.dependencies]
-docutils = ">=0.21.2"
-nh3 = ">=0.2.14"
-Pygments = ">=2.5.1"
-
-[package.extras]
-md = ["cmarkgfm (>=0.8.0)"]
-
[[package]]
name = "requests"
version = "2.32.3"
@@ -1712,38 +834,6 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
-[[package]]
-name = "requests-oauthlib"
-version = "2.0.0"
-description = "OAuthlib authentication support for Requests."
-optional = true
-python-versions = ">=3.4"
-files = [
- {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"},
- {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"},
-]
-
-[package.dependencies]
-oauthlib = ">=3.0.0"
-requests = ">=2.0.0"
-
-[package.extras]
-rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
-
-[[package]]
-name = "requests-toolbelt"
-version = "1.0.0"
-description = "A utility belt for advanced users of python-requests"
-optional = true
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-files = [
- {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
- {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
-]
-
-[package.dependencies]
-requests = ">=2.0.1,<3.0.0"
-
[[package]]
name = "requirements-detector"
version = "1.3.1"
@@ -1761,20 +851,6 @@ packaging = ">=21.3"
semver = ">=3.0.0,<4.0.0"
toml = ">=0.10.2,<0.11.0"
-[[package]]
-name = "rfc3986"
-version = "2.0.0"
-description = "Validating URI References per RFC 3986"
-optional = true
-python-versions = ">=3.7"
-files = [
- {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"},
- {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"},
-]
-
-[package.extras]
-idna2008 = ["idna"]
-
[[package]]
name = "rich"
version = "13.9.4"
@@ -1794,20 +870,6 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
-[[package]]
-name = "rsa"
-version = "4.9"
-description = "Pure-Python RSA implementation"
-optional = true
-python-versions = ">=3.6,<4"
-files = [
- {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
- {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
-]
-
-[package.dependencies]
-pyasn1 = ">=0.1.3"
-
[[package]]
name = "ruamel-yaml"
version = "0.18.6"
@@ -1876,37 +938,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
]
-[[package]]
-name = "secretstorage"
-version = "3.3.3"
-description = "Python bindings to FreeDesktop.org Secret Service API"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"},
- {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"},
-]
-
-[package.dependencies]
-cryptography = ">=2.0"
-jeepney = ">=0.6"
-
-[[package]]
-name = "security-md"
-version = "0.2.3"
-description = "Common utilities for Camptocamp CI"
-optional = false
-python-versions = ">=3.9"
-files = [
- {file = "security_md-0.2.3-py3-none-any.whl", hash = "sha256:e95e454d7c7b9786a7af16c8d5fa657cdad575d76b74c69d4b1f3f88548bbd82"},
- {file = "security_md-0.2.3.tar.gz", hash = "sha256:cc766d03b01d1f5e49ed616480cc5f92469eb5e278466f71066b7acbd011adad"},
-]
-
-[package.dependencies]
-defusedxml = ">=0.0.0,<1.0.0"
-markdown = ">=3.0,<4.0"
-markdown-table = ">=2020.0.0,<2021.0.0"
-
[[package]]
name = "semver"
version = "3.0.2"
@@ -2032,39 +1063,6 @@ files = [
{file = "trove_classifiers-2024.10.21.16.tar.gz", hash = "sha256:17cbd055d67d5e9d9de63293a8732943fabc21574e4c7b74edf112b4928cf5f3"},
]
-[[package]]
-name = "twine"
-version = "5.1.1"
-description = "Collection of utilities for publishing packages on PyPI"
-optional = true
-python-versions = ">=3.8"
-files = [
- {file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"},
- {file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"},
-]
-
-[package.dependencies]
-importlib-metadata = ">=3.6"
-keyring = ">=15.1"
-pkginfo = ">=1.8.1,<1.11"
-readme-renderer = ">=35.0"
-requests = ">=2.20"
-requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
-rfc3986 = ">=1.4.0"
-rich = ">=12.0.0"
-urllib3 = ">=1.26.0"
-
-[[package]]
-name = "types-markdown"
-version = "3.7.0.20240822"
-description = "Typing stubs for Markdown"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "types-Markdown-3.7.0.20240822.tar.gz", hash = "sha256:183557c9f4f865bdefd8f5f96a38145c31819271cde111d35557c3bd2069e78d"},
- {file = "types_Markdown-3.7.0.20240822-py3-none-any.whl", hash = "sha256:bec91c410aaf2470ffdb103e38438fbcc53689b00133f19e64869eb138432ad7"},
-]
-
[[package]]
name = "types-pyyaml"
version = "6.0.12.20240917"
@@ -2101,17 +1099,6 @@ files = [
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
-[[package]]
-name = "uritemplate"
-version = "4.1.1"
-description = "Implementation of RFC 6570 URI Templates"
-optional = true
-python-versions = ">=3.6"
-files = [
- {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"},
- {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"},
-]
-
[[package]]
name = "urllib3"
version = "2.2.3"
@@ -2148,15 +1135,7 @@ enabler = ["pytest-enabler (>=2.2)"]
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
-[extras]
-audit = []
-checks = []
-pr-checks = []
-publish = ["google-api-python-client", "google-auth-httplib2", "google-auth-oauthlib", "id", "twine"]
-publish-plugins = []
-version = ["multi-repo-automation"]
-
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<4.0"
-content-hash = "d1d62d6ba56d86833bae2ebaefea6354b60da07cfe106bc9bea39342466c8a18"
+content-hash = "bb88c491072e6e194a4ead798caae5dc178e91d34de2d234a354ae74d0666f5c"
diff --git a/pyproject.toml b/pyproject.toml
index bdf78d82c..6b4fa322b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -47,43 +47,17 @@ exclude = ["c2cciutils/node_modules/**/test"]
[tool.poetry.scripts]
c2cciutils = "c2cciutils.scripts.main:main"
c2cciutils-env = "c2cciutils.scripts.env:main"
-c2cciutils-publish = "c2cciutils.scripts.publish:main"
-c2cciutils-version = "c2cciutils.scripts.version:main"
-c2cciutils-clean = "c2cciutils.scripts.clean:main"
-c2cciutils-checks = "c2cciutils.scripts.env:main"
-c2cciutils-google-calendar = "c2cciutils.publish:main_calendar"
c2cciutils-k8s-install = "c2cciutils.scripts.k8s.install:main"
c2cciutils-k8s-db = "c2cciutils.scripts.k8s.db:main"
c2cciutils-k8s-wait = "c2cciutils.scripts.k8s.wait:main"
c2cciutils-k8s-logs = "c2cciutils.scripts.k8s.logs:main"
-c2cciutils-pin-pipenv = "c2cciutils.scripts.pin_pipenv:main"
c2cciutils-docker-logs = "c2cciutils.scripts.docker_logs:main"
-c2cciutils-trigger-image-update = "c2cciutils.scripts.trigger_image_update:main"
-c2cciutils-download-applications = "c2cciutils.scripts.download_applications:main"
-c2cciutils-docker-versions-gen = "c2cciutils.scripts.docker_versions_gen:main"
[tool.poetry.dependencies]
python = ">=3.9,<4.0"
requests = "2.32.3"
-google-api-python-client = { version = "2.151.0", optional = true }
-google-auth-httplib2 = { version = "0.2.0", optional = true }
-google-auth-oauthlib = { version = "1.2.1", optional = true }
"ruamel.yaml" = "0.18.6"
-defusedxml = "0.7.1"
-twine = { version = "5.1.1", optional = true }
-debian-inspector = "31.1.0"
PyYAML = "6.0.2"
-multi-repo-automation = { version="1.3.0", optional = true }
-security-md = "0.2.3"
-id = { version="1.4.0", optional = true }
-
-[tool.poetry.extras]
-audit = []
-checks = []
-publish = ["twine", "google-api-python-client", "google-auth-httplib2", "google-auth-oauthlib", "id"]
-publish_plugins = []
-pr_checks = []
-version = ["multi-repo-automation"]
[tool.poetry.group.dev.dependencies]
prospector = { version = "1.12.1", extras = ["with-bandit", "with-mypy", "with-pyroma"] }
@@ -91,7 +65,6 @@ prospector-profile-duplicated = "1.6.0"
prospector-profile-utils = "1.9.1"
types-requests = "2.32.0.20241016"
types-pyyaml = "6.0.12.20240917"
-types-markdown = "3.7.0.20240822"
[build-system]
requires = [
diff --git a/requirements.txt b/requirements.txt
index 03d157d18..861d5e7c5 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,3 +3,4 @@ poetry-plugin-export==1.8.0
poetry-dynamic-versioning[plugin]==1.4.1
poetry-plugin-tweak-dependencies-version==1.5.2
pre-commit==4.0.1
+tag-publish==0.8.0
diff --git a/test/helmchart/Chart.yaml b/test/helmchart/Chart.yaml
deleted file mode 100644
index 4dc9b9891..000000000
--- a/test/helmchart/Chart.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-apiVersion: v2
-appVersion: '1.0'
-description: A Helm chart for Kubernetes
-name: test
-version: 1.0.0