Skip to content

Commit

Permalink
core - use poetry for dependency management (cloud-custodian#5320)
Browse files Browse the repository at this point in the history
  • Loading branch information
kapilt authored Mar 14, 2020
1 parent 042e7a4 commit a92162f
Show file tree
Hide file tree
Showing 64 changed files with 9,794 additions and 623 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:
uses: actions/cache@v1
with:
path: .tox/${{ matrix.tox-target }}
key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/setup.py') }}
key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/requirement*.txt') }}

- name: Install Test Runner
run: |
Expand Down
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
*.egg-info/
*.swp
.eggs/
c7n/version.py
# c7n/version.py
src/
deps/
include/
Expand Down
19 changes: 12 additions & 7 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,32 @@ LABEL name="custodian" \

# Transfer Custodian source into container by directory
# to minimize size
ADD setup.py README.md requirements.txt /src/
ADD pyproject.toml poetry.lock README.md /src/
ADD c7n /src/c7n/
ADD tools/c7n_gcp /src/tools/c7n_gcp
ADD tools/c7n_azure /src/tools/c7n_azure
ADD tools/c7n_kube /src/tools/c7n_kube
ADD tools/c7n_org /src/tools/c7n_org
ADD tools/c7n_mailer /src/tools/c7n_mailer

WORKDIR /src

RUN adduser --disabled-login custodian
RUN apt-get --yes update \
&& apt-get --yes install build-essential --no-install-recommends \
&& pip3 install -r requirements.txt . \
&& pip3 install -r tools/c7n_gcp/requirements.txt tools/c7n_gcp \
&& pip3 install -r tools/c7n_azure/requirements.txt tools/c7n_azure \
&& pip3 install -r tools/c7n_kube/requirements.txt tools/c7n_kube \
&& apt-get --yes install build-essential curl --no-install-recommends \
&& python3 -m venv /usr/local \
&& curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 \
&& . /usr/local/bin/activate \
&& $HOME/.poetry/bin/poetry install --no-dev \
&& cd tools/c7n_azure && $HOME/.poetry/bin/poetry install && cd ../.. \
&& cd tools/c7n_gcp && $HOME/.poetry/bin/poetry install && cd ../.. \
&& cd tools/c7n_kube && $HOME/.poetry/bin/poetry install && cd ../.. \
&& apt-get --yes remove build-essential \
&& apt-get purge --yes --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
&& rm -Rf /var/cache/apt/ \
&& rm -Rf /var/lib/apt/lists/* \
&& rm -Rf /src/ \
&& rm -Rf /root/.cache/ \
&& rm -Rf /root/.poetry \
&& mkdir /output \
&& chown custodian: /output

Expand Down
10 changes: 10 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
prune tests
prune tools
prune docs
prune .vscode
prune .github
prune .azure-pipelines
global-exclude .travis.yml .gitignore .drone.yml .dockerignore .coveragerc
global-exclude .codecov.yml requirements*txt tox.ini Makefile .azure-piplines.yml


57 changes: 47 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,18 +1,56 @@

PKG_SET = tools/c7n_gcp tools/c7n_azure tools/c7n_kube tools/c7n_mailer tools/c7n_logexporter tools/c7n_policystream tools/c7n_trailcreator tools/c7n_org tools/c7n_sphinxext

install:
python3 -m venv .
. bin/activate && pip install -r requirements-dev.txt
. bin/activate && pip install -e .
. bin/activate && pip install -r tools/c7n_mailer/requirements.txt
. bin/activate && pip install -r tools/c7n_azure/requirements.txt
. bin/activate && pip install -r tools/c7n_gcp/requirements.txt
. bin/activate && pip install -r tools/c7n_kube/requirements.txt

test:
./bin/tox -e py27
install-poetry:
poetry install
for pkg in $(PKG_SET); do pushd $$pkg && poetry install && popd; done

pkg-update:
poetry update
for pkg in $(PKG_SET); do pushd $$pkg && poetry update && popd; done

pkg-show-update:
poetry show -o
for pkg in $(PKG_SET); do pushd $$pkg && poetry show -o && popd; done

pkg-freeze-setup:
python3 tools/dev/poetrypkg.py gen-frozensetup -p .
for pkg in $(PKG_SET); do python3 tools/dev/poetrypkg.py gen-frozensetup -p $$pkg; done

test3:
./bin/tox -e py37
pkg-gen-setup:
python3 tools/dev/poetrypkg.py gen-setup -p .
for pkg in $(PKG_SET); do python3 tools/dev/poetrypkg.py gen-setup -p $$pkg; done

pkg-gen-requirements:
# we have todo without hashes due to https://github.com/pypa/pip/issues/4995
poetry export --dev --without-hashes -f requirements.txt > requirements.txt
for pkg in $(PKG_SET); do pushd $$pkg && poetry export --without-hashes -f requirements.txt > requirements.txt && popd; done

pkg-publish-wheel:
# clean up any artifacts first
rm -f dist/*
for pkg in $(PKG_SET); do pushd $$pkg && rm -f dist/* && popd; done
# increment versions
poetry version patch
for pkg in $(PKG_SET); do pushd $$pkg && poetry version patch && popd; done
# generate setup
@$(MAKE) pkg-gen-setup
# generate sdist
python setup.py bdist_wheel
for pkg in $(PKG_SET); do pushd $$pkg && python setup.py bdist_wheel && popd; done
# check wheel
twine check dist/*
for pkg in $(PKG_SET); do pushd $$pkg && twine check dist/* && popd; done
# upload to test pypi
twine upload -r testpypi dist/*
for pkg in $(PKG_SET); do pushd $$pkg && twine upload -r testpypi dist/* && popd; done

test:
./bin/tox -e py38

ftest:
C7N_FUNCTIONAL=yes AWS_DEFAULT_REGION=us-east-2 ./bin/py.test -m functional tests
Expand All @@ -35,4 +73,3 @@ lint:

clean:
rm -rf .tox .Python bin include lib pip-selfcheck.json

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@ As a quick walk through, below are some sample policies for AWS resources.
1. will enforce that no S3 buckets have cross-account access enabled.
1. will terminate any newly launched EC2 instance that do not have an encrypted EBS volume.
1. will tag any EC2 instance that does not have the follow tags
"Environment", "AppId", and either "OwnerContact" or "DeptID" to be stopped
in four days.
"Environment", "AppId", and either "OwnerContact" or "DeptID" to
be stopped in four days.

```yaml
policies:
Expand Down
2 changes: 2 additions & 0 deletions c7n/version.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# coding: utf-8
version = '0.9'
6 changes: 6 additions & 0 deletions docs/Makefile.sphinx
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,12 @@ html:
rm $(SRCDIR)/tools/c7n-salactus.md
rm $(SRCDIR)/tools/c7n-logexporter.md
rm $(SRCDIR)/tools/c7n-guardian.md
rm $(SRCDIR)/tools/cask.md
rm -R $(SRCDIR)/tools/assets
rm $(SRCDIR)/aws/resources/*rst
rm $(SRCDIR)/azure/resources/*rst
rm $(SRCDIR)/gcp/resources/*rst


@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
Expand Down
1 change: 1 addition & 0 deletions docs/source/developer/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ This section is for developers who are contributing to custodian.

* :ref:`developer-installing`
* :ref:`developer-tests`
* :ref:`developer-packaging`
* :ref:`developer-documentation`
41 changes: 22 additions & 19 deletions docs/source/developer/installing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,33 +6,34 @@ Installing for Developers
Installing Prerequisites
------------------------

Cloud Custodian supports Python 2.7, 3.6, and 3.7.
To develop the Custodian, you will need to have a make/C toolchain, Python 3.7 and some basic Python tools.
Cloud Custodian supports Python 3.6, 3.7, 3.8 and above. To develop the
Custodian, you will need to have a make/C toolchain, Python3 and some
basic Python tools.

We strongly recommend any development be done in Python 3.

Install Python 3.7
~~~~~~~~~~~~~~~~~~
Install Python 3
~~~~~~~~~~~~~~~~

You'll need to have a Python 3.7 environment set up.
You'll need to have a Python 3 environment set up.
You may have a preferred way of doing this.
Here are instructions for a way to do it on Ubuntu and Mac OS X.

On Ubuntu
*********

On most recent versions of Ubuntu, Python 3.6 is included by default.
To get Python 3.7, first add the deadsnakes package repository:
On most recent versions of Ubuntu, Python 3 is included by default.

To get Python 3.8, first add the deadsnakes package repository:

.. code-block:: bash
$ sudo add-apt-repository ppa:deadsnakes/ppa
Next, install python3.7 and the development headers for it:
Next, install python3.8 and the development headers for it:

.. code-block:: bash
$ sudo apt-get install python3.7 python3.7-dev
$ sudo apt-get install python3.8 python3.8-dev
Then, install ``pip``:

Expand All @@ -44,8 +45,8 @@ When this is complete you should be able to check that you have pip properly ins

.. code-block::
$ python3.7 -m pip --version
pip 9.0.1 from /usr/lib/python3/dist-packages (python 3.7)
$ python3.8 -m pip --version
pip 9.0.1 from /usr/lib/python3/dist-packages (python 3.8)
(your exact version numbers will likely differ)

Expand Down Expand Up @@ -88,7 +89,7 @@ Then build the software with `tox <https://tox.readthedocs.io/en/latest/>`_:
$ tox
Tox creates a sandboxed "virtual environment" ("virtualenv") for each Python version, 2.7, 3.6, and 3.7.
Tox creates a sandboxed "virtual environment" ("virtualenv") for each Python version, 3.6, 3.7, 3.8
These are stored in the ``.tox/`` directory.
It then runs the test suite under all versions of Python, per the ``tox.ini`` file.
If tox is unable to find a Python executable on your system for one of the supported versions, it will fail for that environment.
Expand All @@ -98,7 +99,7 @@ You can run the test suite in a single enviroment with the ``-e`` flag:

.. code-block:: bash
$ tox -e py37
$ tox -e py38
To access the executables installed in one or the other virtual environment,
source the virtualenv into your current shell, e.g.:
Expand All @@ -113,11 +114,13 @@ You should then have, e.g., the ``custodian`` command available:
(py37)$ custodian -h
You'll also be able to invoke `nosetests
<http://nose.readthedocs.io/en/latest/>`_ or `pytest
<https://docs.pytest.org/en/latest/>`_ directly with the arguments of your
choosing, e.g.:
You'll also be able to invoke `pytest <https://docs.pytest.org/en/latest/>`_ directly
with the arguments of your choosing, e.g.:

.. code-block:: bash
(py37) $ pytest tests/test_s3.py -x
(py37) $ pytest tests/test_s3.py -x -k replication
Note you'll have to environment variables setup appropriately per the tox.ini
for provider credentials.

90 changes: 90 additions & 0 deletions docs/source/developer/packaging.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
.. _developer-packaging:

Packaging Custodian
===================

Custodian moved to using ``poetry`` https://python-poetry.org/ for
managing dependencies and providing for repeatable installs. Its not
typically required for developers as we maintain setuptools/pip/tox
compatible environments, however familiarity is needed when making
changes to the dependency graph (add/update/remove) dependencies,
as all the setup.py/requirements files are generated artifacts.

The reasoning around the move to poetry was that of needing better
tooling to freeze the custodian dependency graph when publishing
packages to pypi to ensure that releases would be repeatably
installable at a future date inspite of changes to the underlying
dependency graph, some perhaps not obeying semantic versioning
principles. Additionally with the growth of providers and other tools,
we wanted better holistic management for release automation across the
set of packages. After experimenting with a few tools in the
ecosystem, including building our own, the maintainers settled on
poetry as one that offered both a superior ux, was actively
maintained, and had a reasonable python api for additional release
management activities.

Our additional tooling around poetry is to help automate management
across the half-dozen custodian packages as well as to keep
requirements and setup.py files intact. We continue to use
setuptools/pip in our CI infrastructure as it offers significant speed
benefits [0]. To ensure the poetry install is exercised as part of CI,
we do maintain the main docker image via poetry.

Usage
-----

We maintain several makefile targets that can be used to front end
poetry.

- `make install-poetry` an alternative custodian installation method, assumes
poetry is already installed.

- `make pkg-show-update` show available updates to packages in poetry
lockfiles.

- `make pkg-update` attempts to update dependencies across the tree,
should be followed by gen-requirements/gen-setup below.

- `make pkg-gen-requirements` show available updates to packages in poetry
lockfiles.

- `make pkg-gen-setup` generate setup.py files from pyproject.toml
this will carry over semver constraints.

- `make pkg-freeze-setup` generate setup.py files from pyproject.toml
with all dependencies frozen in setup.py. Note this is not currently
transitive on the dep graph, just direct dependencis.

- `make pkg-publish-wheel` increments version, builds wheels, lints,
and publishes build to testpypi via twine.

The underlying script that provides additional poetry/packaging
automation specific to custodian is in tools/dev/poetrypkg.py

- [0] poetry will call out to pip as a subprocess per package to
control the exact versions installed, as pip does not have a public
api.


Caveats
-------

To maintain within repo dependencies betweeen packages, we specify all
within intra repo dependencies as dev dependencies with relative
directory source paths. When we generate setup.py files we do so sans
any dev deps, which we resolve in generation to the latest version,
frozen or semver compatible per source dir dev dep.

One interesting consequence of source directory dependencies in poetry
is that break any attempts to distribute/publish a package even if
they are `dev` deps. ie. per the pyproject.toml spec is that via the
build-system PEP, poetry will be invoked during install. The
invocation/installation of poetry as a build sys is transparently
handled by pip. Simple resolution/parse of pyproject.toml dev
dependencies will cause a poetry failure for an source distribution
install, as installation of an sdist, is actually a wheel
compilation.

As a result of this as a publishing limitation we only publish wheels
instead of sdists which avoids the build system entirely, as a wheel
is extractable installation container/format file.
2 changes: 1 addition & 1 deletion docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -99,5 +99,5 @@ Navigate below to your cloud provider and get started with Cloud Custodian!
developer/installing.rst
developer/tests.rst
developer/documentation.rst

developer/packaging.rst

Loading

0 comments on commit a92162f

Please sign in to comment.