diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7e0bc4a8..726dd10c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,6 +20,19 @@ env: PIP_DISABLE_PIP_VERSION_CHECK: "1" # Reduce noise in logs jobs: + pre-commit: + env: + SKIP: pytest,pytype,no-commit-to-branch + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + - uses: pre-commit/action@v3.0.1 + - uses: pre-commit-ci/lite-action@v1.0.3 + if: always() + test: strategy: # See: https://github.com/xenserver/python-libs/pull/26#discussion_r1179482169 @@ -28,12 +41,12 @@ jobs: fail-fast: false matrix: include: - - python-version: '3.6' - os: ubuntu-20.04 - - python-version: '3.10' - os: ubuntu-22.04 - python-version: '3.11' os: ubuntu-22.04 + - python-version: '3.12' + os: ubuntu-22.04 + - python-version: '3.13' + os: ubuntu-22.04 runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -58,21 +71,6 @@ jobs: pip install 'virtualenv<20.22' 'tox==4.5.1' tox-gh-actions tox --workdir .github/workflows/.tox --recreate - # tox >= 4.0.0 is needed for using optional-dependencies from pyproject.toml, which is - # is not available for python <= 3.6, so use the python3.8 of Ubuntu-20.04 to install it: - - name: Run tox for 3.6 and 3.8 on ${{ matrix.os }}'s 3.8 to get 'extras' from pyproject.toml) - if: ${{ matrix.python-version == 2.7 || matrix.python-version == 3.6 }} - run: | - set -xv;curl -sSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py - python3.8 get-pip.py - # The alternative is installing python3-pip but we don't need full pip function for now: - # sudo apt-get update && sudo apt-get install -y python3-pip - # Let tox-gh-actions get the environment(s) to run tests with from tox.ini: - # Use tox==4.5.1: tox>=4 is needed for reading the extras from pyproject.toml - # Warning: tox>=4.5.2 depends on virutalenv>=20.23, which breaks Python 2.7: - python3.8 -m pip install 'virtualenv<20.22' 'tox==4.5.1' tox-gh-actions - tox --workdir .github/workflows/.tox --recreate - - name: Select the coverage file for upload if: | ( matrix.python-version == '3.6' || matrix.python-version == '3.11' ) && diff --git a/.gitignore b/.gitignore index 4ef9fdd1..ba6694d1 100644 --- a/.gitignore +++ b/.gitignore @@ -28,13 +28,14 @@ __pycache__/ # private vscode workspace settings: .vscode/settings.json +# Virtualenv +.venv/ + # temporary file from tests/test_cpio.py (to be fixed later) /data # Output from mypy --txt-report /index.txt -htmlcov/ - # Pip editable install directory python_libs.egg-info/ diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 00000000..de8a471d --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,259 @@ +# Example markdownlint configuration with all properties set to their default value + +# Default state for all rules +default: true + +# Path to configuration file to extend +extends: null + +# MD001/heading-increment : Heading levels should only increment by one level at a time : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md001.md +MD001: true + +# MD003/heading-style : Heading style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md003.md +MD003: + # Heading style + style: "consistent" + +# MD004/ul-style : Unordered list style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md004.md +MD004: + # List style + style: "consistent" + +# MD005/list-indent : Inconsistent indentation for list items at the same level : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md005.md +MD005: true + +# MD007/ul-indent : Unordered list indentation : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md007.md +MD007: + # Spaces for indent + indent: 2 + # Whether to indent the first level of the list + start_indented: false + # Spaces for first level indent (when start_indented is set) + start_indent: 2 + +# MD009/no-trailing-spaces : Trailing spaces : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md009.md +MD009: + # Spaces for line break + br_spaces: 2 + # Allow spaces for empty lines in list items + list_item_empty_lines: false + # Include unnecessary breaks + strict: false + +# MD010/no-hard-tabs : Hard tabs : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md010.md +MD010: + # Include code blocks + code_blocks: true + # Fenced code languages to ignore + ignore_code_languages: [] + # Number of spaces for each hard tab + spaces_per_tab: 1 + +# MD011/no-reversed-links : Reversed link syntax : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md011.md +MD011: true + +# MD012/no-multiple-blanks : Multiple consecutive blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md012.md +MD012: + # Consecutive blank lines + maximum: 1 + +# MD013/line-length : Line length : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md013.md +MD013: + # Number of characters + line_length: 362 + # Number of characters for headings + heading_line_length: 210 + # Number of characters for code blocks + code_block_line_length: 278 + # Include code blocks + code_blocks: true + # Include tables + tables: true + # Include headings + headings: true + # Strict length checking + strict: false + # Stern length checking + stern: false + +# MD014/commands-show-output : Dollar signs used before commands without showing output : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md014.md +MD014: true + +# MD018/no-missing-space-atx : No space after hash on atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md018.md +MD018: true + +# MD019/no-multiple-space-atx : Multiple spaces after hash on atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md019.md +MD019: true + +# MD020/no-missing-space-closed-atx : No space inside hashes on closed atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md020.md +MD020: true + +# MD021/no-multiple-space-closed-atx : Multiple spaces inside hashes on closed atx style heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md021.md +MD021: true + +# MD022/blanks-around-headings : Headings should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md022.md +MD022: + # Blank lines above heading + lines_above: 1 + # Blank lines below heading + lines_below: 1 + +# MD023/heading-start-left : Headings must start at the beginning of the line : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md023.md +MD023: true + +# MD024/no-duplicate-heading : Multiple headings with the same content : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md024.md +MD024: + # Only check sibling headings + allow_different_nesting: false + # Only check sibling headings + siblings_only: false + +# MD025/single-title/single-h1 : Multiple top-level headings in the same document : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md025.md +MD025: + # Heading level + level: 1 + # RegExp for matching title in front matter + front_matter_title: "^\\s*title\\s*[:=]" + +# MD026/no-trailing-punctuation : Trailing punctuation in heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md026.md +MD026: + # Punctuation characters + punctuation: ".,;:!。,;:!" + +# MD027/no-multiple-space-blockquote : Multiple spaces after blockquote symbol : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md027.md +MD027: true + +# MD028/no-blanks-blockquote : Blank line inside blockquote : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md028.md +MD028: true + +# MD029/ol-prefix : Ordered list item prefix : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md029.md +MD029: + # List style + style: "one_or_ordered" + +# MD030/list-marker-space : Spaces after list markers : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md030.md +MD030: + # Spaces for single-line unordered list items + ul_single: 1 + # Spaces for single-line ordered list items + ol_single: 1 + # Spaces for multi-line unordered list items + ul_multi: 1 + # Spaces for multi-line ordered list items + ol_multi: 1 + +# MD031/blanks-around-fences : Fenced code blocks should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md031.md +MD031: + # Include list items + list_items: true + +# MD032/blanks-around-lists : Lists should be surrounded by blank lines : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md032.md +MD032: true + +# MD033/no-inline-html : Inline HTML : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md033.md +MD033: + # Allowed elements + allowed_elements: [img, kbd] + +# MD034/no-bare-urls : Bare URL used : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md034.md +MD034: true + +# MD035/hr-style : Horizontal rule style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md035.md +MD035: + # Horizontal rule style + style: "consistent" + +# MD036/no-emphasis-as-heading : Emphasis used instead of a heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md036.md +MD036: + # Punctuation characters + punctuation: ".,;:!?。,;:!?" + +# MD037/no-space-in-emphasis : Spaces inside emphasis markers : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md037.md +MD037: true + +# MD038/no-space-in-code : Spaces inside code span elements : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md038.md +MD038: true + +# MD039/no-space-in-links : Spaces inside link text : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md039.md +MD039: true + +# MD040/fenced-code-language : Fenced code blocks should have a language specified : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md040.md +MD040: + # List of languages + allowed_languages: [] + # Require language only + language_only: false + +# MD041/first-line-heading/first-line-h1 : First line in a file should be a top-level heading : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md041.md +MD041: + # Heading level + level: 1 + # RegExp for matching title in front matter + front_matter_title: "^\\s*title\\s*[:=]" + +# MD042/no-empty-links : No empty links : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md042.md +MD042: true + +# MD044/proper-names : Proper names should have the correct capitalization : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md044.md +MD044: + # List of proper names + names: [] + # Include code blocks + code_blocks: true + # Include HTML elements + html_elements: true + +# MD045/no-alt-text : Images should have alternate text (alt text) : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md045.md +MD045: false + +# MD046/code-block-style : Code block style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md046.md +MD046: + # Block style + style: "consistent" + +# MD047/single-trailing-newline : Files should end with a single newline character : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md047.md +MD047: true + +# MD048/code-fence-style : Code fence style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md048.md +MD048: + # Code fence style + style: "consistent" + +# MD049/emphasis-style : Emphasis style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md049.md +MD049: + # Emphasis style + style: "consistent" + +# MD050/strong-style : Strong style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md050.md +MD050: + # Strong style + style: "consistent" + +# MD051/link-fragments : Link fragments should be valid : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md051.md +MD051: true + +# MD052/reference-links-images : Reference links and images should use a label that is defined : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md052.md +MD052: + # Include shortcut syntax + shortcut_syntax: false + +# MD053/link-image-reference-definitions : Link and image reference definitions should be needed : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md053.md +MD053: + # Ignored definitions + ignored_definitions: + - "//" + +# MD054/link-image-style : Link and image style : https://github.com/DavidAnson/markdownlint/blob/v0.32.1/doc/md054.md +MD054: + # Allow autolinks + autolink: true + # Allow inline links and images + inline: true + # Allow full reference links and images + full: true + # Allow collapsed reference links and images + collapsed: true + # Allow shortcut reference links and images + shortcut: true + # Allow URLs as inline links + url_inline: true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4dea24a9..7011e537 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ # https://github.com/dexpota/cheatsheets/blob/master/pre-commit exclude: "^tests/data" fail_fast: true -default_stages: [commit] +default_stages: [pre-commit] repos: - repo: local hooks: @@ -47,7 +47,7 @@ repos: types: [binary] language: fail - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: no-commit-to-branch args: [--branch, master] @@ -62,11 +62,21 @@ repos: - isort +- repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 # Use the ref you want to point at + hooks: + # Enforce that `# type: ignore` annotations always occur with specific codes. + # Sample annotation: # type: ignore[attr-defined,name-defined] + - id: python-check-blanket-type-ignore + + - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 + rev: v1.13.0 hooks: - id: mypy additional_dependencies: + - pyfakefs + - pytest_httpserver - pytest-subprocess - types-mock - types-six @@ -74,14 +84,19 @@ repos: - repo: https://github.com/rcmdnk/pyproject-pre-commit - rev: v0.1.9 + rev: v0.3.4 hooks: - id: shellcheck - - id: mdformat-check - exclude: README-Unicode.md - language: python + + +- repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.42.0 + hooks: + - id: markdownlint + + - repo: https://github.com/pycqa/pylint - rev: v2.17.4 + rev: v3.3.1 hooks: - id: pylint args: @@ -93,21 +108,18 @@ repos: ] log_file: ".git/pre-commit-pylint.log" additional_dependencies: - - pyfakefs + - setuptools - six - mock - pandas + - pyfakefs - pytest_forked + - pytest_httpserver - toml + + - repo: local hooks: - - id: run-pyre - name: run-pyre (expect this to take 30 seconds) - entry: python pyre_runner.py - types: [python] - language: python - log_file: ".git/pre-commit-pyre.log" - additional_dependencies: [pyre-check,mock] - id: pytype name: pytype (may take up to two minutes) entry: sh -c "pytype >/dev/tty" @@ -116,24 +128,29 @@ repos: language: python language_version: python3.8 require_serial: true - additional_dependencies: [pytype] + additional_dependencies: + - pytype + - pyfakefs + - pytest + - pytest_forked + - pytest_httpserver + - pytest_localftpserver + - pytest-subprocess + + - id: pytest name: Check pytest unit tests pass types: [python] # entry: sh -c "pytest -x -rf --new-first --show-capture=all >/dev/tty" - entry: sh -c "tox -e py38-covcombine >/dev/tty" + entry: sh -c "tox >/dev/tty" verbose: true language: python require_serial: true pass_filenames: false -- repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.10.0 # Use the ref you want to point at - hooks: - # Enforce that `# type: ignore` annotations always occur with specific codes. - # Sample annotations: # type: ignore[attr-defined] # type: ignore[attr-defined,name-defined] - - id: python-check-blanket-type-ignore + + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -147,3 +164,10 @@ repos: - id: mixed-line-ending args: ['--fix=lf'] description: Replace line endings with LF + + +# Run the CSpell command line spell checker +- repo: https://github.com/streetsidesoftware/cspell-cli + rev: v8.16.0 + hooks: + - id: cspell diff --git a/.pylintrc b/.pylintrc index dc41d223..f2734d90 100644 --- a/.pylintrc +++ b/.pylintrc @@ -46,7 +46,7 @@ load-plugins= # W0141 - Use of builtin functions # -# For now, priorize Python3 mirgration as the use of pylint (e.g. enconding warnings). +# Disable warnings that are not severe, but warn e.g about missing encoding: # disable=W0142,W0703,C0111,R0201,W0603,W0613,W0212,W0141, bad-option-value, # Skip complaining about suppressions for older option values @@ -65,6 +65,7 @@ disable=W0142,W0703,C0111,R0201,W0603,W0613,W0212,W0141, len-as-condition, no-else-return, raise-missing-from, + too-many-positional-arguments, too-many-branches, too-many-nested-blocks, too-many-statements, @@ -222,8 +223,9 @@ defining-attr-methods=__init__,__new__,setUp [DESIGN] -# Maximum number of arguments for function / method -max-args=100 +# Maximum number of arguments for function / method. +# defaults to: max-args=5 +max-args=10 # Argument names that match this expression will be ignored. Default to name # with leading underscore diff --git a/.vscode/ltex.dictionary.en-US.txt b/.vscode/ltex.dictionary.en-US.txt index f92c872c..4420060e 100644 --- a/.vscode/ltex.dictionary.en-US.txt +++ b/.vscode/ltex.dictionary.en-US.txt @@ -1,25 +1,217 @@ -codecov +abstractmethod +accessorclass +actrc +addopts +answerfile +authhandler +autolinks +autouse +binstr +biosdevname +booleaness +bootdir +builtins +caplog +CLOEXEC +cmdline +CodeCov +configparser covcombine +covcp +coveragerc coverallsapp cpio cpiofile cpioinfo +crashkernel +curstate +datamodel +deadsnakes +dinfo +diskutil +distutils +DOTQUAD +drule +dunder +editenv +eliloader +ENOENT +ENVLOGDIR +errkey +errmsg +eths euxv +extlinux +EXTLINUX +faulthandler +fcoe +filehandle +Filesyste +FilesystemAccessor +filterwarnings +fstype +ftpaccessor +ftpserver +getpid +getpreferredencoding +getuid +grubdir +grubenv +hardlinks +host-cpu-tune +hpet +httpaccessor +httpserver +hwdata ibft +icdiff ifrename +igmp +inputtext +intify +isort +keywrd kname +koji +kwargs lastboot +laststate +levelname +lfile +libc +libexec linkname +linktext +localftpserver logbuf +ltex +lzma MACPCI +markdownlint +mboot +mdreport +Mellanox +memoryhelper +menuentry +minidom +mountingaccessor +mountpoint +multiboot +mypy +Navi nektos +netdev +netns +newc +newdefault +newhw +NEWNET +nfspath +nics +nodelist +nodocker +nolvm +noqueue +norules +oldstate +ord organisation +outerr +outkey +passman +pathexists +pciids +pcis +pcpus +pipx +Popen +ppns +precheck +proc +pvsproxy +pycache +pyenv pyfakefs +pylance +pyliblzma +pylint +Pylint +pylintrc +pylzma pyproject +pyright +pytest +PYTHONDEVMODE +pythonpath PYTHONWARNINGS +pytype +qdisc +qlen +Radeon +rckey +readbuffer +readline +repofiles +REPOMD +retcode +retrans rulefile +sbdf +SBDF +sbdfi +setdefault +setenv +SETFD +setuptools +shellcheck +signoff +somestring +sourcery +srule +strerror +stringio +subcls +subdevice +subvendor +sysconfig +tboot +Tboot +templatedir +testfile +testpaths +textiowrapper +timeo +tmpfs +tmultiboot tname tokenless +Traceback +treeinfo +ucode +ucontext +uidmap +urllib +usecase +usecases +vcpus +vcputune +venv +verhash +vfat +videoclass virtualenv +virtualenvs virutalenv +VPRO workdir +xapi +XAPI +XCP-ng +xcp-python-libs +xcptestlib +xelogging +xenopsd +XenServer +xensource +xmldoc +xmlunwrap diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 910602de..626a6e11 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,14 @@ +# Development setup + +## Create a virtual environment with the test dependencies + +```bash +python -m venv .venv +. .venv/bin/activate +pip install pip-tools==7.3.0 +pip-compile --extra=test,mypy,pyright,pytype,tox -o - pyproject.toml | pip install -r /dev/stdin +``` + ## Development setup on Fedora 37 On Fedora 37, the `tox` rpm installs all Python versions. @@ -7,8 +18,26 @@ But this `tox` is older, so install `tox==4.5.1` using `pip` (see below) sudo dnf install tox;sudo rpm -e tox ``` +But preferably use `tox` from the virtual environment instead. + +## Development setup on Ubuntu 24.04 + +Prefer the virtual environment. Alternatively, an option is to use `pipx`: + +```bash +sudo apt install pipx +pipx install tox; pipx install 'pytest<7';pipx install pylint +pipx inject pytest pytest-{forked,localftpserver,pythonpath,subprocess,timeout} pyfakefs pytest_httpserver six mock +pipx inject pylint pyfakefs six mock pytest{,_forked,-localftpserver} +``` + +Use the `deadsnakes` PPA to install Python versions like 3.8 and 3.11 (see below) + ## Development setup on Ubuntu 22.04 +Usage of to install +other python versions. + ```bash sudo apt update sudo apt install software-properties-common python{2,3}-dev @@ -19,15 +48,18 @@ sudo apt-get install -y python3.{8,11}{,-distutils} Installation of additional python versions for testing different versions: - If `deadsnakes/ppa` does not work, e.g. for Python 3.6, `conda` or `pyenv` can be used. - For instructions, see https://realpython.com/intro-to-pyenv: - ```yaml + For instructions, see : + + ```bash sudo apt install -y build-essential xz-utils zlib1g-dev \ lib{bz2,ffi,lzma,readline,ssl,sqlite3}-dev curl https://pyenv.run | bash # add displayed commands to .bashrc ~/.pyenv/bin/pyenv install 3.{6,8,11} && ~/.pyenv/bin/pyenv local 3.{6,8,11} # builds them ``` + - For testing on newer Ubuntu which has `python2-dev`, but not `pip2`, install `pip2` this way: - ```yml + + ```bash curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py;sudo python2 get-pip.py ``` @@ -48,15 +80,14 @@ This project uses `tox` to run the tests for different python versions. Intro: > _"Managing a Project's Virtual environments with `tox` - > A comprehensive beginner's introduction to `tox`":_ -> https://www.seanh.cc/2018/09/01/tox-tutorial/ +> `tox` runs `pytest`, `pylint` and static analysis using `mypy`, `pyre`, `pytype`, and `pyright`. Links: -- https://mypy.readthedocs.io/en/stable/ -- https://microsoft.github.io/pyright/ -- https://google.github.io/pytype/ -- https://pyre-check.org/ +- +- +- With `tox`, developers can run the full test suite for Python 2.7 and 3.x. The same test suite is used in GitHub CI: @@ -74,15 +105,14 @@ Explanation: ## Installation of all development dependencies -Using pip-tools, you can extract the requirements and extras from `pyptoject.toml`: +Using pip-tools, you can extract the requirements and extras from `pyproject.toml`: ```bash PYTHON=python3.10 -EXTRAS=.,test,mypy,pyre,pytype,tox -PFLAGS="--no-warn-conflicts" +EXTRAS=.,test,mypy,pyright,pytype,tox $PYTHON -m pip install pip-tools==7.3.0 -$PYTHON -m piptools compile --extra=$EXTRAS -o - pyproject.toml | - $PYTHON -m pip install -r /dev/stdin $PFLAGS +$PYTHON -m pip-compile --extra=$EXTRAS -o - pyproject.toml | + $PYTHON -m pip install -r /dev/stdin --no-warn-conflicts" ``` With this, you can run most of the CI tests run by `tox` and GitHub CI also from the shell. @@ -113,13 +143,15 @@ pip install "pytest<7" pytest-picked pytest-sugar pytest-clarity # pytest-icdiff ``` To verify or extract the dependencies and extras configured in `pyproject.toml` and `tox.ini` -for specific `tox` environments, you can use https://pypi.org/project/tox-current-env/: +for specific `tox` environments, you can use +: ```bash tox --print-deps-to=pytype-deps.txt --print-extras-to=pytype-extras.txt -e pytype ``` -For more information to debug `pytest` test suites see: https://stribny.name/blog/pytest/ +For more information to debug `pytest` test suites see +: ## Running GitHub actions locally using `act` @@ -159,7 +191,7 @@ To run `podman` as your user, run these as your user: ```bash systemctl enable --now --user podman.socket # Only configures the podman socket systemctl start --user podman.socket # Start the docker-compatible unix socket -# Ubuntu only, Fedora 37 configures it already with more unqualifies search registries: +# Ubuntu only, Fedora 37 configures it already with more unqualified search registries: echo 'unqualified-search-registries = ["docker.io"]' | sudo tee -a /etc/containers/registries.conf sudo touch /etc/containers/nodocker # Quiet the docker emulation notification echo "--container-daemon-socket unix://$XDG_RUNTIME_DIR/podman/podman.sock" >>~/.actrc @@ -177,20 +209,20 @@ docker run -it --rm alpine:latest grep NAME /etc/os-release Ubuntu 22.04 LTS uses `iptables-nft` by default. Switch to `iptables-legacy` so that Docker will work: -https://crapts.org/2022/05/15/install-docker-in-wsl2-with-ubuntu-22-04-lts/ + ### Copy selection on selecting test (without need for Ctrl-C) On traditional X11 and KDE Plasma, selected text is automatically copied -to the X selection/clipboard for pasting it. To use this engrained behavior +to the X selection/clipboard for pasting it. To use this engrained behaviour on Windows as well, it seems the only reliable way to have it for all apps is a `AutoHotKey` script: -- https://www.ilovefreesoftware.com/30/tutorial/auto-copy-selected-text-clipboard-windows-10.html +- While individual extensions for VS Code, Firefox, chrome do work partially, they either don't cover the Firefox URL bar, the VS Code terminal and so on: -- https://addons.mozilla.org/en-GB/firefox/addon/copy-on-select -- https://marketplace.visualstudio.com/items?itemName=dinhani.copy-on-select (VS Code) -- https://www.jackofalladmins.com/knowledge%20bombs/dev%20dungeon/windows-terminal-copy-selection/ +- +- (VS Code) +- diff --git a/README-Unicode.md b/README-Unicode.md index 82e0999f..23881a11 100644 --- a/README-Unicode.md +++ b/README-Unicode.md @@ -1,40 +1,64 @@ -Python3 Unicode migration in the XCP package -============================================ +# Python3 Unicode migration in the XCP package + +## Problem + +Python3.6 on XS8 does not have an all-encompassing default UTF-8 mode for I/O. + +Newer Python versions have a UTF-8 mode that they even enable by default. +Python3.6 only enabled UTF-8 for I/O when a UTF-8 locale is used. +See below for more background info on the UTF-8 mode. + +For situations where UTF-8 enabled, we have to specify UTF-8 explicitly. + +Such situation happens when LANG or LC_* variables are not set for UTF-8. +XAPI plugins like auto-cert-kit are in this situation. + +Example: +For reading UTF-8 files like the `pciids` file, add `encoding="utf-8"`. +This applies especially to `open()` and `Popen()` when files my contain UTF-8. + +This also applies when en/decoding to/form `urllib` which uses bytes. +`urllib` has to use bytes as HTTP data can of course also be binary, e.g. compressed. + +## Migrating `subprocess.Popen()` -Migrating `subprocess.Popen()` ------------------------------- With Python3, the `stdin`, `stdout` and `stderr` pipes for `Popen()` default to `bytes`(binary mode). Binary mode is much safer because it foregoes the encode/decode. The for working with strings, existing users need to either enable text mode (when safe, it will attempt to decode and encode!) or be able to use bytes instead. For cases where the data is guaranteed to be pure ASCII, such as when resting the `proc.stdout` of `lspci -nm`, it is sufficient to use: + ```py open(["lspci, "-nm"], stdout=subprocess.PIPE, universal_newlines=True) ``` + This is possible because `universal_newlines=True` is accepted by Python2 and Python3. On Python3, it also enables text mode for `subprocess.PIPE` streams (newline conversion not needed, but text mode is needed) -Migrating `builtins.open()` ---------------------------- +## Migrating `builtins.open()` + On Python3, `builtins.open()` can be used in a number of modes: + - Binary mode (when `"b"` is in `mode`): `read()` and `write()` use `bytes`. - Text mode (Python3 default up to Python 3.6), when UTF-8 character encoding is not set by the locale -- UTF-8 mode (default since Python 3.7): https://peps.python.org/pep-0540/ +- UTF-8 mode (default since Python 3.7): When no Unicode locale in force, like in XAPI plugins, Python3 will be in text mode or UTF-8 (since Python 3.7, but existing XS is on 3.6): -* By default, `read()` on files `open()`ed without selecting binary mode attempts +- By default, `read()` on files `open()`ed without selecting binary mode attempts to decode the data into the Python3 Unicode string type. This fails for binary data. Any `ord() >= 128`, when no UTF-8 locale is active With Python 3.6, triggers `UnicodeDecodeError`. -* Thus, all `open()` calls which might open binary files have to be converted to binary +- Thus, all `open()` calls which might open binary files have to be converted to binary or UTF-8 mode unless the caller is sure he is opening an ASCII file. But even then, enabling an error handler to handle decoding errors is recommended: + ```py open(filename, errors="replace") ``` + But neither `errors=` nor `encoding=` is accepted by Python2, so a wrapper is likely best. ### Binary mode @@ -43,10 +67,6 @@ When decoding bytes to strings is not needed, binary mode can be great because i However, when strings need to returned from the library, something like `bytes.decode(errors="ignore")` to get strings is needed. -### Text mode - -Text mode using the `ascii` codec should be only used when it is ensured that the data can only consist of ASCII characters (0-127). Sadly, it is the default in Python 3.6 when the Python interpreter was not started using an UTF-8 locale for the LC_CTYPE locale category (set by LC_ALL, LC_CTYPE, LANG environment variables, overriding each other in that order) - ### UTF-8 mode Most if the time, the `UTF-8` codec should be used since even simple text files which are even documented to contain only ASCII characters like `"/usr/share/hwdata/pci.ids"` in fact __do__ contain UTF-8 characters. @@ -54,10 +74,11 @@ Most if the time, the `UTF-8` codec should be used since even simple text files Some files or some output data from commands even contains legacy `ISO-8859-1` chars, and even the `UTF-8` codec would raise `UnicodeDecodeError` for these. When this is known to be the case, `encoding="iso-8859-1` could be tried (not tested yet). -### Problems +### Problems With the locale set to C (XAPI plugins have that), Python's default mode changes between 3.6 and 3.7: + ```py for i in 3.{6,7,10,11};do echo -n "3.$i: "; LC_ALL=C python3.$i -c 'import locale,sys;print(locale.getpreferredencoding())';done @@ -66,7 +87,9 @@ for i in 3.{6,7,10,11};do echo -n "3.$i: "; 3.10: UTF-8 3.11: utf-8 ``` + This has the effect that in Python 3.6, the default codec for XAPI plugins is `ascii`: + ```py for i in 2.7 3.{6,7};do echo "$i:"; LC_ALL=C python$i -c 'open("/usr/share/hwdata/pci.ids").read()';done @@ -79,6 +102,7 @@ Traceback (most recent call last): UnicodeDecodeError: 'ascii' codec can't decode byte 0xc2 in position 97850: ordinal not in range(128) 3.7: ``` + This error means that the `'ascii' codec` cannot handle input ord() >= 128, and as some Video cards use `²` to reference their power, the `ascii` codec chokes on them. It means `xcp.pci.PCIIds()` cannot use `open("/usr/share/hwdata/pci.ids").read()`. @@ -86,6 +110,7 @@ It means `xcp.pci.PCIIds()` cannot use `open("/usr/share/hwdata/pci.ids").read() While Python 3.7 and newer use UTF-8 mode by default, it does not set up an error handler for `UnicodeDecodeError`. As it happens, some older tools output ISO-8859-1 characters hard-coded and these aren't valid UTF-8 sequences, and even newer Python versions need error handlers to not fail: + ```py echo -e "\0262" # ISO-8859-1 for: "²" python3 -c 'open(".text").read()' @@ -133,41 +158,48 @@ tests/test_bootloader.py line 38 in TestLinuxBootloader.setUp() tests/test_pci.py line 96 in TestPCIIds.test_videoclass_by_mock_calls() tests/test_pci.py line 110 in TestPCIIds.mock_lspci_using_open_testfile() ``` -Of course, `xcp/net/ifrename` won't be affected but it would be good to fix the + +Of course, `xcp/net/ifrename` won't be affected, but it would be good to fix the warning for them as well in an intelligent way. See the proposal for that below. There are a couple of possibilities and Python because 2.7 does not support the arguments we need to pass to ensure that all users of open() will work, we need to make passing the arguments conditional on Python >= 3. -1. Overriding `open()`, while technically working would not only affect xcp.python but the entire program: +1. Overriding `open()`. + While technically working, it would affect the entire interpreter: + ```py if sys.version_info >= (3, 0): original_open = __builtins__["open"] - def uopen(*args, **kwargs): + def utf8_open(*args, **kwargs): if "b" not in (args[1] \ if len(args) >= 2 else kwargs.get("mode", "")): kwargs.setdefault("encoding", "UTF-8") kwargs.setdefault("errors", "replace") return original_open(*args, **kwargs) - __builtins__["open"] = uopen + __builtins__["open"] = utf8_open ``` + 2. This is sufficient but is not very nice: + ```py # xcp/utf8mode.py if sys.version_info >= (3, 0): open_utf8args = {"encoding": "utf-8", "errors": "replace"} else: open_utf8args = {} - # xcp/{cmd,pci,environ?,logger?}.py tests/test_{pci,biodevname?,...?}.py + # xcp/{cmd,pci,environ?,logger?}.py tests/test_{pci,biosdevname?,...?}.py + from .utf8mode import open_utf8args ... - open(filename) + open(filename, **open_utf8args) ``` + But, `pylint` will still warn about these lines, so I propose: 3. Instead, use a wrapper function, which will also silence the `pylint` warnings at the locations which have been changed to use it: + ```py # xcp/utf8mode.py if sys.version_info >= (3, 0): @@ -177,7 +209,7 @@ to make passing the arguments conditional on Python >= 3. return open(*args, encoding="utf-8", errors="replace", **kwargs) else: utf8open = open - # xcp/{cmd,pci,environ?,logger?}.py tests/test_{pci,biodevname?,...?}.py + # xcp/{cmd,pci,environ?,logger?}.py tests/test_{pci,biosdevname?,...?}.py + from .utf8mode import utf8open ... - open(filename) @@ -189,4 +221,4 @@ Using the 3rd option, the `pylint` warnings for the changed locations explicitly disabling them. PS: Since utf8open() still returns a context-manager, `with open(...) as f:` -would still work. \ No newline at end of file +would still work. diff --git a/README.md b/README.md index 2f24022a..b58744d3 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ +# Common XenServer/XCP-ng Python classes + [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit)](https://github.com/pre-commit/pre-commit) [![](https://img.shields.io/badge/python-2.7_%7C_3.6_%7C_3.7_%7C_3.8_%7C_3.9_%7C_3.10_%7C_3.11+-blue.svg)](https://www.python.org/downloads/) [![codecov](https://codecov.io/gh/xenserver/python-libs/branch/master/graph/badge.svg?token=6WKVLDXJFN)](https://codecov.io/gh/xenserver/python-libs) [![](https://img.shields.io/badge/License-BSD--2--Cause%20%26%20MIT-brightgreen)](https://github.com/xenserver/python-libs/blob/master/LICENSE) -# Common XenServer/XCP-ng Python classes - The `xcp` directory contains the Common XenServer and XCP-ng Python packages. -They are intented for use in XenServer and XCP-ng Dom0 only and deal with logging, +They are for use in XenServer and XCP-ng Dom0 only and deal with logging, Hardware/PCI, networking, and other Dom0 tasks. The package name is `python-libs` which is also the `rpm` package name in XenServer. @@ -18,29 +18,28 @@ It depends on `six`, and on Python 2.7, also `configparser` and `pyliblzma`. ## Test-driven Development (TDD) Model +Please see [CONTRIBUTING.md] for installing a local development environment. + This package has CI which can be run locally but is also run in GitHub CI to ensure Test-driven development. -The Continuous Integration Tests feature: +CI features: - Combined coverage testing of Python 2.7 and Python 3.8 code branches - Automatic Upload of the combined coverage to CodeCov (from the GitHub Workflow) - Checking of the combined coverage against the diff to master: Fails if changes are not covered! -- Pylint report in the GitHub Action Summary page, with Warning and Error annotatios, even in the code review. -- Check that changes don't generate pylint warnings (if warning classes which are enabled in .pylintrc) -- Static analysis using `mypy`, `pyre` and `pytype` +- Pylint report in the GitHub Action Summary page, with Warning and Error annotations, even in the code review. +- Check that changes don't generate pylint warnings (if warning classes which are enabled in `.pylintrc`) +- Static analysis using `mypy`, `pylint`, `pyright` and `pytype` This enforces that any change (besides whitespace): -- has code coverage and +- Has code coverage, - does not introduce a `pylint` warning which is not disabled in `.pylintrc` - does not introduce a type of static analysis warning which is currently suppressed. ## Status Summary -- The warnings shown on the GitHub Actions Summary Page indicate the remaining - work for full Pyhon3 support (excluding missing tests). - ## `Pylint` results from GitHub CI in GitHub Actions page A step of the GitHub workflow produces a browser-friendly `pylint` report: @@ -72,7 +71,6 @@ For the installation of the general development dependencies, visit [INSTALL.md] - Test with `python2.7 -m pytest` - Run `mypy` (without any arguments - The configuration is in `pyproject.toml`) - Run `./pytype_runner.py` -- Run `./pyre_runner.py` - Run `tox -e py36-lint` and fix any `Pylint` warnings - Run `tox -e py310-covcombine-check` and fix any missing diff-coverage. - Run `tox` for the full CI test suite @@ -80,19 +78,19 @@ For the installation of the general development dependencies, visit [INSTALL.md] - Commit with `--signoff` on a new branch and push it and check the triggered GitHub Action run succeeds. - Open a new PR -The list of `virtualenvs` configured in tox can be shown using this command: `tox -av` +The list of `virtualenvs` configured in `tox` can be shown using this command: `tox -av` -```yaml +```ml $ tox -av default environments: py36-lint -> Run in a py36 virtualenv: Run pylint and fail on warnings remaining on lines in the diff to master -py311-pyre -> Run in a py311 virtualenv: Run pyre for static analyis, only passes using: tox -e py311-pyre -py38-pytype -> Run in a py38 virtualenv: Run pytype for static analyis, intro: https://youtu.be/abvW0mOrDiY -py310-covcombine-check -> Run in a py310 virtualenv: Generate combined coverage reports with py27-test coverage merged Run mypy for static analyis +py311-pyright -> Run in a py311 virtualenv: Run pyright for static analysis +py38-pytype -> Run in a py38 virtualenv: Run pytype for static analysis, intro: https://youtu.be/abvW0mOrDiY +py310-covcombine-check -> Run in a py310 virtualenv: Generate combined coverage reports with py27-test coverage merged Run mypy for static analysis additional environments: -cov -> Run in a python virtualenv: Generate coverage html reports (incl. diff-cover) for this environment -covcp -> Run in a python virtualenv: Copy the generated .converage and coverage.xml to the UPLOAD_DIR dir +cov -> Run in a python virtualenv: Generate coverage html reports for this environment +covcp -> Run in a python virtualenv: Copy the generated .coverage and coverage.xml to the UPLOAD_DIR dir fox -> Run in a python virtualenv: Generate combined coverage html reports and open them in firefox mdreport -> Run in a python virtualenv: Make a test report (which is shown in the GitHub Actions Summary Page) test -> Run in a python virtualenv: Run pytest in this environment with --cov for use in other stages @@ -100,7 +98,7 @@ test -> Run in a python virtualenv: Run pytest in this environ If you have only one version of Python3, that works too. Use: `tox -e py-test` -## Static analysis using mypy, pyre, pyright and pytype +## Static analysis using mypy, pylint, pyright and pytype The preconditions for using static analysis with `mypy` (which passes now but has only a few type comments) and `pyright` are present now and `mypy` is enabled in `tox` @@ -108,24 +106,27 @@ which runs the tests in GitHub CI as well. But of course, because they code is l still not yet typed, no strict checks can be enabled so far. However, every checker which is possible now, is enabled. -Checking the contents of untyped functions is enabled for all but four modules which -would need more work. Look for `check_untyped_defs = false` in `pytproject.toml`. +Checking the contents of not typed functions is enabled for all but four modules which +would need more work. Look for `check_untyped_defs = false` in `pyproject.toml`. The goal or final benefit would be to have it to ensure internal type correctness and code quality but also to use static analysis to check the interoperability with the calling code. -## Type annotations: Use Type comments for now! +## Type annotations: Use Type comments for now Python2.7 can't support the type annotation syntax, but until all users are migrated, annotations in comments (type comments) can be used. They are supported by tools like `mypy` and `pyright` (VS Code): -Quoting from https://stackoverflow.com/questions/53306458/python-3-type-hints-in-python-2: +Quoting from : -> Function annotations were introduced in [PEP 3107](https://www.python.org/dev/peps/pep-3107/) for Python 3.0. The usage of annotations as type hints was formalized in in [PEP 484](https://www.python.org/dev/peps/pep-0484/) for Python 3.5+. +> Function annotations were introduced in [PEP 3107](https://www.python.org/dev/peps/pep-3107/) for Python 3.0. The usage of annotations as type hints was formalized in [PEP 484](https://www.python.org/dev/peps/pep-0484/) for Python 3.5+. > -> Any version before 3.0 then will not support the syntax you are using for type hints at all. However, PEP 484 [offers a workaround](https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code), which some editors may choose to honor. In your case, the hints would look like this: +> Python < 3.0 does support the type hints syntax, but +> [PEP 484](https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code) +> introduces type comments that are equally supported and are otherwise ignored. + These type comments look like this: ```py def get_default_device(use_gpu=True): @@ -133,7 +134,7 @@ def get_default_device(use_gpu=True): ... ``` -Many type checkers support this syntax: mypy, pyright/pylance, pytype +Many type checkers support this syntax: mypy, pyright, pytype As proof, these examples show how the comment below triggers the checks: @@ -171,30 +172,19 @@ xcp/xmlunwrap.py Completed in 0.604sec ``` -See https://github.com/xenserver/python-libs/pull/23 for the context of this example. - -## Special open TODOs: +See for the context of this example. -Charset encoding/string handling: -See [README-Unicode.md](README-Unicode.md) for details: +## Guidelines -- What's more: When code is called from a xapi plugin (such as ACK), when such code - attempts to read text files like the `pciids` file, and there is a Unicode char - it int, and the locale is not set up to be UTF-8 (because xapi plugins are started - from xapi), the UTF-8 decoder has to be explicitly enabled for these files, - bese by adding `encoding="utf-8"` to the arguments of these specific `open()` calls, - to have valid Unicode text strings, e.g. `xcp.pci`, for regular text processing. -- TODO: More to be opened for all remaining `open()` and `Popen()` users, - as well as ensuring that users of `urllib` are able to work with they bytes - it returns (there is no option to use text mode, data may be gzip-encoded!) +Character set encoding/string handling: +See [README-Unicode.md](README-Unicode.md) for details on Unicode support. ## Users -- https://github.com/xenserver/host-installer - - /opt/xensource/installer/ (has copies of `cpiofile.py`, `repository.py` (with `accessor.py`) -- https://github.com/xcp-ng-rpms/host-upgrade-plugin ([koji](https://koji.xcp-ng.org/packageinfo?packageID=104)): +- [host-installer](https://github.com/xenserver/host-installer) +- [host-upgrade-plugin](https://github.com/xcp-ng-rpms/host-upgrade-plugin) ([koji](https://koji.xcp-ng.org/packageinfo?packageID=104)): - /etc/xapi.d/plugins/prepare_host_upgrade.py -- https://github.com/xapi-project/xen-api (`xapi-core.rpm` and `xenopsd.rpm`) +- [xapi](https://github.com/xapi-project/xen-api) (`xapi-core.rpm` and `xenopsd.rpm`) - /etc/xapi.d/extensions/pool_update.apply - /etc/xapi.d/extensions/pool_update.precheck - /etc/xapi.d/plugins/disk-space @@ -207,16 +197,16 @@ See [README-Unicode.md](README-Unicode.md) for details: - xenserver-release-config/[xcp-ng-release-config](https://koji.xcp-ng.org/rpminfo?rpmID=10250) - /opt/xensource/libexec/fcoe_driver - /opt/xensource/libexec/xen-cmdline -- https://github.com/xcp-ng-rpms/interface-rename: +- - /etc/sysconfig/network-scripts/interface-rename.py - /opt/xensource/bin/interface-rename - pvsproxy (Proprietary) - /usr/libexec/xapi-storage-script/volume/org.xen.xapi.storage.tmpfs/memoryhelper.py -- https://github.com/xenserver/linux-guest-loader (not installed by default anymore) +- (not installed by default anymore) - /opt/xensource/libexec/eliloader.py -- https://github.com/xcp-ng-rpms/vcputune +- - /opt/xensource/bin/host-cpu-tune -- The ACK xenapi plugin see: https://github.com/xenserver/python-libs/pull/21 +- The ACK xapi plugin. See: Verification: diff --git a/cspell.json b/cspell.json new file mode 100644 index 00000000..a25c1f71 --- /dev/null +++ b/cspell.json @@ -0,0 +1,48 @@ +// http://cspell.org/docs/getting-started/#2-add-words-to-the-project-dictionary +{ + "version": "0.2", + "language": "en-GB,en-US", + "dictionaryDefinitions": [ + { + "name": "cspell-workspace-dictionary", + "path": ".vscode/ltex.dictionary.en-US.txt", + "addWords": true + } + ], + "dictionaries": ["cspell-workspace-dictionary"], + "ignorePaths": [ + ".actrc", + ".pylintrc", + "Makefile", + "pyproject.toml", + "stubs/", + "tests/xcptestlib_unshare.py", + "tox.ini", + "xcp/cpiofile.py", + "xcp/net/ifrename/dynamic.py", + "xcp/net/ifrename/logic.py", + "xcp/net/ifrename/util.py", + "xcp/net/ip.py", + "xcp/repository.py", + "xcp-python-libs.spec.in" + ], + "ignoreRegExpList": [ + // exclude regular expressions from re.compile(pattern) + "re.compile(.*)", + // exclude MAC addresses + "MAC(.*)" + ], + "ignoreWords": [ + // In a log message, fix in final commit + "Succesfully", + // Volume label used in tests + "vgdorj", + // Unclear words in test method names + "nosrules", + "rshp", + "siobuff", + // Used to test an invalid quad + "efgh", + "ijkl" + ] +} diff --git a/pylint_runner.py b/pylint_runner.py index b013f66f..4f040256 100755 --- a/pylint_runner.py +++ b/pylint_runner.py @@ -6,21 +6,15 @@ - Markdown Reports for showing them in the GitHub Actions Summary - a pylint.txt for diff-quality to ensure no regressions in diffs. -Pylint for Python2 does not support JSONReporter, so this wrapper only supports -the native Python3 checks, not the 2to3 conversion checks selected by the --py3k -options provied only in the Pylint for Python2. -The older pylint-2.16 could be checked if it supports both. - -The output for GitHub of this script is fitered for putting the -focus on severen warnings for the Python3 transition, expecially -the encoding warnings are important. +The output for GitHub of this script is filtered for focussing on severe warnings. +Especially, the encoding warnings are important. On stdout, the format used by GitHub to generate error annotations us used. These error annotations are shown on the top of the GitHub Action summary page and are also shown in the diff view at the their code locations. It also generates a markdown report including two Markdown -tables (one for summary, one with the individual erros) +tables (one for summary, one with the individual errors) which can be viewed locally and is also shown in the GitHub Action's Summary Report. """ @@ -57,10 +51,10 @@ def cleanup_results_dict(r, sym): r["symbol"] = sym[:32] r["message"] = r["message"][:96] try: - dotpos = r["obj"].rindex(".") + 1 + dot_pos = r["obj"].rindex(".") + 1 except ValueError: - dotpos = 0 - r["obj"] = r["obj"][dotpos:].split("test_")[-1][:16] + dot_pos = 0 + r["obj"] = r["obj"][dot_pos:].split("test_")[-1][:16] suppress_msg = ["Unused variable 'e'"] # type: list[str] @@ -99,7 +93,7 @@ def cleanup_results_dict(r, sym): # This is illegal according to: # https://docs.python.org/3/reference/datamodel.html#object.__hash__ # -# Reference: pylint3 removed the --py3k checker "because the transition is bedind us": +# Reference: pylint3 removed the --py3k checker "because the transition is behind us": # https://github.com/pylint-dev/pylint/blob/main/pylint/extensions/eq_without_hash.py # # But some checks are still useful in python3 after all, and this is the remnant of it. @@ -147,7 +141,6 @@ def pylint_project(check_dirs: List[str], errorlog: TextIO, branch_url: str): lineno = r["line"] # Write errors in the format for diff-quality to check against regressions: errorlog.write(f"{path}:{lineno}: [{msg_id}({sym}), {r['obj']}] {msg}\n") - # For suggestions to fix existing warnings, be more focussed on serverity: if not msg: continue @@ -214,7 +207,7 @@ def main(dirs: List[str], output_file: str, pylint_logfile: str, branch_url: str with open(pylint_logfile, "w", encoding="utf-8") as txt_out: panda_overview, panda_results = pylint_project(dirs, txt_out, branch_url) - # Write the panda dable to a markdown output file: + # Write the panda table to a markdown output file: summary_file = output_file or os.environ.get("GITHUB_STEP_SUMMARY") if not summary_file: return @@ -225,22 +218,22 @@ def main(dirs: List[str], output_file: str, pylint_logfile: str, branch_url: str def write_results_as_markdown_tables(branch_url, fp, panda_overview, panda_results): me = os.path.basename(__file__) - mylink = f"[{me}]({branch_url}/{me})" + link = f"[{me}]({branch_url}/{me})" # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-markdown-content - fp.write(f"### PyLint breakdown from {mylink} on **xcp/\\*\\*/*.py**\n") + fp.write(f"### PyLint breakdown from {link} on **xcp/\\*\\*/*.py**\n") fp.write(panda_overview.to_markdown()) - fp.write(f"\n### PyLint results from {mylink} on **xcp/\\*\\*/*.py**\n") + fp.write(f"\n### PyLint results from {link} on **xcp/\\*\\*/*.py**\n") fp.write(panda_results.to_markdown()) if __name__ == "__main__": - ghblob_url = "https://github.com/xenserver/python-libs/blob/master" + github_blob_url = "https://github.com/xenserver/python-libs/blob/master" server_url = os.environ.get("GITHUB_SERVER_URL", None) repository = os.environ.get("GITHUB_REPOSITORY", None) if server_url and repository: # https://github.com/orgs/community/discussions/5251 only set on Pull requests: branch = os.environ.get("GITHUB_HEAD_REF", None) or os.environ.get("GITHUB_REF_NAME", None) - ghblob_url = f"{server_url}/{repository}/blob/{branch}" + github_blob_url = f"{server_url}/{repository}/blob/{branch}" # Like the previous run-pylint.sh, check the xcp module by default: dirs_to_check = sys.argv[1:] if len(sys.argv) > 1 else ["xcp", "tests"] @@ -256,4 +249,4 @@ def write_results_as_markdown_tables(branch_url, fp, panda_overview, panda_resul pylint_txt = os.environ.get("ENVLOGDIR", ".tox") + "/pylint.txt" print("Checking:", str(dirs_to_check) + "; Writing report to:", step_summary) - main(dirs_to_check, step_summary, pylint_txt, ghblob_url) + main(dirs_to_check, step_summary, pylint_txt, github_blob_url) diff --git a/pyproject.toml b/pyproject.toml index 474a6e80..fee467da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,11 +71,6 @@ mypy = [ "types-six", "types-toml", ] -# pyre introduced two false-postives recently, pin it to prevent further surprises: -pyre = [ - "pyre-check == 0.9.21", - "pyre-extensions == 0.0.30", -] pytype = [ "pandas", "pytype", @@ -116,11 +111,12 @@ show_error_context = true error_summary = true files = ["xcp", "tests/test_*.py"] mypy_path = "stubs" -python_version = "3.10" +python_version = "3.11" warn_redundant_casts = true warn_return_any = true warn_unreachable = true warn_unused_configs = true +explicit_package_bases = true disallow_any_unimported = true disallow_any_explicit = false disallow_any_generics = true @@ -198,7 +194,7 @@ exclude = [ extraPaths = ["stubs"] include = ["xcp", "tests"] pythonPlatform = "Linux" -pythonVersion = "3.6" +pythonVersion = "3.11" reportFunctionMemberAccess = true reportGeneralTypeIssues = "warning" reportOptionalMemberAccess = "warning" @@ -211,7 +207,7 @@ stubPath = "stubs" inputs = ["xcp", "tests", "./*.py"] keepgoing = true platform = "linux" -python_version = "3.10" +python_version = "3.11" pythonpath = ".:stubs" disable = ["ignored-type-comment"] overriding_parameter_count_checks = true diff --git a/pyre_runner.py b/pyre_runner.py deleted file mode 100755 index a67d2427..00000000 --- a/pyre_runner.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -""" -Run a one-time pyre static analysis check without needing a .pyre_configuration -Gets the paths dynamically so it can be used in tox and GitHub CI -""" -import os -import sys -import time - -import mock - -me = os.path.basename(__file__) + ":" - -pyre_typesched = os.environ.get("PYRE_TYPESHED", None) -if pyre_typesched and os.path.exists(pyre_typesched + "/stdlib/os/path.pyi"): - print("Using {env:PYRE_TYPESHED}:", pyre_typesched) -else: - pyre_typesched = sys.path[-1] + "/mypy/typeshed" - if os.path.exists(pyre_typesched + "/stdlib/os/path.pyi"): - print("Using python_lib:", pyre_typesched) - else: - pyre_typesched = "/tmp/typeshed" - if os.path.exists(pyre_typesched + "/stdlib/os/path.pyi"): - print("Using:", pyre_typesched) - else: - clone = "git clone --depth 1 https://github.com/python/typeshed " - print(me, "Falling back to:", clone + pyre_typesched) - ret = os.system(clone + pyre_typesched) - if ret or not os.path.exists(pyre_typesched + "/stdlib/os/path.pyi"): - print(me, "Could not find or clone typeshed, giving up.") - sys.exit(0) - -command = ( - "pyre", - "--source-directory", - "xcp", - "--source-directory", - "tests", - "--search-path", - "stubs", - "--search-path", - ".", - "--search-path", - os.path.dirname(mock.__file__), - "--typeshed", - pyre_typesched, - "check", -) -cmd = " ".join(command) -print(me, "Running:", cmd) -start_time = time.time() -ret = os.system(cmd) -duration = time.time() - start_time -r = os.waitstatus_to_exitcode(ret) # type: ignore[module-addr] # newer versions have it -if r == 0: - print(me, f"OK pyre took: {duration:.1f}s") -else: - print(me, "Ran:", cmd) - print(me, "exit code:", r) - if os.environ.get("ACT", None): - time.sleep(10) -sys.exit(r) diff --git a/pytest.ini b/pytest.ini index e07456ee..0906991a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -8,10 +8,11 @@ # These are the most of the needed pytest plugins, unfortunately this list does # not support ;python_version<=3.0 or ;python_version>3.0. Therefore, it can # only list plugins available for all tested python versions (2.7, 3.6 ... 3.11): +# pytest-localftpserver is also used, but its installation is not checked +# to to its installation not being detected on Ubuntu 24.04: required_plugins = pytest_httpserver pytest-forked - pytest-localftpserver pytest-pythonpath pytest-subprocess pytest-timeout diff --git a/pytype_runner.py b/pytype_runner.py index 3ee62e98..113971c1 100755 --- a/pytype_runner.py +++ b/pytype_runner.py @@ -18,15 +18,15 @@ def generate_github_annotation(match: re.Match[str], branch_url: str) -> Tuple[s func = match.group(3) msg = match.group(4) assert isinstance(msg, str) - msg_splitpos = msg.find(" ", 21) + msg_split_pos = msg.find(" ", 21) file = match.group(1) - linktext = os.path.basename(file).split(".")[0] - source_link = f"[`{linktext}`]({branch_url}/{file}#L{lineno})" + link_text = os.path.basename(file).split(".")[0] + source_link = f"[`{link_text}`]({branch_url}/{file}#L{lineno})" row = { "Location": source_link, "Function": f"`{func}`", "Error code": code, - "Error message": msg[:msg_splitpos] + "
" + msg[msg_splitpos + 1 :], + "Error message": msg[:msg_split_pos] + "
" + msg[msg_split_pos + 1 :], "Error description": "", } # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message @@ -49,8 +49,8 @@ def skip_uninteresting_lines(line: str) -> bool: return True if line[0] == "[": pos = line.rfind(os.getcwd()) - printfrom = pos + len(os.getcwd()) + 1 if pos > 0 else line.index("]") + 2 - info("PROGRESS: " + line[1:].split("]")[0] + ": " + line[printfrom:]) + print_from = pos + len(os.getcwd()) + 1 if pos > 0 else line.index("]") + 2 + info("PROGRESS: " + line[1:].split("]")[0] + ": " + line[print_from:]) return True if line.startswith("ninja: "): line = line[7:] @@ -77,7 +77,7 @@ def run_pytype(command: List[str], branch_url: str, errorlog: TextIO, results): ok = True while ok: for key, _ in sel.select(): - line = key.fileobj.readline() # type: ignore + line = key.fileobj.readline() # type: ignore[union-attr] if not line: ok = False break @@ -139,17 +139,17 @@ def run_pytype_and_parse_annotations(xfail_files: List[str], branch_url: str): def to_markdown(me, fp, returncode, results, branch_url): - mylink = f"[{me}]({branch_url}/{me}.py)" + my_link = f"[{me}]({branch_url}/{me}.py)" pytype_link = "[pytype](https://google.github.io/pytype)" if len(results) or returncode: - fp.write(f"\n#### {mylink} reports these {pytype_link} error messages:\n") + fp.write(f"\n#### {my_link} reports these {pytype_link} error messages:\n") fp.write(pd.DataFrame(results).to_markdown()) else: - fp.write(f"\n#### Congratulations, {mylink} reports no {pytype_link} errors.\n") + fp.write(f"\n#### Congratulations, {my_link} reports no {pytype_link} errors.\n") fp.write("\n") -def setup_and_run_pytype_action(scriptname: str): +def setup_and_run_pytype_action(script_name: str): config = load("pyproject.toml") pytype = config["tool"].get("pytype") xfail_files = pytype.get("xfail", []) if pytype else [] @@ -163,16 +163,16 @@ def setup_and_run_pytype_action(scriptname: str): branch = os.environ.get("GITHUB_HEAD_REF", None) or os.environ.get("GITHUB_REF_NAME", None) filelink_baseurl = f"{server_url}/{repository}/blob/{branch}" retcode, results = run_pytype_and_parse_annotations(xfail_files, filelink_baseurl) - # Write the panda dable to a markdown output file: + # Write the panda table to a markdown output file: summary_file = os.environ.get("GITHUB_STEP_SUMMARY", None) if summary_file: with open(summary_file, "w", encoding="utf-8") as fp: - to_markdown(scriptname, fp, retcode, results, filelink_baseurl) + to_markdown(script_name, fp, retcode, results, filelink_baseurl) else: - to_markdown(scriptname, sys.stdout, retcode, results, filelink_baseurl) + to_markdown(script_name, sys.stdout, retcode, results, filelink_baseurl) if __name__ == "__main__": script_basename = os.path.basename(__file__).split(".")[0] basicConfig(format=script_basename + ": %(message)s", level=INFO) - sys.exit(setup_and_run_pytype_action(scriptname=script_basename)) + sys.exit(setup_and_run_pytype_action(script_name=script_basename)) diff --git a/stubs/pyfakefs/__init__.pyi b/stubs/pyfakefs/__init__.pyi deleted file mode 100644 index e69de29b..00000000 diff --git a/stubs/pyfakefs/fake_filesystem.pyi b/stubs/pyfakefs/fake_filesystem.pyi deleted file mode 100644 index 466eb8b1..00000000 --- a/stubs/pyfakefs/fake_filesystem.pyi +++ /dev/null @@ -1,151 +0,0 @@ -from _typeshed import Incomplete # pylint: disable=import-error - - -def set_uid(uid) -> None: ... -def set_gid(gid) -> None: ... -def reset_ids() -> None: ... -def is_root(): ... - - -class FakeFilesystem: - path_separator: Incomplete - alternative_path_separator: Incomplete - patcher: Incomplete - is_windows_fs: Incomplete - is_macos: Incomplete - is_case_sensitive: Incomplete - root: Incomplete - cwd: Incomplete - umask: Incomplete - open_files: Incomplete - mount_points: Incomplete - dev_null: Incomplete - def __init__( - self, - path_separator=..., - total_size: Incomplete | None = ..., - patcher: Incomplete | None = ..., - ) -> None: ... - @property - def is_linux(self): ... - def reset(self, total_size: Incomplete | None = ...) -> None: ... - def pause(self) -> None: ... - def resume(self) -> None: ... - def line_separator(self): ... - def raise_os_error( - self, errno, filename: Incomplete | None = ..., winerror: Incomplete | None = ... - ) -> None: ... - def raise_io_error(self, errno, filename: Incomplete | None = ...) -> None: ... - def add_mount_point(self, path, total_size: Incomplete | None = ...): ... - def get_disk_usage(self, path: Incomplete | None = ...): ... - def set_disk_usage(self, total_size, path: Incomplete | None = ...) -> None: ... - def change_disk_usage(self, usage_change, file_path, st_dev) -> None: ... - def stat(self, entry_path, follow_symlinks: bool = ...): ... - def chmod(self, path, mode, follow_symlinks: bool = ...) -> None: ... - def utime( - self, - path, - times: Incomplete | None = ..., - ns: Incomplete | None = ..., - follow_symlinks: bool = ..., - ) -> None: ... - def get_open_file(self, file_des): ... - def has_open_file(self, file_object): ... - def normcase(self, path): ... - def normpath(self, path): ... - def absnormpath(self, path): ... - def splitpath(self, path): ... - def splitdrive(self, path): ... - def joinpaths(self, *paths): ... - def ends_with_path_separator(self, file_path): ... - def is_filepath_ending_with_separator(self, path): ... - def exists(self, file_path, check_link: bool = ...): ... - def resolve_path(self, file_path, allow_fd: bool = ..., raw_io: bool = ...): ... - def get_object_from_normpath(self, file_path, check_read_perm: bool = ...): ... - def get_object(self, file_path, check_read_perm: bool = ...): ... - def resolve( - self, - file_path, - follow_symlinks: bool = ..., - allow_fd: bool = ..., - check_read_perm: bool = ..., - ): ... - def lresolve(self, path): ... - def add_object(self, file_path, file_object, error_fct: Incomplete | None = ...) -> None: ... - def rename(self, old_file_path, new_file_path, force_replace: bool = ...) -> None: ... - def remove_object(self, file_path) -> None: ... - def make_string_path(self, path): ... - def create_dir(self, directory_path, perm_bits=...): ... - def create_file( - self, - file_path, - st_mode=..., - contents: str = ..., - st_size: Incomplete | None = ..., - create_missing_dirs: bool = ..., - apply_umask: bool = ..., - encoding: Incomplete | None = ..., - errors: Incomplete | None = ..., - side_effect: Incomplete | None = ..., - ): ... - def add_real_file( - self, source_path, read_only: bool = ..., target_path: Incomplete | None = ... - ): ... - def add_real_symlink(self, source_path, target_path: Incomplete | None = ...): ... - def add_real_directory( - self, - source_path, - read_only: bool = ..., - lazy_read: bool = ..., - target_path: Incomplete | None = ..., - ): ... - def add_real_paths( - self, path_list, read_only: bool = ..., lazy_dir_read: bool = ... - ) -> None: ... - def create_file_internally( - self, - file_path, - st_mode=..., - contents: str = ..., - st_size: Incomplete | None = ..., - create_missing_dirs: bool = ..., - apply_umask: bool = ..., - encoding: Incomplete | None = ..., - errors: Incomplete | None = ..., - read_from_real_fs: bool = ..., - raw_io: bool = ..., - side_effect: Incomplete | None = ..., - ): ... - def create_symlink(self, file_path, link_target, create_missing_dirs: bool = ...): ... - def link(self, old_path, new_path, follow_symlinks: bool = ...): ... - def readlink(self, path): ... - def makedir(self, dir_name, mode=...) -> None: ... - def makedirs(self, dir_name, mode=..., exist_ok: bool = ...) -> None: ... - def isdir(self, path, follow_symlinks: bool = ...): ... - def isfile(self, path, follow_symlinks: bool = ...): ... - def islink(self, path): ... - def confirmdir(self, target_directory): ... - def remove(self, path) -> None: ... - def rmdir(self, target_directory, allow_symlink: bool = ...) -> None: ... - def listdir(self, target_directory): ... - -class FakeFileOpen: - __name__: str - filesystem: Incomplete - raw_io: Incomplete - def __init__( - self, filesystem, delete_on_close: bool = ..., use_io: bool = ..., raw_io: bool = ... - ) -> None: ... - def __call__(self, *args, **kwargs): ... - def call( - self, - file_, - mode: str = ..., - buffering: int = ..., - encoding: Incomplete | None = ..., - errors: Incomplete | None = ..., - newline: Incomplete | None = ..., - closefd: bool = ..., - opener: Incomplete | None = ..., - open_modes: Incomplete | None = ..., - ): ... diff --git a/stubs/pyfakefs/fake_filesystem_unittest.pyi b/stubs/pyfakefs/fake_filesystem_unittest.pyi deleted file mode 100644 index aade18b0..00000000 --- a/stubs/pyfakefs/fake_filesystem_unittest.pyi +++ /dev/null @@ -1,20 +0,0 @@ -from types import ModuleType -from typing import Any, Callable, Dict, List, Optional, Union - -from _typeshed import Incomplete # pylint: disable=import-error - -Patcher = Incomplete -PatchMode = Incomplete - -def patchfs( - _func: Optional[Incomplete] = ..., - *, - additional_skip_names: Optional[List[Union[str, ModuleType]]] = ..., - modules_to_reload: Optional[List[ModuleType]] = ..., - modules_to_patch: Optional[Dict[str, ModuleType]] = ..., - allow_root_user: bool = ..., - use_known_patches: bool = ..., - patch_open_code: PatchMode = ..., - patch_default_args: bool = ..., - use_cache: bool = ..., -) -> Callable[[Any], Any]: ... diff --git a/stubs/pytest.pyi b/stubs/pytest.pyi deleted file mode 100644 index 6e31256b..00000000 --- a/stubs/pytest.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# pylint: disable=reimported,no-name-in-module,unused-import,function-redefined.redefined-builtin -from _pytest.python_api import raises -from _typeshed import Incomplete as fixture -from _typeshed import Incomplete as mark - -def skip(msg: str = "", *, allow_module_level: bool = False): ... - -__all__ = ["mark", "fixture", "skip", "raises"] diff --git a/stubs/pytest_httpserver.pyi b/stubs/pytest_httpserver.pyi deleted file mode 100644 index 0ee27850..00000000 --- a/stubs/pytest_httpserver.pyi +++ /dev/null @@ -1,134 +0,0 @@ -import abc -from enum import Enum -from ssl import SSLContext -from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Pattern, Tuple, Union - -# pylint: disable=import-error, no-name-in-module, super-init-not-called, multiple-statements, too-few-public-methods, invalid-name, line-too-long -from _typeshed import Incomplete - -from werkzeug.wrappers import Request, Response - -URI_DEFAULT: str -METHOD_ALL: str -HEADERS_T = Union[Mapping[str, Union[str, Iterable[str]]], Iterable[Tuple[str, str]]] -HVMATCHER_T = Callable[[str, Optional[str], str], bool] - -class Error(Exception): ... -class NoHandlerError(Error): ... -class HTTPServerError(Error): ... -class NoMethodFoundForMatchingHeaderValueError(Error): ... - -class WaitingSettings: - raise_assertions: Incomplete - stop_on_nohandler: Incomplete - timeout: Incomplete - def __init__(self, raise_assertions: bool = ..., stop_on_nohandler: bool = ..., timeout: float = ...) -> None: ... - -class Waiting: - def __init__(self) -> None: ... - def complete(self, result: bool): ... - @property - def result(self) -> bool: ... - @property - def elapsed_time(self) -> float: ... - -class HeaderValueMatcher: - DEFAULT_MATCHERS: MutableMapping[str, Callable[[Optional[str], str], bool]] - matchers: Incomplete - def __init__(self, matchers: Optional[Mapping[str, Callable[[Optional[str], str], bool]]] = ...) -> None: ... - @staticmethod - def authorization_header_value_matcher(actual: Optional[str], expected: str) -> bool: ... - @staticmethod - def default_header_value_matcher(actual: Optional[str], expected: str) -> bool: ... - def __call__(self, header_name: str, actual: Optional[str], expected: str) -> bool: ... - -class URIPattern(abc.ABC, metaclass=abc.ABCMeta): - @abc.abstractmethod - def match(self, uri: str) -> bool: ... - -class RequestMatcher: - uri: Incomplete - method: Incomplete - query_string: Incomplete - query_matcher: Incomplete - json: Incomplete - headers: Incomplete - data: Incomplete - data_encoding: Incomplete - header_value_matcher: Incomplete - # def __init__(self) - def match_data(self, request: Request) -> bool: ... - def match_uri(self, request: Request) -> bool: ... - def match_json(self, request: Request) -> bool: ... - def match(self, request: Request) -> bool: ... - -class RequestHandlerBase(abc.ABC, metaclass=abc.ABCMeta): - def respond_with_json(self, response_json, status: int = ..., headers: Optional[Mapping[str, str]] = ..., content_type: str = ...): ... - def respond_with_data(self, response_data: Union[str, bytes] = ..., status: int = ..., headers: Optional[HEADERS_T] = ..., mimetype: Optional[str] = ..., content_type: Optional[str] = ...): ... - @abc.abstractmethod - def respond_with_response(self, response: Response): ... - -class RequestHandler(RequestHandlerBase): - matcher: Incomplete - request_handler: Incomplete - def __init__(self, matcher: RequestMatcher) -> None: ... - def respond(self, request: Request) -> Response: ... - def respond_with_handler(self, func: Callable[[Request], Response]): ... - def respond_with_response(self, response: Response): ... - -class HandlerType(Enum): - PERMANENT: str - ONESHOT: str - ORDERED: str - -class HTTPServerBase(abc.ABC, metaclass=abc.ABCMeta): - host: Incomplete - port: Incomplete - server: Incomplete - server_thread: Incomplete - assertions: Incomplete - handler_errors: Incomplete - log: Incomplete - ssl_context: Incomplete - no_handler_status_code: int - def __init__(self, host: str, port: int, ssl_context: Optional[SSLContext] = ...) -> None: ... - def clear(self) -> None: ... - def clear_assertions(self) -> None: ... - def clear_handler_errors(self) -> None: ... - def clear_log(self) -> None: ... - def url_for(self, suffix: str): ... - def create_matcher(self, *args, **kwargs) -> RequestMatcher: ... - def thread_target(self) -> None: ... - def is_running(self) -> bool: ... - def start(self) -> None: ... - def stop(self) -> None: ... - def add_assertion(self, obj) -> None: ... - def check(self) -> None: ... - def check_assertions(self) -> None: ... - def check_handler_errors(self) -> None: ... - def respond_nohandler(self, request: Request, extra_message: str = ...): ... - @abc.abstractmethod - def dispatch(self, request: Request) -> Response: ... - def application(self, request: Request): ... - def __enter__(self): ... - def __exit__(self, *args, **kwargs) -> None: ... - @staticmethod - def format_host(host): ... - -class HTTPServer(HTTPServerBase): - DEFAULT_LISTEN_HOST: str - DEFAULT_LISTEN_PORT: int - ordered_handlers: Incomplete - oneshot_handlers: Incomplete - handlers: Incomplete - permanently_failed: bool - default_waiting_settings: Incomplete - def __init__(self, host=..., port=..., ssl_context: Optional[SSLContext] = ..., default_waiting_settings: Optional[WaitingSettings] = ...) -> None: ... - def clear(self) -> None: ... - def clear_all_handlers(self) -> None: ... - def expect_request(self, uri: Union[str, URIPattern, Pattern[str]], method: str = ..., data: Union[str, bytes, None] = ..., data_encoding: str = ..., header_value_matcher: Optional[HVMATCHER_T] = ..., handler_type: HandlerType = ..., json: Any = ...) -> RequestHandler: ... - def format_matchers(self) -> str: ... - def respond_nohandler(self, request: Request, extra_message: str = ...): ... - def respond_permanent_failure(self): ... - def dispatch(self, request: Request) -> Response: ... - def wait(self, raise_assertions: Optional[bool] = ..., stop_on_nohandler: Optional[bool] = ..., timeout: Optional[float] = ...): ... diff --git a/stubs/werkzeug/wrappers.pyi b/stubs/werkzeug/wrappers.pyi deleted file mode 100644 index f4a46d17..00000000 --- a/stubs/werkzeug/wrappers.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from _typeshed import Incomplete # pylint: disable=import-error,no-name-in-module -Request = Incomplete -Response = Incomplete diff --git a/tests/conftest.py b/tests/conftest.py index 1a2a4974..a0ff0d1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,10 +5,10 @@ This module is run automatically by pytest to define and enable fixtures. """ -# pyre-ignore-all-errors[21] import warnings -import pytest # pyre does not find the module when run by tox -e py311-pyre +import pytest + @pytest.fixture(autouse=True) def set_warnings(): @@ -24,14 +24,12 @@ def set_warnings(): One purpose of this fixture that with it, we can globally enable Development Mode (https://docs.python.org/3/library/devmode.html) - using setenv = PYTHONDEVMODE=yes in tox.int which enables further + using setenv:PYTHONDEVMODE=yes in tox.ini which enables further run-time checking during tests. - By additionally using setenv = PYTHONWARNINGS=ignore in tox.ini, - we can disabling the Deprecation warnings wihch pytest plugins exhibit - (which we are not interested in, those are not our responsiblity). - and this fixture will still enable the default warning filter to - have e.g. ResourceWarning checks enabled. + Using setenv:PYTHONWARNINGS=ignore in tox.ini, we disable the Deprecation + warnings caused by pytest plugins. This fixture still enable the default + warning filter to have e.g. ResourceWarning checks enabled. Another nice effect is that also during interactive pytest use, the default warning filter also provides checking of ResourceWarning: diff --git a/tests/httpserver_testcase.py b/tests/httpserver_testcase.py index b9dc5ceb..cad59524 100644 --- a/tests/httpserver_testcase.py +++ b/tests/httpserver_testcase.py @@ -1,4 +1,5 @@ import os +import sys import unittest from typing import Callable, Optional @@ -11,9 +12,10 @@ from pytest_httpserver import HTTPServer from werkzeug.wrappers import Request, Response - ErrorHandler = Optional[Callable[[Request], Response]] + ErrorHandler = Optional[Callable[[Request], Response | None]] except ImportError: pytest.skip(allow_module_level=True) + sys.exit(0) # Let pyright know that this is a dead end class HTTPServerTestCase(unittest.TestCase): @@ -31,7 +33,7 @@ def tearDownClass(cls): @classmethod def serve_file(cls, root, file_path, error_handler=None, real_path=None): - # type:(str, str, Optional[Callable[[Request], Response]], Optional[str]) -> None + # type:(str, str, ErrorHandler, Optional[str]) -> None """Expect a GET request and handle it using the local pytest_httpserver.HTTPServer""" def handle_get(request): diff --git a/tests/test_accessor.py b/tests/test_accessor.py index 93f5d609..885d5762 100644 --- a/tests/test_accessor.py +++ b/tests/test_accessor.py @@ -1,14 +1,16 @@ import unittest - -from pyfakefs.fake_filesystem import FakeFilesystem +from typing import TYPE_CHECKING import xcp.accessor from .test_mountingaccessor import check_binary_read, check_binary_write +if TYPE_CHECKING: + import pyfakefs + def test_file_accessor(fs): - # type:(FakeFilesystem) -> None + # type(pyfakefs.fake_filesystem.FakeFilesystem) -> None """Test FileAccessor.writeFile(), .openAddress and .access using pyfakefs""" accessor = xcp.accessor.createAccessor("file://repo/", False) assert isinstance(accessor, xcp.accessor.FileAccessor) @@ -18,7 +20,7 @@ def test_file_accessor(fs): class TestAccessor(unittest.TestCase): def setUp(self): - """Provide the refrence content of the repo/.treeinfo file for check_repo_access()""" + """Provide the reference content of the repo/.treeinfo file for check_repo_access()""" with open("tests/data/repo/.treeinfo", "rb") as dot_treeinfo: self.reference_treeinfo = dot_treeinfo.read() @@ -35,6 +37,7 @@ def check_repo_access(self, a): self.assertFalse(a.access('no_such_file')) self.assertEqual(a.lastError, 404) a.finish() + a.finish() # Cover the code handing a 2nd call of accessor.finish() def test_filesystem_accessor_access(self): """Test FilesystemAccessor.access()""" diff --git a/tests/test_bootloader.py b/tests/test_bootloader.py index 82500336..6c31d00a 100644 --- a/tests/test_bootloader.py +++ b/tests/test_bootloader.py @@ -41,15 +41,14 @@ def test_grub2(self): universal_newlines=True) assert proc.stdout for line in proc.stdout: - # pylint: disable-next=deprecated-method - self.assertRegexpMatches(line, r"^(5a6,13$|>)") + self.assertRegex(line, r"^(5a6,13$|>)") # Replaced removed assert call proc.stdout.close() proc.wait() class TestLinuxBootloader(unittest.TestCase): def setUp(self): - self.tmpdir = mkdtemp(prefix="testbl") + self.tmpdir = mkdtemp(prefix="test-bootloader") bootdir = os.path.join(self.tmpdir, "boot") grubdir = os.path.join(bootdir, "grub") os.makedirs(grubdir) diff --git a/tests/test_cpiofile.py b/tests/test_cpiofile.py index cd36a2f2..0371d2df 100644 --- a/tests/test_cpiofile.py +++ b/tests/test_cpiofile.py @@ -1,5 +1,4 @@ # suppress false positive on pytest missing pytest.raises(): -# pyre-ignore-all-errors[16] """ Test various modes of creating and extracting CpioFile using different compression types, opening the archive as stream and as file, using pyfakefs as filesystem without diff --git a/tests/test_ftpaccessor.py b/tests/test_ftpaccessor.py index a5b0b303..340b759c 100644 --- a/tests/test_ftpaccessor.py +++ b/tests/test_ftpaccessor.py @@ -21,13 +21,14 @@ from io import BytesIO import pytest -import pytest_localftpserver # pylint: disable=unused-import # Ensure that it is installed +import pytest_localftpserver # Ensure that it is installed from six import ensure_binary, ensure_str import xcp.accessor binary_data = b"\x80\x91\xaa\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xcc\xdd\xee\xff" text_data = "✋➔Hello Accessor from the 🗺, download and verify ✅ me!" +assert pytest_localftpserver def upload_textfile(ftpserver, accessor): diff --git a/tests/test_httpaccessor.py b/tests/test_httpaccessor.py index d0248f7f..c6bd38fa 100644 --- a/tests/test_httpaccessor.py +++ b/tests/test_httpaccessor.py @@ -43,7 +43,6 @@ def http_get_request_data(self, url, read_file, error_handler): def assert_http_get_request_data(self, url, read_file, error_handler): # type:(str, str, ErrorHandler) -> HTTPAccessor - # pyre-ignore[23]: silence false positive with self.http_get_request_data(url, read_file, error_handler) as (httpaccessor, ref): http_accessor_filehandle = httpaccessor.openAddress(read_file) if sys.version_info >= (3, 0): diff --git a/tests/test_ifrename_logic.py b/tests/test_ifrename_logic.py index 5d445f3c..6e38dcd0 100644 --- a/tests/test_ifrename_logic.py +++ b/tests/test_ifrename_logic.py @@ -1,4 +1,4 @@ -# pyright: reportGeneralTypeIssues=false +# pyright: reportGeneralTypeIssues=false,reportAttributeAccessIssue=false # pytype: disable=attribute-error from __future__ import print_function from __future__ import unicode_literals @@ -77,7 +77,7 @@ def test_newhw_norules_1eth(self): def test_newhw_norules_2eth(self): """ Two previously unrecognised nics, with no other rules. Expecting - them to be renamed to eth0 and 1 respectivly + them to be renamed to eth0 and 1 respectively """ eth0 = MACPCI("ab:cd:ef:12:34:56","0000:00:0f.0","side-12-eth1") @@ -96,7 +96,7 @@ def test_newhw_norules_2eth(self): def test_newhw_1srule_1eth(self): """ - One previously unrecognised nic with a static rule refering to it. + One previously unrecognised nic with a static rule referring to it. Expecting it to be named to eth0 as per the static rule. """ @@ -336,7 +336,7 @@ def test_usecase5(self): def test_CA_94279(self): """ - CA-94279 occured because the logic did no honour the order paramater + CA-94279 occurred because the logic did no honor the order parameter from biosdevname if it was the only deciding factor. (It appears rare for the embedded network cards to have a greater PCI sbdf than the non- embedded cards) @@ -528,11 +528,11 @@ def setUp(self): Set up a lot of MACPCI objects. This reflection magic creates many self.cXXX objects where XXX - represents the indicies of mac, pci and eth names. + represents the indices of mac, pci and eth names. e.g. self.c123 means the 1st mac, 2nd pci and 3rd eth self.c221 means the 2nd mac, 2nd pci and 1st eth - In addition, set up equivelent self.sXXX objects which have a kname + In addition, set up equivalent self.sXXX objects which have a kname set to None and a tname set to the 'eth' """ @@ -558,7 +558,7 @@ def tearDown(self): self.siobuff.close() - def test_srule_eth_unaliased(self): + def test_srule_eth_not_aliased(self): rename([self.s111], [], [], []) def test_srule_eth_alias(self): diff --git a/tests/test_ifrename_static.py b/tests/test_ifrename_static.py index 3471c796..d6a6c33a 100644 --- a/tests/test_ifrename_static.py +++ b/tests/test_ifrename_static.py @@ -136,7 +136,7 @@ def test_single_ppn_slot(self): self.assertEqual(sr.formulae, {"eth0": ("ppn", "p2p3")}) self.assertEqual(sr.rules, []) - def test_single_oldsytle_ppn_slot(self): + def test_single_old_style_ppn_slot(self): # CA-82901 - Accept old-style PPNs but translate them to new-style fd = StringIO('eth0:ppn="pci2p3"') sr = StaticRules(fd = fd) @@ -455,7 +455,7 @@ def test_one_valid(self): def test_one_invalid_method(self): sr = StaticRules() - sr.formulae = {"eth0": ("ppf", "foobaz"), + sr.formulae = {"eth0": ("ppf", "foobar"), } self.assertEqual(sr.write(False), "") diff --git a/tests/test_logger.py b/tests/test_logger.py index af8cd5c3..3a14521d 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -10,8 +10,16 @@ from xcp.logger import openLog -def test_openLog_mock_open(): - """Cover xcp.logger.openLog.open_with_codec_handling and check the arguments used for open()""" +def test_openLog_mock_open(fs): + # type(FakeFilesystem) -> None + """ + - Covers xcp.logger.openLog.open_with_codec_handling and + - checks the arguments it uses for open() + + Because needs to call openLog() which creates a logfile, this test uses + the pytest pyfakefs fixture 'fs' which wraps creating it in a virtual fs. + With it, this tests does not create a log file on the filesystem of the host. + """ fh = StringIO() with patch("xcp.compat.open", mock_open()) as open_mock: open_mock.return_value = fh @@ -31,3 +39,4 @@ def test_openLog_mock_stdin(): assert openLog("test.log") is True os.close(slave_fd) os.close(master_fd) + open_mock.assert_called_once_with("test.log", "a", **open_utf8) diff --git a/tests/test_mac.py b/tests/test_mac.py index 7976a3bf..1524db3f 100644 --- a/tests/test_mac.py +++ b/tests/test_mac.py @@ -45,7 +45,7 @@ def test_colon_too_many_octets(self): MAC(val) self.assertFalse(MAC.is_valid(val)) - def test_dash_too_few_octetes(self): + def test_dash_too_few_octets(self): val = "00-00-00-00-00" with self.assertRaises(ValueError): MAC(val) @@ -190,7 +190,7 @@ def test_random(self): self.assertEqual(mac.as_string("-", True), "15-52-4A-B4-0C-FF") self.assertEqual(mac.as_string(".", True), "1552.4AB4.0CFF") -class TestCompaisons(unittest.TestCase): +class TestComparisons(unittest.TestCase): def test_equal(self): @@ -218,7 +218,7 @@ def test_unequal(self): self.assertFalse(m1 > m2) self.assertFalse(m1 >= m2) -class TestHashability(unittest.TestCase): +class TestHashing(unittest.TestCase): def test_keys(self): diff --git a/tests/test_mountingaccessor.py b/tests/test_mountingaccessor.py index 9dea2d3f..54bb6303 100644 --- a/tests/test_mountingaccessor.py +++ b/tests/test_mountingaccessor.py @@ -20,6 +20,7 @@ import pytest pytest.skip(allow_module_level=True) + sys.exit(0) # Let pyright know that this is a dead end binary_data = b"\x00\x1b\x5b\x95\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xcc\xdd\xee\xff" diff --git a/tests/test_pci.py b/tests/test_pci.py index 1b723538..d3278507 100644 --- a/tests/test_pci.py +++ b/tests/test_pci.py @@ -11,6 +11,8 @@ if sys.version_info >= (3, 6): from pytest_subprocess.fake_process import FakeProcess +else: + pytest.skip(allow_module_level=True) class TestInvalid(unittest.TestCase): @@ -31,6 +33,7 @@ class TestValid(unittest.TestCase): def test_null_with_segment(self): + assert PCI.is_valid("0000:00:00.0") is True c = PCI("0000:00:00.0") self.assertEqual(c.segment, 0) @@ -43,6 +46,7 @@ def test_null_with_segment(self): def test_null_without_segment(self): + assert PCI.is_valid("00:00.0") is True c = PCI("00:00.0") self.assertEqual(c.segment, 0) @@ -54,6 +58,7 @@ def test_null_without_segment(self): def test_valid(self): + assert PCI.is_valid("8765:43:1f.3") is True c = PCI("8765:43:1f.3") self.assertEqual(c.segment, 0x8765) @@ -61,14 +66,31 @@ def test_valid(self): self.assertEqual(c.device, 0x1f) self.assertEqual(c.function, 0x3) + def test_valid_index(self): + + assert PCI.is_valid("8765:43:1f.3[0]") is True + assert PCI.is_valid("1234:56:01.7[1]") is True + c = PCI("1234:56:01.7[1]") + + self.assertEqual(c.segment, 0x1234) + self.assertEqual(c.bus, 0x56) + self.assertEqual(c.device, 0x01) + self.assertEqual(c.function, 0x7) + self.assertEqual(c.index, 0x1) + def test_equality(self): self.assertEqual(PCI("0000:00:00.0"), PCI("00:00.0")) + assert PCI("1234:56:01.7[1]") != PCI("1234:56:01.7[2]") + assert PCI("1234:56:01.2") >= PCI("1234:56:01.2") + assert PCI("1234:56:01.1") <= PCI("1234:56:01.2") + assert PCI("1234:56:01.3") > PCI("1234:56:01.2") + assert PCI("1234:56:01.1") < PCI("1234:56:02.2") if sys.version_info >= (2, 7): def assert_videoclass_devices(ids, devs): # type: (PCIIds, PCIDevices) -> None - """Verification function for checking the otuput of PCIDevices.findByClass()""" + """Verification function for checking the output of PCIDevices.findByClass()""" video_class = ids.lookupClass('Display controller') assert video_class == ["03"] sorted_devices = sorted(devs.findByClass(video_class), @@ -76,6 +98,7 @@ def assert_videoclass_devices(ids, devs): # type: (PCIIds, PCIDevices) -> None # Assert devs.findByClass() finding 3 GPUs from tests/data/lspci-mn in our mocked PCIIds DB: assert len(sorted_devices) == 3 + assert len(devs.findByClass("03", "80")) == 2 # For each of the found devices, assert these expected values: for (video_dev, @@ -164,6 +187,7 @@ def test_videoclass_by_mock_calls(fp): def mock_lspci_using_open_testfile(fp): """Mock xcp.pci.PCIDevices.Popen() using open(tests/data/lspci-mn)""" with open("tests/data/lspci-mn", "rb") as fake_data: - assert isinstance(fp, FakeProcess) + if sys.version_info >= (3, 6): + assert isinstance(fp, FakeProcess) fp.register_subprocess(["lspci", "-mn"], stdout=fake_data.read()) return PCIDevices() diff --git a/tests/test_sbdfi_to_nic.py b/tests/test_sbdfi_to_nic.py index 5bb9a5af..018a02bf 100644 --- a/tests/test_sbdfi_to_nic.py +++ b/tests/test_sbdfi_to_nic.py @@ -35,7 +35,7 @@ def check_raises(exc_type, func, sbdfi, nic_list): def test_sbdf_index(): - """Test all possible uses and xcp.pci.pci_sbdfi_to_nic() and raised Execptions""" + """Test all possible uses and xcp.pci.pci_sbdfi_to_nic() and raised Exceptions""" assert pci_sbdfi_to_nic("0000:01:00.0", nics) == nics[0] assert pci_sbdfi_to_nic("0000:01:00.0[0]", nics) == nics[0] assert pci_sbdfi_to_nic("0000:01:00.0[1]", nics) == nics[1] diff --git a/tox.ini b/tox.ini index d7386335..0491f56e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,11 +1,17 @@ [tox] -# This is the order how tox runs the tests when used interactively during development. -# Run the tests which uncover issues most often first! For example: -# 1. python 2.7 and 3.10 coverage test for changed, but not covered lines and mypy check -# 2. python 3.6 test and pylint warnings from changed lines -# 3. pytype (needs Python 3.8 for best results) -# 4. pyre and pyright checks, pytest test report as markdown for GitHub Actions summary -envlist = py38-covcombine-check, py36-lint-test, py310-pytype, py311-pyre-mdreport +# Set the envlist: +# Defines the environments that tox runs by default (when -e is not used) +# +# Default order of execution when using `tox` without `-e `: +# +# 1. pytest using python 3.11 for code coverage with diff-cover and mypy check +# 2. pytype (needs Python 3.10 or newer) +# 3. pylint and pyright checks (any modern Python version is fine) +# +# .github/workflows/main.yml is set up to test with 3.11, 3.12 and 3.13 in parallel. +# Therefore, use three environments: One with 3.11, one with 3.12 and one with 3.13: +# +envlist = py311-covcp-check-mdreport, py312-cov-pytype, py313-cov-lint-pyright isolated_build = true skip_missing_interpreters = true requires = @@ -28,9 +34,9 @@ commands = # https://github.com/actions/toolkit/blob/main/docs/commands.md#problem-matchers echo "::add-matcher::.github/workflows/PYTHONWARNINGS-problemMatcher.json" pytest --cov -v --new-first -x --show-capture=all -rA - sh -c 'ls -l {env:COVERAGE_FILE}' sh -c 'if [ -n "{env:PYTEST_MD_REPORT_OUTPUT}" -a -n "{env:GITHUB_STEP_SUMMARY}" ];then \ - sed -i "s/tests\(.*py\)/[&](&)/" {env:PYTEST_MD_REPORT_OUTPUT}; sed "/title/,/\/style/d" \ + mkdir -p $(dirname "{env:GITHUB_STEP_SUMMARY:.git/sum.md}"); \ + sed "s/tests\(.*py\)/[&](&)/" \ {env:PYTEST_MD_REPORT_OUTPUT} >{env:GITHUB_STEP_SUMMARY:.git/sum.md};fi' [testenv] @@ -42,15 +48,15 @@ description = Run in a {basepython} virtualenv: lint: {[lint]description} mdreport: Make a test report (which is shown in the GitHub Actions Summary Page) test: {[test]description} - # https://pypi.org/project/pyre-check/ pyre intro: https://youtu.be/0FSXS5kw2m4 - pyre: Run pyre for static analyis, only passes using: tox -e py311-pyre + # See https://microsoft.github.io/pyright/#/configuration for more in pyright + pyright: Run pyright for static analyis check: Run mypy for static analyis pytype: Run pytype for static analyis, intro: https://youtu.be/abvW0mOrDiY # checkers(mypy) need the pytest dependices as well: extras = {check,pytype}: {[check]extras} - {cov,covcp,covcombine,fox,check,lint,test,pytype,pyre,mdreport}: {[test]extras} - {cov,covcp,covcombine,fox}: {[cov]extras} + {cov,covcp,covcombine,fox,check,lint,test,pytype,pyright,mdreport}: {[test]extras} + {cov,covcp,covcombine,fox}: {[cov]extras} deps = mdreport: pytest-md-report {py27-test,py27-cov}: pyftpdlib @@ -58,9 +64,7 @@ deps = {cov,covcp,covcombine,fox}: coverage[toml] {cov,covcp,covcombine,fox}: diff-cover {lint,fox}: {[lint]deps} - pyre: pyre-check - pyre: pyre-extensions - pyre: pyright + pyright: pyright pytype: {[pytype]deps} allowlist_externals = {cov,covcp,covcombine,fox,check,lint,test,mdreport}: echo @@ -81,7 +85,6 @@ passenv = covcp: HOME check: MYPY_FORCE_COLOR check: MYPY_FORCE_TERMINAL_WIDTH - pyre: PYRE_TYPESHED {fox,check,pytype}: TERM fox: DISPLAY fox: XAUTHORITY @@ -104,7 +107,7 @@ setenv = {[cov]setenv} commands = lint: {[lint]commands} - pyre: {[pyre]commands} + pyright: {[pyright]commands} check: {[check]commands} pytype: {[pytype]commands} {cov,covcp,covcombine,check,fox,test,mdreport}: {[test]commands} @@ -189,6 +192,8 @@ python = 3.9: py39 3.10: py310 3.11: py311 + 3.12: py312 + 3.13: py313 [check] extras = mypy @@ -200,17 +205,17 @@ commands = ignore = W191,W293,W504,E101,E126,E127,E201,E202,E203,E221,E222,E226,E227,E241,E251,E261,E262,E265,E301,E302,E303,E305,E722,W391,E401,E402,E741 max-line-length = 129 -[pyre] +[pyright] commands = - pyre: python3.11 --version -V # Needs py311-pyre, does not work with py310-pyre - python pyre_runner.py - -pyright + pyright --version + # To make pyright checks optional, change the next line to '-pyright': + pyright [pytype] deps = pytype pandas commands = - python3.10 -V # Needs python <= 3.10, and 3.10 is needed to parse new "|" syntax + # Needs python >= 3.10: Needed to parse the newer syntax for "Type2 | Type2" pytype --version # Runs pytype -j auto -k --config .github/workflows/pytype.cfg and parses the output: python3 pytype_runner.py # When switching versions, update .github/workflows/pytype.cfg too! diff --git a/xcp/accessor.py b/xcp/accessor.py index 9583dfe7..7da5076a 100644 --- a/xcp/accessor.py +++ b/xcp/accessor.py @@ -24,7 +24,6 @@ """accessor - provide common interface to access methods""" import errno -# pyre-ignore-all-errors[6,16] import ftplib import io import os @@ -315,7 +314,8 @@ def finish(self): self.cleanup = False self.ftp = None - def access(self, path): # pylint: disable=arguments-differ,arguments-renamed + # pylint: disable-next=arguments-differ,arguments-renamed + def access(self, path): # pyright: ignore[reportIncompatibleMethodOverride] try: logger.debug("Testing "+path) self._cleanup() @@ -395,11 +395,11 @@ def __repr__(self): return "" % self.baseAddress -# Tuple passed in tests to isinstanc(val, ...Types) to check types: +# Tuple passed in tests to isinstance(val, ...Types) to check types: MountingAccessorTypes = (DeviceAccessor, NFSAccessor) """Tuple for type checking in unit tests testing subclasses of MountingAccessor""" -# Tuple passed in tests to isinstanc(val, ...Types) to check types: +# Tuple passed in tests to isinstance(val, ...Types) to check types: LocalTypes = (DeviceAccessor, NFSAccessor, FileAccessor) diff --git a/xcp/bootloader.py b/xcp/bootloader.py index 353cc7ff..81721c82 100644 --- a/xcp/bootloader.py +++ b/xcp/bootloader.py @@ -32,7 +32,6 @@ import xcp.cmd -# pyre-ignore-all-errors[21] try: # xenserver-release.rpm puts a branding.py into our xcp installation directory: from xcp import branding # type:ignore[attr-defined] # pytype: disable=import-error except ImportError: # For CI, use stubs/branding.py (./stubs is added to pythonpath) diff --git a/xcp/compat.py b/xcp/compat.py index 9a08772e..bdb2bec6 100644 --- a/xcp/compat.py +++ b/xcp/compat.py @@ -38,7 +38,7 @@ def open_with_codec_handling(filename, mode="r", encoding="utf-8", **kwargs): def open_with_codec_handling(filename, mode="r", encoding="", errors="", **kwargs): # type:(str, str, str, str, str) -> IO[Any] - """open() wrapper to pass mode and **kwargs to open(), ignores endcoding and errors args""" + """open() wrapper to pass mode and **kwargs to open(), ignores encoding and errors args""" _ = encoding _ = errors return open(filename, mode, **kwargs) diff --git a/xcp/cpiofile.py b/xcp/cpiofile.py index 995da94c..ff8f5c7c 100644 --- a/xcp/cpiofile.py +++ b/xcp/cpiofile.py @@ -36,8 +36,6 @@ Derived from Lars Gustäbel's tarfile.py """ from __future__ import print_function -# pyre is not as good as other static analysis tools in inferring the correct types: -# pyre-ignore-all-errors[6,16] __version__ = "0.1" __author__ = "Simon Rowe" diff --git a/xcp/net/biosdevname.py b/xcp/net/biosdevname.py index 9c1427c4..a2bb548b 100644 --- a/xcp/net/biosdevname.py +++ b/xcp/net/biosdevname.py @@ -51,7 +51,7 @@ def all_devices_all_names(): Get all information, including all names, for all devices. Returns a dictionary of devices, indexed by current kernel name. All entries will be string to string mappings, with the exception of - 'BIOS device' which will be a dictonary of policies to names. + 'BIOS device' which will be a dictionary of policies to names. """ devices = {} @@ -90,7 +90,7 @@ def all_devices_all_names(): def has_ppn_quirks(bdn_dicts): # CA-75599 - Assert that no devices share the same SMBIOS Instance. Some - # BIOSes have multiple different nics with the same value set, which causes + # BIOSes have multiple different NICs with the same value set, which causes # biosdevname to mis-name its physical policy names (emXX, pciXpX etc) smbios_instances = set() diff --git a/xcp/net/ifrename/logic.py b/xcp/net/ifrename/logic.py index 476dc2d7..f294bfbe 100644 --- a/xcp/net/ifrename/logic.py +++ b/xcp/net/ifrename/logic.py @@ -33,7 +33,7 @@ list of MACPCI objects in form ethXXX|side-XXX-ethXX->(mac, pci) [in] last_state - Last boot state (post rename) of network cards on the machine list of MACPCI objects in form (mac, pci)->ethXX -[in] old_state - Any older nics which have disappeared in the meantime +[in] old_state - Any older NICs which have disappeared in the meantime list of MACPCI objects in form (mac, pci)->ethXX [out] transactions diff --git a/xcp/net/ifrename/static.py b/xcp/net/ifrename/static.py index 76ab3723..2c1092c9 100644 --- a/xcp/net/ifrename/static.py +++ b/xcp/net/ifrename/static.py @@ -184,7 +184,7 @@ def load_and_parse(self): # If no validators, assume label method = "label" - # If we have a validator, test the valididy + # If we have a validator, test the validity else: if method in StaticRules.validators: if StaticRules.validators[method].match(value) is None: @@ -215,7 +215,7 @@ def generate(self, state): """ # CA-75599 - check that state has no shared ppns. - # See net.biodevname.has_ppn_quirks() for full reason + # See net.biosdevname.has_ppn_quirks() for full reason ppns = [ x.ppn for x in state if x.ppn is not None ] ppn_quirks = ( len(ppns) != len(set(ppns)) ) @@ -311,7 +311,7 @@ def write(self, header = True): LOG.warning("Method %s not recognised. Ignoring" % (method,)) continue - # If we have a validator, test the valididy + # If we have a validator, test the validity if method in StaticRules.validators: if StaticRules.validators[method].match(value) is None: LOG.warning("Invalid %s value '%s'. Ignoring" diff --git a/xcp/net/ip.py b/xcp/net/ip.py index 55b7e832..e9b068c6 100644 --- a/xcp/net/ip.py +++ b/xcp/net/ip.py @@ -85,7 +85,7 @@ def ip_link_set_name(src_name, dst_name): # if the device was up before, bring it back up if isup: - # Performace note: if we are doing an intermediate rename to + # Performance note: if we are doing an intermediate rename to # move a device sideways, we shouldn't bring it back until it has # its final name. However, I can't think of a non-hacky way of doing # this with the current implementation diff --git a/xcp/net/mac.py b/xcp/net/mac.py index 0d72a93c..976ffc68 100644 --- a/xcp/net/mac.py +++ b/xcp/net/mac.py @@ -44,7 +44,7 @@ class MAC(object): @classmethod def is_valid(cls, addr): """ - Static method to assertain whether addr is a recognised MAC address or + Static method to ascertain whether addr is a recognised MAC address or not """ try: @@ -134,7 +134,7 @@ def as_string(self, sep = ".", upper = False): if sep == ".": # this is a hack but I cant think of an easy way of - # manipulating self.octetes + # manipulating self.octets res = "%0.4x.%0.4x.%0.4x" % ( (self.integer >> 32) & 0xffff, (self.integer >> 16) & 0xffff, (self.integer ) & 0xffff ) @@ -146,7 +146,7 @@ def as_string(self, sep = ".", upper = False): res = ':'.join([ "%0.2x" % o for o in self.octets]) else: - raise ValueError("'%s' is not a valid seperator" % sep) + raise ValueError("'%s' is not a valid separator" % sep) if upper: return res.upper() diff --git a/xcp/pci.py b/xcp/pci.py index ef5cec29..1e41e345 100644 --- a/xcp/pci.py +++ b/xcp/pci.py @@ -50,8 +50,7 @@ class PCI(object): @classmethod def is_valid(cls, addr): """ - Static method to assertain whether addr is a recognised PCI address - or not + Static method to ascertain whether addr is a recognised PCI address """ try: PCI(addr)