Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

drop support for python 3.8 #281

Merged
merged 1 commit into from
Jul 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@ jobs:
main-windows:
uses: asottile/workflows/.github/workflows/[email protected]
with:
env: '["py38"]'
env: '["py39"]'
os: windows-latest
main-linux:
uses: asottile/workflows/.github/workflows/[email protected]
with:
env: '["py38", "py39", "py310", "py311", "py312"]'
env: '["py39", "py310", "py311", "py312"]'
os: ubuntu-latest
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ repos:
rev: v3.13.0
hooks:
- id: reorder-python-imports
args: [--py38-plus, --add-import, 'from __future__ import annotations']
args: [--py39-plus, --add-import, 'from __future__ import annotations']
- repo: https://github.com/asottile/add-trailing-comma
rev: v3.1.0
hooks:
Expand All @@ -26,7 +26,7 @@ repos:
rev: v3.16.0
hooks:
- id: pyupgrade
args: [--py38-plus]
args: [--py39-plus]
- repo: https://github.com/hhatto/autopep8
rev: v2.3.1
hooks:
Expand Down
11 changes: 1 addition & 10 deletions add_trailing_comma/_ast_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,4 @@ def ast_parse(contents_text: str) -> ast.Module:


def ast_to_offset(node: ast.AST) -> Offset:
candidates = [node]
while candidates:
candidate = candidates.pop()
if hasattr(candidate, 'lineno'):
return Offset(candidate.lineno, candidate.col_offset)
elif hasattr(candidate, '_fields'): # pragma: <3.9 cover
for field in reversed(candidate._fields):
candidates.append(getattr(candidate, field))
else:
raise AssertionError(node)
return Offset(node.lineno, node.col_offset)
8 changes: 3 additions & 5 deletions add_trailing_comma/_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@
import ast
import collections
import pkgutil
from collections.abc import Iterable
from typing import Callable
from typing import Iterable
from typing import List
from typing import NamedTuple
from typing import Protocol
from typing import Tuple
from typing import TypeVar

from tokenize_rt import Offset
Expand All @@ -22,8 +20,8 @@ class State(NamedTuple):


AST_T = TypeVar('AST_T', bound=ast.AST)
TokenFunc = Callable[[int, List[Token]], None]
ASTFunc = Callable[[State, AST_T], Iterable[Tuple[Offset, TokenFunc]]]
TokenFunc = Callable[[int, list[Token]], None]
ASTFunc = Callable[[State, AST_T], Iterable[tuple[Offset, TokenFunc]]]

FUNCS = collections.defaultdict(list)

Expand Down
4 changes: 2 additions & 2 deletions add_trailing_comma/_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

import argparse
import sys
from typing import Iterable
from typing import Sequence
from collections.abc import Iterable
from collections.abc import Sequence

from tokenize_rt import src_to_tokens
from tokenize_rt import Token
Expand Down
33 changes: 16 additions & 17 deletions add_trailing_comma/_plugins/_with.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from __future__ import annotations

import ast
import sys
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand All @@ -15,20 +14,20 @@
from add_trailing_comma._token_helpers import fix_brace


if sys.version_info >= (3, 9): # pragma: >=3.9 cover
def _fix_with(i: int, tokens: list[Token]) -> None:
def _fix_with(i: int, tokens: list[Token]) -> None:
i += 1
if tokens[i].name == 'UNIMPORTANT_WS':
i += 1
if tokens[i].name == 'UNIMPORTANT_WS':
i += 1
if tokens[i].src == '(':
fix = find_simple(i, tokens)
# only fix if outer parens are for the with items (next is ':')
if fix is not None and tokens[fix.braces[-1] + 1].src == ':':
fix_brace(tokens, fix, add_comma=True, remove_comma=True)
if tokens[i].src == '(':
fix = find_simple(i, tokens)
# only fix if outer parens are for the with items (next is ':')
if fix is not None and tokens[fix.braces[-1] + 1].src == ':':
fix_brace(tokens, fix, add_comma=True, remove_comma=True)

@register(ast.With)
def visit_With(
state: State,
node: ast.With,
) -> Iterable[tuple[Offset, TokenFunc]]:
yield ast_to_offset(node), _fix_with

@register(ast.With)
def visit_With(
state: State,
node: ast.With,
) -> Iterable[tuple[Offset, TokenFunc]]:
yield ast_to_offset(node), _fix_with
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import ast
import functools
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import ast
import functools
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import ast
import functools
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/imports.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import ast
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
4 changes: 2 additions & 2 deletions add_trailing_comma/_plugins/literals.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import ast
import functools
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import NON_CODING_TOKENS
from tokenize_rt import Offset
Expand Down Expand Up @@ -91,7 +91,7 @@ def _fix_tuple_py38(
tokens: list[Token],
*,
one_el_tuple: bool,
) -> None: # pragma: >=3.8 cover
) -> None:
fix = find_simple(i, tokens)

# for tuples we *must* find a comma, otherwise it is not a tuple
Expand Down
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/match.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import ast
import functools
import sys
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
2 changes: 1 addition & 1 deletion add_trailing_comma/_plugins/pep695.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import ast
import sys
from typing import Iterable
from collections.abc import Iterable

from tokenize_rt import Offset
from tokenize_rt import Token
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ classifiers =
packages = find:
install_requires =
tokenize-rt>=3.0.1
python_requires = >=3.8
python_requires = >=3.9

[options.packages.find]
exclude =
Expand Down
Loading