Skip to content

Commit

Permalink
MNT: require py39, enable pyupgrade for ruff, upgrade lang features, …
Browse files Browse the repository at this point in the history
…bump numpy min version (#614)
  • Loading branch information
theOehrly committed Jul 16, 2024
1 parent 04ec86a commit b41d646
Show file tree
Hide file tree
Showing 14 changed files with 39 additions and 57 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/selective_cache_persist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.8-minver', '3.8', '3.9', '3.10', '3.11', '3.12']
python-version: [ '3.9-minver', '3.9', '3.10', '3.11', '3.12']
#
name: Persist cache for ${{ matrix.python-version }}
steps:
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,9 @@ jobs:
matrix:
include:
- name-suffix: "(Minimum Versions)"
python-version: "3.8"
python-version: "3.9"
cache-suffix: "-minver"
extra-requirements: "-c requirements/minver.txt"
- python-version: "3.8"
- python-version: "3.9"
- python-version: "3.10"
- python-version: "3.11"
Expand Down
9 changes: 4 additions & 5 deletions fastf1/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import json
import zlib
from typing import (
Dict,
Optional,
Union
)
Expand All @@ -29,15 +28,15 @@

base_url = 'https://livetiming.formula1.com'

headers: Dict[str, str] = {
headers: dict[str, str] = {
'Host': 'livetiming.formula1.com',
'Connection': 'close',
'TE': 'identity',
'User-Agent': 'BestHTTP',
'Accept-Encoding': 'gzip, identity',
}

pages: Dict[str, str] = {
pages: dict[str, str] = {
'session_data': 'SessionData.json', # track + session status + lap count
'session_info': 'SessionInfo.jsonStream', # more rnd
'archive_status': 'ArchiveStatus.json', # rnd=1880327548
Expand Down Expand Up @@ -83,7 +82,7 @@ def make_path(wname, wdate, sname, sdate):


# define all empty columns for timing data
EMPTY_LAPS = {'Time': pd.NaT, 'Driver': str(), 'LapTime': pd.NaT,
EMPTY_LAPS = {'Time': pd.NaT, 'Driver': '', 'LapTime': pd.NaT,
'NumberOfLaps': np.nan, 'NumberOfPitStops': np.nan,
'PitOutTime': pd.NaT, 'PitInTime': pd.NaT,
'Sector1Time': pd.NaT, 'Sector2Time': pd.NaT,
Expand All @@ -92,7 +91,7 @@ def make_path(wname, wdate, sname, sdate):
'SpeedI1': np.nan, 'SpeedI2': np.nan, 'SpeedFL': np.nan,
'SpeedST': np.nan, 'IsPersonalBest': False}

EMPTY_STREAM = {'Time': pd.NaT, 'Driver': str(), 'Position': np.nan,
EMPTY_STREAM = {'Time': pd.NaT, 'Driver': '', 'Position': np.nan,
'GapToLeader': np.nan, 'IntervalToPositionAhead': np.nan}


Expand Down
23 changes: 9 additions & 14 deletions fastf1/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,13 @@
import re
import typing
import warnings
from collections.abc import Iterable
from functools import cached_property
from typing import (
Any,
Callable,
Iterable,
List,
Literal,
Optional,
Tuple,
Union
)

Expand Down Expand Up @@ -925,8 +923,8 @@ def add_driver_ahead(self, drop_existing: bool = True) -> "Telemetry":
)

if ((d['Date'].shape != dtd['Date'].shape)
or np.any((d['Date'].values
!= dtd['Date'].values))):
or np.any(d['Date'].values
!= dtd['Date'].values)):
dtd = dtd.resample_channels(new_date_ref=d["Date"])

# indices need to match as .join works index-on-index
Expand Down Expand Up @@ -1510,7 +1508,7 @@ def _load_laps_data(self, livedata=None):
elif not len(d2):
result = d1.copy()
result.reset_index(drop=True, inplace=True)
result['Compound'] = str()
result['Compound'] = ''
result['TyreLife'] = np.nan
result['Stint'] = 0
result['New'] = False
Expand Down Expand Up @@ -3298,7 +3296,7 @@ def pick_accurate(self) -> "Laps":
"""
return self[self['IsAccurate']]

def split_qualifying_sessions(self) -> List[Optional["Laps"]]:
def split_qualifying_sessions(self) -> list[Optional["Laps"]]:
"""Splits a lap object into individual laps objects for each
qualifying session.
Expand Down Expand Up @@ -3357,7 +3355,7 @@ def split_qualifying_sessions(self) -> List[Optional["Laps"]]:
return laps

def iterlaps(self, require: Optional[Iterable] = None) \
-> Iterable[Tuple[int, "Lap"]]:
-> Iterable[tuple[int, "Lap"]]:
"""Iterator for iterating over all laps in self.
This method wraps :meth:`pandas.DataFrame.iterrows`.
Expand Down Expand Up @@ -3765,16 +3763,13 @@ class NoLapDataError(Exception):
after processing the result.
"""
def __init__(self, *args):
super(NoLapDataError, self).__init__("Failed to load session because "
"the API did not provide any "
"usable data.")
super().__init__("Failed to load session because the API did not "
"provide any usable data.")


class InvalidSessionError(Exception):
"""Raised if no session for the specified event name, type and year
can be found."""

def __init__(self, *args):
super(InvalidSessionError, self).__init__(
"No matching session can be found."
)
super().__init__("No matching session can be found.")
6 changes: 2 additions & 4 deletions fastf1/ergast/interface.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import copy
import json
from typing import (
List,
Literal,
Optional,
Type,
Union
)

Expand Down Expand Up @@ -252,7 +250,7 @@ class ErgastSimpleResponse(ErgastResponseMixin, ErgastResultFrame):
_internal_names_set = set(_internal_names)

@property
def _constructor(self) -> Type["ErgastResultFrame"]:
def _constructor(self) -> type["ErgastResultFrame"]:
# drop from ErgastSimpleResponse to ErgastResultFrame, removing the
# ErgastResponseMixin because a slice of the data is no longer a full
# response and pagination, ... is therefore not supported anymore
Expand Down Expand Up @@ -363,7 +361,7 @@ def description(self) -> ErgastResultFrame:
return self._description

@property
def content(self) -> List[ErgastResultFrame]:
def content(self) -> list[ErgastResultFrame]:
"""A ``list`` of :class:`ErgastResultFrame` that contain the main
response data.
Expand Down
9 changes: 4 additions & 5 deletions fastf1/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,6 @@
from typing import (
Literal,
Optional,
Type,
Union
)

Expand Down Expand Up @@ -633,7 +632,7 @@ def _get_schedule_ff1(year):
data[f'session{j+1}_date'][i] = pd.Timestamp(date)
data[f'session{j+1}_date_Utc'][i] = pd.Timestamp(date_utc)

str().capitalize()
''.capitalize()

df = pd.DataFrame(data)
# change column names from snake_case to UpperCamelCase
Expand Down Expand Up @@ -894,7 +893,7 @@ def __init__(self, *args, year: int = 0,
self[col] = self[col].astype(_type)

@property
def _constructor_sliced_horizontal(self) -> Type["Event"]:
def _constructor_sliced_horizontal(self) -> type["Event"]:
return Event

def is_testing(self):
Expand Down Expand Up @@ -981,10 +980,10 @@ def _matcher_strings(ev):
max_index = i

if max_ratio != 100:
_logger.warning((
_logger.warning(
"Correcting user input "
f"'{user_input}' to'{self.loc[max_index].EventName}'"
)

)
return self.loc[max_index]

Expand Down
8 changes: 3 additions & 5 deletions fastf1/internals/pandas_extensions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from typing import List

import numpy as np
from pandas import (
DataFrame,
Expand Down Expand Up @@ -35,7 +33,7 @@

def create_df_fast(
*,
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list,
fallback: bool = True
) -> DataFrame:
Expand Down Expand Up @@ -71,7 +69,7 @@ def create_df_fast(


def _fallback_create_df(
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list
) -> DataFrame:
data = {col: arr for col, arr in zip(columns, arrays)}
Expand All @@ -87,7 +85,7 @@ def _fallback_if_unsupported(func):

@_fallback_if_unsupported
def _unsafe_create_df_fast(
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list
) -> DataFrame:
# Implements parts of pandas' internal DataFrame creation mechanics
Expand Down
2 changes: 1 addition & 1 deletion fastf1/livetiming/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def save(args):


def convert(args):
with open(args.input, 'r') as infile:
with open(args.input) as infile:
messages = infile.readlines()
data, ec = messages_from_raw(messages)
with open(args.output, 'w') as outfile:
Expand Down
6 changes: 2 additions & 4 deletions fastf1/livetiming/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@
import json
import logging
import time
from typing import (
Iterable,
Optional
)
from collections.abc import Iterable
from typing import Optional

import requests

Expand Down
2 changes: 1 addition & 1 deletion fastf1/livetiming/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def load(self):
next_data = None
else:
# read a new file as next file
with open(next_file, 'r') as fobj:
with open(next_file) as fobj:
next_data = fobj.readlines()

if current_data is None:
Expand Down
13 changes: 5 additions & 8 deletions fastf1/req.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,7 @@
import re
import sys
import time
from typing import (
Optional,
Tuple
)
from typing import Optional

import requests
from requests_cache import CacheMixin
Expand Down Expand Up @@ -524,8 +521,8 @@ def _enable_default_cache(cls):
try:
os.mkdir(cache_dir, mode=0o0700)
except Exception as err:
_logger.error("Failed to create cache directory {0}. "
"Error {1}".format(cache_dir, err))
_logger.error(f"Failed to create cache directory "
f"{cache_dir}. Error {err}")
raise

# Enable cache with default
Expand Down Expand Up @@ -632,7 +629,7 @@ def ci_mode(cls, enabled: bool):
cls._ci_mode = enabled

@classmethod
def get_cache_info(cls) -> Tuple[Optional[str], Optional[int]]:
def get_cache_info(cls) -> tuple[Optional[str], Optional[int]]:
"""Returns information about the cache directory and its size.
If the cache is not configured, None will be returned for both the
Expand All @@ -658,7 +655,7 @@ def _convert_size(cls, size_bytes): # https://stackoverflow.com/questions/51940
i = int(math.floor(math.log(size_bytes, 1024)))
p = math.pow(1024, i)
s = round(size_bytes / p, 2)
return "%s %s" % (s, size_name[i])
return f"{s} {size_name[i]}"

@classmethod
def _get_size(cls, start_path='.'): # https://stackoverflow.com/questions/1392413/calculating-a-directorys-size-using-python # noqa: E501
Expand Down
6 changes: 2 additions & 4 deletions fastf1/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
import warnings
from functools import reduce
from typing import (
Dict,
Optional,
Tuple,
Union
)

Expand All @@ -22,7 +20,7 @@
def delta_time(
reference_lap: "fastf1.core.Lap",
compare_lap: "fastf1.core.Lap"
) -> Tuple[pd.Series, "fastf1.core.Telemetry", "fastf1.core.Telemetry"]:
) -> tuple[pd.Series, "fastf1.core.Telemetry", "fastf1.core.Telemetry"]:
"""Calculates the delta time of a given lap, along the 'Distance' axis
of the reference lap.
Expand Down Expand Up @@ -114,7 +112,7 @@ def mini_pro(stream):
return delta, ref, comp


def recursive_dict_get(d: Dict, *keys: str, default_none: bool = False):
def recursive_dict_get(d: dict, *keys: str, default_none: bool = False):
"""Recursive dict get. Can take an arbitrary number of keys and returns an
empty dict if any key does not exist.
https://stackoverflow.com/a/28225747"""
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ readme = "README.md"
license = { file = "LICENSE" }

# minimum python version additionally needs to be changed in the test matrix
requires-python = ">=3.8"
requires-python = ">=3.9"
# minimum package versions additionally need to be changed in requirements/minver.txt
dependencies = [
"matplotlib>=3.5.1,<4.0.0",
"numpy>=1.21.5,<3.0.0",
"numpy>=1.23.1,<3.0.0",
"pandas>=1.4.1,<3.0.0",
"python-dateutil",
"requests>=2.28.1",
Expand Down Expand Up @@ -85,6 +85,7 @@ select = [
"E",
"F",
"W",
"UP",
"NPY201"
]

Expand Down
2 changes: 1 addition & 1 deletion requirements/minver.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
matplotlib==3.5.1
numpy==1.21.5
numpy==1.23.1
pandas==1.4.1
requests==2.28.1
requests-cache==1.0.0
Expand Down

0 comments on commit b41d646

Please sign in to comment.