Skip to content

Commit

Permalink
Merge branch 'master' into patch-3
Browse files Browse the repository at this point in the history
  • Loading branch information
theOehrly authored Jul 25, 2024
2 parents 43809b0 + 93bfdde commit 530d631
Show file tree
Hide file tree
Showing 34 changed files with 150 additions and 159 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@ on:
types: [ released ]


env:
# Set environment variable with value from configuration variable
FASTF1_DOCS_ERGAST_BACKEND_OVERRIDE: ${{ vars.FASTF1_DOCS_ERGAST_BACKEND_OVERRIDE }}


jobs:
build_docs:
name: Build Documentation
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/selective_cache_persist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.8-minver', '3.8', '3.9', '3.10', '3.11', '3.12']
python-version: [ '3.9-minver', '3.9', '3.10', '3.11', '3.12']
#
name: Persist cache for ${{ matrix.python-version }}
steps:
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ on:
pull_request:


env:
# Set environment variable with value from configuration variable
FASTF1_TEST_ERGAST_BACKEND_OVERRIDE: ${{ vars.FASTF1_TEST_ERGAST_BACKEND_OVERRIDE }}


jobs:
run-code-tests:
runs-on: ubuntu-latest
Expand All @@ -16,10 +21,9 @@ jobs:
matrix:
include:
- name-suffix: "(Minimum Versions)"
python-version: "3.8"
python-version: "3.9"
cache-suffix: "-minver"
extra-requirements: "-c requirements/minver.txt"
- python-version: "3.8"
- python-version: "3.9"
- python-version: "3.10"
- python-version: "3.11"
Expand Down
8 changes: 8 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,14 @@
sys.path.append(os.path.abspath('extensions'))


ERGAST_BACKEND_OVERRIDE = os.environ.get("FASTF1_DOCS_ERGAST_BACKEND_OVERRIDE")

if ERGAST_BACKEND_OVERRIDE:
import fastf1.ergast

fastf1.ergast.interface.BASE_URL = ERGAST_BACKEND_OVERRIDE


# -- FastF1 specific config --------------------------------------------------
# ignore warning on import of fastf1.api
warnings.filterwarnings(action='ignore',
Expand Down
8 changes: 5 additions & 3 deletions docs/ergast.rst
Original file line number Diff line number Diff line change
Expand Up @@ -206,17 +206,19 @@ response. When 'pandas' is selected as result type, these endpoints return a
:class:`~fastf1.ergast.interface.ErgastMultiResponse`. One such endpoint is
the constructor standings endpoint.

.. TODO: the following doctests are skipped because of the broken Ergast API
.. doctest::

>>> standings = ergast.get_constructor_standings()
>>> standings = ergast.get_constructor_standings() # doctest: +SKIP

Called without any 'season' specifier, it returns standings for multiple
seasons. An overview over the returned data is available as a ``.description``
of the response:

.. doctest::

>>> standings.description
>>> standings.description # doctest: +SKIP
season round
0 1958 11
1 1959 9
Expand All @@ -237,7 +239,7 @@ The first element in ``.content`` is associated with the first row of the

.. doctest::

>>> standings.content[0]
>>> standings.content[0] # doctest: +SKIP
position positionText ... constructorName constructorNationality
0 1 1 ... Vanwall British
1 2 2 ... Ferrari Italian
Expand Down
18 changes: 12 additions & 6 deletions fastf1/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import json
import zlib
from typing import (
Dict,
Optional,
Union
)
Expand All @@ -28,16 +27,16 @@
_logger = get_logger('api')

base_url = 'https://livetiming.formula1.com'
base_url_mirror = 'https://livetiming-mirror.fastf1.dev'

headers: Dict[str, str] = {
'Host': 'livetiming.formula1.com',
headers: dict[str, str] = {
'Connection': 'close',
'TE': 'identity',
'User-Agent': 'BestHTTP',
'Accept-Encoding': 'gzip, identity',
}

pages: Dict[str, str] = {
pages: dict[str, str] = {
'session_data': 'SessionData.json', # track + session status + lap count
'session_info': 'SessionInfo.jsonStream', # more rnd
'archive_status': 'ArchiveStatus.json', # rnd=1880327548
Expand Down Expand Up @@ -83,7 +82,7 @@ def make_path(wname, wdate, sname, sdate):


# define all empty columns for timing data
EMPTY_LAPS = {'Time': pd.NaT, 'Driver': str(), 'LapTime': pd.NaT,
EMPTY_LAPS = {'Time': pd.NaT, 'Driver': '', 'LapTime': pd.NaT,
'NumberOfLaps': np.nan, 'NumberOfPitStops': np.nan,
'PitOutTime': pd.NaT, 'PitInTime': pd.NaT,
'Sector1Time': pd.NaT, 'Sector2Time': pd.NaT,
Expand All @@ -92,7 +91,7 @@ def make_path(wname, wdate, sname, sdate):
'SpeedI1': np.nan, 'SpeedI2': np.nan, 'SpeedFL': np.nan,
'SpeedST': np.nan, 'IsPersonalBest': False}

EMPTY_STREAM = {'Time': pd.NaT, 'Driver': str(), 'Position': np.nan,
EMPTY_STREAM = {'Time': pd.NaT, 'Driver': '', 'Position': np.nan,
'GapToLeader': np.nan, 'IntervalToPositionAhead': np.nan}


Expand Down Expand Up @@ -1699,7 +1698,14 @@ def fetch_page(path, name):
page = pages[name]
is_stream = 'jsonStream' in page
is_z = '.z.' in page

r = Cache.requests_get(base_url + path + pages[name], headers=headers)

if r.status_code >= 400:
_logger.debug(f"Falling back to livetiming mirror ({base_url_mirror})")
r = Cache.requests_get(base_url_mirror + path + pages[name],
headers=headers)

if r.status_code == 200:
raw = r.content.decode('utf-8-sig')
if is_stream:
Expand Down
36 changes: 16 additions & 20 deletions fastf1/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,13 @@
import re
import typing
import warnings
from collections.abc import Iterable
from functools import cached_property
from typing import (
Any,
Callable,
Iterable,
List,
Literal,
Optional,
Tuple,
Union
)

Expand Down Expand Up @@ -925,8 +923,8 @@ def add_driver_ahead(self, drop_existing: bool = True) -> "Telemetry":
)

if ((d['Date'].shape != dtd['Date'].shape)
or np.any((d['Date'].values
!= dtd['Date'].values))):
or np.any(d['Date'].values
!= dtd['Date'].values)):
dtd = dtd.resample_channels(new_date_ref=d["Date"])

# indices need to match as .join works index-on-index
Expand Down Expand Up @@ -1510,7 +1508,7 @@ def _load_laps_data(self, livedata=None):
elif not len(d2):
result = d1.copy()
result.reset_index(drop=True, inplace=True)
result['Compound'] = str()
result['Compound'] = ''
result['TyreLife'] = np.nan
result['Stint'] = 0
result['New'] = False
Expand Down Expand Up @@ -2245,7 +2243,7 @@ def _drivers_from_f1_api(self, *, livedata=None):
except Exception as exc:
_logger.warning("Failed to load extended driver information!")
_logger.debug("Exception while loading driver list", exc_info=exc)
driver_info = {}
return None
else:
driver_info = collections.defaultdict(list)

Expand Down Expand Up @@ -2275,11 +2273,12 @@ def _drivers_from_f1_api(self, *, livedata=None):
driver_info['LastName']):
driver_info['FullName'].append(f"{first} {last}")

# driver info is required for joining on index (used as index),
# therefore drop rows where driver number is unavailable as they have
# an invalid index
return pd.DataFrame(driver_info, index=driver_info['DriverNumber']) \
.dropna(subset=['DriverNumber'])
# driver info is required for joining on index (used as index),
# therefore drop rows where driver number is unavailable as they
# have an invalid index
return pd.DataFrame(
driver_info, index=driver_info['DriverNumber']
).dropna(subset=['DriverNumber'])

def _drivers_results_from_ergast(
self, *, load_drivers=False, load_results=False
Expand Down Expand Up @@ -3295,7 +3294,7 @@ def pick_accurate(self) -> "Laps":
"""
return self[self['IsAccurate']]

def split_qualifying_sessions(self) -> List[Optional["Laps"]]:
def split_qualifying_sessions(self) -> list[Optional["Laps"]]:
"""Splits a lap object into individual laps objects for each
qualifying session.
Expand Down Expand Up @@ -3354,7 +3353,7 @@ def split_qualifying_sessions(self) -> List[Optional["Laps"]]:
return laps

def iterlaps(self, require: Optional[Iterable] = None) \
-> Iterable[Tuple[int, "Lap"]]:
-> Iterable[tuple[int, "Lap"]]:
"""Iterator for iterating over all laps in self.
This method wraps :meth:`pandas.DataFrame.iterrows`.
Expand Down Expand Up @@ -3762,16 +3761,13 @@ class NoLapDataError(Exception):
after processing the result.
"""
def __init__(self, *args):
super(NoLapDataError, self).__init__("Failed to load session because "
"the API did not provide any "
"usable data.")
super().__init__("Failed to load session because the API did not "
"provide any usable data.")


class InvalidSessionError(Exception):
"""Raised if no session for the specified event name, type and year
can be found."""

def __init__(self, *args):
super(InvalidSessionError, self).__init__(
"No matching session can be found."
)
super().__init__("No matching session can be found.")
10 changes: 5 additions & 5 deletions fastf1/ergast/interface.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import copy
import json
from typing import (
List,
Literal,
Optional,
Type,
Union
)

Expand All @@ -18,6 +16,7 @@


BASE_URL = 'https://ergast.com/api/f1'
TIMEOUT = 5.0
HEADERS = {'User-Agent': f'FastF1/{__version_short__}'}


Expand Down Expand Up @@ -252,7 +251,7 @@ class ErgastSimpleResponse(ErgastResponseMixin, ErgastResultFrame):
_internal_names_set = set(_internal_names)

@property
def _constructor(self) -> Type["ErgastResultFrame"]:
def _constructor(self) -> type["ErgastResultFrame"]:
# drop from ErgastSimpleResponse to ErgastResultFrame, removing the
# ErgastResponseMixin because a slice of the data is no longer a full
# response and pagination, ... is therefore not supported anymore
Expand Down Expand Up @@ -363,7 +362,7 @@ def description(self) -> ErgastResultFrame:
return self._description

@property
def content(self) -> List[ErgastResultFrame]:
def content(self) -> list[ErgastResultFrame]:
"""A ``list`` of :class:`ErgastResultFrame` that contain the main
response data.
Expand Down Expand Up @@ -489,7 +488,8 @@ def _build_url(
@classmethod
def _get(cls, url: str, params: dict) -> Union[dict, list]:
# request data from ergast and load the returned json data.
r = Cache.requests_get(url, headers=HEADERS, params=params)
r = Cache.requests_get(url, headers=HEADERS, params=params,
timeout=TIMEOUT)
if r.status_code == 200:
try:
return json.loads(r.content.decode('utf-8'))
Expand Down
5 changes: 2 additions & 3 deletions fastf1/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,6 @@
from typing import (
Literal,
Optional,
Type,
Union
)

Expand Down Expand Up @@ -624,7 +623,7 @@ def _get_schedule_ff1(year):
data[f'session{j+1}_date'][i] = pd.Timestamp(date)
data[f'session{j+1}_date_Utc'][i] = pd.Timestamp(date_utc)

str().capitalize()
''.capitalize()

df = pd.DataFrame(data)
# change column names from snake_case to UpperCamelCase
Expand Down Expand Up @@ -885,7 +884,7 @@ def __init__(self, *args, year: int = 0,
self[col] = self[col].astype(_type)

@property
def _constructor_sliced_horizontal(self) -> Type["Event"]:
def _constructor_sliced_horizontal(self) -> type["Event"]:
return Event

def is_testing(self):
Expand Down
3 changes: 1 addition & 2 deletions fastf1/internals/fuzzy.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import warnings
from typing import List

import numpy as np

Expand All @@ -15,7 +14,7 @@

def fuzzy_matcher(
query: str,
reference: List[List[str]],
reference: list[list[str]],
abs_confidence: float = 0.0,
rel_confidence: float = 0.0
) -> (int, bool):
Expand Down
8 changes: 3 additions & 5 deletions fastf1/internals/pandas_extensions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from typing import List

import numpy as np
from pandas import (
DataFrame,
Expand Down Expand Up @@ -35,7 +33,7 @@

def create_df_fast(
*,
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list,
fallback: bool = True
) -> DataFrame:
Expand Down Expand Up @@ -71,7 +69,7 @@ def create_df_fast(


def _fallback_create_df(
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list
) -> DataFrame:
data = {col: arr for col, arr in zip(columns, arrays)}
Expand All @@ -87,7 +85,7 @@ def _fallback_if_unsupported(func):

@_fallback_if_unsupported
def _unsafe_create_df_fast(
arrays: List[np.ndarray],
arrays: list[np.ndarray],
columns: list
) -> DataFrame:
# Implements parts of pandas' internal DataFrame creation mechanics
Expand Down
2 changes: 1 addition & 1 deletion fastf1/livetiming/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def save(args):


def convert(args):
with open(args.input, 'r') as infile:
with open(args.input) as infile:
messages = infile.readlines()
data, ec = messages_from_raw(messages)
with open(args.output, 'w') as outfile:
Expand Down
6 changes: 2 additions & 4 deletions fastf1/livetiming/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@
import json
import logging
import time
from typing import (
Iterable,
Optional
)
from collections.abc import Iterable
from typing import Optional

import requests

Expand Down
Loading

0 comments on commit 530d631

Please sign in to comment.