Skip to content

Commit

Permalink
Reformat using linters (#269)
Browse files Browse the repository at this point in the history
  • Loading branch information
deedy5 authored Mar 17, 2024
1 parent 4d0982c commit 0ba3277
Show file tree
Hide file tree
Showing 44 changed files with 476 additions and 471 deletions.
12 changes: 12 additions & 0 deletions .github/workflows/build-and-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,18 @@ permissions:
contents: write

jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: |
pip install mypy ruff
make lint
sdist:
name: Build sdist wheel
runs-on: ubuntu-latest
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@ curl-*/
.preprocessed
include/
.DS_Store

.mypy_cache/
.ruff_cache/
9 changes: 9 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,15 @@ build: .preprocessed
pip install build
python -m build --wheel

lint:
ruff check
ruff format --diff
mypy .

format:
ruff check --fix
ruff format

test:
python -bb -m pytest tests/unittest

Expand Down
22 changes: 13 additions & 9 deletions benchmark/benchmark.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@
import asyncio
import queue
import threading
import time
import pandas as pd
import requests
from io import BytesIO

import aiohttp
import httpx
import pandas as pd
import pycurl
import requests
import tls_client
import httpx

import curl_cffi
import curl_cffi.requests
import queue
import threading
from io import BytesIO

# import uvloop
# uvloop.install()

results = []


class FakePycurlSession:
def __init__(self):
self.c = pycurl.Curl()
Expand Down Expand Up @@ -68,16 +71,17 @@ def __del__(self):
print("One worker, {}: ".format(size), stats)

df = pd.DataFrame(results)
df.to_csv("single_worker.csv", index=False, float_format='%.4f')
df.to_csv("single_worker.csv", index=False, float_format="%.4f")

results = []


def worker(q, done, SessionClass):
s = SessionClass()
while not done.is_set():
try:
url = q.get_nowait()
except:
except Exception:
continue
s.get(url)
q.task_done()
Expand Down Expand Up @@ -156,4 +160,4 @@ async def test_asyncs_workers():
print("10 Workers, {}: ".format(size), stats)

df = pd.DataFrame(results)
df.to_csv("multiple_workers.csv", index=False, float_format='%.4f')
df.to_csv("multiple_workers.csv", index=False, float_format="%.4f")
4 changes: 2 additions & 2 deletions benchmark/server.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import os

from starlette.applications import Starlette
from starlette.responses import Response, PlainTextResponse
from starlette.responses import PlainTextResponse
from starlette.routing import Route


random_1k = os.urandom(1 * 1024)
random_20k = os.urandom(20 * 1024)
random_200k = os.urandom(200 * 1024)
Expand Down
12 changes: 6 additions & 6 deletions curl_cffi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@
]

import _cffi_backend # noqa: F401 # required by _wrapper
# This line includes _wrapper.so into the wheel
from ._wrapper import ffi, lib # type: ignore

from .const import CurlInfo, CurlMOpt, CurlOpt, CurlECode, CurlHttpVersion, CurlWsFlag
from .curl import Curl, CurlError, CurlMime
from .aio import AsyncCurl
from .__version__ import __curl_version__, __description__, __title__, __version__ # noqa: F401

from .__version__ import __title__, __version__, __description__, __curl_version__
# This line includes _wrapper.so into the wheel
from ._wrapper import ffi, lib
from .aio import AsyncCurl
from .const import CurlECode, CurlHttpVersion, CurlInfo, CurlMOpt, CurlOpt, CurlWsFlag
from .curl import Curl, CurlError, CurlMime
2 changes: 1 addition & 1 deletion curl_cffi/__version__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from importlib import metadata
from .curl import Curl

from .curl import Curl

__title__ = "curl_cffi"
__description__ = metadata.metadata("curl_cffi")["Summary"]
Expand Down
59 changes: 32 additions & 27 deletions curl_cffi/_asyncio_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
running select in a thread and defining these methods on the running event loop.
This factors out the functionality of AddThreadSelectorEventLoop
into a standalone SelectorThread object which can be attached to any running event loop.
Vendored from tornado v6.1.0-64-g289c834b (PR #3029)
Vendored from tornado v6.4.0
Redistributed under license Apache-2.0
"""

import asyncio
import atexit
import errno
Expand All @@ -17,21 +18,22 @@
from typing import (
Any,
Callable,
Union,
Optional,
Dict,
List,
Optional,
Protocol,
Set,
Tuple,
Dict,
) # noqa: F401

from typing import Set # noqa: F401
TypeVar,
Union,
)

from typing import Protocol
_T = TypeVar("_T")


class _HasFileno(Protocol):
def fileno(self) -> int:
pass
return 0


_FileDescriptorLike = Union[int, _HasFileno]
Expand All @@ -50,11 +52,12 @@ def _atexit_callback() -> None:
loop._waker_w.send(b"a")
except BlockingIOError:
pass
# If we don't join our (daemon) thread here, we may get a deadlock
# during interpreter shutdown. I don't really understand why. This
# deadlock happens every time in CI (both travis and appveyor) but
# I've never been able to reproduce locally.
loop._thread.join()
if loop._thread is not None:
# If we don't join our (daemon) thread here, we may get a deadlock
# during interpreter shutdown. I don't really understand why. This
# deadlock happens every time in CI (both travis and appveyor) but
# I've never been able to reproduce locally.
loop._thread.join()
_selector_loops.clear()


Expand All @@ -78,9 +81,9 @@ def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None:
self._real_loop = real_loop

self._select_cond = threading.Condition()
self._select_args: Optional[
Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]
] = None
self._select_args: Optional[Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]] = (
None
)
self._closing_selector = False
self._thread: Optional[threading.Thread] = None
self._thread_manager_handle = self._thread_manager()
Expand All @@ -93,9 +96,7 @@ async def thread_manager_anext() -> None:
# When the loop starts, start the thread. Not too soon because we can't
# clean up if we get to this point but the event loop is closed without
# starting.
self._real_loop.call_soon(
lambda: self._real_loop.create_task(thread_manager_anext())
)
self._real_loop.call_soon(lambda: self._real_loop.create_task(thread_manager_anext()))

self._readers: Dict[_FileDescriptorLike, Callable] = {}
self._writers: Dict[_FileDescriptorLike, Callable] = {}
Expand Down Expand Up @@ -235,9 +236,7 @@ def _run_select(self) -> None:
# Swallow it too for consistency.
pass

def _handle_select(
self, rs: List[_FileDescriptorLike], ws: List[_FileDescriptorLike]
) -> None:
def _handle_select(self, rs: List[_FileDescriptorLike], ws: List[_FileDescriptorLike]) -> None:
for r in rs:
self._handle_event(r, self._readers)
for w in ws:
Expand Down Expand Up @@ -328,17 +327,23 @@ def close(self) -> None:
self._real_loop.close()

def add_reader(
self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any
self,
fd: "_FileDescriptorLike",
callback: Callable[..., None],
*args: Any, # type: ignore
) -> None:
return self._selector.add_reader(fd, callback, *args)

def add_writer(
self, fd: _FileDescriptorLike, callback: Callable[..., None], *args: Any
self,
fd: "_FileDescriptorLike",
callback: Callable[..., None],
*args: Any, # type: ignore
) -> None:
return self._selector.add_writer(fd, callback, *args)

def remove_reader(self, fd: _FileDescriptorLike) -> bool:
def remove_reader(self, fd: "_FileDescriptorLike") -> bool:
return self._selector.remove_reader(fd)

def remove_writer(self, fd: _FileDescriptorLike) -> bool:
def remove_writer(self, fd: "_FileDescriptorLike") -> bool:
return self._selector.remove_writer(fd)
29 changes: 14 additions & 15 deletions curl_cffi/aio.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
import sys
import warnings
from contextlib import suppress
from typing import Any
from weakref import WeakSet, WeakKeyDictionary
from typing import Any, Dict, Set
from weakref import WeakKeyDictionary, WeakSet

from ._wrapper import ffi, lib # type: ignore
from ._wrapper import ffi, lib
from .const import CurlMOpt
from .curl import Curl, DEFAULT_CACERT
from .curl import DEFAULT_CACERT, Curl

__all__ = ["AsyncCurl"]

Expand Down Expand Up @@ -52,10 +52,11 @@ def _close_selector_and_loop():
_selectors.pop(asyncio_loop, None)
selector_loop.close()

asyncio_loop.close = _close_selector_and_loop # type: ignore # mypy bug - assign a function to method
asyncio_loop.close = _close_selector_and_loop
return selector_loop

else:

def _get_selector(loop) -> asyncio.AbstractEventLoop:
return loop

Expand All @@ -77,7 +78,7 @@ def _get_selector(loop) -> asyncio.AbstractEventLoop:


@ffi.def_extern()
def timer_function(curlm, timeout_ms: int, clientp: Any):
def timer_function(curlm, timeout_ms: int, clientp: "AsyncCurl"):
"""
see: https://curl.se/libcurl/c/CURLMOPT_TIMERFUNCTION.html
"""
Expand All @@ -98,9 +99,8 @@ def timer_function(curlm, timeout_ms: int, clientp: Any):
async_curl._timers.add(timer)



@ffi.def_extern()
def socket_function(curl, sockfd: int, what: int, clientp: Any, data: Any):
def socket_function(curl, sockfd: int, what: int, clientp: "AsyncCurl", data: Any):
async_curl = ffi.from_handle(clientp)
loop = async_curl.loop

Expand All @@ -118,6 +118,7 @@ def socket_function(curl, sockfd: int, what: int, clientp: Any, data: Any):
if what & CURL_POLL_REMOVE:
async_curl._sockfds.remove(sockfd)


class AsyncCurl:
"""Wrapper around curl_multi handle to provide asyncio support. It uses the libcurl
socket_action APIs."""
Expand All @@ -130,14 +131,12 @@ def __init__(self, cacert: str = "", loop=None):
"""
self._curlm = lib.curl_multi_init()
self._cacert = cacert or DEFAULT_CACERT
self._curl2future = {} # curl to future map
self._curl2curl = {} # c curl to Curl
self._sockfds = set() # sockfds
self.loop = _get_selector(
loop if loop is not None else asyncio.get_running_loop()
)
self._curl2future: Dict[Curl, asyncio.Future] = {} # curl to future map
self._curl2curl: Dict[ffi.CData, Curl] = {} # c curl to Curl
self._sockfds: Set[int] = set() # sockfds
self.loop = _get_selector(loop if loop is not None else asyncio.get_running_loop())
self._checker = self.loop.create_task(self._force_timeout())
self._timers = WeakSet()
self._timers: WeakSet[asyncio.TimerHandle] = WeakSet()
self._setup()

def _setup(self):
Expand Down
12 changes: 6 additions & 6 deletions curl_cffi/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,9 +545,9 @@ class CurlHttpVersion(IntEnum):
class CurlWsFlag(IntEnum):
"""``CURL_WS_FLAG`` constancs extracted from libcurl, see comments for details"""

TEXT = (1<<0)
BINARY = (1<<1)
CONT = (1<<2)
CLOSE = (1<<3)
PING = (1<<4)
OFFSET = (1<<5)
TEXT = 1 << 0
BINARY = 1 << 1
CONT = 1 << 2
CLOSE = 1 << 3
PING = 1 << 4
OFFSET = 1 << 5
Loading

0 comments on commit 0ba3277

Please sign in to comment.