Skip to content

Commit

Permalink
Remove unused ignores (#1690)
Browse files Browse the repository at this point in the history
* Remove unused ignores

* Add misc ignore comments and remove cast

* Revert cast change
  • Loading branch information
kumaranvpl authored Aug 16, 2024
1 parent 16417cc commit 52123ae
Show file tree
Hide file tree
Showing 24 changed files with 41 additions and 41 deletions.
6 changes: 3 additions & 3 deletions faststream/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def is_test_env() -> bool:
orjson = None

try:
import ujson # type: ignore[no-redef]
import ujson
except ImportError:
ujson = None

Expand Down Expand Up @@ -94,7 +94,7 @@ def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never:
from pydantic.annotated_handlers import (
GetJsonSchemaHandler as GetJsonSchemaHandler,
)
from pydantic_core.core_schema import ( # type: ignore[attr-defined]
from pydantic_core.core_schema import (
with_info_plain_validator_function as with_info_plain_validator_function,
)
else:
Expand Down Expand Up @@ -190,7 +190,7 @@ def with_info_plain_validator_function( # type: ignore[misc]
from anyio import ExceptionGroup as ExceptionGroup # type: ignore[attr-defined]
else:
if sys.version_info < (3, 11):
from exceptiongroup import ( # type: ignore[assignment,no-redef]
from exceptiongroup import (
ExceptionGroup as ExceptionGroup,
)
else:
Expand Down
4 changes: 2 additions & 2 deletions faststream/asyncapi/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def parse_handler_params(
body = get_model_schema(
create_model( # type: ignore[call-overload]
model.__name__,
**call.flat_params, # type: ignore[arg-type]
**call.flat_params,
),
prefix=prefix,
exclude=tuple(call.custom_fields.keys()),
Expand Down Expand Up @@ -119,7 +119,7 @@ def get_model_schema(
else:
param_body[DEF_KEY] = defs

original_title = param.title if PYDANTIC_V2 else param.field_info.title # type: ignore[attr-defined]
original_title = param.title if PYDANTIC_V2 else param.field_info.title

if original_title:
use_original_model = True
Expand Down
2 changes: 1 addition & 1 deletion faststream/asyncapi/schema/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __get_pydantic_core_schema__(
source : the source
handler : the handler
"""
return with_info_plain_validator_function(cls._validate) # type: ignore[no-any-return]
return with_info_plain_validator_function(cls._validate)


class ContactDict(TypedDict, total=False):
Expand Down
4 changes: 2 additions & 2 deletions faststream/broker/fastapi/get_dependant.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,10 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":

from faststream._compat import PydanticUndefined

params = dependant.query_params + dependant.body_params # type: ignore[attr-defined]
params = dependant.query_params + dependant.body_params

for d in dependant.dependencies:
params.extend(d.query_params + d.body_params) # type: ignore[attr-defined]
params.extend(d.query_params + d.body_params)

params_unique = {}
for p in params:
Expand Down
4 changes: 2 additions & 2 deletions faststream/broker/fastapi/route.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,12 +114,12 @@ def __init__(
else:
handler = call # type: ignore[assignment]

self.handler = broker.subscriber( # type: ignore[assignment,call-arg]
self.handler = broker.subscriber( # type: ignore[call-arg]
*extra,
dependencies=list(dependencies),
**handle_kwargs,
)(
handler, # type: ignore[arg-type]
handler,
)


Expand Down
4 changes: 2 additions & 2 deletions faststream/broker/fastapi/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ def after_startup(
Callable[["AppType"], Awaitable[None]],
]:
"""Register a function to be executed after startup."""
self._after_startup_hooks.append(to_async(func)) # type: ignore
self._after_startup_hooks.append(to_async(func))
return func

@overload
Expand All @@ -400,7 +400,7 @@ def on_broker_shutdown(
Callable[["AppType"], Awaitable[None]],
]:
"""Register a function to be executed before broker stop."""
self._on_shutdown_hooks.append(to_async(func)) # type: ignore
self._on_shutdown_hooks.append(to_async(func))
return func

@abstractmethod
Expand Down
4 changes: 2 additions & 2 deletions faststream/broker/publisher/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,8 @@ def get_payloads(self) -> List[Tuple["AnyDict", str]]:
model=create_model("Fake"),
response_model=create_model( # type: ignore[call-overload]
"",
__config__=get_config_base(), # type: ignore[arg-type]
**params, # type: ignore[arg-type]
__config__=get_config_base(),
**params,
),
params=params,
)
Expand Down
2 changes: 1 addition & 1 deletion faststream/broker/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,4 +121,4 @@ def resolve_custom_func(

else:
name = tuple(original_params.items())[1][0]
return partial(to_async(custom_func), **{name: default_func}) # type: ignore
return partial(to_async(custom_func), **{name: default_func})
4 changes: 2 additions & 2 deletions faststream/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def _run(
with suppress(ImportError):
import uvloop

uvloop.install() # type: ignore[attr-defined]
uvloop.install()

try:
anyio.run(
Expand Down Expand Up @@ -252,7 +252,7 @@ def publish(
async def publish_message(broker: "BrokerUsecase[Any, Any]", extra: "AnyDict") -> Any:
try:
async with broker:
return await broker.publish(**extra) # type: ignore[union-attr]
return await broker.publish(**extra)
except Exception as e:
typer.echo(f"Error when broker was publishing: {e}")
sys.exit(1)
6 changes: 3 additions & 3 deletions faststream/confluent/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def __init__(
}
)

self.producer = Producer(self.config, logger=self.logger) # type: ignore[call-arg]
self.producer = Producer(self.config, logger=self.logger)

async def stop(self) -> None:
"""Stop the Kafka producer and flush remaining messages."""
Expand Down Expand Up @@ -267,7 +267,7 @@ def __init__(
}
)

self.consumer = Consumer(self.config, logger=self.logger) # type: ignore[call-arg]
self.consumer = Consumer(self.config, logger=self.logger)

@property
def topics_to_create(self) -> List[str]:
Expand Down Expand Up @@ -336,7 +336,7 @@ async def getmany(
) -> Tuple[Message, ...]:
"""Consumes a batch of messages from Kafka and groups them by topic and partition."""
raw_messages: List[Optional[Message]] = await call_or_await(
self.consumer.consume, # type: ignore[arg-type]
self.consumer.consume,
num_messages=max_records or 10,
timeout=timeout,
)
Expand Down
2 changes: 1 addition & 1 deletion faststream/confluent/publisher/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ async def publish(

class BatchPublisher(LogicPublisher[Tuple[Message, ...]]):
@override
async def publish( # type: ignore[override]
async def publish(
self,
message: Union["SendableMessage", Iterable["SendableMessage"]],
*extra_messages: "SendableMessage",
Expand Down
2 changes: 1 addition & 1 deletion faststream/confluent/schemas/partition.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,4 @@ def to_confluent(self) -> ConfluentPartition:
kwargs["metadata"] = self.metadata
if self.leader_epoch is not None:
kwargs["leader_epoch"] = self.leader_epoch
return ConfluentPartition(**kwargs) # type: ignore[arg-type]
return ConfluentPartition(**kwargs)
2 changes: 1 addition & 1 deletion faststream/confluent/subscriber/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ async def _consume(self) -> None:
connected = True

if msg is not None:
await self.consume(msg) # type: ignore[arg-type]
await self.consume(msg)

@property
def topic_names(self) -> List[str]:
Expand Down
2 changes: 1 addition & 1 deletion faststream/confluent/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def create_publisher_fake_subscriber(
def publisher_response_subscriber(msg: Any) -> None:
pass

broker.setup_subscriber(sub) # type: ignore[arg-type]
broker.setup_subscriber(sub)

return sub.calls[0].handler

Expand Down
2 changes: 1 addition & 1 deletion faststream/kafka/publisher/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ async def publish(

class BatchPublisher(LogicPublisher[Tuple["ConsumerRecord", ...]]):
@override
async def publish( # type: ignore[override]
async def publish(
self,
message: Annotated[
Union["SendableMessage", Iterable["SendableMessage"]],
Expand Down
2 changes: 1 addition & 1 deletion faststream/nats/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ async def _fake_connect( # type: ignore[override]
*args: Any,
**kwargs: Any,
) -> AsyncMock:
broker.stream = AsyncMock() # type: ignore[assignment]
broker.stream = AsyncMock()
broker._js_producer = broker._producer = FakeProducer(broker) # type: ignore[assignment]
return AsyncMock()

Expand Down
6 changes: 3 additions & 3 deletions faststream/rabbit/publisher/asyncapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def get_schema(self) -> Dict[str, Channel]:

return {
self.name: Channel(
description=self.description, # type: ignore[attr-defined]
description=self.description,
publish=Operation(
bindings=OperationBinding(
amqp=amqp.OperationBinding(
Expand Down Expand Up @@ -76,7 +76,7 @@ def get_schema(self) -> Dict[str, Channel]:
bindings=ChannelBinding(
amqp=amqp.ChannelBinding(
**{
"is": "routingKey", # type: ignore
"is": "routingKey",
"queue": amqp.Queue(
name=self.queue.name,
durable=self.queue.durable,
Expand All @@ -90,7 +90,7 @@ def get_schema(self) -> Dict[str, Channel]:
amqp.Exchange(type="default", vhost=self.virtual_host)
if not self.exchange.name
else amqp.Exchange(
type=self.exchange.type.value, # type: ignore
type=self.exchange.type.value,
name=self.exchange.name,
durable=self.exchange.durable,
autoDelete=self.exchange.auto_delete,
Expand Down
6 changes: 3 additions & 3 deletions faststream/rabbit/subscriber/asyncapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def get_schema(self) -> Dict[str, Channel]:

return {
self.name: Channel(
description=self.description, # type: ignore[attr-defined]
description=self.description,
subscribe=Operation(
bindings=OperationBinding(
amqp=amqp.OperationBinding(
Expand All @@ -45,7 +45,7 @@ def get_schema(self) -> Dict[str, Channel]:
bindings=ChannelBinding(
amqp=amqp.ChannelBinding(
**{
"is": "routingKey", # type: ignore
"is": "routingKey",
"queue": amqp.Queue(
name=self.queue.name,
durable=self.queue.durable,
Expand All @@ -59,7 +59,7 @@ def get_schema(self) -> Dict[str, Channel]:
amqp.Exchange(type="default", vhost=self.virtual_host)
if not self.exchange.name
else amqp.Exchange(
type=self.exchange.type.value, # type: ignore
type=self.exchange.type.value,
name=self.exchange.name,
durable=self.exchange.durable,
autoDelete=self.exchange.auto_delete,
Expand Down
2 changes: 1 addition & 1 deletion faststream/rabbit/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def __init__(self, broker: RabbitBroker) -> None:
self.broker = broker

@override
async def publish( # type: ignore[override]
async def publish(
self,
message: "AioPikaSendableMessage" = "",
exchange: Union["RabbitExchange", str, None] = None,
Expand Down
2 changes: 1 addition & 1 deletion faststream/redis/fastapi/fastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,7 +668,7 @@ def subscriber( # type: ignore[override]
)

@override
def publisher( # type: ignore[override]
def publisher(
self,
channel: Annotated[
Union[str, PubSub, None],
Expand Down
6 changes: 3 additions & 3 deletions faststream/redis/publisher/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def add_prefix(self, prefix: str) -> None:
self.channel = channel

@override
async def publish( # type: ignore[override]
async def publish(
self,
message: Annotated[
"SendableMessage",
Expand Down Expand Up @@ -231,7 +231,7 @@ def add_prefix(self, prefix: str) -> None:
self.list = list_sub

@override
async def publish( # type: ignore[override]
async def publish(
self,
message: Annotated[
"SendableMessage",
Expand Down Expand Up @@ -405,7 +405,7 @@ def add_prefix(self, prefix: str) -> None:
self.stream = stream_sub

@override
async def publish( # type: ignore[override]
async def publish(
self,
message: Annotated[
"SendableMessage",
Expand Down
2 changes: 1 addition & 1 deletion faststream/redis/subscriber/usecase.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def _make_response_publisher(
)

@override
async def start( # type: ignore[override]
async def start(
self,
*args: Any,
) -> None:
Expand Down
4 changes: 2 additions & 2 deletions faststream/redis/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ async def _fake_connect( # type: ignore[override]
*args: Any,
**kwargs: Any,
) -> AsyncMock:
broker._producer = FakeProducer(broker) # type: ignore[assignment]
broker._producer = FakeProducer(broker)
connection = MagicMock()

pub_sub = AsyncMock()
Expand Down Expand Up @@ -86,7 +86,7 @@ def __init__(self, broker: RedisBroker) -> None:
self.broker = broker

@override
async def publish( # type: ignore[override]
async def publish(
self,
message: "SendableMessage",
*,
Expand Down
2 changes: 1 addition & 1 deletion faststream/testing/broker.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ async def __aexit__(self, *args: Any) -> None:

# TODO: remove useless middlewares filter
middlewares: Tuple[BrokerMiddleware[Any], ...] = (
CriticalLogMiddleware( # type: ignore[arg-type]
CriticalLogMiddleware(
logger=self.broker.logger,
log_level=self.broker._msg_log_level,
),
Expand Down

0 comments on commit 52123ae

Please sign in to comment.