diff --git a/.github/workflows/pr_tests.yaml b/.github/workflows/pr_tests.yaml
index 607769b996..9519e3dbc6 100644
--- a/.github/workflows/pr_tests.yaml
+++ b/.github/workflows/pr_tests.yaml
@@ -44,7 +44,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
pydantic-version: ["pydantic-v1", "pydantic-v2"]
fail-fast: false
@@ -480,7 +480,7 @@ jobs:
version: "latest"
- uses: actions/setup-python@v5
with:
- python-version: "3.8"
+ python-version: "3.12"
- name: Get coverage files
uses: actions/download-artifact@v4
diff --git a/CITATION.cff b/CITATION.cff
index cfc7b23a6a..9e01da744e 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -10,7 +10,7 @@ type: software
authors:
- given-names: Nikita
family-names: Pastukhov
- email: diementros@yandex.com
+ email: nikita@pastukhov-dev.ru
- given-names: Davor
family-names: Runje
email: davor@airt.ai
diff --git a/docs/create_api_docs.py b/docs/create_api_docs.py
index 64dafda5ff..fe21b9b4fd 100644
--- a/docs/create_api_docs.py
+++ b/docs/create_api_docs.py
@@ -63,7 +63,8 @@ def _get_submodules(package_name: str) -> List[str]:
def _import_submodules(
- module_name: str, include_public_api_only: bool = False
+ module_name: str,
+ include_public_api_only: bool = False,
) -> Optional[List[ModuleType]]:
def _import_module(name: str) -> Optional[ModuleType]:
try:
@@ -89,7 +90,8 @@ def _import_module(name: str) -> Optional[ModuleType]:
def _import_functions_and_classes(
- m: ModuleType, include_public_api_only: bool = False
+ m: ModuleType,
+ include_public_api_only: bool = False,
) -> List[Tuple[str, Union[FunctionType, Type[Any]]]]:
funcs_and_classes = []
if not include_public_api_only:
@@ -112,20 +114,23 @@ def _is_private(name: str) -> bool:
def _import_all_members(
- module_name: str, include_public_api_only: bool = False
+ module_name: str,
+ include_public_api_only: bool = False,
) -> List[str]:
submodules = _import_submodules(
- module_name, include_public_api_only=include_public_api_only
+ module_name,
+ include_public_api_only=include_public_api_only,
)
members: List[Tuple[str, Union[FunctionType, Type[Any]]]] = list(
itertools.chain(
*[
_import_functions_and_classes(
- m, include_public_api_only=include_public_api_only
+ m,
+ include_public_api_only=include_public_api_only,
)
for m in submodules
- ]
- )
+ ],
+ ),
)
names = [
@@ -168,9 +173,8 @@ def _get_api_summary_item(x: str) -> str:
if x.endswith("."):
indent = " " * (4 * (len(xs) - 1 + 1))
return f"{indent}- {xs[-2]}"
- else:
- indent = " " * (4 * (len(xs) + 1))
- return f"{indent}- [{xs[-1]}](api/{'/'.join(xs)}.md)"
+ indent = " " * (4 * (len(xs) + 1))
+ return f"{indent}- [{xs[-1]}](api/{'/'.join(xs)}.md)"
def _get_api_summary(members: List[str]) -> str:
@@ -236,7 +240,9 @@ def _load_submodules(
def _update_single_api_doc(
- symbol: Union[FunctionType, Type[Any]], docs_path: Path, module_name: str
+ symbol: Union[FunctionType, Type[Any]],
+ docs_path: Path,
+ module_name: str,
) -> None:
en_docs_path = docs_path / "docs" / "en"
@@ -263,11 +269,15 @@ def _update_single_api_doc(
def _update_api_docs(
- symbols: List[Union[FunctionType, Type[Any]]], docs_path: Path, module_name: str
+ symbols: List[Union[FunctionType, Type[Any]]],
+ docs_path: Path,
+ module_name: str,
) -> None:
for symbol in symbols:
_update_single_api_doc(
- symbol=symbol, docs_path=docs_path, module_name=module_name
+ symbol=symbol,
+ docs_path=docs_path,
+ module_name=module_name,
)
@@ -284,8 +294,8 @@ def _generate_api_docs_for_module(root_path: Path, module_name: str) -> Tuple[st
"""
public_api_summary = _get_api_summary(
_add_all_submodules(
- _import_all_members(module_name, include_public_api_only=True)
- )
+ _import_all_members(module_name, include_public_api_only=True),
+ ),
)
# Using public_api/ symlink pointing to api/ because of the issue
# https://github.com/mkdocs/mkdocs/issues/1974
@@ -308,7 +318,7 @@ def _generate_api_docs_for_module(root_path: Path, module_name: str) -> Tuple[st
_update_api_docs(symbols, root_path, module_name)
- # todo: fix the problem and remove this
+ # TODO: fix the problem and remove this
src = """ - [ContactDict](api/faststream/asyncapi/schema/info/ContactDict.md)
"""
dst = """ - [ContactDict](api/faststream/asyncapi/schema/info/ContactDict.md)
diff --git a/docs/docs.py b/docs/docs.py
index f5a9f0e7a9..045c665068 100644
--- a/docs/docs.py
+++ b/docs/docs.py
@@ -22,7 +22,7 @@
CONFIG = BASE_DIR / "mkdocs.yml"
DOCS_DIR = BASE_DIR / "docs"
LANGUAGES_DIRS = tuple(
- filter(lambda f: f.is_dir() and f.name not in IGNORE_DIRS, DOCS_DIR.iterdir())
+ filter(lambda f: f.is_dir() and f.name not in IGNORE_DIRS, DOCS_DIR.iterdir()),
)
BUILD_DIR = BASE_DIR / "site"
@@ -125,7 +125,7 @@ def add(path=typer.Argument(...)):
not_exists.append(i)
file.write_text(
f"# {title or get_default_title(file)} \n"
- "{! " + get_in_progress(i.name) + " !}"
+ "{! " + get_in_progress(i.name) + " !}",
)
typer.echo(f"{file} - write `in progress`")
@@ -134,7 +134,7 @@ def add(path=typer.Argument(...)):
file = i / path
file.write_text(
f"# {title or get_default_title(file)} \n"
- "{! " + get_missing_translation(i.name) + " !}"
+ "{! " + get_missing_translation(i.name) + " !}",
)
typer.echo(f"{file} - write `missing translation`")
@@ -172,9 +172,8 @@ def mv(path: str = typer.Argument(...), new_path: str = typer.Argument(...)):
@app.command()
def update_readme() -> None:
"""Update README.md by expanding embeddings in docs/docs/en/index.md."""
- # todo: fix this function
+ # TODO: fix this function
typer.echo("Skipping updating README.md for now")
- return None
# typer.echo(f"Updating README.md")
# expand_markdown(input_markdown_path=EN_INDEX_PATH, output_markdown_path=README_PATH)
@@ -208,9 +207,9 @@ def update_contributing():
(
f"> **_NOTE:_** This is an auto-generated file. Please edit {relative_path} instead.",
*content,
- )
+ ),
)
- + "\n"
+ + "\n",
)
diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md
deleted file mode 100644
index b41d3fa972..0000000000
--- a/docs/docs/SUMMARY.md
+++ /dev/null
@@ -1,1167 +0,0 @@
----
-search:
- exclude: true
----
-- [Features](faststream.md)
-- Tutorial
- - [Getting Started](getting-started/index.md)
- - [Subscription and Serialization](getting-started/subscription/index.md)
- - [Annotation Serialization](getting-started/subscription/annotation.md)
- - [Pydantic Serialization](getting-started/subscription/pydantic.md)
- - [Filtering](getting-started/subscription/filtering.md)
- - [Testing](getting-started/subscription/test.md)
- - [Publishing](getting-started/publishing/index.md)
- - [Broker Publish](getting-started/publishing/broker.md)
- - [Decorator](getting-started/publishing/decorator.md)
- - [Object Decorator](getting-started/publishing/object.md)
- - [Direct Publishing](getting-started/publishing/direct.md)
- - [Testing](getting-started/publishing/test.md)
- - [Routers](getting-started/routers/index.md)
- - [Dependencies](getting-started/dependencies/index.md)
- - [Context](getting-started/context/index.md)
- - [Existing Fields](getting-started/context/existed.md)
- - [Custom Context](getting-started/context/custom.md)
- - [Fields Access](getting-started/context/fields.md)
- - [Extra Options](getting-started/context/extra.md)
- - [Custom Serialization](getting-started/serialization/index.md)
- - [Parser](getting-started/serialization/parser.md)
- - [Decoder](getting-started/serialization/decoder.md)
- - [Examples](getting-started/serialization/examples.md)
- - [Lifespan](getting-started/lifespan/index.md)
- - [Hooks](getting-started/lifespan/hooks.md)
- - [Context](getting-started/lifespan/context.md)
- - [Testing](getting-started/lifespan/test.md)
- - [Middlewares](getting-started/middlewares/index.md)
- - [Exception Middleware](getting-started/middlewares/exception.md)
- - AsyncAPI
- - [Schema Export](getting-started/asyncapi/export.md)
- - [Schema Hosting](getting-started/asyncapi/hosting.md)
- - [Customize Information](getting-started/asyncapi/custom.md)
- - Integrations
- - [HTTP Async Frameworks](getting-started/integrations/frameworks/index.md)
- - [FastAPI Plugin](getting-started/integrations/fastapi/index.md)
- - [Django](getting-started/integrations/django/index.md)
- - [CLI](getting-started/cli/index.md)
- - [ASGI](getting-started/asgi.md)
- - [OpenTelemetry](getting-started/opentelemetry/index.md)
- - [Prometheus](getting-started/prometheus/index.md)
- - [Logging](getting-started/logging.md)
- - [Config Management](getting-started/config/index.md)
- - [Task Scheduling](scheduling.md)
- - [FastStream Project Template](getting-started/template/index.md)
-- [Kafka](kafka/kafka.md)
- - [AIOKafka](kafka/index.md)
- - [Subscription](kafka/Subscriber/index.md)
- - [Batch Subscriber](kafka/Subscriber/batch_subscriber.md)
- - [Publishing](kafka/Publisher/index.md)
- - [Batch Publishing](kafka/Publisher/batch_publisher.md)
- - [Publish With Key](kafka/Publisher/using_a_key.md)
- - [Acknowledgement](kafka/ack.md)
- - [Message Information](kafka/message.md)
- - [Security Configuration](kafka/security.md)
- - [Confluent](confluent/index.md)
- - [Subscription](confluent/Subscriber/index.md)
- - [Batch Subscriber](confluent/Subscriber/batch_subscriber.md)
- - [Publishing](confluent/Publisher/index.md)
- - [Batch Publishing](confluent/Publisher/batch_publisher.md)
- - [Publish With Key](confluent/Publisher/using_a_key.md)
- - [Acknowledgement](confluent/ack.md)
- - [Message Information](confluent/message.md)
- - [Security Configuration](confluent/security.md)
- - [Additional Configuration](confluent/additional-configuration.md)
- - [How-To](howto/kafka/index.md)
- - [Kafka RPC](howto/kafka/rpc.md)
-- [RabbitMQ](rabbit/index.md)
- - [Subscription](rabbit/examples/index.md)
- - [Direct](rabbit/examples/direct.md)
- - [Fanout](rabbit/examples/fanout.md)
- - [Topic](rabbit/examples/topic.md)
- - [Headers](rabbit/examples/headers.md)
- - [Stream](rabbit/examples/stream.md)
- - [Publishing](rabbit/publishing.md)
- - [RPC](rabbit/rpc.md)
- - [Acknowledgement](rabbit/ack.md)
- - [Declare Queue/Exchange](rabbit/declare.md)
- - [Message Information](rabbit/message.md)
- - [Security Configuration](rabbit/security.md)
-- [NATS](nats/index.md)
- - Subscription
- - [Direct](nats/examples/direct.md)
- - [Pattern](nats/examples/pattern.md)
- - [JetStream](nats/jetstream/index.md)
- - [Pull Subscriber](nats/jetstream/pull.md)
- - [Key-Value Storage](nats/jetstream/key-value.md)
- - [Object Storage](nats/jetstream/object.md)
- - [Acknowledgement](nats/jetstream/ack.md)
- - [Publishing](nats/publishing/index.md)
- - [RPC](nats/rpc.md)
- - [Message Information](nats/message.md)
- - [How-To](howto/nats/index.md)
- - [DynaConf](howto/nats/dynaconf.md)
- - [In-Progess](howto/nats/in-progress.md)
-- [Redis](redis/index.md)
- - [Pub/Sub](redis/pubsub/index.md)
- - [Subscription](redis/pubsub/subscription.md)
- - [Publishing](redis/pubsub/publishing.md)
- - [List](redis/list/index.md)
- - [Subscription](redis/list/subscription.md)
- - [Publishing](redis/list/publishing.md)
- - [Batching](redis/list/batch.md)
- - [Streams](redis/streams/index.md)
- - [Subscription](redis/streams/subscription.md)
- - [Publishing](redis/streams/publishing.md)
- - [Groups](redis/streams/groups.md)
- - [Batching](redis/streams/batch.md)
- - [Acknowledgement](redis/streams/ack.md)
- - [RPC](redis/rpc.md)
- - [Message Information](redis/message.md)
- - [Security Configuration](redis/security.md)
-- [Reference - Code API](api/index.md)
- - Public API
- - faststream
- - [BaseMiddleware](public_api/faststream/BaseMiddleware.md)
- - [Context](public_api/faststream/Context.md)
- - [Depends](public_api/faststream/Depends.md)
- - [ExceptionMiddleware](public_api/faststream/ExceptionMiddleware.md)
- - [FastStream](public_api/faststream/FastStream.md)
- - [Header](public_api/faststream/Header.md)
- - [Path](public_api/faststream/Path.md)
- - [Response](public_api/faststream/Response.md)
- - [TestApp](public_api/faststream/TestApp.md)
- - [apply_types](public_api/faststream/apply_types.md)
- - asgi
- - [AsgiFastStream](public_api/faststream/asgi/AsgiFastStream.md)
- - [AsgiResponse](public_api/faststream/asgi/AsgiResponse.md)
- - [get](public_api/faststream/asgi/get.md)
- - [make_asyncapi_asgi](public_api/faststream/asgi/make_asyncapi_asgi.md)
- - [make_ping_asgi](public_api/faststream/asgi/make_ping_asgi.md)
- - asyncapi
- - [get_app_schema](public_api/faststream/asyncapi/get_app_schema.md)
- - [get_asyncapi_html](public_api/faststream/asyncapi/get_asyncapi_html.md)
- - confluent
- - [KafkaBroker](public_api/faststream/confluent/KafkaBroker.md)
- - [KafkaPublisher](public_api/faststream/confluent/KafkaPublisher.md)
- - [KafkaResponse](public_api/faststream/confluent/KafkaResponse.md)
- - [KafkaRoute](public_api/faststream/confluent/KafkaRoute.md)
- - [KafkaRouter](public_api/faststream/confluent/KafkaRouter.md)
- - [TestApp](public_api/faststream/confluent/TestApp.md)
- - [TestKafkaBroker](public_api/faststream/confluent/TestKafkaBroker.md)
- - [TopicPartition](public_api/faststream/confluent/TopicPartition.md)
- - kafka
- - [KafkaBroker](public_api/faststream/kafka/KafkaBroker.md)
- - [KafkaPublisher](public_api/faststream/kafka/KafkaPublisher.md)
- - [KafkaResponse](public_api/faststream/kafka/KafkaResponse.md)
- - [KafkaRoute](public_api/faststream/kafka/KafkaRoute.md)
- - [KafkaRouter](public_api/faststream/kafka/KafkaRouter.md)
- - [TestApp](public_api/faststream/kafka/TestApp.md)
- - [TestKafkaBroker](public_api/faststream/kafka/TestKafkaBroker.md)
- - [TopicPartition](public_api/faststream/kafka/TopicPartition.md)
- - nats
- - [AckPolicy](public_api/faststream/nats/AckPolicy.md)
- - [ConsumerConfig](public_api/faststream/nats/ConsumerConfig.md)
- - [DeliverPolicy](public_api/faststream/nats/DeliverPolicy.md)
- - [DiscardPolicy](public_api/faststream/nats/DiscardPolicy.md)
- - [ExternalStream](public_api/faststream/nats/ExternalStream.md)
- - [JStream](public_api/faststream/nats/JStream.md)
- - [KvWatch](public_api/faststream/nats/KvWatch.md)
- - [NatsBroker](public_api/faststream/nats/NatsBroker.md)
- - [NatsPublisher](public_api/faststream/nats/NatsPublisher.md)
- - [NatsResponse](public_api/faststream/nats/NatsResponse.md)
- - [NatsRoute](public_api/faststream/nats/NatsRoute.md)
- - [NatsRouter](public_api/faststream/nats/NatsRouter.md)
- - [ObjWatch](public_api/faststream/nats/ObjWatch.md)
- - [Placement](public_api/faststream/nats/Placement.md)
- - [PullSub](public_api/faststream/nats/PullSub.md)
- - [RePublish](public_api/faststream/nats/RePublish.md)
- - [ReplayPolicy](public_api/faststream/nats/ReplayPolicy.md)
- - [RetentionPolicy](public_api/faststream/nats/RetentionPolicy.md)
- - [StorageType](public_api/faststream/nats/StorageType.md)
- - [StreamConfig](public_api/faststream/nats/StreamConfig.md)
- - [StreamSource](public_api/faststream/nats/StreamSource.md)
- - [TestApp](public_api/faststream/nats/TestApp.md)
- - [TestNatsBroker](public_api/faststream/nats/TestNatsBroker.md)
- - opentelemetry
- - [Baggage](public_api/faststream/opentelemetry/Baggage.md)
- - [TelemetryMiddleware](public_api/faststream/opentelemetry/TelemetryMiddleware.md)
- - [TelemetrySettingsProvider](public_api/faststream/opentelemetry/TelemetrySettingsProvider.md)
- - rabbit
- - [ExchangeType](public_api/faststream/rabbit/ExchangeType.md)
- - [QueueType](public_api/faststream/rabbit/QueueType.md)
- - [RabbitBroker](public_api/faststream/rabbit/RabbitBroker.md)
- - [RabbitExchange](public_api/faststream/rabbit/RabbitExchange.md)
- - [RabbitPublisher](public_api/faststream/rabbit/RabbitPublisher.md)
- - [RabbitQueue](public_api/faststream/rabbit/RabbitQueue.md)
- - [RabbitResponse](public_api/faststream/rabbit/RabbitResponse.md)
- - [RabbitRoute](public_api/faststream/rabbit/RabbitRoute.md)
- - [RabbitRouter](public_api/faststream/rabbit/RabbitRouter.md)
- - [ReplyConfig](public_api/faststream/rabbit/ReplyConfig.md)
- - [TestApp](public_api/faststream/rabbit/TestApp.md)
- - [TestRabbitBroker](public_api/faststream/rabbit/TestRabbitBroker.md)
- - redis
- - [ListSub](public_api/faststream/redis/ListSub.md)
- - [PubSub](public_api/faststream/redis/PubSub.md)
- - [RedisBroker](public_api/faststream/redis/RedisBroker.md)
- - [RedisPublisher](public_api/faststream/redis/RedisPublisher.md)
- - [RedisResponse](public_api/faststream/redis/RedisResponse.md)
- - [RedisRoute](public_api/faststream/redis/RedisRoute.md)
- - [RedisRouter](public_api/faststream/redis/RedisRouter.md)
- - [StreamSub](public_api/faststream/redis/StreamSub.md)
- - [TestApp](public_api/faststream/redis/TestApp.md)
- - [TestRedisBroker](public_api/faststream/redis/TestRedisBroker.md)
- - All API
- - faststream
- - [BaseMiddleware](api/faststream/BaseMiddleware.md)
- - [Context](api/faststream/Context.md)
- - [Depends](api/faststream/Depends.md)
- - [ExceptionMiddleware](api/faststream/ExceptionMiddleware.md)
- - [FastStream](api/faststream/FastStream.md)
- - [Header](api/faststream/Header.md)
- - [Path](api/faststream/Path.md)
- - [Response](api/faststream/Response.md)
- - [TestApp](api/faststream/TestApp.md)
- - [apply_types](api/faststream/apply_types.md)
- - app
- - [FastStream](api/faststream/app/FastStream.md)
- - [catch_startup_validation_error](api/faststream/app/catch_startup_validation_error.md)
- - asgi
- - [AsgiFastStream](api/faststream/asgi/AsgiFastStream.md)
- - [AsgiResponse](api/faststream/asgi/AsgiResponse.md)
- - [get](api/faststream/asgi/get.md)
- - [make_asyncapi_asgi](api/faststream/asgi/make_asyncapi_asgi.md)
- - [make_ping_asgi](api/faststream/asgi/make_ping_asgi.md)
- - app
- - [AsgiFastStream](api/faststream/asgi/app/AsgiFastStream.md)
- - [cast_uvicorn_params](api/faststream/asgi/app/cast_uvicorn_params.md)
- - factories
- - [make_asyncapi_asgi](api/faststream/asgi/factories/make_asyncapi_asgi.md)
- - [make_ping_asgi](api/faststream/asgi/factories/make_ping_asgi.md)
- - handlers
- - [get](api/faststream/asgi/handlers/get.md)
- - response
- - [AsgiResponse](api/faststream/asgi/response/AsgiResponse.md)
- - websocket
- - [WebSocketClose](api/faststream/asgi/websocket/WebSocketClose.md)
- - asyncapi
- - [get_app_schema](api/faststream/asyncapi/get_app_schema.md)
- - [get_asyncapi_html](api/faststream/asyncapi/get_asyncapi_html.md)
- - abc
- - [AsyncAPIOperation](api/faststream/asyncapi/abc/AsyncAPIOperation.md)
- - generate
- - [get_app_schema](api/faststream/asyncapi/generate/get_app_schema.md)
- - [get_broker_channels](api/faststream/asyncapi/generate/get_broker_channels.md)
- - [get_broker_server](api/faststream/asyncapi/generate/get_broker_server.md)
- - message
- - [get_model_schema](api/faststream/asyncapi/message/get_model_schema.md)
- - [get_response_schema](api/faststream/asyncapi/message/get_response_schema.md)
- - [parse_handler_params](api/faststream/asyncapi/message/parse_handler_params.md)
- - proto
- - [AsyncAPIApplication](api/faststream/asyncapi/proto/AsyncAPIApplication.md)
- - [AsyncAPIProto](api/faststream/asyncapi/proto/AsyncAPIProto.md)
- - schema
- - [Channel](api/faststream/asyncapi/schema/Channel.md)
- - [ChannelBinding](api/faststream/asyncapi/schema/ChannelBinding.md)
- - [Components](api/faststream/asyncapi/schema/Components.md)
- - [Contact](api/faststream/asyncapi/schema/Contact.md)
- - [ContactDict](api/faststream/asyncapi/schema/ContactDict.md)
- - [CorrelationId](api/faststream/asyncapi/schema/CorrelationId.md)
- - [ExternalDocs](api/faststream/asyncapi/schema/ExternalDocs.md)
- - [ExternalDocsDict](api/faststream/asyncapi/schema/ExternalDocsDict.md)
- - [Info](api/faststream/asyncapi/schema/Info.md)
- - [License](api/faststream/asyncapi/schema/License.md)
- - [LicenseDict](api/faststream/asyncapi/schema/LicenseDict.md)
- - [Message](api/faststream/asyncapi/schema/Message.md)
- - [Operation](api/faststream/asyncapi/schema/Operation.md)
- - [OperationBinding](api/faststream/asyncapi/schema/OperationBinding.md)
- - [Reference](api/faststream/asyncapi/schema/Reference.md)
- - [Schema](api/faststream/asyncapi/schema/Schema.md)
- - [SecuritySchemaComponent](api/faststream/asyncapi/schema/SecuritySchemaComponent.md)
- - [Server](api/faststream/asyncapi/schema/Server.md)
- - [ServerBinding](api/faststream/asyncapi/schema/ServerBinding.md)
- - [Tag](api/faststream/asyncapi/schema/Tag.md)
- - [TagDict](api/faststream/asyncapi/schema/TagDict.md)
- - bindings
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/ServerBinding.md)
- - amqp
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md)
- - [Exchange](api/faststream/asyncapi/schema/bindings/amqp/Exchange.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md)
- - [Queue](api/faststream/asyncapi/schema/bindings/amqp/Queue.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md)
- - kafka
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md)
- - main
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/main/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/main/ServerBinding.md)
- - nats
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md)
- - redis
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md)
- - sqs
- - [ChannelBinding](api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md)
- - [OperationBinding](api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md)
- - [ServerBinding](api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md)
- - channels
- - [Channel](api/faststream/asyncapi/schema/channels/Channel.md)
- - info
- - [Contact](api/faststream/asyncapi/schema/info/Contact.md)
- - [ContactDict](api/faststream/asyncapi/schema/info/ContactDict.md)
- - [EmailStr](api/faststream/asyncapi/schema/info/EmailStr.md)
- - [Info](api/faststream/asyncapi/schema/info/Info.md)
- - [License](api/faststream/asyncapi/schema/info/License.md)
- - [LicenseDict](api/faststream/asyncapi/schema/info/LicenseDict.md)
- - main
- - [Components](api/faststream/asyncapi/schema/main/Components.md)
- - [Schema](api/faststream/asyncapi/schema/main/Schema.md)
- - message
- - [CorrelationId](api/faststream/asyncapi/schema/message/CorrelationId.md)
- - [Message](api/faststream/asyncapi/schema/message/Message.md)
- - operations
- - [Operation](api/faststream/asyncapi/schema/operations/Operation.md)
- - security
- - [OauthFlowObj](api/faststream/asyncapi/schema/security/OauthFlowObj.md)
- - [OauthFlows](api/faststream/asyncapi/schema/security/OauthFlows.md)
- - [SecuritySchemaComponent](api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md)
- - servers
- - [Server](api/faststream/asyncapi/schema/servers/Server.md)
- - [ServerVariable](api/faststream/asyncapi/schema/servers/ServerVariable.md)
- - utils
- - [ExternalDocs](api/faststream/asyncapi/schema/utils/ExternalDocs.md)
- - [ExternalDocsDict](api/faststream/asyncapi/schema/utils/ExternalDocsDict.md)
- - [Parameter](api/faststream/asyncapi/schema/utils/Parameter.md)
- - [Reference](api/faststream/asyncapi/schema/utils/Reference.md)
- - [Tag](api/faststream/asyncapi/schema/utils/Tag.md)
- - [TagDict](api/faststream/asyncapi/schema/utils/TagDict.md)
- - site
- - [get_asyncapi_html](api/faststream/asyncapi/site/get_asyncapi_html.md)
- - [serve_app](api/faststream/asyncapi/site/serve_app.md)
- - utils
- - [resolve_payloads](api/faststream/asyncapi/utils/resolve_payloads.md)
- - [to_camelcase](api/faststream/asyncapi/utils/to_camelcase.md)
- - broker
- - acknowledgement_watcher
- - [BaseWatcher](api/faststream/broker/acknowledgement_watcher/BaseWatcher.md)
- - [CounterWatcher](api/faststream/broker/acknowledgement_watcher/CounterWatcher.md)
- - [EndlessWatcher](api/faststream/broker/acknowledgement_watcher/EndlessWatcher.md)
- - [OneTryWatcher](api/faststream/broker/acknowledgement_watcher/OneTryWatcher.md)
- - [WatcherContext](api/faststream/broker/acknowledgement_watcher/WatcherContext.md)
- - [get_watcher](api/faststream/broker/acknowledgement_watcher/get_watcher.md)
- - core
- - abc
- - [ABCBroker](api/faststream/broker/core/abc/ABCBroker.md)
- - logging
- - [LoggingBroker](api/faststream/broker/core/logging/LoggingBroker.md)
- - usecase
- - [BrokerUsecase](api/faststream/broker/core/usecase/BrokerUsecase.md)
- - fastapi
- - [StreamMessage](api/faststream/broker/fastapi/StreamMessage.md)
- - [StreamRouter](api/faststream/broker/fastapi/StreamRouter.md)
- - context
- - [Context](api/faststream/broker/fastapi/context/Context.md)
- - get_dependant
- - [get_fastapi_dependant](api/faststream/broker/fastapi/get_dependant/get_fastapi_dependant.md)
- - [get_fastapi_native_dependant](api/faststream/broker/fastapi/get_dependant/get_fastapi_native_dependant.md)
- - route
- - [StreamMessage](api/faststream/broker/fastapi/route/StreamMessage.md)
- - [build_faststream_to_fastapi_parser](api/faststream/broker/fastapi/route/build_faststream_to_fastapi_parser.md)
- - [make_fastapi_execution](api/faststream/broker/fastapi/route/make_fastapi_execution.md)
- - [wrap_callable_to_fastapi_compatible](api/faststream/broker/fastapi/route/wrap_callable_to_fastapi_compatible.md)
- - router
- - [StreamRouter](api/faststream/broker/fastapi/router/StreamRouter.md)
- - message
- - [AckStatus](api/faststream/broker/message/AckStatus.md)
- - [SourceType](api/faststream/broker/message/SourceType.md)
- - [StreamMessage](api/faststream/broker/message/StreamMessage.md)
- - [decode_message](api/faststream/broker/message/decode_message.md)
- - [encode_message](api/faststream/broker/message/encode_message.md)
- - [gen_cor_id](api/faststream/broker/message/gen_cor_id.md)
- - middlewares
- - [BaseMiddleware](api/faststream/broker/middlewares/BaseMiddleware.md)
- - [ExceptionMiddleware](api/faststream/broker/middlewares/ExceptionMiddleware.md)
- - base
- - [BaseMiddleware](api/faststream/broker/middlewares/base/BaseMiddleware.md)
- - exception
- - [BaseExceptionMiddleware](api/faststream/broker/middlewares/exception/BaseExceptionMiddleware.md)
- - [ExceptionMiddleware](api/faststream/broker/middlewares/exception/ExceptionMiddleware.md)
- - [ignore_handler](api/faststream/broker/middlewares/exception/ignore_handler.md)
- - logging
- - [CriticalLogMiddleware](api/faststream/broker/middlewares/logging/CriticalLogMiddleware.md)
- - proto
- - [EndpointProto](api/faststream/broker/proto/EndpointProto.md)
- - [SetupAble](api/faststream/broker/proto/SetupAble.md)
- - publisher
- - fake
- - [FakePublisher](api/faststream/broker/publisher/fake/FakePublisher.md)
- - proto
- - [BasePublisherProto](api/faststream/broker/publisher/proto/BasePublisherProto.md)
- - [ProducerProto](api/faststream/broker/publisher/proto/ProducerProto.md)
- - [PublisherProto](api/faststream/broker/publisher/proto/PublisherProto.md)
- - usecase
- - [PublisherUsecase](api/faststream/broker/publisher/usecase/PublisherUsecase.md)
- - response
- - [Response](api/faststream/broker/response/Response.md)
- - [ensure_response](api/faststream/broker/response/ensure_response.md)
- - router
- - [ArgsContainer](api/faststream/broker/router/ArgsContainer.md)
- - [BrokerRouter](api/faststream/broker/router/BrokerRouter.md)
- - [SubscriberRoute](api/faststream/broker/router/SubscriberRoute.md)
- - schemas
- - [NameRequired](api/faststream/broker/schemas/NameRequired.md)
- - subscriber
- - call_item
- - [HandlerItem](api/faststream/broker/subscriber/call_item/HandlerItem.md)
- - mixins
- - [ConcurrentMixin](api/faststream/broker/subscriber/mixins/ConcurrentMixin.md)
- - [TasksMixin](api/faststream/broker/subscriber/mixins/TasksMixin.md)
- - proto
- - [SubscriberProto](api/faststream/broker/subscriber/proto/SubscriberProto.md)
- - usecase
- - [SubscriberUsecase](api/faststream/broker/subscriber/usecase/SubscriberUsecase.md)
- - types
- - [PublisherMiddleware](api/faststream/broker/types/PublisherMiddleware.md)
- - utils
- - [MultiLock](api/faststream/broker/utils/MultiLock.md)
- - [default_filter](api/faststream/broker/utils/default_filter.md)
- - [get_watcher_context](api/faststream/broker/utils/get_watcher_context.md)
- - [process_msg](api/faststream/broker/utils/process_msg.md)
- - [resolve_custom_func](api/faststream/broker/utils/resolve_custom_func.md)
- - wrapper
- - call
- - [HandlerCallWrapper](api/faststream/broker/wrapper/call/HandlerCallWrapper.md)
- - proto
- - [WrapperProto](api/faststream/broker/wrapper/proto/WrapperProto.md)
- - cli
- - docs
- - app
- - [gen](api/faststream/cli/docs/app/gen.md)
- - [serve](api/faststream/cli/docs/app/serve.md)
- - main
- - [main](api/faststream/cli/main/main.md)
- - [publish](api/faststream/cli/main/publish.md)
- - [publish_message](api/faststream/cli/main/publish_message.md)
- - [run](api/faststream/cli/main/run.md)
- - [version_callback](api/faststream/cli/main/version_callback.md)
- - supervisors
- - asgi_multiprocess
- - [ASGIMultiprocess](api/faststream/cli/supervisors/asgi_multiprocess/ASGIMultiprocess.md)
- - basereload
- - [BaseReload](api/faststream/cli/supervisors/basereload/BaseReload.md)
- - multiprocess
- - [Multiprocess](api/faststream/cli/supervisors/multiprocess/Multiprocess.md)
- - utils
- - [get_subprocess](api/faststream/cli/supervisors/utils/get_subprocess.md)
- - [set_exit](api/faststream/cli/supervisors/utils/set_exit.md)
- - [subprocess_started](api/faststream/cli/supervisors/utils/subprocess_started.md)
- - watchfiles
- - [ExtendedFilter](api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md)
- - [WatchReloader](api/faststream/cli/supervisors/watchfiles/WatchReloader.md)
- - utils
- - imports
- - [get_app_path](api/faststream/cli/utils/imports/get_app_path.md)
- - [import_from_string](api/faststream/cli/utils/imports/import_from_string.md)
- - [import_object](api/faststream/cli/utils/imports/import_object.md)
- - [try_import_app](api/faststream/cli/utils/imports/try_import_app.md)
- - logs
- - [LogLevels](api/faststream/cli/utils/logs/LogLevels.md)
- - [get_log_level](api/faststream/cli/utils/logs/get_log_level.md)
- - [set_log_level](api/faststream/cli/utils/logs/set_log_level.md)
- - parser
- - [is_bind_arg](api/faststream/cli/utils/parser/is_bind_arg.md)
- - [parse_cli_args](api/faststream/cli/utils/parser/parse_cli_args.md)
- - [remove_prefix](api/faststream/cli/utils/parser/remove_prefix.md)
- - confluent
- - [KafkaBroker](api/faststream/confluent/KafkaBroker.md)
- - [KafkaPublisher](api/faststream/confluent/KafkaPublisher.md)
- - [KafkaResponse](api/faststream/confluent/KafkaResponse.md)
- - [KafkaRoute](api/faststream/confluent/KafkaRoute.md)
- - [KafkaRouter](api/faststream/confluent/KafkaRouter.md)
- - [TestApp](api/faststream/confluent/TestApp.md)
- - [TestKafkaBroker](api/faststream/confluent/TestKafkaBroker.md)
- - [TopicPartition](api/faststream/confluent/TopicPartition.md)
- - broker
- - [KafkaBroker](api/faststream/confluent/broker/KafkaBroker.md)
- - broker
- - [KafkaBroker](api/faststream/confluent/broker/broker/KafkaBroker.md)
- - logging
- - [KafkaLoggingBroker](api/faststream/confluent/broker/logging/KafkaLoggingBroker.md)
- - registrator
- - [KafkaRegistrator](api/faststream/confluent/broker/registrator/KafkaRegistrator.md)
- - client
- - [AsyncConfluentConsumer](api/faststream/confluent/client/AsyncConfluentConsumer.md)
- - [AsyncConfluentProducer](api/faststream/confluent/client/AsyncConfluentProducer.md)
- - [BatchBuilder](api/faststream/confluent/client/BatchBuilder.md)
- - [check_msg_error](api/faststream/confluent/client/check_msg_error.md)
- - [create_topics](api/faststream/confluent/client/create_topics.md)
- - config
- - [BrokerAddressFamily](api/faststream/confluent/config/BrokerAddressFamily.md)
- - [BuiltinFeatures](api/faststream/confluent/config/BuiltinFeatures.md)
- - [ClientDNSLookup](api/faststream/confluent/config/ClientDNSLookup.md)
- - [CompressionCodec](api/faststream/confluent/config/CompressionCodec.md)
- - [CompressionType](api/faststream/confluent/config/CompressionType.md)
- - [ConfluentConfig](api/faststream/confluent/config/ConfluentConfig.md)
- - [ConfluentFastConfig](api/faststream/confluent/config/ConfluentFastConfig.md)
- - [Debug](api/faststream/confluent/config/Debug.md)
- - [GroupProtocol](api/faststream/confluent/config/GroupProtocol.md)
- - [IsolationLevel](api/faststream/confluent/config/IsolationLevel.md)
- - [OffsetStoreMethod](api/faststream/confluent/config/OffsetStoreMethod.md)
- - [SASLOAUTHBearerMethod](api/faststream/confluent/config/SASLOAUTHBearerMethod.md)
- - [SecurityProtocol](api/faststream/confluent/config/SecurityProtocol.md)
- - fastapi
- - [Context](api/faststream/confluent/fastapi/Context.md)
- - [KafkaRouter](api/faststream/confluent/fastapi/KafkaRouter.md)
- - fastapi
- - [KafkaRouter](api/faststream/confluent/fastapi/fastapi/KafkaRouter.md)
- - message
- - [ConsumerProtocol](api/faststream/confluent/message/ConsumerProtocol.md)
- - [FakeConsumer](api/faststream/confluent/message/FakeConsumer.md)
- - [KafkaMessage](api/faststream/confluent/message/KafkaMessage.md)
- - opentelemetry
- - [KafkaTelemetryMiddleware](api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md)
- - middleware
- - [KafkaTelemetryMiddleware](api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md)
- - provider
- - [BaseConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md)
- - [BatchConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md)
- - [ConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md)
- - [telemetry_attributes_provider_factory](api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md)
- - parser
- - [AsyncConfluentParser](api/faststream/confluent/parser/AsyncConfluentParser.md)
- - prometheus
- - [KafkaPrometheusMiddleware](api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md)
- - middleware
- - [KafkaPrometheusMiddleware](api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md)
- - provider
- - [BaseConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md)
- - [BatchConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md)
- - [ConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md)
- - [settings_provider_factory](api/faststream/confluent/prometheus/provider/settings_provider_factory.md)
- - publisher
- - asyncapi
- - [AsyncAPIBatchPublisher](api/faststream/confluent/publisher/asyncapi/AsyncAPIBatchPublisher.md)
- - [AsyncAPIDefaultPublisher](api/faststream/confluent/publisher/asyncapi/AsyncAPIDefaultPublisher.md)
- - [AsyncAPIPublisher](api/faststream/confluent/publisher/asyncapi/AsyncAPIPublisher.md)
- - producer
- - [AsyncConfluentFastProducer](api/faststream/confluent/publisher/producer/AsyncConfluentFastProducer.md)
- - usecase
- - [BatchPublisher](api/faststream/confluent/publisher/usecase/BatchPublisher.md)
- - [DefaultPublisher](api/faststream/confluent/publisher/usecase/DefaultPublisher.md)
- - [LogicPublisher](api/faststream/confluent/publisher/usecase/LogicPublisher.md)
- - response
- - [KafkaResponse](api/faststream/confluent/response/KafkaResponse.md)
- - router
- - [KafkaPublisher](api/faststream/confluent/router/KafkaPublisher.md)
- - [KafkaRoute](api/faststream/confluent/router/KafkaRoute.md)
- - [KafkaRouter](api/faststream/confluent/router/KafkaRouter.md)
- - schemas
- - [TopicPartition](api/faststream/confluent/schemas/TopicPartition.md)
- - params
- - [ConsumerConnectionParams](api/faststream/confluent/schemas/params/ConsumerConnectionParams.md)
- - partition
- - [TopicPartition](api/faststream/confluent/schemas/partition/TopicPartition.md)
- - security
- - [parse_security](api/faststream/confluent/security/parse_security.md)
- - subscriber
- - asyncapi
- - [AsyncAPIBatchSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md)
- - [AsyncAPIConcurrentDefaultSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md)
- - [AsyncAPIDefaultSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md)
- - [AsyncAPISubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md)
- - factory
- - [create_subscriber](api/faststream/confluent/subscriber/factory/create_subscriber.md)
- - usecase
- - [BatchSubscriber](api/faststream/confluent/subscriber/usecase/BatchSubscriber.md)
- - [ConcurrentDefaultSubscriber](api/faststream/confluent/subscriber/usecase/ConcurrentDefaultSubscriber.md)
- - [DefaultSubscriber](api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md)
- - [LogicSubscriber](api/faststream/confluent/subscriber/usecase/LogicSubscriber.md)
- - testing
- - [FakeProducer](api/faststream/confluent/testing/FakeProducer.md)
- - [MockConfluentMessage](api/faststream/confluent/testing/MockConfluentMessage.md)
- - [TestKafkaBroker](api/faststream/confluent/testing/TestKafkaBroker.md)
- - [build_message](api/faststream/confluent/testing/build_message.md)
- - constants
- - [ContentTypes](api/faststream/constants/ContentTypes.md)
- - exceptions
- - [AckMessage](api/faststream/exceptions/AckMessage.md)
- - [FastStreamException](api/faststream/exceptions/FastStreamException.md)
- - [HandlerException](api/faststream/exceptions/HandlerException.md)
- - [IgnoredException](api/faststream/exceptions/IgnoredException.md)
- - [NackMessage](api/faststream/exceptions/NackMessage.md)
- - [OperationForbiddenError](api/faststream/exceptions/OperationForbiddenError.md)
- - [RejectMessage](api/faststream/exceptions/RejectMessage.md)
- - [SetupError](api/faststream/exceptions/SetupError.md)
- - [SkipMessage](api/faststream/exceptions/SkipMessage.md)
- - [StopApplication](api/faststream/exceptions/StopApplication.md)
- - [StopConsume](api/faststream/exceptions/StopConsume.md)
- - [SubscriberNotFound](api/faststream/exceptions/SubscriberNotFound.md)
- - [ValidationError](api/faststream/exceptions/ValidationError.md)
- - kafka
- - [KafkaBroker](api/faststream/kafka/KafkaBroker.md)
- - [KafkaPublisher](api/faststream/kafka/KafkaPublisher.md)
- - [KafkaResponse](api/faststream/kafka/KafkaResponse.md)
- - [KafkaRoute](api/faststream/kafka/KafkaRoute.md)
- - [KafkaRouter](api/faststream/kafka/KafkaRouter.md)
- - [TestApp](api/faststream/kafka/TestApp.md)
- - [TestKafkaBroker](api/faststream/kafka/TestKafkaBroker.md)
- - [TopicPartition](api/faststream/kafka/TopicPartition.md)
- - broker
- - [KafkaBroker](api/faststream/kafka/broker/KafkaBroker.md)
- - broker
- - [KafkaBroker](api/faststream/kafka/broker/broker/KafkaBroker.md)
- - logging
- - [KafkaLoggingBroker](api/faststream/kafka/broker/logging/KafkaLoggingBroker.md)
- - registrator
- - [KafkaRegistrator](api/faststream/kafka/broker/registrator/KafkaRegistrator.md)
- - exceptions
- - [BatchBufferOverflowException](api/faststream/kafka/exceptions/BatchBufferOverflowException.md)
- - fastapi
- - [Context](api/faststream/kafka/fastapi/Context.md)
- - [KafkaRouter](api/faststream/kafka/fastapi/KafkaRouter.md)
- - fastapi
- - [KafkaRouter](api/faststream/kafka/fastapi/fastapi/KafkaRouter.md)
- - message
- - [ConsumerProtocol](api/faststream/kafka/message/ConsumerProtocol.md)
- - [FakeConsumer](api/faststream/kafka/message/FakeConsumer.md)
- - [KafkaAckableMessage](api/faststream/kafka/message/KafkaAckableMessage.md)
- - [KafkaMessage](api/faststream/kafka/message/KafkaMessage.md)
- - opentelemetry
- - [KafkaTelemetryMiddleware](api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md)
- - middleware
- - [KafkaTelemetryMiddleware](api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md)
- - provider
- - [BaseKafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md)
- - [BatchKafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md)
- - [KafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md)
- - [telemetry_attributes_provider_factory](api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md)
- - parser
- - [AioKafkaBatchParser](api/faststream/kafka/parser/AioKafkaBatchParser.md)
- - [AioKafkaParser](api/faststream/kafka/parser/AioKafkaParser.md)
- - prometheus
- - [KafkaPrometheusMiddleware](api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md)
- - middleware
- - [KafkaPrometheusMiddleware](api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md)
- - provider
- - [BaseKafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md)
- - [BatchKafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md)
- - [KafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md)
- - [settings_provider_factory](api/faststream/kafka/prometheus/provider/settings_provider_factory.md)
- - publisher
- - asyncapi
- - [AsyncAPIBatchPublisher](api/faststream/kafka/publisher/asyncapi/AsyncAPIBatchPublisher.md)
- - [AsyncAPIDefaultPublisher](api/faststream/kafka/publisher/asyncapi/AsyncAPIDefaultPublisher.md)
- - [AsyncAPIPublisher](api/faststream/kafka/publisher/asyncapi/AsyncAPIPublisher.md)
- - producer
- - [AioKafkaFastProducer](api/faststream/kafka/publisher/producer/AioKafkaFastProducer.md)
- - usecase
- - [BatchPublisher](api/faststream/kafka/publisher/usecase/BatchPublisher.md)
- - [DefaultPublisher](api/faststream/kafka/publisher/usecase/DefaultPublisher.md)
- - [LogicPublisher](api/faststream/kafka/publisher/usecase/LogicPublisher.md)
- - response
- - [KafkaResponse](api/faststream/kafka/response/KafkaResponse.md)
- - router
- - [KafkaPublisher](api/faststream/kafka/router/KafkaPublisher.md)
- - [KafkaRoute](api/faststream/kafka/router/KafkaRoute.md)
- - [KafkaRouter](api/faststream/kafka/router/KafkaRouter.md)
- - schemas
- - params
- - [ConsumerConnectionParams](api/faststream/kafka/schemas/params/ConsumerConnectionParams.md)
- - security
- - [parse_security](api/faststream/kafka/security/parse_security.md)
- - subscriber
- - asyncapi
- - [AsyncAPIBatchSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md)
- - [AsyncAPIConcurrentDefaultSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md)
- - [AsyncAPIDefaultSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md)
- - [AsyncAPISubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md)
- - factory
- - [create_subscriber](api/faststream/kafka/subscriber/factory/create_subscriber.md)
- - usecase
- - [BatchSubscriber](api/faststream/kafka/subscriber/usecase/BatchSubscriber.md)
- - [ConcurrentDefaultSubscriber](api/faststream/kafka/subscriber/usecase/ConcurrentDefaultSubscriber.md)
- - [DefaultSubscriber](api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md)
- - [LogicSubscriber](api/faststream/kafka/subscriber/usecase/LogicSubscriber.md)
- - testing
- - [FakeProducer](api/faststream/kafka/testing/FakeProducer.md)
- - [TestKafkaBroker](api/faststream/kafka/testing/TestKafkaBroker.md)
- - [build_message](api/faststream/kafka/testing/build_message.md)
- - log
- - formatter
- - [ColourizedFormatter](api/faststream/log/formatter/ColourizedFormatter.md)
- - [expand_log_field](api/faststream/log/formatter/expand_log_field.md)
- - logging
- - [ExtendedFilter](api/faststream/log/logging/ExtendedFilter.md)
- - [get_broker_logger](api/faststream/log/logging/get_broker_logger.md)
- - [set_logger_fmt](api/faststream/log/logging/set_logger_fmt.md)
- - nats
- - [AckPolicy](api/faststream/nats/AckPolicy.md)
- - [ConsumerConfig](api/faststream/nats/ConsumerConfig.md)
- - [DeliverPolicy](api/faststream/nats/DeliverPolicy.md)
- - [DiscardPolicy](api/faststream/nats/DiscardPolicy.md)
- - [ExternalStream](api/faststream/nats/ExternalStream.md)
- - [JStream](api/faststream/nats/JStream.md)
- - [KvWatch](api/faststream/nats/KvWatch.md)
- - [NatsBroker](api/faststream/nats/NatsBroker.md)
- - [NatsPublisher](api/faststream/nats/NatsPublisher.md)
- - [NatsResponse](api/faststream/nats/NatsResponse.md)
- - [NatsRoute](api/faststream/nats/NatsRoute.md)
- - [NatsRouter](api/faststream/nats/NatsRouter.md)
- - [ObjWatch](api/faststream/nats/ObjWatch.md)
- - [Placement](api/faststream/nats/Placement.md)
- - [PullSub](api/faststream/nats/PullSub.md)
- - [RePublish](api/faststream/nats/RePublish.md)
- - [ReplayPolicy](api/faststream/nats/ReplayPolicy.md)
- - [RetentionPolicy](api/faststream/nats/RetentionPolicy.md)
- - [StorageType](api/faststream/nats/StorageType.md)
- - [StreamConfig](api/faststream/nats/StreamConfig.md)
- - [StreamSource](api/faststream/nats/StreamSource.md)
- - [TestApp](api/faststream/nats/TestApp.md)
- - [TestNatsBroker](api/faststream/nats/TestNatsBroker.md)
- - broker
- - [NatsBroker](api/faststream/nats/broker/NatsBroker.md)
- - broker
- - [NatsBroker](api/faststream/nats/broker/broker/NatsBroker.md)
- - logging
- - [NatsLoggingBroker](api/faststream/nats/broker/logging/NatsLoggingBroker.md)
- - registrator
- - [NatsRegistrator](api/faststream/nats/broker/registrator/NatsRegistrator.md)
- - fastapi
- - [Context](api/faststream/nats/fastapi/Context.md)
- - [NatsRouter](api/faststream/nats/fastapi/NatsRouter.md)
- - fastapi
- - [NatsRouter](api/faststream/nats/fastapi/fastapi/NatsRouter.md)
- - helpers
- - [KVBucketDeclarer](api/faststream/nats/helpers/KVBucketDeclarer.md)
- - [OSBucketDeclarer](api/faststream/nats/helpers/OSBucketDeclarer.md)
- - [StreamBuilder](api/faststream/nats/helpers/StreamBuilder.md)
- - bucket_declarer
- - [KVBucketDeclarer](api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md)
- - obj_storage_declarer
- - [OSBucketDeclarer](api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md)
- - object_builder
- - [StreamBuilder](api/faststream/nats/helpers/object_builder/StreamBuilder.md)
- - message
- - [NatsBatchMessage](api/faststream/nats/message/NatsBatchMessage.md)
- - [NatsKvMessage](api/faststream/nats/message/NatsKvMessage.md)
- - [NatsMessage](api/faststream/nats/message/NatsMessage.md)
- - [NatsObjMessage](api/faststream/nats/message/NatsObjMessage.md)
- - opentelemetry
- - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md)
- - middleware
- - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md)
- - provider
- - [BaseNatsTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md)
- - [NatsBatchTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md)
- - [NatsTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md)
- - [telemetry_attributes_provider_factory](api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md)
- - parser
- - [BatchParser](api/faststream/nats/parser/BatchParser.md)
- - [JsParser](api/faststream/nats/parser/JsParser.md)
- - [KvParser](api/faststream/nats/parser/KvParser.md)
- - [NatsBaseParser](api/faststream/nats/parser/NatsBaseParser.md)
- - [NatsParser](api/faststream/nats/parser/NatsParser.md)
- - [ObjParser](api/faststream/nats/parser/ObjParser.md)
- - prometheus
- - [NatsPrometheusMiddleware](api/faststream/nats/prometheus/NatsPrometheusMiddleware.md)
- - middleware
- - [NatsPrometheusMiddleware](api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md)
- - provider
- - [BaseNatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md)
- - [BatchNatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md)
- - [NatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md)
- - [settings_provider_factory](api/faststream/nats/prometheus/provider/settings_provider_factory.md)
- - publisher
- - asyncapi
- - [AsyncAPIPublisher](api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md)
- - producer
- - [NatsFastProducer](api/faststream/nats/publisher/producer/NatsFastProducer.md)
- - [NatsJSFastProducer](api/faststream/nats/publisher/producer/NatsJSFastProducer.md)
- - usecase
- - [LogicPublisher](api/faststream/nats/publisher/usecase/LogicPublisher.md)
- - response
- - [NatsResponse](api/faststream/nats/response/NatsResponse.md)
- - router
- - [NatsPublisher](api/faststream/nats/router/NatsPublisher.md)
- - [NatsRoute](api/faststream/nats/router/NatsRoute.md)
- - [NatsRouter](api/faststream/nats/router/NatsRouter.md)
- - schemas
- - [JStream](api/faststream/nats/schemas/JStream.md)
- - [KvWatch](api/faststream/nats/schemas/KvWatch.md)
- - [ObjWatch](api/faststream/nats/schemas/ObjWatch.md)
- - [PullSub](api/faststream/nats/schemas/PullSub.md)
- - js_stream
- - [JStream](api/faststream/nats/schemas/js_stream/JStream.md)
- - [compile_nats_wildcard](api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md)
- - [is_subject_match_wildcard](api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md)
- - kv_watch
- - [KvWatch](api/faststream/nats/schemas/kv_watch/KvWatch.md)
- - obj_watch
- - [ObjWatch](api/faststream/nats/schemas/obj_watch/ObjWatch.md)
- - pull_sub
- - [PullSub](api/faststream/nats/schemas/pull_sub/PullSub.md)
- - security
- - [parse_security](api/faststream/nats/security/parse_security.md)
- - subscriber
- - asyncapi
- - [AsyncAPIBatchPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md)
- - [AsyncAPIConcurrentCoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md)
- - [AsyncAPIConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md)
- - [AsyncAPIConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md)
- - [AsyncAPICoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md)
- - [AsyncAPIKeyValueWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md)
- - [AsyncAPIObjStoreWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md)
- - [AsyncAPIPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md)
- - [AsyncAPIStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md)
- - [AsyncAPISubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md)
- - factory
- - [create_subscriber](api/faststream/nats/subscriber/factory/create_subscriber.md)
- - subscription
- - [UnsubscribeAdapter](api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md)
- - [Unsubscriptable](api/faststream/nats/subscriber/subscription/Unsubscriptable.md)
- - [Watchable](api/faststream/nats/subscriber/subscription/Watchable.md)
- - usecase
- - [BatchPullStreamSubscriber](api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md)
- - [ConcurrentCoreSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md)
- - [ConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md)
- - [ConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md)
- - [CoreSubscriber](api/faststream/nats/subscriber/usecase/CoreSubscriber.md)
- - [KeyValueWatchSubscriber](api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md)
- - [LogicSubscriber](api/faststream/nats/subscriber/usecase/LogicSubscriber.md)
- - [ObjStoreWatchSubscriber](api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md)
- - [PullStreamSubscriber](api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md)
- - [PushStreamSubscription](api/faststream/nats/subscriber/usecase/PushStreamSubscription.md)
- - testing
- - [FakeProducer](api/faststream/nats/testing/FakeProducer.md)
- - [PatchedMessage](api/faststream/nats/testing/PatchedMessage.md)
- - [TestNatsBroker](api/faststream/nats/testing/TestNatsBroker.md)
- - [build_message](api/faststream/nats/testing/build_message.md)
- - opentelemetry
- - [Baggage](api/faststream/opentelemetry/Baggage.md)
- - [TelemetryMiddleware](api/faststream/opentelemetry/TelemetryMiddleware.md)
- - [TelemetrySettingsProvider](api/faststream/opentelemetry/TelemetrySettingsProvider.md)
- - baggage
- - [Baggage](api/faststream/opentelemetry/baggage/Baggage.md)
- - consts
- - [MessageAction](api/faststream/opentelemetry/consts/MessageAction.md)
- - middleware
- - [BaseTelemetryMiddleware](api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md)
- - [TelemetryMiddleware](api/faststream/opentelemetry/middleware/TelemetryMiddleware.md)
- - provider
- - [TelemetrySettingsProvider](api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md)
- - prometheus
- - [BasePrometheusMiddleware](api/faststream/prometheus/BasePrometheusMiddleware.md)
- - [ConsumeAttrs](api/faststream/prometheus/ConsumeAttrs.md)
- - [MetricsSettingsProvider](api/faststream/prometheus/MetricsSettingsProvider.md)
- - container
- - [MetricsContainer](api/faststream/prometheus/container/MetricsContainer.md)
- - manager
- - [MetricsManager](api/faststream/prometheus/manager/MetricsManager.md)
- - middleware
- - [BasePrometheusMiddleware](api/faststream/prometheus/middleware/BasePrometheusMiddleware.md)
- - [PrometheusMiddleware](api/faststream/prometheus/middleware/PrometheusMiddleware.md)
- - provider
- - [MetricsSettingsProvider](api/faststream/prometheus/provider/MetricsSettingsProvider.md)
- - types
- - [ConsumeAttrs](api/faststream/prometheus/types/ConsumeAttrs.md)
- - [ProcessingStatus](api/faststream/prometheus/types/ProcessingStatus.md)
- - [PublishingStatus](api/faststream/prometheus/types/PublishingStatus.md)
- - rabbit
- - [ExchangeType](api/faststream/rabbit/ExchangeType.md)
- - [QueueType](api/faststream/rabbit/QueueType.md)
- - [RabbitBroker](api/faststream/rabbit/RabbitBroker.md)
- - [RabbitExchange](api/faststream/rabbit/RabbitExchange.md)
- - [RabbitPublisher](api/faststream/rabbit/RabbitPublisher.md)
- - [RabbitQueue](api/faststream/rabbit/RabbitQueue.md)
- - [RabbitResponse](api/faststream/rabbit/RabbitResponse.md)
- - [RabbitRoute](api/faststream/rabbit/RabbitRoute.md)
- - [RabbitRouter](api/faststream/rabbit/RabbitRouter.md)
- - [ReplyConfig](api/faststream/rabbit/ReplyConfig.md)
- - [TestApp](api/faststream/rabbit/TestApp.md)
- - [TestRabbitBroker](api/faststream/rabbit/TestRabbitBroker.md)
- - broker
- - [RabbitBroker](api/faststream/rabbit/broker/RabbitBroker.md)
- - broker
- - [RabbitBroker](api/faststream/rabbit/broker/broker/RabbitBroker.md)
- - logging
- - [RabbitLoggingBroker](api/faststream/rabbit/broker/logging/RabbitLoggingBroker.md)
- - registrator
- - [RabbitRegistrator](api/faststream/rabbit/broker/registrator/RabbitRegistrator.md)
- - fastapi
- - [Context](api/faststream/rabbit/fastapi/Context.md)
- - [RabbitRouter](api/faststream/rabbit/fastapi/RabbitRouter.md)
- - router
- - [RabbitRouter](api/faststream/rabbit/fastapi/router/RabbitRouter.md)
- - helpers
- - declarer
- - [RabbitDeclarer](api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md)
- - message
- - [RabbitMessage](api/faststream/rabbit/message/RabbitMessage.md)
- - opentelemetry
- - [RabbitTelemetryMiddleware](api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md)
- - middleware
- - [RabbitTelemetryMiddleware](api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md)
- - provider
- - [RabbitTelemetrySettingsProvider](api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md)
- - parser
- - [AioPikaParser](api/faststream/rabbit/parser/AioPikaParser.md)
- - prometheus
- - [RabbitPrometheusMiddleware](api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md)
- - middleware
- - [RabbitPrometheusMiddleware](api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md)
- - provider
- - [RabbitMetricsSettingsProvider](api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md)
- - publisher
- - asyncapi
- - [AsyncAPIPublisher](api/faststream/rabbit/publisher/asyncapi/AsyncAPIPublisher.md)
- - producer
- - [AioPikaFastProducer](api/faststream/rabbit/publisher/producer/AioPikaFastProducer.md)
- - usecase
- - [LogicPublisher](api/faststream/rabbit/publisher/usecase/LogicPublisher.md)
- - [PublishKwargs](api/faststream/rabbit/publisher/usecase/PublishKwargs.md)
- - [RequestPublishKwargs](api/faststream/rabbit/publisher/usecase/RequestPublishKwargs.md)
- - response
- - [RabbitResponse](api/faststream/rabbit/response/RabbitResponse.md)
- - router
- - [RabbitPublisher](api/faststream/rabbit/router/RabbitPublisher.md)
- - [RabbitRoute](api/faststream/rabbit/router/RabbitRoute.md)
- - [RabbitRouter](api/faststream/rabbit/router/RabbitRouter.md)
- - schemas
- - [BaseRMQInformation](api/faststream/rabbit/schemas/BaseRMQInformation.md)
- - [ExchangeType](api/faststream/rabbit/schemas/ExchangeType.md)
- - [QueueType](api/faststream/rabbit/schemas/QueueType.md)
- - [RabbitExchange](api/faststream/rabbit/schemas/RabbitExchange.md)
- - [RabbitQueue](api/faststream/rabbit/schemas/RabbitQueue.md)
- - [ReplyConfig](api/faststream/rabbit/schemas/ReplyConfig.md)
- - constants
- - [ExchangeType](api/faststream/rabbit/schemas/constants/ExchangeType.md)
- - exchange
- - [RabbitExchange](api/faststream/rabbit/schemas/exchange/RabbitExchange.md)
- - proto
- - [BaseRMQInformation](api/faststream/rabbit/schemas/proto/BaseRMQInformation.md)
- - queue
- - [ClassicQueueArgs](api/faststream/rabbit/schemas/queue/ClassicQueueArgs.md)
- - [CommonQueueArgs](api/faststream/rabbit/schemas/queue/CommonQueueArgs.md)
- - [QueueClassicTypeSpecificArgs](api/faststream/rabbit/schemas/queue/QueueClassicTypeSpecificArgs.md)
- - [QueueQuorumTypeSpecificArgs](api/faststream/rabbit/schemas/queue/QueueQuorumTypeSpecificArgs.md)
- - [QueueStreamTypeSpecificArgs](api/faststream/rabbit/schemas/queue/QueueStreamTypeSpecificArgs.md)
- - [QueueType](api/faststream/rabbit/schemas/queue/QueueType.md)
- - [QuorumQueueArgs](api/faststream/rabbit/schemas/queue/QuorumQueueArgs.md)
- - [RabbitQueue](api/faststream/rabbit/schemas/queue/RabbitQueue.md)
- - [SharedQueueClassicAndQuorumArgs](api/faststream/rabbit/schemas/queue/SharedQueueClassicAndQuorumArgs.md)
- - [StreamQueueArgs](api/faststream/rabbit/schemas/queue/StreamQueueArgs.md)
- - reply
- - [ReplyConfig](api/faststream/rabbit/schemas/reply/ReplyConfig.md)
- - security
- - [parse_security](api/faststream/rabbit/security/parse_security.md)
- - subscriber
- - asyncapi
- - [AsyncAPISubscriber](api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md)
- - factory
- - [create_subscriber](api/faststream/rabbit/subscriber/factory/create_subscriber.md)
- - usecase
- - [LogicSubscriber](api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md)
- - testing
- - [FakeProducer](api/faststream/rabbit/testing/FakeProducer.md)
- - [PatchedMessage](api/faststream/rabbit/testing/PatchedMessage.md)
- - [TestRabbitBroker](api/faststream/rabbit/testing/TestRabbitBroker.md)
- - [apply_pattern](api/faststream/rabbit/testing/apply_pattern.md)
- - [build_message](api/faststream/rabbit/testing/build_message.md)
- - utils
- - [build_url](api/faststream/rabbit/utils/build_url.md)
- - [build_virtual_host](api/faststream/rabbit/utils/build_virtual_host.md)
- - [is_routing_exchange](api/faststream/rabbit/utils/is_routing_exchange.md)
- - redis
- - [ListSub](api/faststream/redis/ListSub.md)
- - [PubSub](api/faststream/redis/PubSub.md)
- - [RedisBroker](api/faststream/redis/RedisBroker.md)
- - [RedisPublisher](api/faststream/redis/RedisPublisher.md)
- - [RedisResponse](api/faststream/redis/RedisResponse.md)
- - [RedisRoute](api/faststream/redis/RedisRoute.md)
- - [RedisRouter](api/faststream/redis/RedisRouter.md)
- - [StreamSub](api/faststream/redis/StreamSub.md)
- - [TestApp](api/faststream/redis/TestApp.md)
- - [TestRedisBroker](api/faststream/redis/TestRedisBroker.md)
- - broker
- - broker
- - [RedisBroker](api/faststream/redis/broker/broker/RedisBroker.md)
- - logging
- - [RedisLoggingBroker](api/faststream/redis/broker/logging/RedisLoggingBroker.md)
- - registrator
- - [RedisRegistrator](api/faststream/redis/broker/registrator/RedisRegistrator.md)
- - fastapi
- - [Context](api/faststream/redis/fastapi/Context.md)
- - [RedisRouter](api/faststream/redis/fastapi/RedisRouter.md)
- - fastapi
- - [RedisRouter](api/faststream/redis/fastapi/fastapi/RedisRouter.md)
- - message
- - [BatchListMessage](api/faststream/redis/message/BatchListMessage.md)
- - [BatchStreamMessage](api/faststream/redis/message/BatchStreamMessage.md)
- - [DefaultListMessage](api/faststream/redis/message/DefaultListMessage.md)
- - [DefaultStreamMessage](api/faststream/redis/message/DefaultStreamMessage.md)
- - [ListMessage](api/faststream/redis/message/ListMessage.md)
- - [PubSubMessage](api/faststream/redis/message/PubSubMessage.md)
- - [RedisBatchListMessage](api/faststream/redis/message/RedisBatchListMessage.md)
- - [RedisBatchStreamMessage](api/faststream/redis/message/RedisBatchStreamMessage.md)
- - [RedisListMessage](api/faststream/redis/message/RedisListMessage.md)
- - [RedisMessage](api/faststream/redis/message/RedisMessage.md)
- - [RedisStreamMessage](api/faststream/redis/message/RedisStreamMessage.md)
- - [StreamMessage](api/faststream/redis/message/StreamMessage.md)
- - [UnifyRedisDict](api/faststream/redis/message/UnifyRedisDict.md)
- - [UnifyRedisMessage](api/faststream/redis/message/UnifyRedisMessage.md)
- - opentelemetry
- - [RedisTelemetryMiddleware](api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md)
- - middleware
- - [RedisTelemetryMiddleware](api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md)
- - provider
- - [RedisTelemetrySettingsProvider](api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md)
- - parser
- - [RawMessage](api/faststream/redis/parser/RawMessage.md)
- - [RedisBatchListParser](api/faststream/redis/parser/RedisBatchListParser.md)
- - [RedisBatchStreamParser](api/faststream/redis/parser/RedisBatchStreamParser.md)
- - [RedisListParser](api/faststream/redis/parser/RedisListParser.md)
- - [RedisPubSubParser](api/faststream/redis/parser/RedisPubSubParser.md)
- - [RedisStreamParser](api/faststream/redis/parser/RedisStreamParser.md)
- - [SimpleParser](api/faststream/redis/parser/SimpleParser.md)
- - prometheus
- - [RedisPrometheusMiddleware](api/faststream/redis/prometheus/RedisPrometheusMiddleware.md)
- - middleware
- - [RedisPrometheusMiddleware](api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md)
- - provider
- - [BaseRedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md)
- - [BatchRedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md)
- - [RedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md)
- - [settings_provider_factory](api/faststream/redis/prometheus/provider/settings_provider_factory.md)
- - publisher
- - asyncapi
- - [AsyncAPIChannelPublisher](api/faststream/redis/publisher/asyncapi/AsyncAPIChannelPublisher.md)
- - [AsyncAPIListBatchPublisher](api/faststream/redis/publisher/asyncapi/AsyncAPIListBatchPublisher.md)
- - [AsyncAPIListPublisher](api/faststream/redis/publisher/asyncapi/AsyncAPIListPublisher.md)
- - [AsyncAPIPublisher](api/faststream/redis/publisher/asyncapi/AsyncAPIPublisher.md)
- - [AsyncAPIStreamPublisher](api/faststream/redis/publisher/asyncapi/AsyncAPIStreamPublisher.md)
- - producer
- - [RedisFastProducer](api/faststream/redis/publisher/producer/RedisFastProducer.md)
- - usecase
- - [ChannelPublisher](api/faststream/redis/publisher/usecase/ChannelPublisher.md)
- - [ListBatchPublisher](api/faststream/redis/publisher/usecase/ListBatchPublisher.md)
- - [ListPublisher](api/faststream/redis/publisher/usecase/ListPublisher.md)
- - [LogicPublisher](api/faststream/redis/publisher/usecase/LogicPublisher.md)
- - [StreamPublisher](api/faststream/redis/publisher/usecase/StreamPublisher.md)
- - response
- - [RedisResponse](api/faststream/redis/response/RedisResponse.md)
- - router
- - [RedisPublisher](api/faststream/redis/router/RedisPublisher.md)
- - [RedisRoute](api/faststream/redis/router/RedisRoute.md)
- - [RedisRouter](api/faststream/redis/router/RedisRouter.md)
- - schemas
- - [ListSub](api/faststream/redis/schemas/ListSub.md)
- - [PubSub](api/faststream/redis/schemas/PubSub.md)
- - [StreamSub](api/faststream/redis/schemas/StreamSub.md)
- - list_sub
- - [ListSub](api/faststream/redis/schemas/list_sub/ListSub.md)
- - proto
- - [RedisAsyncAPIProtocol](api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md)
- - [validate_options](api/faststream/redis/schemas/proto/validate_options.md)
- - pub_sub
- - [PubSub](api/faststream/redis/schemas/pub_sub/PubSub.md)
- - stream_sub
- - [StreamSub](api/faststream/redis/schemas/stream_sub/StreamSub.md)
- - security
- - [parse_security](api/faststream/redis/security/parse_security.md)
- - subscriber
- - asyncapi
- - [AsyncAPIChannelSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIChannelSubscriber.md)
- - [AsyncAPIListBatchSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIListBatchSubscriber.md)
- - [AsyncAPIListSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIListSubscriber.md)
- - [AsyncAPIStreamBatchSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md)
- - [AsyncAPIStreamSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md)
- - [AsyncAPISubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md)
- - factory
- - [create_subscriber](api/faststream/redis/subscriber/factory/create_subscriber.md)
- - usecase
- - [BatchListSubscriber](api/faststream/redis/subscriber/usecase/BatchListSubscriber.md)
- - [BatchStreamSubscriber](api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md)
- - [ChannelSubscriber](api/faststream/redis/subscriber/usecase/ChannelSubscriber.md)
- - [ListSubscriber](api/faststream/redis/subscriber/usecase/ListSubscriber.md)
- - [LogicSubscriber](api/faststream/redis/subscriber/usecase/LogicSubscriber.md)
- - [StreamSubscriber](api/faststream/redis/subscriber/usecase/StreamSubscriber.md)
- - testing
- - [ChannelVisitor](api/faststream/redis/testing/ChannelVisitor.md)
- - [FakeProducer](api/faststream/redis/testing/FakeProducer.md)
- - [ListVisitor](api/faststream/redis/testing/ListVisitor.md)
- - [StreamVisitor](api/faststream/redis/testing/StreamVisitor.md)
- - [TestRedisBroker](api/faststream/redis/testing/TestRedisBroker.md)
- - [Visitor](api/faststream/redis/testing/Visitor.md)
- - [build_message](api/faststream/redis/testing/build_message.md)
- - security
- - [BaseSecurity](api/faststream/security/BaseSecurity.md)
- - [SASLGSSAPI](api/faststream/security/SASLGSSAPI.md)
- - [SASLOAuthBearer](api/faststream/security/SASLOAuthBearer.md)
- - [SASLPlaintext](api/faststream/security/SASLPlaintext.md)
- - [SASLScram256](api/faststream/security/SASLScram256.md)
- - [SASLScram512](api/faststream/security/SASLScram512.md)
- - testing
- - [TestApp](api/faststream/testing/TestApp.md)
- - app
- - [TestApp](api/faststream/testing/app/TestApp.md)
- - broker
- - [TestBroker](api/faststream/testing/broker/TestBroker.md)
- - [patch_broker_calls](api/faststream/testing/broker/patch_broker_calls.md)
- - types
- - [LoggerProto](api/faststream/types/LoggerProto.md)
- - [StandardDataclass](api/faststream/types/StandardDataclass.md)
- - utils
- - [Context](api/faststream/utils/Context.md)
- - [ContextRepo](api/faststream/utils/ContextRepo.md)
- - [Depends](api/faststream/utils/Depends.md)
- - [Header](api/faststream/utils/Header.md)
- - [NoCast](api/faststream/utils/NoCast.md)
- - [Path](api/faststream/utils/Path.md)
- - [apply_types](api/faststream/utils/apply_types.md)
- - ast
- - [find_ast_node](api/faststream/utils/ast/find_ast_node.md)
- - [find_withitems](api/faststream/utils/ast/find_withitems.md)
- - [get_withitem_calls](api/faststream/utils/ast/get_withitem_calls.md)
- - [is_contains_context_name](api/faststream/utils/ast/is_contains_context_name.md)
- - classes
- - [Singleton](api/faststream/utils/classes/Singleton.md)
- - context
- - [Context](api/faststream/utils/context/Context.md)
- - [ContextRepo](api/faststream/utils/context/ContextRepo.md)
- - [Header](api/faststream/utils/context/Header.md)
- - [Path](api/faststream/utils/context/Path.md)
- - builders
- - [Context](api/faststream/utils/context/builders/Context.md)
- - [Header](api/faststream/utils/context/builders/Header.md)
- - [Path](api/faststream/utils/context/builders/Path.md)
- - repository
- - [ContextRepo](api/faststream/utils/context/repository/ContextRepo.md)
- - types
- - [Context](api/faststream/utils/context/types/Context.md)
- - [resolve_context_by_name](api/faststream/utils/context/types/resolve_context_by_name.md)
- - data
- - [filter_by_dict](api/faststream/utils/data/filter_by_dict.md)
- - functions
- - [call_or_await](api/faststream/utils/functions/call_or_await.md)
- - [drop_response_type](api/faststream/utils/functions/drop_response_type.md)
- - [fake_context](api/faststream/utils/functions/fake_context.md)
- - [return_input](api/faststream/utils/functions/return_input.md)
- - [sync_fake_context](api/faststream/utils/functions/sync_fake_context.md)
- - [timeout_scope](api/faststream/utils/functions/timeout_scope.md)
- - [to_async](api/faststream/utils/functions/to_async.md)
- - no_cast
- - [NoCast](api/faststream/utils/no_cast/NoCast.md)
- - nuid
- - [NUID](api/faststream/utils/nuid/NUID.md)
- - path
- - [compile_path](api/faststream/utils/path/compile_path.md)
-- [FastStream People](faststream-people.md)
-- Contributing
- - [Development](getting-started/contributing/CONTRIBUTING.md)
- - [Documentation](getting-started/contributing/docs.md)
-- [Release Notes](release.md)
\ No newline at end of file
diff --git a/docs/docs/en/api/faststream/BaseMiddleware.md b/docs/docs/en/api/faststream/BaseMiddleware.md
deleted file mode 100644
index 21145bf983..0000000000
--- a/docs/docs/en/api/faststream/BaseMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.BaseMiddleware
diff --git a/docs/docs/en/api/faststream/Context.md b/docs/docs/en/api/faststream/Context.md
deleted file mode 100644
index c6400b1e56..0000000000
--- a/docs/docs/en/api/faststream/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.Context
diff --git a/docs/docs/en/api/faststream/Depends.md b/docs/docs/en/api/faststream/Depends.md
deleted file mode 100644
index c0704687e8..0000000000
--- a/docs/docs/en/api/faststream/Depends.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: fast_depends.use.Depends
diff --git a/docs/docs/en/api/faststream/ExceptionMiddleware.md b/docs/docs/en/api/faststream/ExceptionMiddleware.md
deleted file mode 100644
index a5e2038f22..0000000000
--- a/docs/docs/en/api/faststream/ExceptionMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.ExceptionMiddleware
diff --git a/docs/docs/en/api/faststream/FastStream.md b/docs/docs/en/api/faststream/FastStream.md
deleted file mode 100644
index 8d79ba3921..0000000000
--- a/docs/docs/en/api/faststream/FastStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.FastStream
diff --git a/docs/docs/en/api/faststream/Header.md b/docs/docs/en/api/faststream/Header.md
deleted file mode 100644
index 98bdb592a7..0000000000
--- a/docs/docs/en/api/faststream/Header.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.Header
diff --git a/docs/docs/en/api/faststream/Path.md b/docs/docs/en/api/faststream/Path.md
deleted file mode 100644
index 7716f47c23..0000000000
--- a/docs/docs/en/api/faststream/Path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.Path
diff --git a/docs/docs/en/api/faststream/Response.md b/docs/docs/en/api/faststream/Response.md
deleted file mode 100644
index 3475e3f584..0000000000
--- a/docs/docs/en/api/faststream/Response.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.Response
diff --git a/docs/docs/en/api/faststream/TestApp.md b/docs/docs/en/api/faststream/TestApp.md
deleted file mode 100644
index 2301790c21..0000000000
--- a/docs/docs/en/api/faststream/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.TestApp
diff --git a/docs/docs/en/api/faststream/app/FastStream.md b/docs/docs/en/api/faststream/app/FastStream.md
deleted file mode 100644
index 24235253c2..0000000000
--- a/docs/docs/en/api/faststream/app/FastStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.app.FastStream
diff --git a/docs/docs/en/api/faststream/app/catch_startup_validation_error.md b/docs/docs/en/api/faststream/app/catch_startup_validation_error.md
deleted file mode 100644
index a53e4686f9..0000000000
--- a/docs/docs/en/api/faststream/app/catch_startup_validation_error.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.app.catch_startup_validation_error
diff --git a/docs/docs/en/api/faststream/apply_types.md b/docs/docs/en/api/faststream/apply_types.md
deleted file mode 100644
index 9dc4603bd2..0000000000
--- a/docs/docs/en/api/faststream/apply_types.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: fast_depends.use.inject
diff --git a/docs/docs/en/api/faststream/asgi/AsgiFastStream.md b/docs/docs/en/api/faststream/asgi/AsgiFastStream.md
deleted file mode 100644
index 49a94bd574..0000000000
--- a/docs/docs/en/api/faststream/asgi/AsgiFastStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.AsgiFastStream
diff --git a/docs/docs/en/api/faststream/asgi/AsgiResponse.md b/docs/docs/en/api/faststream/asgi/AsgiResponse.md
deleted file mode 100644
index 4814f18557..0000000000
--- a/docs/docs/en/api/faststream/asgi/AsgiResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.AsgiResponse
diff --git a/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md b/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md
deleted file mode 100644
index 9d58b9576c..0000000000
--- a/docs/docs/en/api/faststream/asgi/app/AsgiFastStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.app.AsgiFastStream
diff --git a/docs/docs/en/api/faststream/asgi/app/cast_uvicorn_params.md b/docs/docs/en/api/faststream/asgi/app/cast_uvicorn_params.md
deleted file mode 100644
index 1431e2c833..0000000000
--- a/docs/docs/en/api/faststream/asgi/app/cast_uvicorn_params.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.app.cast_uvicorn_params
diff --git a/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md b/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md
deleted file mode 100644
index e96de51b01..0000000000
--- a/docs/docs/en/api/faststream/asgi/factories/make_asyncapi_asgi.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.factories.make_asyncapi_asgi
diff --git a/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md b/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md
deleted file mode 100644
index fb163d02a1..0000000000
--- a/docs/docs/en/api/faststream/asgi/factories/make_ping_asgi.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.factories.make_ping_asgi
diff --git a/docs/docs/en/api/faststream/asgi/get.md b/docs/docs/en/api/faststream/asgi/get.md
deleted file mode 100644
index 044c05ed81..0000000000
--- a/docs/docs/en/api/faststream/asgi/get.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.get
diff --git a/docs/docs/en/api/faststream/asgi/handlers/get.md b/docs/docs/en/api/faststream/asgi/handlers/get.md
deleted file mode 100644
index 8f3c04a050..0000000000
--- a/docs/docs/en/api/faststream/asgi/handlers/get.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.handlers.get
diff --git a/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md b/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md
deleted file mode 100644
index 5e57a1a2db..0000000000
--- a/docs/docs/en/api/faststream/asgi/make_asyncapi_asgi.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.make_asyncapi_asgi
diff --git a/docs/docs/en/api/faststream/asgi/make_ping_asgi.md b/docs/docs/en/api/faststream/asgi/make_ping_asgi.md
deleted file mode 100644
index 5c24aaef19..0000000000
--- a/docs/docs/en/api/faststream/asgi/make_ping_asgi.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.make_ping_asgi
diff --git a/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md b/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md
deleted file mode 100644
index 037739b09d..0000000000
--- a/docs/docs/en/api/faststream/asgi/response/AsgiResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.response.AsgiResponse
diff --git a/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md b/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md
deleted file mode 100644
index 130ee9a59a..0000000000
--- a/docs/docs/en/api/faststream/asgi/websocket/WebSocketClose.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asgi.websocket.WebSocketClose
diff --git a/docs/docs/en/api/faststream/asyncapi/abc/AsyncAPIOperation.md b/docs/docs/en/api/faststream/asyncapi/abc/AsyncAPIOperation.md
deleted file mode 100644
index 1e80c37541..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/abc/AsyncAPIOperation.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.abc.AsyncAPIOperation
diff --git a/docs/docs/en/api/faststream/asyncapi/generate/get_app_schema.md b/docs/docs/en/api/faststream/asyncapi/generate/get_app_schema.md
deleted file mode 100644
index 07475ef5a8..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/generate/get_app_schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.generate.get_app_schema
diff --git a/docs/docs/en/api/faststream/asyncapi/generate/get_broker_channels.md b/docs/docs/en/api/faststream/asyncapi/generate/get_broker_channels.md
deleted file mode 100644
index f5788bae0b..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/generate/get_broker_channels.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.generate.get_broker_channels
diff --git a/docs/docs/en/api/faststream/asyncapi/generate/get_broker_server.md b/docs/docs/en/api/faststream/asyncapi/generate/get_broker_server.md
deleted file mode 100644
index 5f652d5b59..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/generate/get_broker_server.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.generate.get_broker_server
diff --git a/docs/docs/en/api/faststream/asyncapi/get_app_schema.md b/docs/docs/en/api/faststream/asyncapi/get_app_schema.md
deleted file mode 100644
index 03d7e4466b..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/get_app_schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.get_app_schema
diff --git a/docs/docs/en/api/faststream/asyncapi/get_asyncapi_html.md b/docs/docs/en/api/faststream/asyncapi/get_asyncapi_html.md
deleted file mode 100644
index 1ed4ce5500..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/get_asyncapi_html.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.get_asyncapi_html
diff --git a/docs/docs/en/api/faststream/asyncapi/message/get_model_schema.md b/docs/docs/en/api/faststream/asyncapi/message/get_model_schema.md
deleted file mode 100644
index 0099721324..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/message/get_model_schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.message.get_model_schema
diff --git a/docs/docs/en/api/faststream/asyncapi/message/get_response_schema.md b/docs/docs/en/api/faststream/asyncapi/message/get_response_schema.md
deleted file mode 100644
index e297370d01..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/message/get_response_schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.message.get_response_schema
diff --git a/docs/docs/en/api/faststream/asyncapi/message/parse_handler_params.md b/docs/docs/en/api/faststream/asyncapi/message/parse_handler_params.md
deleted file mode 100644
index ffaf1cf7dc..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/message/parse_handler_params.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.message.parse_handler_params
diff --git a/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md b/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md
deleted file mode 100644
index da1715119d..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIApplication.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.proto.AsyncAPIApplication
diff --git a/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIProto.md b/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIProto.md
deleted file mode 100644
index 6905c2d82f..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/proto/AsyncAPIProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.proto.AsyncAPIProto
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Channel.md b/docs/docs/en/api/faststream/asyncapi/schema/Channel.md
deleted file mode 100644
index 4d3b7e83a3..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Channel.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Channel
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/ChannelBinding.md
deleted file mode 100644
index 4aaf57e584..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Components.md b/docs/docs/en/api/faststream/asyncapi/schema/Components.md
deleted file mode 100644
index 9dc785c35e..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Components.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Components
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Contact.md b/docs/docs/en/api/faststream/asyncapi/schema/Contact.md
deleted file mode 100644
index ded05c314d..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Contact.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Contact
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/ContactDict.md b/docs/docs/en/api/faststream/asyncapi/schema/ContactDict.md
deleted file mode 100644
index 4170e564f6..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/ContactDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.ContactDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/CorrelationId.md b/docs/docs/en/api/faststream/asyncapi/schema/CorrelationId.md
deleted file mode 100644
index cd12cdbba6..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/CorrelationId.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.CorrelationId
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocs.md b/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocs.md
deleted file mode 100644
index 7899164431..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.ExternalDocs
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocsDict.md b/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocsDict.md
deleted file mode 100644
index d80a12b10f..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/ExternalDocsDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.ExternalDocsDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Info.md b/docs/docs/en/api/faststream/asyncapi/schema/Info.md
deleted file mode 100644
index 62eb9e4832..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Info.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Info
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/License.md b/docs/docs/en/api/faststream/asyncapi/schema/License.md
deleted file mode 100644
index adb11654e4..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/License.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.License
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/LicenseDict.md b/docs/docs/en/api/faststream/asyncapi/schema/LicenseDict.md
deleted file mode 100644
index 7c200c4ac7..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/LicenseDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.LicenseDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Message.md b/docs/docs/en/api/faststream/asyncapi/schema/Message.md
deleted file mode 100644
index f04adf939f..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Message
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Operation.md b/docs/docs/en/api/faststream/asyncapi/schema/Operation.md
deleted file mode 100644
index 2d43f05b89..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Operation.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Operation
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/OperationBinding.md
deleted file mode 100644
index 0dc2099b66..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Reference.md b/docs/docs/en/api/faststream/asyncapi/schema/Reference.md
deleted file mode 100644
index 778b70e548..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Reference.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Reference
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Schema.md b/docs/docs/en/api/faststream/asyncapi/schema/Schema.md
deleted file mode 100644
index a496f56769..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Schema
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/SecuritySchemaComponent.md b/docs/docs/en/api/faststream/asyncapi/schema/SecuritySchemaComponent.md
deleted file mode 100644
index 61c0a83bf7..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/SecuritySchemaComponent.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.SecuritySchemaComponent
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Server.md b/docs/docs/en/api/faststream/asyncapi/schema/Server.md
deleted file mode 100644
index e0d028314e..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Server.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Server
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/ServerBinding.md
deleted file mode 100644
index 8dcaba6701..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/Tag.md b/docs/docs/en/api/faststream/asyncapi/schema/Tag.md
deleted file mode 100644
index 0c32584f58..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/Tag.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.Tag
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/TagDict.md b/docs/docs/en/api/faststream/asyncapi/schema/TagDict.md
deleted file mode 100644
index ebb68351e0..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/TagDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.TagDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/ChannelBinding.md
deleted file mode 100644
index 51a5ed6586..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/OperationBinding.md
deleted file mode 100644
index 37a28843be..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/ServerBinding.md
deleted file mode 100644
index d91efbfe52..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md
deleted file mode 100644
index 6c5c546126..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.amqp.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md
deleted file mode 100644
index b81a881827..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.amqp.Exchange
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md
deleted file mode 100644
index 5b9b34dd78..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.amqp.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Queue.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Queue.md
deleted file mode 100644
index 395a7aedb0..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/Queue.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.amqp.Queue
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md
deleted file mode 100644
index 0daa6510ec..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.amqp.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md
deleted file mode 100644
index f327d3147e..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.kafka.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md
deleted file mode 100644
index adaa645db0..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.kafka.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md
deleted file mode 100644
index e52855bd45..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.kafka.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md
deleted file mode 100644
index a2a8872d64..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.main.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md
deleted file mode 100644
index 1e597b1757..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.main.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md
deleted file mode 100644
index 4dacad7825..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.main.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md
deleted file mode 100644
index 11135ad968..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.nats.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md
deleted file mode 100644
index 8e0cd8acb1..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.nats.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md
deleted file mode 100644
index 7d95811c44..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.nats.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md
deleted file mode 100644
index fef00d4e8a..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.redis.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md
deleted file mode 100644
index 81b906045b..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.redis.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md
deleted file mode 100644
index 7d12316c85..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.redis.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md
deleted file mode 100644
index 4a255559db..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.sqs.ChannelBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md
deleted file mode 100644
index 6a438685b4..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.sqs.OperationBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md b/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md
deleted file mode 100644
index f6a200b3f7..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.bindings.sqs.ServerBinding
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/channels/Channel.md b/docs/docs/en/api/faststream/asyncapi/schema/channels/Channel.md
deleted file mode 100644
index 7e8a913786..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/channels/Channel.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.channels.Channel
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/Contact.md b/docs/docs/en/api/faststream/asyncapi/schema/info/Contact.md
deleted file mode 100644
index 2dfb0d074e..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/info/Contact.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.info.Contact
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/ContactDict.md b/docs/docs/en/api/faststream/asyncapi/schema/info/ContactDict.md
deleted file mode 100644
index adcd40891f..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/info/ContactDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.info.ContactDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/Info.md b/docs/docs/en/api/faststream/asyncapi/schema/info/Info.md
deleted file mode 100644
index 88201af129..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/info/Info.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.info.Info
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/License.md b/docs/docs/en/api/faststream/asyncapi/schema/info/License.md
deleted file mode 100644
index ad564b3886..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/info/License.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.info.License
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/info/LicenseDict.md b/docs/docs/en/api/faststream/asyncapi/schema/info/LicenseDict.md
deleted file mode 100644
index 29fab879e4..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/info/LicenseDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.info.LicenseDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/main/Components.md b/docs/docs/en/api/faststream/asyncapi/schema/main/Components.md
deleted file mode 100644
index 782ed0e625..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/main/Components.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.main.Components
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/main/Schema.md b/docs/docs/en/api/faststream/asyncapi/schema/main/Schema.md
deleted file mode 100644
index 1280877df1..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/main/Schema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.main.Schema
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/message/CorrelationId.md b/docs/docs/en/api/faststream/asyncapi/schema/message/CorrelationId.md
deleted file mode 100644
index 7693915525..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/message/CorrelationId.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.message.CorrelationId
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/message/Message.md b/docs/docs/en/api/faststream/asyncapi/schema/message/Message.md
deleted file mode 100644
index e3959190b0..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/message/Message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.message.Message
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/operations/Operation.md b/docs/docs/en/api/faststream/asyncapi/schema/operations/Operation.md
deleted file mode 100644
index 0af1c63cfe..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/operations/Operation.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.operations.Operation
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlowObj.md b/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlowObj.md
deleted file mode 100644
index ea6ad87db9..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlowObj.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.security.OauthFlowObj
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlows.md b/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlows.md
deleted file mode 100644
index 0c429487fb..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/security/OauthFlows.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.security.OauthFlows
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md b/docs/docs/en/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md
deleted file mode 100644
index 779e70fdd6..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.security.SecuritySchemaComponent
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/servers/Server.md b/docs/docs/en/api/faststream/asyncapi/schema/servers/Server.md
deleted file mode 100644
index 5af6199d20..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/servers/Server.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.servers.Server
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/servers/ServerVariable.md b/docs/docs/en/api/faststream/asyncapi/schema/servers/ServerVariable.md
deleted file mode 100644
index 51f99bd3bc..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/servers/ServerVariable.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.servers.ServerVariable
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocs.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocs.md
deleted file mode 100644
index 207668a5c5..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.ExternalDocs
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md
deleted file mode 100644
index fc5cedfb73..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.ExternalDocsDict
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/Parameter.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/Parameter.md
deleted file mode 100644
index 05cc2f3ba3..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/Parameter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.Parameter
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/Reference.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/Reference.md
deleted file mode 100644
index a47fd931df..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/Reference.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.Reference
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/Tag.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/Tag.md
deleted file mode 100644
index cf558e756d..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/Tag.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.Tag
diff --git a/docs/docs/en/api/faststream/asyncapi/schema/utils/TagDict.md b/docs/docs/en/api/faststream/asyncapi/schema/utils/TagDict.md
deleted file mode 100644
index 412546da6f..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/schema/utils/TagDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.schema.utils.TagDict
diff --git a/docs/docs/en/api/faststream/asyncapi/site/get_asyncapi_html.md b/docs/docs/en/api/faststream/asyncapi/site/get_asyncapi_html.md
deleted file mode 100644
index 69af839e6c..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/site/get_asyncapi_html.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.site.get_asyncapi_html
diff --git a/docs/docs/en/api/faststream/asyncapi/site/serve_app.md b/docs/docs/en/api/faststream/asyncapi/site/serve_app.md
deleted file mode 100644
index c5a1a726e8..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/site/serve_app.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.site.serve_app
diff --git a/docs/docs/en/api/faststream/asyncapi/utils/resolve_payloads.md b/docs/docs/en/api/faststream/asyncapi/utils/resolve_payloads.md
deleted file mode 100644
index 23aeedd082..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/utils/resolve_payloads.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.utils.resolve_payloads
diff --git a/docs/docs/en/api/faststream/asyncapi/utils/to_camelcase.md b/docs/docs/en/api/faststream/asyncapi/utils/to_camelcase.md
deleted file mode 100644
index 42cbdf9f29..0000000000
--- a/docs/docs/en/api/faststream/asyncapi/utils/to_camelcase.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.asyncapi.utils.to_camelcase
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/BaseWatcher.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/BaseWatcher.md
deleted file mode 100644
index d0c27d17d9..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/BaseWatcher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.BaseWatcher
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/CounterWatcher.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/CounterWatcher.md
deleted file mode 100644
index e299f4c442..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/CounterWatcher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.CounterWatcher
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/EndlessWatcher.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/EndlessWatcher.md
deleted file mode 100644
index b3aac70921..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/EndlessWatcher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.EndlessWatcher
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/OneTryWatcher.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/OneTryWatcher.md
deleted file mode 100644
index 4baa0bdd9c..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/OneTryWatcher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.OneTryWatcher
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/WatcherContext.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/WatcherContext.md
deleted file mode 100644
index ee1ef8643b..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/WatcherContext.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.WatcherContext
diff --git a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/get_watcher.md b/docs/docs/en/api/faststream/broker/acknowledgement_watcher/get_watcher.md
deleted file mode 100644
index 9f6869bcf5..0000000000
--- a/docs/docs/en/api/faststream/broker/acknowledgement_watcher/get_watcher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.acknowledgement_watcher.get_watcher
diff --git a/docs/docs/en/api/faststream/broker/core/abc/ABCBroker.md b/docs/docs/en/api/faststream/broker/core/abc/ABCBroker.md
deleted file mode 100644
index 88b39efd40..0000000000
--- a/docs/docs/en/api/faststream/broker/core/abc/ABCBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.core.abc.ABCBroker
diff --git a/docs/docs/en/api/faststream/broker/core/logging/LoggingBroker.md b/docs/docs/en/api/faststream/broker/core/logging/LoggingBroker.md
deleted file mode 100644
index b10dd8bc3f..0000000000
--- a/docs/docs/en/api/faststream/broker/core/logging/LoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.core.logging.LoggingBroker
diff --git a/docs/docs/en/api/faststream/broker/core/usecase/BrokerUsecase.md b/docs/docs/en/api/faststream/broker/core/usecase/BrokerUsecase.md
deleted file mode 100644
index 0e791c5c38..0000000000
--- a/docs/docs/en/api/faststream/broker/core/usecase/BrokerUsecase.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.core.usecase.BrokerUsecase
diff --git a/docs/docs/en/api/faststream/broker/fastapi/StreamMessage.md b/docs/docs/en/api/faststream/broker/fastapi/StreamMessage.md
deleted file mode 100644
index 2124b279ea..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/StreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.StreamMessage
diff --git a/docs/docs/en/api/faststream/broker/fastapi/StreamRouter.md b/docs/docs/en/api/faststream/broker/fastapi/StreamRouter.md
deleted file mode 100644
index 32a8e8743d..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/StreamRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.StreamRouter
diff --git a/docs/docs/en/api/faststream/broker/fastapi/context/Context.md b/docs/docs/en/api/faststream/broker/fastapi/context/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/context/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_dependant.md b/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_dependant.md
deleted file mode 100644
index 1f5d3d1e77..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_dependant.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.get_dependant.get_fastapi_dependant
diff --git a/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_native_dependant.md b/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_native_dependant.md
deleted file mode 100644
index f3d6a05e39..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/get_dependant/get_fastapi_native_dependant.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.get_dependant.get_fastapi_native_dependant
diff --git a/docs/docs/en/api/faststream/broker/fastapi/route/StreamMessage.md b/docs/docs/en/api/faststream/broker/fastapi/route/StreamMessage.md
deleted file mode 100644
index 0fbed89be9..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/route/StreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.route.StreamMessage
diff --git a/docs/docs/en/api/faststream/broker/fastapi/route/build_faststream_to_fastapi_parser.md b/docs/docs/en/api/faststream/broker/fastapi/route/build_faststream_to_fastapi_parser.md
deleted file mode 100644
index dc05bb190e..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/route/build_faststream_to_fastapi_parser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.route.build_faststream_to_fastapi_parser
diff --git a/docs/docs/en/api/faststream/broker/fastapi/route/make_fastapi_execution.md b/docs/docs/en/api/faststream/broker/fastapi/route/make_fastapi_execution.md
deleted file mode 100644
index f9a0fdd712..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/route/make_fastapi_execution.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.route.make_fastapi_execution
diff --git a/docs/docs/en/api/faststream/broker/fastapi/route/wrap_callable_to_fastapi_compatible.md b/docs/docs/en/api/faststream/broker/fastapi/route/wrap_callable_to_fastapi_compatible.md
deleted file mode 100644
index ab7081c711..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/route/wrap_callable_to_fastapi_compatible.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.route.wrap_callable_to_fastapi_compatible
diff --git a/docs/docs/en/api/faststream/broker/fastapi/router/StreamRouter.md b/docs/docs/en/api/faststream/broker/fastapi/router/StreamRouter.md
deleted file mode 100644
index d1f017acc6..0000000000
--- a/docs/docs/en/api/faststream/broker/fastapi/router/StreamRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.router.StreamRouter
diff --git a/docs/docs/en/api/faststream/broker/message/AckStatus.md b/docs/docs/en/api/faststream/broker/message/AckStatus.md
deleted file mode 100644
index 412a61de84..0000000000
--- a/docs/docs/en/api/faststream/broker/message/AckStatus.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.AckStatus
diff --git a/docs/docs/en/api/faststream/broker/message/SourceType.md b/docs/docs/en/api/faststream/broker/message/SourceType.md
deleted file mode 100644
index fd242902f9..0000000000
--- a/docs/docs/en/api/faststream/broker/message/SourceType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.SourceType
diff --git a/docs/docs/en/api/faststream/broker/message/StreamMessage.md b/docs/docs/en/api/faststream/broker/message/StreamMessage.md
deleted file mode 100644
index 800059b91d..0000000000
--- a/docs/docs/en/api/faststream/broker/message/StreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.StreamMessage
diff --git a/docs/docs/en/api/faststream/broker/message/decode_message.md b/docs/docs/en/api/faststream/broker/message/decode_message.md
deleted file mode 100644
index a5904b1458..0000000000
--- a/docs/docs/en/api/faststream/broker/message/decode_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.decode_message
diff --git a/docs/docs/en/api/faststream/broker/message/encode_message.md b/docs/docs/en/api/faststream/broker/message/encode_message.md
deleted file mode 100644
index ed34f0ceb1..0000000000
--- a/docs/docs/en/api/faststream/broker/message/encode_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.encode_message
diff --git a/docs/docs/en/api/faststream/broker/message/gen_cor_id.md b/docs/docs/en/api/faststream/broker/message/gen_cor_id.md
deleted file mode 100644
index 5e4c2a4622..0000000000
--- a/docs/docs/en/api/faststream/broker/message/gen_cor_id.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.message.gen_cor_id
diff --git a/docs/docs/en/api/faststream/broker/middlewares/BaseMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/BaseMiddleware.md
deleted file mode 100644
index d81c2fbf20..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/BaseMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.BaseMiddleware
diff --git a/docs/docs/en/api/faststream/broker/middlewares/ExceptionMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/ExceptionMiddleware.md
deleted file mode 100644
index 1fa11b80fc..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/ExceptionMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.ExceptionMiddleware
diff --git a/docs/docs/en/api/faststream/broker/middlewares/base/BaseMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/base/BaseMiddleware.md
deleted file mode 100644
index 8502288249..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/base/BaseMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.base.BaseMiddleware
diff --git a/docs/docs/en/api/faststream/broker/middlewares/exception/BaseExceptionMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/exception/BaseExceptionMiddleware.md
deleted file mode 100644
index 7ab0a414d0..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/exception/BaseExceptionMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.exception.BaseExceptionMiddleware
diff --git a/docs/docs/en/api/faststream/broker/middlewares/exception/ExceptionMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/exception/ExceptionMiddleware.md
deleted file mode 100644
index 0abf119ab3..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/exception/ExceptionMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.exception.ExceptionMiddleware
diff --git a/docs/docs/en/api/faststream/broker/middlewares/exception/ignore_handler.md b/docs/docs/en/api/faststream/broker/middlewares/exception/ignore_handler.md
deleted file mode 100644
index 425561dcba..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/exception/ignore_handler.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.exception.ignore_handler
diff --git a/docs/docs/en/api/faststream/broker/middlewares/logging/CriticalLogMiddleware.md b/docs/docs/en/api/faststream/broker/middlewares/logging/CriticalLogMiddleware.md
deleted file mode 100644
index 829368d699..0000000000
--- a/docs/docs/en/api/faststream/broker/middlewares/logging/CriticalLogMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.middlewares.logging.CriticalLogMiddleware
diff --git a/docs/docs/en/api/faststream/broker/proto/EndpointProto.md b/docs/docs/en/api/faststream/broker/proto/EndpointProto.md
deleted file mode 100644
index 5a3b095952..0000000000
--- a/docs/docs/en/api/faststream/broker/proto/EndpointProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.proto.EndpointProto
diff --git a/docs/docs/en/api/faststream/broker/proto/SetupAble.md b/docs/docs/en/api/faststream/broker/proto/SetupAble.md
deleted file mode 100644
index a4b487318e..0000000000
--- a/docs/docs/en/api/faststream/broker/proto/SetupAble.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.proto.SetupAble
diff --git a/docs/docs/en/api/faststream/broker/publisher/fake/FakePublisher.md b/docs/docs/en/api/faststream/broker/publisher/fake/FakePublisher.md
deleted file mode 100644
index 67b2c04f5c..0000000000
--- a/docs/docs/en/api/faststream/broker/publisher/fake/FakePublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.publisher.fake.FakePublisher
diff --git a/docs/docs/en/api/faststream/broker/publisher/proto/BasePublisherProto.md b/docs/docs/en/api/faststream/broker/publisher/proto/BasePublisherProto.md
deleted file mode 100644
index ed0944fa14..0000000000
--- a/docs/docs/en/api/faststream/broker/publisher/proto/BasePublisherProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.publisher.proto.BasePublisherProto
diff --git a/docs/docs/en/api/faststream/broker/publisher/proto/ProducerProto.md b/docs/docs/en/api/faststream/broker/publisher/proto/ProducerProto.md
deleted file mode 100644
index 8cf65d4e00..0000000000
--- a/docs/docs/en/api/faststream/broker/publisher/proto/ProducerProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.publisher.proto.ProducerProto
diff --git a/docs/docs/en/api/faststream/broker/publisher/proto/PublisherProto.md b/docs/docs/en/api/faststream/broker/publisher/proto/PublisherProto.md
deleted file mode 100644
index f86760bba6..0000000000
--- a/docs/docs/en/api/faststream/broker/publisher/proto/PublisherProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.publisher.proto.PublisherProto
diff --git a/docs/docs/en/api/faststream/broker/publisher/usecase/PublisherUsecase.md b/docs/docs/en/api/faststream/broker/publisher/usecase/PublisherUsecase.md
deleted file mode 100644
index f1de9539fe..0000000000
--- a/docs/docs/en/api/faststream/broker/publisher/usecase/PublisherUsecase.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.publisher.usecase.PublisherUsecase
diff --git a/docs/docs/en/api/faststream/broker/response/Response.md b/docs/docs/en/api/faststream/broker/response/Response.md
deleted file mode 100644
index 1163381d7b..0000000000
--- a/docs/docs/en/api/faststream/broker/response/Response.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.response.Response
diff --git a/docs/docs/en/api/faststream/broker/response/ensure_response.md b/docs/docs/en/api/faststream/broker/response/ensure_response.md
deleted file mode 100644
index b4a98bd4a4..0000000000
--- a/docs/docs/en/api/faststream/broker/response/ensure_response.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.response.ensure_response
diff --git a/docs/docs/en/api/faststream/broker/router/ArgsContainer.md b/docs/docs/en/api/faststream/broker/router/ArgsContainer.md
deleted file mode 100644
index bd82308c79..0000000000
--- a/docs/docs/en/api/faststream/broker/router/ArgsContainer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.router.ArgsContainer
diff --git a/docs/docs/en/api/faststream/broker/router/BrokerRouter.md b/docs/docs/en/api/faststream/broker/router/BrokerRouter.md
deleted file mode 100644
index d6bb82fdd2..0000000000
--- a/docs/docs/en/api/faststream/broker/router/BrokerRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.router.BrokerRouter
diff --git a/docs/docs/en/api/faststream/broker/router/SubscriberRoute.md b/docs/docs/en/api/faststream/broker/router/SubscriberRoute.md
deleted file mode 100644
index 18c3a547ec..0000000000
--- a/docs/docs/en/api/faststream/broker/router/SubscriberRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.router.SubscriberRoute
diff --git a/docs/docs/en/api/faststream/broker/schemas/NameRequired.md b/docs/docs/en/api/faststream/broker/schemas/NameRequired.md
deleted file mode 100644
index 398f70b421..0000000000
--- a/docs/docs/en/api/faststream/broker/schemas/NameRequired.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.schemas.NameRequired
diff --git a/docs/docs/en/api/faststream/broker/subscriber/call_item/HandlerItem.md b/docs/docs/en/api/faststream/broker/subscriber/call_item/HandlerItem.md
deleted file mode 100644
index e2f635512c..0000000000
--- a/docs/docs/en/api/faststream/broker/subscriber/call_item/HandlerItem.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.subscriber.call_item.HandlerItem
diff --git a/docs/docs/en/api/faststream/broker/subscriber/mixins/ConcurrentMixin.md b/docs/docs/en/api/faststream/broker/subscriber/mixins/ConcurrentMixin.md
deleted file mode 100644
index 994f224aea..0000000000
--- a/docs/docs/en/api/faststream/broker/subscriber/mixins/ConcurrentMixin.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.subscriber.mixins.ConcurrentMixin
diff --git a/docs/docs/en/api/faststream/broker/subscriber/mixins/TasksMixin.md b/docs/docs/en/api/faststream/broker/subscriber/mixins/TasksMixin.md
deleted file mode 100644
index 6d483bef85..0000000000
--- a/docs/docs/en/api/faststream/broker/subscriber/mixins/TasksMixin.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.subscriber.mixins.TasksMixin
diff --git a/docs/docs/en/api/faststream/broker/subscriber/proto/SubscriberProto.md b/docs/docs/en/api/faststream/broker/subscriber/proto/SubscriberProto.md
deleted file mode 100644
index fd887d41b9..0000000000
--- a/docs/docs/en/api/faststream/broker/subscriber/proto/SubscriberProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.subscriber.proto.SubscriberProto
diff --git a/docs/docs/en/api/faststream/broker/subscriber/usecase/SubscriberUsecase.md b/docs/docs/en/api/faststream/broker/subscriber/usecase/SubscriberUsecase.md
deleted file mode 100644
index f7e9448277..0000000000
--- a/docs/docs/en/api/faststream/broker/subscriber/usecase/SubscriberUsecase.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.subscriber.usecase.SubscriberUsecase
diff --git a/docs/docs/en/api/faststream/broker/types/PublisherMiddleware.md b/docs/docs/en/api/faststream/broker/types/PublisherMiddleware.md
deleted file mode 100644
index 2c43d2efcb..0000000000
--- a/docs/docs/en/api/faststream/broker/types/PublisherMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.types.PublisherMiddleware
diff --git a/docs/docs/en/api/faststream/broker/utils/MultiLock.md b/docs/docs/en/api/faststream/broker/utils/MultiLock.md
deleted file mode 100644
index 5f4bc6d5cb..0000000000
--- a/docs/docs/en/api/faststream/broker/utils/MultiLock.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.utils.MultiLock
diff --git a/docs/docs/en/api/faststream/broker/utils/default_filter.md b/docs/docs/en/api/faststream/broker/utils/default_filter.md
deleted file mode 100644
index 3fe25fa14a..0000000000
--- a/docs/docs/en/api/faststream/broker/utils/default_filter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.utils.default_filter
diff --git a/docs/docs/en/api/faststream/broker/utils/get_watcher_context.md b/docs/docs/en/api/faststream/broker/utils/get_watcher_context.md
deleted file mode 100644
index 883599c043..0000000000
--- a/docs/docs/en/api/faststream/broker/utils/get_watcher_context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.utils.get_watcher_context
diff --git a/docs/docs/en/api/faststream/broker/utils/process_msg.md b/docs/docs/en/api/faststream/broker/utils/process_msg.md
deleted file mode 100644
index e7ce8aaf99..0000000000
--- a/docs/docs/en/api/faststream/broker/utils/process_msg.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.utils.process_msg
diff --git a/docs/docs/en/api/faststream/broker/utils/resolve_custom_func.md b/docs/docs/en/api/faststream/broker/utils/resolve_custom_func.md
deleted file mode 100644
index f72ed3c059..0000000000
--- a/docs/docs/en/api/faststream/broker/utils/resolve_custom_func.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.utils.resolve_custom_func
diff --git a/docs/docs/en/api/faststream/broker/wrapper/call/HandlerCallWrapper.md b/docs/docs/en/api/faststream/broker/wrapper/call/HandlerCallWrapper.md
deleted file mode 100644
index 4c25733797..0000000000
--- a/docs/docs/en/api/faststream/broker/wrapper/call/HandlerCallWrapper.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.wrapper.call.HandlerCallWrapper
diff --git a/docs/docs/en/api/faststream/broker/wrapper/proto/WrapperProto.md b/docs/docs/en/api/faststream/broker/wrapper/proto/WrapperProto.md
deleted file mode 100644
index 87ffdf815b..0000000000
--- a/docs/docs/en/api/faststream/broker/wrapper/proto/WrapperProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.wrapper.proto.WrapperProto
diff --git a/docs/docs/en/api/faststream/cli/docs/app/gen.md b/docs/docs/en/api/faststream/cli/docs/app/gen.md
deleted file mode 100644
index 72af7d6688..0000000000
--- a/docs/docs/en/api/faststream/cli/docs/app/gen.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.docs.app.gen
diff --git a/docs/docs/en/api/faststream/cli/docs/app/serve.md b/docs/docs/en/api/faststream/cli/docs/app/serve.md
deleted file mode 100644
index 3d9ec139d9..0000000000
--- a/docs/docs/en/api/faststream/cli/docs/app/serve.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.docs.app.serve
diff --git a/docs/docs/en/api/faststream/cli/main/main.md b/docs/docs/en/api/faststream/cli/main/main.md
deleted file mode 100644
index c15cba484c..0000000000
--- a/docs/docs/en/api/faststream/cli/main/main.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.main.main
diff --git a/docs/docs/en/api/faststream/cli/main/publish.md b/docs/docs/en/api/faststream/cli/main/publish.md
deleted file mode 100644
index 84b490cde8..0000000000
--- a/docs/docs/en/api/faststream/cli/main/publish.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.main.publish
diff --git a/docs/docs/en/api/faststream/cli/main/publish_message.md b/docs/docs/en/api/faststream/cli/main/publish_message.md
deleted file mode 100644
index a8bb7b8efa..0000000000
--- a/docs/docs/en/api/faststream/cli/main/publish_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.main.publish_message
diff --git a/docs/docs/en/api/faststream/cli/main/run.md b/docs/docs/en/api/faststream/cli/main/run.md
deleted file mode 100644
index 6a01af3d26..0000000000
--- a/docs/docs/en/api/faststream/cli/main/run.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.main.run
diff --git a/docs/docs/en/api/faststream/cli/main/version_callback.md b/docs/docs/en/api/faststream/cli/main/version_callback.md
deleted file mode 100644
index a5467ffeb7..0000000000
--- a/docs/docs/en/api/faststream/cli/main/version_callback.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.main.version_callback
diff --git a/docs/docs/en/api/faststream/cli/supervisors/asgi_multiprocess/ASGIMultiprocess.md b/docs/docs/en/api/faststream/cli/supervisors/asgi_multiprocess/ASGIMultiprocess.md
deleted file mode 100644
index 8424b2d5fa..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/asgi_multiprocess/ASGIMultiprocess.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.asgi_multiprocess.ASGIMultiprocess
diff --git a/docs/docs/en/api/faststream/cli/supervisors/basereload/BaseReload.md b/docs/docs/en/api/faststream/cli/supervisors/basereload/BaseReload.md
deleted file mode 100644
index b378b2922a..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/basereload/BaseReload.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.basereload.BaseReload
diff --git a/docs/docs/en/api/faststream/cli/supervisors/multiprocess/Multiprocess.md b/docs/docs/en/api/faststream/cli/supervisors/multiprocess/Multiprocess.md
deleted file mode 100644
index 4cdd6d30e3..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/multiprocess/Multiprocess.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.multiprocess.Multiprocess
diff --git a/docs/docs/en/api/faststream/cli/supervisors/utils/get_subprocess.md b/docs/docs/en/api/faststream/cli/supervisors/utils/get_subprocess.md
deleted file mode 100644
index 1488078e45..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/utils/get_subprocess.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.utils.get_subprocess
diff --git a/docs/docs/en/api/faststream/cli/supervisors/utils/set_exit.md b/docs/docs/en/api/faststream/cli/supervisors/utils/set_exit.md
deleted file mode 100644
index e739d79409..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/utils/set_exit.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.utils.set_exit
diff --git a/docs/docs/en/api/faststream/cli/supervisors/utils/subprocess_started.md b/docs/docs/en/api/faststream/cli/supervisors/utils/subprocess_started.md
deleted file mode 100644
index 8840390ca8..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/utils/subprocess_started.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.utils.subprocess_started
diff --git a/docs/docs/en/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md b/docs/docs/en/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md
deleted file mode 100644
index 095c3cc2f0..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.watchfiles.ExtendedFilter
diff --git a/docs/docs/en/api/faststream/cli/supervisors/watchfiles/WatchReloader.md b/docs/docs/en/api/faststream/cli/supervisors/watchfiles/WatchReloader.md
deleted file mode 100644
index b86533f1e8..0000000000
--- a/docs/docs/en/api/faststream/cli/supervisors/watchfiles/WatchReloader.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.supervisors.watchfiles.WatchReloader
diff --git a/docs/docs/en/api/faststream/cli/utils/imports/get_app_path.md b/docs/docs/en/api/faststream/cli/utils/imports/get_app_path.md
deleted file mode 100644
index be8fcfef0c..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/imports/get_app_path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.imports.get_app_path
diff --git a/docs/docs/en/api/faststream/cli/utils/imports/import_from_string.md b/docs/docs/en/api/faststream/cli/utils/imports/import_from_string.md
deleted file mode 100644
index 731203ac54..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/imports/import_from_string.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.imports.import_from_string
diff --git a/docs/docs/en/api/faststream/cli/utils/imports/import_object.md b/docs/docs/en/api/faststream/cli/utils/imports/import_object.md
deleted file mode 100644
index e26a3e280c..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/imports/import_object.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.imports.import_object
diff --git a/docs/docs/en/api/faststream/cli/utils/imports/try_import_app.md b/docs/docs/en/api/faststream/cli/utils/imports/try_import_app.md
deleted file mode 100644
index 0c6df90c86..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/imports/try_import_app.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.imports.try_import_app
diff --git a/docs/docs/en/api/faststream/cli/utils/logs/LogLevels.md b/docs/docs/en/api/faststream/cli/utils/logs/LogLevels.md
deleted file mode 100644
index f82e3bbb6f..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/logs/LogLevels.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.logs.LogLevels
diff --git a/docs/docs/en/api/faststream/cli/utils/logs/get_log_level.md b/docs/docs/en/api/faststream/cli/utils/logs/get_log_level.md
deleted file mode 100644
index f5e4fcaea0..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/logs/get_log_level.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.logs.get_log_level
diff --git a/docs/docs/en/api/faststream/cli/utils/logs/set_log_level.md b/docs/docs/en/api/faststream/cli/utils/logs/set_log_level.md
deleted file mode 100644
index 6db13adbb9..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/logs/set_log_level.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.logs.set_log_level
diff --git a/docs/docs/en/api/faststream/cli/utils/parser/is_bind_arg.md b/docs/docs/en/api/faststream/cli/utils/parser/is_bind_arg.md
deleted file mode 100644
index 133a1d5675..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/parser/is_bind_arg.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.parser.is_bind_arg
diff --git a/docs/docs/en/api/faststream/cli/utils/parser/parse_cli_args.md b/docs/docs/en/api/faststream/cli/utils/parser/parse_cli_args.md
deleted file mode 100644
index 9c6f03d066..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/parser/parse_cli_args.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.parser.parse_cli_args
diff --git a/docs/docs/en/api/faststream/cli/utils/parser/remove_prefix.md b/docs/docs/en/api/faststream/cli/utils/parser/remove_prefix.md
deleted file mode 100644
index 587db3677f..0000000000
--- a/docs/docs/en/api/faststream/cli/utils/parser/remove_prefix.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.cli.utils.parser.remove_prefix
diff --git a/docs/docs/en/api/faststream/confluent/KafkaBroker.md b/docs/docs/en/api/faststream/confluent/KafkaBroker.md
deleted file mode 100644
index 99fd644946..0000000000
--- a/docs/docs/en/api/faststream/confluent/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.KafkaBroker
diff --git a/docs/docs/en/api/faststream/confluent/KafkaPublisher.md b/docs/docs/en/api/faststream/confluent/KafkaPublisher.md
deleted file mode 100644
index 73e485fcc5..0000000000
--- a/docs/docs/en/api/faststream/confluent/KafkaPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.KafkaPublisher
diff --git a/docs/docs/en/api/faststream/confluent/KafkaResponse.md b/docs/docs/en/api/faststream/confluent/KafkaResponse.md
deleted file mode 100644
index eb0eab479c..0000000000
--- a/docs/docs/en/api/faststream/confluent/KafkaResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.KafkaResponse
diff --git a/docs/docs/en/api/faststream/confluent/KafkaRoute.md b/docs/docs/en/api/faststream/confluent/KafkaRoute.md
deleted file mode 100644
index 723012794f..0000000000
--- a/docs/docs/en/api/faststream/confluent/KafkaRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.KafkaRoute
diff --git a/docs/docs/en/api/faststream/confluent/KafkaRouter.md b/docs/docs/en/api/faststream/confluent/KafkaRouter.md
deleted file mode 100644
index b9e7b0d991..0000000000
--- a/docs/docs/en/api/faststream/confluent/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.KafkaRouter
diff --git a/docs/docs/en/api/faststream/confluent/TestApp.md b/docs/docs/en/api/faststream/confluent/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/confluent/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/confluent/TestKafkaBroker.md b/docs/docs/en/api/faststream/confluent/TestKafkaBroker.md
deleted file mode 100644
index 0a24384f69..0000000000
--- a/docs/docs/en/api/faststream/confluent/TestKafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.TestKafkaBroker
diff --git a/docs/docs/en/api/faststream/confluent/TopicPartition.md b/docs/docs/en/api/faststream/confluent/TopicPartition.md
deleted file mode 100644
index 9b5e09bdf9..0000000000
--- a/docs/docs/en/api/faststream/confluent/TopicPartition.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.TopicPartition
diff --git a/docs/docs/en/api/faststream/confluent/broker/KafkaBroker.md b/docs/docs/en/api/faststream/confluent/broker/KafkaBroker.md
deleted file mode 100644
index cdfdbc6ef1..0000000000
--- a/docs/docs/en/api/faststream/confluent/broker/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.broker.KafkaBroker
diff --git a/docs/docs/en/api/faststream/confluent/broker/broker/KafkaBroker.md b/docs/docs/en/api/faststream/confluent/broker/broker/KafkaBroker.md
deleted file mode 100644
index 05c8356f26..0000000000
--- a/docs/docs/en/api/faststream/confluent/broker/broker/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.broker.broker.KafkaBroker
diff --git a/docs/docs/en/api/faststream/confluent/broker/logging/KafkaLoggingBroker.md b/docs/docs/en/api/faststream/confluent/broker/logging/KafkaLoggingBroker.md
deleted file mode 100644
index ea238b6b85..0000000000
--- a/docs/docs/en/api/faststream/confluent/broker/logging/KafkaLoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.broker.logging.KafkaLoggingBroker
diff --git a/docs/docs/en/api/faststream/confluent/broker/registrator/KafkaRegistrator.md b/docs/docs/en/api/faststream/confluent/broker/registrator/KafkaRegistrator.md
deleted file mode 100644
index 80068d2349..0000000000
--- a/docs/docs/en/api/faststream/confluent/broker/registrator/KafkaRegistrator.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.broker.registrator.KafkaRegistrator
diff --git a/docs/docs/en/api/faststream/confluent/client/AsyncConfluentConsumer.md b/docs/docs/en/api/faststream/confluent/client/AsyncConfluentConsumer.md
deleted file mode 100644
index 25374c405d..0000000000
--- a/docs/docs/en/api/faststream/confluent/client/AsyncConfluentConsumer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.client.AsyncConfluentConsumer
diff --git a/docs/docs/en/api/faststream/confluent/client/AsyncConfluentProducer.md b/docs/docs/en/api/faststream/confluent/client/AsyncConfluentProducer.md
deleted file mode 100644
index 29bfac283f..0000000000
--- a/docs/docs/en/api/faststream/confluent/client/AsyncConfluentProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.client.AsyncConfluentProducer
diff --git a/docs/docs/en/api/faststream/confluent/client/BatchBuilder.md b/docs/docs/en/api/faststream/confluent/client/BatchBuilder.md
deleted file mode 100644
index 232f9ecdf2..0000000000
--- a/docs/docs/en/api/faststream/confluent/client/BatchBuilder.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.client.BatchBuilder
diff --git a/docs/docs/en/api/faststream/confluent/client/check_msg_error.md b/docs/docs/en/api/faststream/confluent/client/check_msg_error.md
deleted file mode 100644
index 71ac291b6e..0000000000
--- a/docs/docs/en/api/faststream/confluent/client/check_msg_error.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.client.check_msg_error
diff --git a/docs/docs/en/api/faststream/confluent/client/create_topics.md b/docs/docs/en/api/faststream/confluent/client/create_topics.md
deleted file mode 100644
index 8efc1a80c4..0000000000
--- a/docs/docs/en/api/faststream/confluent/client/create_topics.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.client.create_topics
diff --git a/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md b/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md
deleted file mode 100644
index bf5cfbaca7..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.BrokerAddressFamily
diff --git a/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md b/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md
deleted file mode 100644
index 41e324305d..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.BuiltinFeatures
diff --git a/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md b/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md
deleted file mode 100644
index 15f67688f1..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.ClientDNSLookup
diff --git a/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md b/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md
deleted file mode 100644
index dd9640afd4..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.CompressionCodec
diff --git a/docs/docs/en/api/faststream/confluent/config/CompressionType.md b/docs/docs/en/api/faststream/confluent/config/CompressionType.md
deleted file mode 100644
index 8139bfcdda..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/CompressionType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.CompressionType
diff --git a/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md b/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md
deleted file mode 100644
index 9ebd97c1ff..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.ConfluentConfig
diff --git a/docs/docs/en/api/faststream/confluent/config/ConfluentFastConfig.md b/docs/docs/en/api/faststream/confluent/config/ConfluentFastConfig.md
deleted file mode 100644
index 27861ffd5b..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/ConfluentFastConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.ConfluentFastConfig
diff --git a/docs/docs/en/api/faststream/confluent/config/Debug.md b/docs/docs/en/api/faststream/confluent/config/Debug.md
deleted file mode 100644
index 2036046f5d..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/Debug.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.Debug
diff --git a/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md b/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md
deleted file mode 100644
index a5cab4b1d9..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.GroupProtocol
diff --git a/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md b/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md
deleted file mode 100644
index d122261f0f..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.IsolationLevel
diff --git a/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md b/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md
deleted file mode 100644
index 4b203e65e9..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.OffsetStoreMethod
diff --git a/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md b/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md
deleted file mode 100644
index 2cb635c6b0..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.SASLOAUTHBearerMethod
diff --git a/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md b/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md
deleted file mode 100644
index 8415d3214e..0000000000
--- a/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.config.SecurityProtocol
diff --git a/docs/docs/en/api/faststream/confluent/fastapi/Context.md b/docs/docs/en/api/faststream/confluent/fastapi/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/confluent/fastapi/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/confluent/fastapi/KafkaRouter.md b/docs/docs/en/api/faststream/confluent/fastapi/KafkaRouter.md
deleted file mode 100644
index 034203e103..0000000000
--- a/docs/docs/en/api/faststream/confluent/fastapi/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.fastapi.KafkaRouter
diff --git a/docs/docs/en/api/faststream/confluent/fastapi/fastapi/KafkaRouter.md b/docs/docs/en/api/faststream/confluent/fastapi/fastapi/KafkaRouter.md
deleted file mode 100644
index 87edcc2c3d..0000000000
--- a/docs/docs/en/api/faststream/confluent/fastapi/fastapi/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.fastapi.fastapi.KafkaRouter
diff --git a/docs/docs/en/api/faststream/confluent/message/ConsumerProtocol.md b/docs/docs/en/api/faststream/confluent/message/ConsumerProtocol.md
deleted file mode 100644
index 18971d0829..0000000000
--- a/docs/docs/en/api/faststream/confluent/message/ConsumerProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.message.ConsumerProtocol
diff --git a/docs/docs/en/api/faststream/confluent/message/FakeConsumer.md b/docs/docs/en/api/faststream/confluent/message/FakeConsumer.md
deleted file mode 100644
index 19e60bb461..0000000000
--- a/docs/docs/en/api/faststream/confluent/message/FakeConsumer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.message.FakeConsumer
diff --git a/docs/docs/en/api/faststream/confluent/message/KafkaMessage.md b/docs/docs/en/api/faststream/confluent/message/KafkaMessage.md
deleted file mode 100644
index 02004c7d37..0000000000
--- a/docs/docs/en/api/faststream/confluent/message/KafkaMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.message.KafkaMessage
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md
deleted file mode 100644
index 743c494591..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.KafkaTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md
deleted file mode 100644
index b34265dfbb..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.middleware.KafkaTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md
deleted file mode 100644
index 730662fae5..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.provider.BaseConfluentTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md
deleted file mode 100644
index a6db133484..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.provider.BatchConfluentTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md
deleted file mode 100644
index 2c5242e6e5..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.provider.ConfluentTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md
deleted file mode 100644
index 7dd0e1d0fd..0000000000
--- a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.opentelemetry.provider.telemetry_attributes_provider_factory
diff --git a/docs/docs/en/api/faststream/confluent/parser/AsyncConfluentParser.md b/docs/docs/en/api/faststream/confluent/parser/AsyncConfluentParser.md
deleted file mode 100644
index e5029a60d1..0000000000
--- a/docs/docs/en/api/faststream/confluent/parser/AsyncConfluentParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.parser.AsyncConfluentParser
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md
deleted file mode 100644
index e84e84acc3..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md
deleted file mode 100644
index 6603893f74..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.middleware.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md
deleted file mode 100644
index 27c186c098..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.provider.BaseConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md
deleted file mode 100644
index f784a64e9f..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.provider.BatchConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md
deleted file mode 100644
index 65f0a8348e..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.provider.ConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md
deleted file mode 100644
index 78358f46e3..0000000000
--- a/docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIBatchPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIBatchPublisher.md
deleted file mode 100644
index 62ae234697..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIBatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.asyncapi.AsyncAPIBatchPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIDefaultPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIDefaultPublisher.md
deleted file mode 100644
index 32685d612d..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIDefaultPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.asyncapi.AsyncAPIDefaultPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIPublisher.md
deleted file mode 100644
index f76d27ccd0..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/asyncapi/AsyncAPIPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.asyncapi.AsyncAPIPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/producer/AsyncConfluentFastProducer.md b/docs/docs/en/api/faststream/confluent/publisher/producer/AsyncConfluentFastProducer.md
deleted file mode 100644
index fd614d1593..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/producer/AsyncConfluentFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.producer.AsyncConfluentFastProducer
diff --git a/docs/docs/en/api/faststream/confluent/publisher/usecase/BatchPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/usecase/BatchPublisher.md
deleted file mode 100644
index 23e8baeed9..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/usecase/BatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.usecase.BatchPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/usecase/DefaultPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/usecase/DefaultPublisher.md
deleted file mode 100644
index faa20eaa11..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/usecase/DefaultPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.usecase.DefaultPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/usecase/LogicPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/usecase/LogicPublisher.md
deleted file mode 100644
index d9a8594d12..0000000000
--- a/docs/docs/en/api/faststream/confluent/publisher/usecase/LogicPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.publisher.usecase.LogicPublisher
diff --git a/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md b/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md
deleted file mode 100644
index 7fa5542613..0000000000
--- a/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.response.KafkaResponse
diff --git a/docs/docs/en/api/faststream/confluent/router/KafkaPublisher.md b/docs/docs/en/api/faststream/confluent/router/KafkaPublisher.md
deleted file mode 100644
index ee1c818707..0000000000
--- a/docs/docs/en/api/faststream/confluent/router/KafkaPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.router.KafkaPublisher
diff --git a/docs/docs/en/api/faststream/confluent/router/KafkaRoute.md b/docs/docs/en/api/faststream/confluent/router/KafkaRoute.md
deleted file mode 100644
index 60d7bb1c99..0000000000
--- a/docs/docs/en/api/faststream/confluent/router/KafkaRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.router.KafkaRoute
diff --git a/docs/docs/en/api/faststream/confluent/router/KafkaRouter.md b/docs/docs/en/api/faststream/confluent/router/KafkaRouter.md
deleted file mode 100644
index dac6c1a646..0000000000
--- a/docs/docs/en/api/faststream/confluent/router/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.router.KafkaRouter
diff --git a/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md b/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md
deleted file mode 100644
index 0c52345b4e..0000000000
--- a/docs/docs/en/api/faststream/confluent/schemas/TopicPartition.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.schemas.TopicPartition
diff --git a/docs/docs/en/api/faststream/confluent/schemas/params/ConsumerConnectionParams.md b/docs/docs/en/api/faststream/confluent/schemas/params/ConsumerConnectionParams.md
deleted file mode 100644
index f4ed5b2004..0000000000
--- a/docs/docs/en/api/faststream/confluent/schemas/params/ConsumerConnectionParams.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.schemas.params.ConsumerConnectionParams
diff --git a/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md b/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md
deleted file mode 100644
index 11e0bc2b3c..0000000000
--- a/docs/docs/en/api/faststream/confluent/schemas/partition/TopicPartition.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.schemas.partition.TopicPartition
diff --git a/docs/docs/en/api/faststream/confluent/security/parse_security.md b/docs/docs/en/api/faststream/confluent/security/parse_security.md
deleted file mode 100644
index 1eb84ceed6..0000000000
--- a/docs/docs/en/api/faststream/confluent/security/parse_security.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.security.parse_security
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md
deleted file mode 100644
index f6fc81226a..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.asyncapi.AsyncAPIBatchSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md
deleted file mode 100644
index 372b29b571..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.asyncapi.AsyncAPIConcurrentDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md
deleted file mode 100644
index 12641d32ce..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.asyncapi.AsyncAPIDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md
deleted file mode 100644
index b22facc06a..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.asyncapi.AsyncAPISubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md
deleted file mode 100644
index ce811a99d9..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.factory.create_subscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/usecase/BatchSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/usecase/BatchSubscriber.md
deleted file mode 100644
index 4642abd4a8..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/usecase/BatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.usecase.BatchSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/usecase/ConcurrentDefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/usecase/ConcurrentDefaultSubscriber.md
deleted file mode 100644
index 13d0f308c1..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/usecase/ConcurrentDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.usecase.ConcurrentDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md
deleted file mode 100644
index c2d7ed227e..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.usecase.DefaultSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/usecase/LogicSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/usecase/LogicSubscriber.md
deleted file mode 100644
index c47daf891f..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/usecase/LogicSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.usecase.LogicSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/testing/FakeProducer.md b/docs/docs/en/api/faststream/confluent/testing/FakeProducer.md
deleted file mode 100644
index aeaee2a2d7..0000000000
--- a/docs/docs/en/api/faststream/confluent/testing/FakeProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.testing.FakeProducer
diff --git a/docs/docs/en/api/faststream/confluent/testing/MockConfluentMessage.md b/docs/docs/en/api/faststream/confluent/testing/MockConfluentMessage.md
deleted file mode 100644
index 78791486ab..0000000000
--- a/docs/docs/en/api/faststream/confluent/testing/MockConfluentMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.testing.MockConfluentMessage
diff --git a/docs/docs/en/api/faststream/confluent/testing/TestKafkaBroker.md b/docs/docs/en/api/faststream/confluent/testing/TestKafkaBroker.md
deleted file mode 100644
index 53dfed8f24..0000000000
--- a/docs/docs/en/api/faststream/confluent/testing/TestKafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.testing.TestKafkaBroker
diff --git a/docs/docs/en/api/faststream/confluent/testing/build_message.md b/docs/docs/en/api/faststream/confluent/testing/build_message.md
deleted file mode 100644
index 75787a13b3..0000000000
--- a/docs/docs/en/api/faststream/confluent/testing/build_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.testing.build_message
diff --git a/docs/docs/en/api/faststream/constants/ContentTypes.md b/docs/docs/en/api/faststream/constants/ContentTypes.md
deleted file mode 100644
index 28d62bdcd7..0000000000
--- a/docs/docs/en/api/faststream/constants/ContentTypes.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.constants.ContentTypes
diff --git a/docs/docs/en/api/faststream/exceptions/AckMessage.md b/docs/docs/en/api/faststream/exceptions/AckMessage.md
deleted file mode 100644
index 175efc68ed..0000000000
--- a/docs/docs/en/api/faststream/exceptions/AckMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.AckMessage
diff --git a/docs/docs/en/api/faststream/exceptions/FastStreamException.md b/docs/docs/en/api/faststream/exceptions/FastStreamException.md
deleted file mode 100644
index bd988e9332..0000000000
--- a/docs/docs/en/api/faststream/exceptions/FastStreamException.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.FastStreamException
diff --git a/docs/docs/en/api/faststream/exceptions/HandlerException.md b/docs/docs/en/api/faststream/exceptions/HandlerException.md
deleted file mode 100644
index 64495519a4..0000000000
--- a/docs/docs/en/api/faststream/exceptions/HandlerException.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.HandlerException
diff --git a/docs/docs/en/api/faststream/exceptions/IgnoredException.md b/docs/docs/en/api/faststream/exceptions/IgnoredException.md
deleted file mode 100644
index 18452057c1..0000000000
--- a/docs/docs/en/api/faststream/exceptions/IgnoredException.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.IgnoredException
diff --git a/docs/docs/en/api/faststream/exceptions/NackMessage.md b/docs/docs/en/api/faststream/exceptions/NackMessage.md
deleted file mode 100644
index 05502ca14d..0000000000
--- a/docs/docs/en/api/faststream/exceptions/NackMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.NackMessage
diff --git a/docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md b/docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md
deleted file mode 100644
index e34e86542b..0000000000
--- a/docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.OperationForbiddenError
diff --git a/docs/docs/en/api/faststream/exceptions/RejectMessage.md b/docs/docs/en/api/faststream/exceptions/RejectMessage.md
deleted file mode 100644
index be491d89c1..0000000000
--- a/docs/docs/en/api/faststream/exceptions/RejectMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.RejectMessage
diff --git a/docs/docs/en/api/faststream/exceptions/SetupError.md b/docs/docs/en/api/faststream/exceptions/SetupError.md
deleted file mode 100644
index 588e66557f..0000000000
--- a/docs/docs/en/api/faststream/exceptions/SetupError.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.SetupError
diff --git a/docs/docs/en/api/faststream/exceptions/SkipMessage.md b/docs/docs/en/api/faststream/exceptions/SkipMessage.md
deleted file mode 100644
index e2a6ac135e..0000000000
--- a/docs/docs/en/api/faststream/exceptions/SkipMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.SkipMessage
diff --git a/docs/docs/en/api/faststream/exceptions/StopApplication.md b/docs/docs/en/api/faststream/exceptions/StopApplication.md
deleted file mode 100644
index 12059837a4..0000000000
--- a/docs/docs/en/api/faststream/exceptions/StopApplication.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.StopApplication
diff --git a/docs/docs/en/api/faststream/exceptions/StopConsume.md b/docs/docs/en/api/faststream/exceptions/StopConsume.md
deleted file mode 100644
index 9733dcc2e9..0000000000
--- a/docs/docs/en/api/faststream/exceptions/StopConsume.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.StopConsume
diff --git a/docs/docs/en/api/faststream/exceptions/SubscriberNotFound.md b/docs/docs/en/api/faststream/exceptions/SubscriberNotFound.md
deleted file mode 100644
index 89428f8251..0000000000
--- a/docs/docs/en/api/faststream/exceptions/SubscriberNotFound.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.SubscriberNotFound
diff --git a/docs/docs/en/api/faststream/exceptions/ValidationError.md b/docs/docs/en/api/faststream/exceptions/ValidationError.md
deleted file mode 100644
index 93dc0a73d1..0000000000
--- a/docs/docs/en/api/faststream/exceptions/ValidationError.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.exceptions.ValidationError
diff --git a/docs/docs/en/api/faststream/kafka/KafkaBroker.md b/docs/docs/en/api/faststream/kafka/KafkaBroker.md
deleted file mode 100644
index 7ee56a5e01..0000000000
--- a/docs/docs/en/api/faststream/kafka/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.KafkaBroker
diff --git a/docs/docs/en/api/faststream/kafka/KafkaPublisher.md b/docs/docs/en/api/faststream/kafka/KafkaPublisher.md
deleted file mode 100644
index c379528109..0000000000
--- a/docs/docs/en/api/faststream/kafka/KafkaPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.KafkaPublisher
diff --git a/docs/docs/en/api/faststream/kafka/KafkaResponse.md b/docs/docs/en/api/faststream/kafka/KafkaResponse.md
deleted file mode 100644
index 4aab0b965d..0000000000
--- a/docs/docs/en/api/faststream/kafka/KafkaResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.KafkaResponse
diff --git a/docs/docs/en/api/faststream/kafka/KafkaRoute.md b/docs/docs/en/api/faststream/kafka/KafkaRoute.md
deleted file mode 100644
index 89a8d8cca1..0000000000
--- a/docs/docs/en/api/faststream/kafka/KafkaRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.KafkaRoute
diff --git a/docs/docs/en/api/faststream/kafka/KafkaRouter.md b/docs/docs/en/api/faststream/kafka/KafkaRouter.md
deleted file mode 100644
index c60f3ca6f4..0000000000
--- a/docs/docs/en/api/faststream/kafka/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.KafkaRouter
diff --git a/docs/docs/en/api/faststream/kafka/TestApp.md b/docs/docs/en/api/faststream/kafka/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/kafka/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/kafka/TestKafkaBroker.md b/docs/docs/en/api/faststream/kafka/TestKafkaBroker.md
deleted file mode 100644
index 096df3b1d1..0000000000
--- a/docs/docs/en/api/faststream/kafka/TestKafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.TestKafkaBroker
diff --git a/docs/docs/en/api/faststream/kafka/TopicPartition.md b/docs/docs/en/api/faststream/kafka/TopicPartition.md
deleted file mode 100644
index 41fbd7f624..0000000000
--- a/docs/docs/en/api/faststream/kafka/TopicPartition.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: aiokafka.structs.TopicPartition
diff --git a/docs/docs/en/api/faststream/kafka/broker/KafkaBroker.md b/docs/docs/en/api/faststream/kafka/broker/KafkaBroker.md
deleted file mode 100644
index 2cee711d14..0000000000
--- a/docs/docs/en/api/faststream/kafka/broker/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.broker.KafkaBroker
diff --git a/docs/docs/en/api/faststream/kafka/broker/broker/KafkaBroker.md b/docs/docs/en/api/faststream/kafka/broker/broker/KafkaBroker.md
deleted file mode 100644
index ca32dd3865..0000000000
--- a/docs/docs/en/api/faststream/kafka/broker/broker/KafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.broker.broker.KafkaBroker
diff --git a/docs/docs/en/api/faststream/kafka/broker/logging/KafkaLoggingBroker.md b/docs/docs/en/api/faststream/kafka/broker/logging/KafkaLoggingBroker.md
deleted file mode 100644
index 1f8d5921b7..0000000000
--- a/docs/docs/en/api/faststream/kafka/broker/logging/KafkaLoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.broker.logging.KafkaLoggingBroker
diff --git a/docs/docs/en/api/faststream/kafka/broker/registrator/KafkaRegistrator.md b/docs/docs/en/api/faststream/kafka/broker/registrator/KafkaRegistrator.md
deleted file mode 100644
index aa06d38f65..0000000000
--- a/docs/docs/en/api/faststream/kafka/broker/registrator/KafkaRegistrator.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.broker.registrator.KafkaRegistrator
diff --git a/docs/docs/en/api/faststream/kafka/fastapi/Context.md b/docs/docs/en/api/faststream/kafka/fastapi/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/kafka/fastapi/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/kafka/fastapi/KafkaRouter.md b/docs/docs/en/api/faststream/kafka/fastapi/KafkaRouter.md
deleted file mode 100644
index 2ab7254e79..0000000000
--- a/docs/docs/en/api/faststream/kafka/fastapi/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.fastapi.KafkaRouter
diff --git a/docs/docs/en/api/faststream/kafka/fastapi/fastapi/KafkaRouter.md b/docs/docs/en/api/faststream/kafka/fastapi/fastapi/KafkaRouter.md
deleted file mode 100644
index 80fc17dd4a..0000000000
--- a/docs/docs/en/api/faststream/kafka/fastapi/fastapi/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.fastapi.fastapi.KafkaRouter
diff --git a/docs/docs/en/api/faststream/kafka/message/ConsumerProtocol.md b/docs/docs/en/api/faststream/kafka/message/ConsumerProtocol.md
deleted file mode 100644
index c9fd16a983..0000000000
--- a/docs/docs/en/api/faststream/kafka/message/ConsumerProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.message.ConsumerProtocol
diff --git a/docs/docs/en/api/faststream/kafka/message/FakeConsumer.md b/docs/docs/en/api/faststream/kafka/message/FakeConsumer.md
deleted file mode 100644
index d41724b288..0000000000
--- a/docs/docs/en/api/faststream/kafka/message/FakeConsumer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.message.FakeConsumer
diff --git a/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md b/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md
deleted file mode 100644
index 16461be675..0000000000
--- a/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.message.KafkaAckableMessage
diff --git a/docs/docs/en/api/faststream/kafka/message/KafkaMessage.md b/docs/docs/en/api/faststream/kafka/message/KafkaMessage.md
deleted file mode 100644
index 7a7a30bae3..0000000000
--- a/docs/docs/en/api/faststream/kafka/message/KafkaMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.message.KafkaMessage
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md
deleted file mode 100644
index 02fb4805ac..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.KafkaTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md
deleted file mode 100644
index aba78378f2..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.middleware.KafkaTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md
deleted file mode 100644
index 5cb13be947..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.provider.BaseKafkaTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md
deleted file mode 100644
index d3d7080509..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.provider.BatchKafkaTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md
deleted file mode 100644
index 0859c0df3d..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.provider.KafkaTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md
deleted file mode 100644
index 3b2a1ad394..0000000000
--- a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.opentelemetry.provider.telemetry_attributes_provider_factory
diff --git a/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md b/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md
deleted file mode 100644
index 25df2532c6..0000000000
--- a/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.parser.AioKafkaBatchParser
diff --git a/docs/docs/en/api/faststream/kafka/parser/AioKafkaParser.md b/docs/docs/en/api/faststream/kafka/parser/AioKafkaParser.md
deleted file mode 100644
index e7e37cce97..0000000000
--- a/docs/docs/en/api/faststream/kafka/parser/AioKafkaParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.parser.AioKafkaParser
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md
deleted file mode 100644
index c2ffd5356a..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md
deleted file mode 100644
index 451b7080c0..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.middleware.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md
deleted file mode 100644
index 0fd044f694..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.provider.BaseKafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md
deleted file mode 100644
index 9bd01d5e71..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.provider.BatchKafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md
deleted file mode 100644
index ae7c490da8..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.provider.KafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md
deleted file mode 100644
index 1393fd9065..0000000000
--- a/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIBatchPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIBatchPublisher.md
deleted file mode 100644
index 8d796523e6..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIBatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.asyncapi.AsyncAPIBatchPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIDefaultPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIDefaultPublisher.md
deleted file mode 100644
index 7e4e54d030..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIDefaultPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.asyncapi.AsyncAPIDefaultPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIPublisher.md
deleted file mode 100644
index 7d914809c2..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/asyncapi/AsyncAPIPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.asyncapi.AsyncAPIPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/producer/AioKafkaFastProducer.md b/docs/docs/en/api/faststream/kafka/publisher/producer/AioKafkaFastProducer.md
deleted file mode 100644
index 83b116989b..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/producer/AioKafkaFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.producer.AioKafkaFastProducer
diff --git a/docs/docs/en/api/faststream/kafka/publisher/usecase/BatchPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/usecase/BatchPublisher.md
deleted file mode 100644
index 045cfbf45f..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/usecase/BatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.usecase.BatchPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/usecase/DefaultPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/usecase/DefaultPublisher.md
deleted file mode 100644
index 07518c75b3..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/usecase/DefaultPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.usecase.DefaultPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/usecase/LogicPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/usecase/LogicPublisher.md
deleted file mode 100644
index 615da58f90..0000000000
--- a/docs/docs/en/api/faststream/kafka/publisher/usecase/LogicPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.publisher.usecase.LogicPublisher
diff --git a/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md b/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md
deleted file mode 100644
index 05ecd69c2d..0000000000
--- a/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.response.KafkaResponse
diff --git a/docs/docs/en/api/faststream/kafka/router/KafkaPublisher.md b/docs/docs/en/api/faststream/kafka/router/KafkaPublisher.md
deleted file mode 100644
index 5027c18f20..0000000000
--- a/docs/docs/en/api/faststream/kafka/router/KafkaPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.router.KafkaPublisher
diff --git a/docs/docs/en/api/faststream/kafka/router/KafkaRoute.md b/docs/docs/en/api/faststream/kafka/router/KafkaRoute.md
deleted file mode 100644
index e7e6184deb..0000000000
--- a/docs/docs/en/api/faststream/kafka/router/KafkaRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.router.KafkaRoute
diff --git a/docs/docs/en/api/faststream/kafka/router/KafkaRouter.md b/docs/docs/en/api/faststream/kafka/router/KafkaRouter.md
deleted file mode 100644
index 5d7578bbfc..0000000000
--- a/docs/docs/en/api/faststream/kafka/router/KafkaRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.router.KafkaRouter
diff --git a/docs/docs/en/api/faststream/kafka/schemas/params/ConsumerConnectionParams.md b/docs/docs/en/api/faststream/kafka/schemas/params/ConsumerConnectionParams.md
deleted file mode 100644
index b289e61e5a..0000000000
--- a/docs/docs/en/api/faststream/kafka/schemas/params/ConsumerConnectionParams.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.schemas.params.ConsumerConnectionParams
diff --git a/docs/docs/en/api/faststream/kafka/security/parse_security.md b/docs/docs/en/api/faststream/kafka/security/parse_security.md
deleted file mode 100644
index e325a99ad8..0000000000
--- a/docs/docs/en/api/faststream/kafka/security/parse_security.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.security.parse_security
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md
deleted file mode 100644
index 3ce948d2e2..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.asyncapi.AsyncAPIBatchSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md
deleted file mode 100644
index 8ce5838961..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIConcurrentDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.asyncapi.AsyncAPIConcurrentDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md
deleted file mode 100644
index ef10b05e80..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.asyncapi.AsyncAPIDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md
deleted file mode 100644
index 330a621bf5..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.asyncapi.AsyncAPISubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md
deleted file mode 100644
index d9e5fcb4a4..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.factory.create_subscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/usecase/BatchSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/usecase/BatchSubscriber.md
deleted file mode 100644
index 6f8978f38b..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/usecase/BatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.usecase.BatchSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/usecase/ConcurrentDefaultSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/usecase/ConcurrentDefaultSubscriber.md
deleted file mode 100644
index 16f09d9334..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/usecase/ConcurrentDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.usecase.ConcurrentDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md
deleted file mode 100644
index 78949c27dd..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.usecase.DefaultSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/usecase/LogicSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/usecase/LogicSubscriber.md
deleted file mode 100644
index 297013e037..0000000000
--- a/docs/docs/en/api/faststream/kafka/subscriber/usecase/LogicSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.subscriber.usecase.LogicSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/testing/FakeProducer.md b/docs/docs/en/api/faststream/kafka/testing/FakeProducer.md
deleted file mode 100644
index 63eb94c3ca..0000000000
--- a/docs/docs/en/api/faststream/kafka/testing/FakeProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.testing.FakeProducer
diff --git a/docs/docs/en/api/faststream/kafka/testing/TestKafkaBroker.md b/docs/docs/en/api/faststream/kafka/testing/TestKafkaBroker.md
deleted file mode 100644
index 96f257a15f..0000000000
--- a/docs/docs/en/api/faststream/kafka/testing/TestKafkaBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.testing.TestKafkaBroker
diff --git a/docs/docs/en/api/faststream/kafka/testing/build_message.md b/docs/docs/en/api/faststream/kafka/testing/build_message.md
deleted file mode 100644
index 354d7a82f3..0000000000
--- a/docs/docs/en/api/faststream/kafka/testing/build_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.kafka.testing.build_message
diff --git a/docs/docs/en/api/faststream/log/formatter/ColourizedFormatter.md b/docs/docs/en/api/faststream/log/formatter/ColourizedFormatter.md
deleted file mode 100644
index 6e1aec157c..0000000000
--- a/docs/docs/en/api/faststream/log/formatter/ColourizedFormatter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.log.formatter.ColourizedFormatter
diff --git a/docs/docs/en/api/faststream/log/formatter/expand_log_field.md b/docs/docs/en/api/faststream/log/formatter/expand_log_field.md
deleted file mode 100644
index ce943209af..0000000000
--- a/docs/docs/en/api/faststream/log/formatter/expand_log_field.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.log.formatter.expand_log_field
diff --git a/docs/docs/en/api/faststream/log/logging/ExtendedFilter.md b/docs/docs/en/api/faststream/log/logging/ExtendedFilter.md
deleted file mode 100644
index bd8f017947..0000000000
--- a/docs/docs/en/api/faststream/log/logging/ExtendedFilter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.log.logging.ExtendedFilter
diff --git a/docs/docs/en/api/faststream/log/logging/get_broker_logger.md b/docs/docs/en/api/faststream/log/logging/get_broker_logger.md
deleted file mode 100644
index e3433fc8dd..0000000000
--- a/docs/docs/en/api/faststream/log/logging/get_broker_logger.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.log.logging.get_broker_logger
diff --git a/docs/docs/en/api/faststream/log/logging/set_logger_fmt.md b/docs/docs/en/api/faststream/log/logging/set_logger_fmt.md
deleted file mode 100644
index a4af3d137f..0000000000
--- a/docs/docs/en/api/faststream/log/logging/set_logger_fmt.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.log.logging.set_logger_fmt
diff --git a/docs/docs/en/api/faststream/nats/AckPolicy.md b/docs/docs/en/api/faststream/nats/AckPolicy.md
deleted file mode 100644
index 308d12ac63..0000000000
--- a/docs/docs/en/api/faststream/nats/AckPolicy.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.AckPolicy
diff --git a/docs/docs/en/api/faststream/nats/ConsumerConfig.md b/docs/docs/en/api/faststream/nats/ConsumerConfig.md
deleted file mode 100644
index 56c357cc07..0000000000
--- a/docs/docs/en/api/faststream/nats/ConsumerConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.ConsumerConfig
diff --git a/docs/docs/en/api/faststream/nats/DeliverPolicy.md b/docs/docs/en/api/faststream/nats/DeliverPolicy.md
deleted file mode 100644
index ebb664d0d9..0000000000
--- a/docs/docs/en/api/faststream/nats/DeliverPolicy.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.DeliverPolicy
diff --git a/docs/docs/en/api/faststream/nats/DiscardPolicy.md b/docs/docs/en/api/faststream/nats/DiscardPolicy.md
deleted file mode 100644
index 9eacd12198..0000000000
--- a/docs/docs/en/api/faststream/nats/DiscardPolicy.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.DiscardPolicy
diff --git a/docs/docs/en/api/faststream/nats/ExternalStream.md b/docs/docs/en/api/faststream/nats/ExternalStream.md
deleted file mode 100644
index 5ea0eacbbc..0000000000
--- a/docs/docs/en/api/faststream/nats/ExternalStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.ExternalStream
diff --git a/docs/docs/en/api/faststream/nats/JStream.md b/docs/docs/en/api/faststream/nats/JStream.md
deleted file mode 100644
index 70ca7cab69..0000000000
--- a/docs/docs/en/api/faststream/nats/JStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.JStream
diff --git a/docs/docs/en/api/faststream/nats/KvWatch.md b/docs/docs/en/api/faststream/nats/KvWatch.md
deleted file mode 100644
index 1527be51fd..0000000000
--- a/docs/docs/en/api/faststream/nats/KvWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.KvWatch
diff --git a/docs/docs/en/api/faststream/nats/NatsBroker.md b/docs/docs/en/api/faststream/nats/NatsBroker.md
deleted file mode 100644
index 376231c4cd..0000000000
--- a/docs/docs/en/api/faststream/nats/NatsBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.NatsBroker
diff --git a/docs/docs/en/api/faststream/nats/NatsPublisher.md b/docs/docs/en/api/faststream/nats/NatsPublisher.md
deleted file mode 100644
index 1f1ffbca2a..0000000000
--- a/docs/docs/en/api/faststream/nats/NatsPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.NatsPublisher
diff --git a/docs/docs/en/api/faststream/nats/NatsResponse.md b/docs/docs/en/api/faststream/nats/NatsResponse.md
deleted file mode 100644
index 6b967b527a..0000000000
--- a/docs/docs/en/api/faststream/nats/NatsResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.NatsResponse
diff --git a/docs/docs/en/api/faststream/nats/NatsRoute.md b/docs/docs/en/api/faststream/nats/NatsRoute.md
deleted file mode 100644
index b76a8481dc..0000000000
--- a/docs/docs/en/api/faststream/nats/NatsRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.NatsRoute
diff --git a/docs/docs/en/api/faststream/nats/NatsRouter.md b/docs/docs/en/api/faststream/nats/NatsRouter.md
deleted file mode 100644
index 89e975235b..0000000000
--- a/docs/docs/en/api/faststream/nats/NatsRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.NatsRouter
diff --git a/docs/docs/en/api/faststream/nats/ObjWatch.md b/docs/docs/en/api/faststream/nats/ObjWatch.md
deleted file mode 100644
index 50102ecf31..0000000000
--- a/docs/docs/en/api/faststream/nats/ObjWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.ObjWatch
diff --git a/docs/docs/en/api/faststream/nats/Placement.md b/docs/docs/en/api/faststream/nats/Placement.md
deleted file mode 100644
index bff8b3d4b8..0000000000
--- a/docs/docs/en/api/faststream/nats/Placement.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.Placement
diff --git a/docs/docs/en/api/faststream/nats/PullSub.md b/docs/docs/en/api/faststream/nats/PullSub.md
deleted file mode 100644
index dbfaf68f54..0000000000
--- a/docs/docs/en/api/faststream/nats/PullSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.PullSub
diff --git a/docs/docs/en/api/faststream/nats/RePublish.md b/docs/docs/en/api/faststream/nats/RePublish.md
deleted file mode 100644
index 35ad498def..0000000000
--- a/docs/docs/en/api/faststream/nats/RePublish.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.RePublish
diff --git a/docs/docs/en/api/faststream/nats/ReplayPolicy.md b/docs/docs/en/api/faststream/nats/ReplayPolicy.md
deleted file mode 100644
index 6430f0a22f..0000000000
--- a/docs/docs/en/api/faststream/nats/ReplayPolicy.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.ReplayPolicy
diff --git a/docs/docs/en/api/faststream/nats/RetentionPolicy.md b/docs/docs/en/api/faststream/nats/RetentionPolicy.md
deleted file mode 100644
index 919b818c9e..0000000000
--- a/docs/docs/en/api/faststream/nats/RetentionPolicy.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.RetentionPolicy
diff --git a/docs/docs/en/api/faststream/nats/StorageType.md b/docs/docs/en/api/faststream/nats/StorageType.md
deleted file mode 100644
index 78a6bc4d8f..0000000000
--- a/docs/docs/en/api/faststream/nats/StorageType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.StorageType
diff --git a/docs/docs/en/api/faststream/nats/StreamConfig.md b/docs/docs/en/api/faststream/nats/StreamConfig.md
deleted file mode 100644
index 3bce18f7de..0000000000
--- a/docs/docs/en/api/faststream/nats/StreamConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.StreamConfig
diff --git a/docs/docs/en/api/faststream/nats/StreamSource.md b/docs/docs/en/api/faststream/nats/StreamSource.md
deleted file mode 100644
index 4d85db37e5..0000000000
--- a/docs/docs/en/api/faststream/nats/StreamSource.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: nats.js.api.StreamSource
diff --git a/docs/docs/en/api/faststream/nats/TestApp.md b/docs/docs/en/api/faststream/nats/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/nats/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/nats/TestNatsBroker.md b/docs/docs/en/api/faststream/nats/TestNatsBroker.md
deleted file mode 100644
index 8557295619..0000000000
--- a/docs/docs/en/api/faststream/nats/TestNatsBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.TestNatsBroker
diff --git a/docs/docs/en/api/faststream/nats/broker/NatsBroker.md b/docs/docs/en/api/faststream/nats/broker/NatsBroker.md
deleted file mode 100644
index eeea31372b..0000000000
--- a/docs/docs/en/api/faststream/nats/broker/NatsBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.broker.NatsBroker
diff --git a/docs/docs/en/api/faststream/nats/broker/broker/NatsBroker.md b/docs/docs/en/api/faststream/nats/broker/broker/NatsBroker.md
deleted file mode 100644
index 7aed0de1ec..0000000000
--- a/docs/docs/en/api/faststream/nats/broker/broker/NatsBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.broker.broker.NatsBroker
diff --git a/docs/docs/en/api/faststream/nats/broker/logging/NatsLoggingBroker.md b/docs/docs/en/api/faststream/nats/broker/logging/NatsLoggingBroker.md
deleted file mode 100644
index cd31396a61..0000000000
--- a/docs/docs/en/api/faststream/nats/broker/logging/NatsLoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.broker.logging.NatsLoggingBroker
diff --git a/docs/docs/en/api/faststream/nats/broker/registrator/NatsRegistrator.md b/docs/docs/en/api/faststream/nats/broker/registrator/NatsRegistrator.md
deleted file mode 100644
index f7f313746a..0000000000
--- a/docs/docs/en/api/faststream/nats/broker/registrator/NatsRegistrator.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.broker.registrator.NatsRegistrator
diff --git a/docs/docs/en/api/faststream/nats/fastapi/Context.md b/docs/docs/en/api/faststream/nats/fastapi/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/nats/fastapi/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/nats/fastapi/NatsRouter.md b/docs/docs/en/api/faststream/nats/fastapi/NatsRouter.md
deleted file mode 100644
index 53123192c2..0000000000
--- a/docs/docs/en/api/faststream/nats/fastapi/NatsRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.fastapi.NatsRouter
diff --git a/docs/docs/en/api/faststream/nats/fastapi/fastapi/NatsRouter.md b/docs/docs/en/api/faststream/nats/fastapi/fastapi/NatsRouter.md
deleted file mode 100644
index 015f730b4f..0000000000
--- a/docs/docs/en/api/faststream/nats/fastapi/fastapi/NatsRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.fastapi.fastapi.NatsRouter
diff --git a/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md
deleted file mode 100644
index b24feaada6..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.KVBucketDeclarer
diff --git a/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md
deleted file mode 100644
index 3ee16a3f24..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.OSBucketDeclarer
diff --git a/docs/docs/en/api/faststream/nats/helpers/StreamBuilder.md b/docs/docs/en/api/faststream/nats/helpers/StreamBuilder.md
deleted file mode 100644
index 3b2a318598..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/StreamBuilder.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.StreamBuilder
diff --git a/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md
deleted file mode 100644
index fe0eaec17f..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.bucket_declarer.KVBucketDeclarer
diff --git a/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md
deleted file mode 100644
index b7663051c8..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.obj_storage_declarer.OSBucketDeclarer
diff --git a/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md b/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md
deleted file mode 100644
index 024daf2d14..0000000000
--- a/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.helpers.object_builder.StreamBuilder
diff --git a/docs/docs/en/api/faststream/nats/message/NatsBatchMessage.md b/docs/docs/en/api/faststream/nats/message/NatsBatchMessage.md
deleted file mode 100644
index 83017107ff..0000000000
--- a/docs/docs/en/api/faststream/nats/message/NatsBatchMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.message.NatsBatchMessage
diff --git a/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md b/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md
deleted file mode 100644
index 5ac6ed9f41..0000000000
--- a/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.message.NatsKvMessage
diff --git a/docs/docs/en/api/faststream/nats/message/NatsMessage.md b/docs/docs/en/api/faststream/nats/message/NatsMessage.md
deleted file mode 100644
index 22d17ceb56..0000000000
--- a/docs/docs/en/api/faststream/nats/message/NatsMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.message.NatsMessage
diff --git a/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md b/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md
deleted file mode 100644
index 3671628da4..0000000000
--- a/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.message.NatsObjMessage
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md b/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md
deleted file mode 100644
index e72f2de8ab..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.NatsTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md b/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md
deleted file mode 100644
index b2bb226585..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.middleware.NatsTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md
deleted file mode 100644
index d6626c537d..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.provider.BaseNatsTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md
deleted file mode 100644
index 045996125a..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.provider.NatsBatchTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md
deleted file mode 100644
index b58590c4fa..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.provider.NatsTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md
deleted file mode 100644
index 200d333e0b..0000000000
--- a/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.opentelemetry.provider.telemetry_attributes_provider_factory
diff --git a/docs/docs/en/api/faststream/nats/parser/BatchParser.md b/docs/docs/en/api/faststream/nats/parser/BatchParser.md
deleted file mode 100644
index 03ad25f549..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/BatchParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.BatchParser
diff --git a/docs/docs/en/api/faststream/nats/parser/JsParser.md b/docs/docs/en/api/faststream/nats/parser/JsParser.md
deleted file mode 100644
index 0cd283d36e..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/JsParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.JsParser
diff --git a/docs/docs/en/api/faststream/nats/parser/KvParser.md b/docs/docs/en/api/faststream/nats/parser/KvParser.md
deleted file mode 100644
index acba65e133..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/KvParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.KvParser
diff --git a/docs/docs/en/api/faststream/nats/parser/NatsBaseParser.md b/docs/docs/en/api/faststream/nats/parser/NatsBaseParser.md
deleted file mode 100644
index 00b038738d..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/NatsBaseParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.NatsBaseParser
diff --git a/docs/docs/en/api/faststream/nats/parser/NatsParser.md b/docs/docs/en/api/faststream/nats/parser/NatsParser.md
deleted file mode 100644
index ceed3d0bdf..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/NatsParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.NatsParser
diff --git a/docs/docs/en/api/faststream/nats/parser/ObjParser.md b/docs/docs/en/api/faststream/nats/parser/ObjParser.md
deleted file mode 100644
index 50ff5d0e18..0000000000
--- a/docs/docs/en/api/faststream/nats/parser/ObjParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.parser.ObjParser
diff --git a/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md b/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md
deleted file mode 100644
index d9b179b0c4..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.NatsPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md b/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md
deleted file mode 100644
index 7202731048..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.middleware.NatsPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md
deleted file mode 100644
index 80742833bc..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.provider.BaseNatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md
deleted file mode 100644
index 163ebb7bc6..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.provider.BatchNatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md
deleted file mode 100644
index e5515a4cc5..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.provider.NatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md
deleted file mode 100644
index aeaa7b26e0..0000000000
--- a/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md b/docs/docs/en/api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md
deleted file mode 100644
index 6ea394db59..0000000000
--- a/docs/docs/en/api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.publisher.asyncapi.AsyncAPIPublisher
diff --git a/docs/docs/en/api/faststream/nats/publisher/producer/NatsFastProducer.md b/docs/docs/en/api/faststream/nats/publisher/producer/NatsFastProducer.md
deleted file mode 100644
index 82ff491f16..0000000000
--- a/docs/docs/en/api/faststream/nats/publisher/producer/NatsFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.publisher.producer.NatsFastProducer
diff --git a/docs/docs/en/api/faststream/nats/publisher/producer/NatsJSFastProducer.md b/docs/docs/en/api/faststream/nats/publisher/producer/NatsJSFastProducer.md
deleted file mode 100644
index 9c0e046e61..0000000000
--- a/docs/docs/en/api/faststream/nats/publisher/producer/NatsJSFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.publisher.producer.NatsJSFastProducer
diff --git a/docs/docs/en/api/faststream/nats/publisher/usecase/LogicPublisher.md b/docs/docs/en/api/faststream/nats/publisher/usecase/LogicPublisher.md
deleted file mode 100644
index 08c7794545..0000000000
--- a/docs/docs/en/api/faststream/nats/publisher/usecase/LogicPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.publisher.usecase.LogicPublisher
diff --git a/docs/docs/en/api/faststream/nats/response/NatsResponse.md b/docs/docs/en/api/faststream/nats/response/NatsResponse.md
deleted file mode 100644
index 8a7da66982..0000000000
--- a/docs/docs/en/api/faststream/nats/response/NatsResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.response.NatsResponse
diff --git a/docs/docs/en/api/faststream/nats/router/NatsPublisher.md b/docs/docs/en/api/faststream/nats/router/NatsPublisher.md
deleted file mode 100644
index b025495e44..0000000000
--- a/docs/docs/en/api/faststream/nats/router/NatsPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.router.NatsPublisher
diff --git a/docs/docs/en/api/faststream/nats/router/NatsRoute.md b/docs/docs/en/api/faststream/nats/router/NatsRoute.md
deleted file mode 100644
index 36df33c45e..0000000000
--- a/docs/docs/en/api/faststream/nats/router/NatsRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.router.NatsRoute
diff --git a/docs/docs/en/api/faststream/nats/router/NatsRouter.md b/docs/docs/en/api/faststream/nats/router/NatsRouter.md
deleted file mode 100644
index 4b6dfaaf7d..0000000000
--- a/docs/docs/en/api/faststream/nats/router/NatsRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.router.NatsRouter
diff --git a/docs/docs/en/api/faststream/nats/schemas/JStream.md b/docs/docs/en/api/faststream/nats/schemas/JStream.md
deleted file mode 100644
index 51df9a02cc..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/JStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.JStream
diff --git a/docs/docs/en/api/faststream/nats/schemas/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/KvWatch.md
deleted file mode 100644
index ce99738043..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/KvWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.KvWatch
diff --git a/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md
deleted file mode 100644
index 51c3628e5e..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.ObjWatch
diff --git a/docs/docs/en/api/faststream/nats/schemas/PullSub.md b/docs/docs/en/api/faststream/nats/schemas/PullSub.md
deleted file mode 100644
index cb7341340c..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/PullSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.PullSub
diff --git a/docs/docs/en/api/faststream/nats/schemas/js_stream/JStream.md b/docs/docs/en/api/faststream/nats/schemas/js_stream/JStream.md
deleted file mode 100644
index af375c116a..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/js_stream/JStream.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.js_stream.JStream
diff --git a/docs/docs/en/api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md b/docs/docs/en/api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md
deleted file mode 100644
index 910f034eff..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.js_stream.compile_nats_wildcard
diff --git a/docs/docs/en/api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md b/docs/docs/en/api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md
deleted file mode 100644
index f9305b94e8..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.js_stream.is_subject_match_wildcard
diff --git a/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md
deleted file mode 100644
index ce07fa305d..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.kv_watch.KvWatch
diff --git a/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md
deleted file mode 100644
index 55831b8a6a..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.obj_watch.ObjWatch
diff --git a/docs/docs/en/api/faststream/nats/schemas/pull_sub/PullSub.md b/docs/docs/en/api/faststream/nats/schemas/pull_sub/PullSub.md
deleted file mode 100644
index 673a1e8ff6..0000000000
--- a/docs/docs/en/api/faststream/nats/schemas/pull_sub/PullSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.schemas.pull_sub.PullSub
diff --git a/docs/docs/en/api/faststream/nats/security/parse_security.md b/docs/docs/en/api/faststream/nats/security/parse_security.md
deleted file mode 100644
index d2fe5dd0c3..0000000000
--- a/docs/docs/en/api/faststream/nats/security/parse_security.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.security.parse_security
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md
deleted file mode 100644
index 15bceeedbc..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIBatchPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md
deleted file mode 100644
index f88e14f817..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentCoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md
deleted file mode 100644
index b5ebf86f93..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md
deleted file mode 100644
index 7bb4a6e088..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPushStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md
deleted file mode 100644
index 8819adebab..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPICoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md
deleted file mode 100644
index b006854b0b..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIKeyValueWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md
deleted file mode 100644
index 0a9157ed55..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIObjStoreWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md
deleted file mode 100644
index e9650bef94..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md
deleted file mode 100644
index 6d448d3af5..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPIStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md
deleted file mode 100644
index 4fcbab6ea6..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.asyncapi.AsyncAPISubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md
deleted file mode 100644
index 0e132c3394..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.factory.create_subscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md
deleted file mode 100644
index 455885671f..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscription.UnsubscribeAdapter
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md
deleted file mode 100644
index c94cb1b731..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscription.Unsubscriptable
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md
deleted file mode 100644
index 67638258ea..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscription.Watchable
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md
deleted file mode 100644
index dfb1c43575..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.BatchPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md
deleted file mode 100644
index e1f100c043..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.ConcurrentCoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md
deleted file mode 100644
index c1b7207285..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.ConcurrentPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md
deleted file mode 100644
index ffa2e0c37b..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.ConcurrentPushStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md
deleted file mode 100644
index 8ddb0b8c04..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.CoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md
deleted file mode 100644
index 778557ee2b..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.KeyValueWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/LogicSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/LogicSubscriber.md
deleted file mode 100644
index 100db07bbe..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/LogicSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.LogicSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md
deleted file mode 100644
index ad15f32931..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.ObjStoreWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md
deleted file mode 100644
index 30f30a893f..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.PullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md
deleted file mode 100644
index bb29bbb9c2..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.usecase.PushStreamSubscription
diff --git a/docs/docs/en/api/faststream/nats/testing/FakeProducer.md b/docs/docs/en/api/faststream/nats/testing/FakeProducer.md
deleted file mode 100644
index f2615aeb36..0000000000
--- a/docs/docs/en/api/faststream/nats/testing/FakeProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.testing.FakeProducer
diff --git a/docs/docs/en/api/faststream/nats/testing/PatchedMessage.md b/docs/docs/en/api/faststream/nats/testing/PatchedMessage.md
deleted file mode 100644
index e32802d4dd..0000000000
--- a/docs/docs/en/api/faststream/nats/testing/PatchedMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.testing.PatchedMessage
diff --git a/docs/docs/en/api/faststream/nats/testing/TestNatsBroker.md b/docs/docs/en/api/faststream/nats/testing/TestNatsBroker.md
deleted file mode 100644
index 2abcf7f06d..0000000000
--- a/docs/docs/en/api/faststream/nats/testing/TestNatsBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.testing.TestNatsBroker
diff --git a/docs/docs/en/api/faststream/nats/testing/build_message.md b/docs/docs/en/api/faststream/nats/testing/build_message.md
deleted file mode 100644
index 160977893d..0000000000
--- a/docs/docs/en/api/faststream/nats/testing/build_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.testing.build_message
diff --git a/docs/docs/en/api/faststream/opentelemetry/Baggage.md b/docs/docs/en/api/faststream/opentelemetry/Baggage.md
deleted file mode 100644
index a61cb56d97..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/Baggage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.Baggage
diff --git a/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md
deleted file mode 100644
index 914f134e60..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.TelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md b/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md
deleted file mode 100644
index 7ca8b2cb6d..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.TelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/opentelemetry/baggage/Baggage.md b/docs/docs/en/api/faststream/opentelemetry/baggage/Baggage.md
deleted file mode 100644
index c1c6e4efec..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/baggage/Baggage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.baggage.Baggage
diff --git a/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md b/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md
deleted file mode 100644
index cd58706774..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.consts.MessageAction
diff --git a/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md
deleted file mode 100644
index 64a7b4a501..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.middleware.BaseTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md
deleted file mode 100644
index f019b3ad61..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.middleware.TelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md b/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md
deleted file mode 100644
index 0fefe1c0ef..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.provider.TelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/prometheus/BasePrometheusMiddleware.md b/docs/docs/en/api/faststream/prometheus/BasePrometheusMiddleware.md
deleted file mode 100644
index 1f5cf6a1d4..0000000000
--- a/docs/docs/en/api/faststream/prometheus/BasePrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.BasePrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md b/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md
deleted file mode 100644
index ad8e536b7a..0000000000
--- a/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.ConsumeAttrs
diff --git a/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md b/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md
deleted file mode 100644
index 0f7405e44d..0000000000
--- a/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.MetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md b/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md
deleted file mode 100644
index 009d88d263..0000000000
--- a/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.container.MetricsContainer
diff --git a/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md b/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md
deleted file mode 100644
index b1a897c717..0000000000
--- a/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.manager.MetricsManager
diff --git a/docs/docs/en/api/faststream/prometheus/middleware/BasePrometheusMiddleware.md b/docs/docs/en/api/faststream/prometheus/middleware/BasePrometheusMiddleware.md
deleted file mode 100644
index 62bbd031ac..0000000000
--- a/docs/docs/en/api/faststream/prometheus/middleware/BasePrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.middleware.BasePrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md b/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md
deleted file mode 100644
index 2902586e38..0000000000
--- a/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.middleware.PrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md b/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md
deleted file mode 100644
index 3511a21a5b..0000000000
--- a/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.provider.MetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md b/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md
deleted file mode 100644
index d9196cab8d..0000000000
--- a/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.types.ConsumeAttrs
diff --git a/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md b/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md
deleted file mode 100644
index 98b6710bcd..0000000000
--- a/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.types.ProcessingStatus
diff --git a/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md b/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md
deleted file mode 100644
index 4e7435fbea..0000000000
--- a/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.prometheus.types.PublishingStatus
diff --git a/docs/docs/en/api/faststream/rabbit/ExchangeType.md b/docs/docs/en/api/faststream/rabbit/ExchangeType.md
deleted file mode 100644
index 9b299b951d..0000000000
--- a/docs/docs/en/api/faststream/rabbit/ExchangeType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.ExchangeType
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitBroker.md b/docs/docs/en/api/faststream/rabbit/RabbitBroker.md
deleted file mode 100644
index f48b2b5e78..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitBroker
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitExchange.md b/docs/docs/en/api/faststream/rabbit/RabbitExchange.md
deleted file mode 100644
index bbf9676e72..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitExchange.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitExchange
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitPublisher.md b/docs/docs/en/api/faststream/rabbit/RabbitPublisher.md
deleted file mode 100644
index 7e0d3f674b..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitPublisher
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitQueue.md b/docs/docs/en/api/faststream/rabbit/RabbitQueue.md
deleted file mode 100644
index 97945b6408..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitQueue.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitQueue
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitResponse.md b/docs/docs/en/api/faststream/rabbit/RabbitResponse.md
deleted file mode 100644
index 4d20d82b0e..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitResponse
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitRoute.md b/docs/docs/en/api/faststream/rabbit/RabbitRoute.md
deleted file mode 100644
index e11a9f058d..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitRoute
diff --git a/docs/docs/en/api/faststream/rabbit/RabbitRouter.md b/docs/docs/en/api/faststream/rabbit/RabbitRouter.md
deleted file mode 100644
index 133880fc50..0000000000
--- a/docs/docs/en/api/faststream/rabbit/RabbitRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.RabbitRouter
diff --git a/docs/docs/en/api/faststream/rabbit/ReplyConfig.md b/docs/docs/en/api/faststream/rabbit/ReplyConfig.md
deleted file mode 100644
index 013bd2f986..0000000000
--- a/docs/docs/en/api/faststream/rabbit/ReplyConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.ReplyConfig
diff --git a/docs/docs/en/api/faststream/rabbit/TestApp.md b/docs/docs/en/api/faststream/rabbit/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/rabbit/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/rabbit/TestRabbitBroker.md b/docs/docs/en/api/faststream/rabbit/TestRabbitBroker.md
deleted file mode 100644
index c4519d58b8..0000000000
--- a/docs/docs/en/api/faststream/rabbit/TestRabbitBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.TestRabbitBroker
diff --git a/docs/docs/en/api/faststream/rabbit/broker/RabbitBroker.md b/docs/docs/en/api/faststream/rabbit/broker/RabbitBroker.md
deleted file mode 100644
index ac72ec0ae9..0000000000
--- a/docs/docs/en/api/faststream/rabbit/broker/RabbitBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.broker.RabbitBroker
diff --git a/docs/docs/en/api/faststream/rabbit/broker/broker/RabbitBroker.md b/docs/docs/en/api/faststream/rabbit/broker/broker/RabbitBroker.md
deleted file mode 100644
index 9ed9170ead..0000000000
--- a/docs/docs/en/api/faststream/rabbit/broker/broker/RabbitBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.broker.broker.RabbitBroker
diff --git a/docs/docs/en/api/faststream/rabbit/broker/logging/RabbitLoggingBroker.md b/docs/docs/en/api/faststream/rabbit/broker/logging/RabbitLoggingBroker.md
deleted file mode 100644
index a3b3151d4b..0000000000
--- a/docs/docs/en/api/faststream/rabbit/broker/logging/RabbitLoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.broker.logging.RabbitLoggingBroker
diff --git a/docs/docs/en/api/faststream/rabbit/broker/registrator/RabbitRegistrator.md b/docs/docs/en/api/faststream/rabbit/broker/registrator/RabbitRegistrator.md
deleted file mode 100644
index f22385f512..0000000000
--- a/docs/docs/en/api/faststream/rabbit/broker/registrator/RabbitRegistrator.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.broker.registrator.RabbitRegistrator
diff --git a/docs/docs/en/api/faststream/rabbit/fastapi/Context.md b/docs/docs/en/api/faststream/rabbit/fastapi/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/rabbit/fastapi/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/rabbit/fastapi/RabbitRouter.md b/docs/docs/en/api/faststream/rabbit/fastapi/RabbitRouter.md
deleted file mode 100644
index 72f0a90072..0000000000
--- a/docs/docs/en/api/faststream/rabbit/fastapi/RabbitRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.fastapi.RabbitRouter
diff --git a/docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md b/docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md
deleted file mode 100644
index 36dda03314..0000000000
--- a/docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.fastapi.router.RabbitRouter
diff --git a/docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md b/docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md
deleted file mode 100644
index b8fc8a0ebd..0000000000
--- a/docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.helpers.declarer.RabbitDeclarer
diff --git a/docs/docs/en/api/faststream/rabbit/message/RabbitMessage.md b/docs/docs/en/api/faststream/rabbit/message/RabbitMessage.md
deleted file mode 100644
index 598d43f818..0000000000
--- a/docs/docs/en/api/faststream/rabbit/message/RabbitMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.message.RabbitMessage
diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md
deleted file mode 100644
index 7d5ef3de27..0000000000
--- a/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.opentelemetry.RabbitTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md
deleted file mode 100644
index e86771a8ba..0000000000
--- a/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.opentelemetry.middleware.RabbitTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md
deleted file mode 100644
index ba6742ac90..0000000000
--- a/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.opentelemetry.provider.RabbitTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/rabbit/parser/AioPikaParser.md b/docs/docs/en/api/faststream/rabbit/parser/AioPikaParser.md
deleted file mode 100644
index 0a02d90270..0000000000
--- a/docs/docs/en/api/faststream/rabbit/parser/AioPikaParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.parser.AioPikaParser
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md b/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md
deleted file mode 100644
index 2c4308fabd..0000000000
--- a/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.prometheus.RabbitPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md b/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md
deleted file mode 100644
index 45163c998a..0000000000
--- a/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.prometheus.middleware.RabbitPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md b/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md
deleted file mode 100644
index 6d63301b34..0000000000
--- a/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.prometheus.provider.RabbitMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/asyncapi/AsyncAPIPublisher.md b/docs/docs/en/api/faststream/rabbit/publisher/asyncapi/AsyncAPIPublisher.md
deleted file mode 100644
index 6ece65cfed..0000000000
--- a/docs/docs/en/api/faststream/rabbit/publisher/asyncapi/AsyncAPIPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.publisher.asyncapi.AsyncAPIPublisher
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/producer/AioPikaFastProducer.md b/docs/docs/en/api/faststream/rabbit/publisher/producer/AioPikaFastProducer.md
deleted file mode 100644
index 527cc5604c..0000000000
--- a/docs/docs/en/api/faststream/rabbit/publisher/producer/AioPikaFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.publisher.producer.AioPikaFastProducer
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/usecase/LogicPublisher.md b/docs/docs/en/api/faststream/rabbit/publisher/usecase/LogicPublisher.md
deleted file mode 100644
index 1ef927866e..0000000000
--- a/docs/docs/en/api/faststream/rabbit/publisher/usecase/LogicPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.publisher.usecase.LogicPublisher
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/usecase/PublishKwargs.md b/docs/docs/en/api/faststream/rabbit/publisher/usecase/PublishKwargs.md
deleted file mode 100644
index 3d917891cd..0000000000
--- a/docs/docs/en/api/faststream/rabbit/publisher/usecase/PublishKwargs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.publisher.usecase.PublishKwargs
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/usecase/RequestPublishKwargs.md b/docs/docs/en/api/faststream/rabbit/publisher/usecase/RequestPublishKwargs.md
deleted file mode 100644
index 5668633016..0000000000
--- a/docs/docs/en/api/faststream/rabbit/publisher/usecase/RequestPublishKwargs.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.publisher.usecase.RequestPublishKwargs
diff --git a/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md b/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md
deleted file mode 100644
index 477cfb9861..0000000000
--- a/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.response.RabbitResponse
diff --git a/docs/docs/en/api/faststream/rabbit/router/RabbitPublisher.md b/docs/docs/en/api/faststream/rabbit/router/RabbitPublisher.md
deleted file mode 100644
index befbec9103..0000000000
--- a/docs/docs/en/api/faststream/rabbit/router/RabbitPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.router.RabbitPublisher
diff --git a/docs/docs/en/api/faststream/rabbit/router/RabbitRoute.md b/docs/docs/en/api/faststream/rabbit/router/RabbitRoute.md
deleted file mode 100644
index 8e8b0fbb6c..0000000000
--- a/docs/docs/en/api/faststream/rabbit/router/RabbitRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.router.RabbitRoute
diff --git a/docs/docs/en/api/faststream/rabbit/router/RabbitRouter.md b/docs/docs/en/api/faststream/rabbit/router/RabbitRouter.md
deleted file mode 100644
index eff5f6169a..0000000000
--- a/docs/docs/en/api/faststream/rabbit/router/RabbitRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.router.RabbitRouter
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/BaseRMQInformation.md b/docs/docs/en/api/faststream/rabbit/schemas/BaseRMQInformation.md
deleted file mode 100644
index 7ff32d2cd2..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/BaseRMQInformation.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.BaseRMQInformation
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/ExchangeType.md b/docs/docs/en/api/faststream/rabbit/schemas/ExchangeType.md
deleted file mode 100644
index c6c2ef8a28..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/ExchangeType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.ExchangeType
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/RabbitExchange.md b/docs/docs/en/api/faststream/rabbit/schemas/RabbitExchange.md
deleted file mode 100644
index 4e60ed96f5..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/RabbitExchange.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.RabbitExchange
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/RabbitQueue.md b/docs/docs/en/api/faststream/rabbit/schemas/RabbitQueue.md
deleted file mode 100644
index 947238b788..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/RabbitQueue.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.RabbitQueue
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/ReplyConfig.md b/docs/docs/en/api/faststream/rabbit/schemas/ReplyConfig.md
deleted file mode 100644
index 239c4f9d6e..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/ReplyConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.ReplyConfig
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/constants/ExchangeType.md b/docs/docs/en/api/faststream/rabbit/schemas/constants/ExchangeType.md
deleted file mode 100644
index 11705f35ac..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/constants/ExchangeType.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.constants.ExchangeType
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/exchange/RabbitExchange.md b/docs/docs/en/api/faststream/rabbit/schemas/exchange/RabbitExchange.md
deleted file mode 100644
index ebcb211714..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/exchange/RabbitExchange.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.exchange.RabbitExchange
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/proto/BaseRMQInformation.md b/docs/docs/en/api/faststream/rabbit/schemas/proto/BaseRMQInformation.md
deleted file mode 100644
index 1eca00071b..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/proto/BaseRMQInformation.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.proto.BaseRMQInformation
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/queue/RabbitQueue.md b/docs/docs/en/api/faststream/rabbit/schemas/queue/RabbitQueue.md
deleted file mode 100644
index 83bc15e02f..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/queue/RabbitQueue.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.queue.RabbitQueue
diff --git a/docs/docs/en/api/faststream/rabbit/schemas/reply/ReplyConfig.md b/docs/docs/en/api/faststream/rabbit/schemas/reply/ReplyConfig.md
deleted file mode 100644
index 1aeb941ff5..0000000000
--- a/docs/docs/en/api/faststream/rabbit/schemas/reply/ReplyConfig.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.schemas.reply.ReplyConfig
diff --git a/docs/docs/en/api/faststream/rabbit/security/parse_security.md b/docs/docs/en/api/faststream/rabbit/security/parse_security.md
deleted file mode 100644
index 0b19ee5ee2..0000000000
--- a/docs/docs/en/api/faststream/rabbit/security/parse_security.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.security.parse_security
diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md
deleted file mode 100644
index 4d11c4b8e0..0000000000
--- a/docs/docs/en/api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.subscriber.asyncapi.AsyncAPISubscriber
diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md
deleted file mode 100644
index 79c7082931..0000000000
--- a/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.subscriber.factory.create_subscriber
diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md
deleted file mode 100644
index 56ef70dd0d..0000000000
--- a/docs/docs/en/api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.subscriber.usecase.LogicSubscriber
diff --git a/docs/docs/en/api/faststream/rabbit/testing/FakeProducer.md b/docs/docs/en/api/faststream/rabbit/testing/FakeProducer.md
deleted file mode 100644
index 7fa3603f60..0000000000
--- a/docs/docs/en/api/faststream/rabbit/testing/FakeProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.testing.FakeProducer
diff --git a/docs/docs/en/api/faststream/rabbit/testing/PatchedMessage.md b/docs/docs/en/api/faststream/rabbit/testing/PatchedMessage.md
deleted file mode 100644
index f58c1140c2..0000000000
--- a/docs/docs/en/api/faststream/rabbit/testing/PatchedMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.testing.PatchedMessage
diff --git a/docs/docs/en/api/faststream/rabbit/testing/TestRabbitBroker.md b/docs/docs/en/api/faststream/rabbit/testing/TestRabbitBroker.md
deleted file mode 100644
index ab2c088b39..0000000000
--- a/docs/docs/en/api/faststream/rabbit/testing/TestRabbitBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.testing.TestRabbitBroker
diff --git a/docs/docs/en/api/faststream/rabbit/testing/apply_pattern.md b/docs/docs/en/api/faststream/rabbit/testing/apply_pattern.md
deleted file mode 100644
index 02ffd305ef..0000000000
--- a/docs/docs/en/api/faststream/rabbit/testing/apply_pattern.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.testing.apply_pattern
diff --git a/docs/docs/en/api/faststream/rabbit/testing/build_message.md b/docs/docs/en/api/faststream/rabbit/testing/build_message.md
deleted file mode 100644
index 296715e46a..0000000000
--- a/docs/docs/en/api/faststream/rabbit/testing/build_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.testing.build_message
diff --git a/docs/docs/en/api/faststream/rabbit/utils/build_url.md b/docs/docs/en/api/faststream/rabbit/utils/build_url.md
deleted file mode 100644
index ffb6555837..0000000000
--- a/docs/docs/en/api/faststream/rabbit/utils/build_url.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.utils.build_url
diff --git a/docs/docs/en/api/faststream/rabbit/utils/is_routing_exchange.md b/docs/docs/en/api/faststream/rabbit/utils/is_routing_exchange.md
deleted file mode 100644
index 4ef1481a69..0000000000
--- a/docs/docs/en/api/faststream/rabbit/utils/is_routing_exchange.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.utils.is_routing_exchange
diff --git a/docs/docs/en/api/faststream/redis/ListSub.md b/docs/docs/en/api/faststream/redis/ListSub.md
deleted file mode 100644
index 9c97a0afcd..0000000000
--- a/docs/docs/en/api/faststream/redis/ListSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.ListSub
diff --git a/docs/docs/en/api/faststream/redis/PubSub.md b/docs/docs/en/api/faststream/redis/PubSub.md
deleted file mode 100644
index d2fba00014..0000000000
--- a/docs/docs/en/api/faststream/redis/PubSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.PubSub
diff --git a/docs/docs/en/api/faststream/redis/RedisBroker.md b/docs/docs/en/api/faststream/redis/RedisBroker.md
deleted file mode 100644
index 7275bfb60a..0000000000
--- a/docs/docs/en/api/faststream/redis/RedisBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.RedisBroker
diff --git a/docs/docs/en/api/faststream/redis/RedisPublisher.md b/docs/docs/en/api/faststream/redis/RedisPublisher.md
deleted file mode 100644
index 565d857810..0000000000
--- a/docs/docs/en/api/faststream/redis/RedisPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.RedisPublisher
diff --git a/docs/docs/en/api/faststream/redis/RedisResponse.md b/docs/docs/en/api/faststream/redis/RedisResponse.md
deleted file mode 100644
index eedecf1ea3..0000000000
--- a/docs/docs/en/api/faststream/redis/RedisResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.RedisResponse
diff --git a/docs/docs/en/api/faststream/redis/RedisRoute.md b/docs/docs/en/api/faststream/redis/RedisRoute.md
deleted file mode 100644
index 14b4416ed4..0000000000
--- a/docs/docs/en/api/faststream/redis/RedisRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.RedisRoute
diff --git a/docs/docs/en/api/faststream/redis/RedisRouter.md b/docs/docs/en/api/faststream/redis/RedisRouter.md
deleted file mode 100644
index 9b7292e703..0000000000
--- a/docs/docs/en/api/faststream/redis/RedisRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.RedisRouter
diff --git a/docs/docs/en/api/faststream/redis/StreamSub.md b/docs/docs/en/api/faststream/redis/StreamSub.md
deleted file mode 100644
index d1244238b6..0000000000
--- a/docs/docs/en/api/faststream/redis/StreamSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.StreamSub
diff --git a/docs/docs/en/api/faststream/redis/TestApp.md b/docs/docs/en/api/faststream/redis/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/redis/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/redis/TestRedisBroker.md b/docs/docs/en/api/faststream/redis/TestRedisBroker.md
deleted file mode 100644
index 703490c302..0000000000
--- a/docs/docs/en/api/faststream/redis/TestRedisBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.TestRedisBroker
diff --git a/docs/docs/en/api/faststream/redis/broker/broker/RedisBroker.md b/docs/docs/en/api/faststream/redis/broker/broker/RedisBroker.md
deleted file mode 100644
index fdc177e868..0000000000
--- a/docs/docs/en/api/faststream/redis/broker/broker/RedisBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.broker.broker.RedisBroker
diff --git a/docs/docs/en/api/faststream/redis/broker/logging/RedisLoggingBroker.md b/docs/docs/en/api/faststream/redis/broker/logging/RedisLoggingBroker.md
deleted file mode 100644
index 58500b3c1f..0000000000
--- a/docs/docs/en/api/faststream/redis/broker/logging/RedisLoggingBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.broker.logging.RedisLoggingBroker
diff --git a/docs/docs/en/api/faststream/redis/broker/registrator/RedisRegistrator.md b/docs/docs/en/api/faststream/redis/broker/registrator/RedisRegistrator.md
deleted file mode 100644
index 8d040533d7..0000000000
--- a/docs/docs/en/api/faststream/redis/broker/registrator/RedisRegistrator.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.broker.registrator.RedisRegistrator
diff --git a/docs/docs/en/api/faststream/redis/fastapi/Context.md b/docs/docs/en/api/faststream/redis/fastapi/Context.md
deleted file mode 100644
index f4240bb0da..0000000000
--- a/docs/docs/en/api/faststream/redis/fastapi/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.broker.fastapi.context.Context
diff --git a/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md b/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md
deleted file mode 100644
index 7894f88728..0000000000
--- a/docs/docs/en/api/faststream/redis/fastapi/RedisRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.fastapi.RedisRouter
diff --git a/docs/docs/en/api/faststream/redis/fastapi/fastapi/RedisRouter.md b/docs/docs/en/api/faststream/redis/fastapi/fastapi/RedisRouter.md
deleted file mode 100644
index 858c951f61..0000000000
--- a/docs/docs/en/api/faststream/redis/fastapi/fastapi/RedisRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.fastapi.fastapi.RedisRouter
diff --git a/docs/docs/en/api/faststream/redis/message/BatchListMessage.md b/docs/docs/en/api/faststream/redis/message/BatchListMessage.md
deleted file mode 100644
index c510fa09b9..0000000000
--- a/docs/docs/en/api/faststream/redis/message/BatchListMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.BatchListMessage
diff --git a/docs/docs/en/api/faststream/redis/message/BatchStreamMessage.md b/docs/docs/en/api/faststream/redis/message/BatchStreamMessage.md
deleted file mode 100644
index 16885fd028..0000000000
--- a/docs/docs/en/api/faststream/redis/message/BatchStreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.BatchStreamMessage
diff --git a/docs/docs/en/api/faststream/redis/message/DefaultListMessage.md b/docs/docs/en/api/faststream/redis/message/DefaultListMessage.md
deleted file mode 100644
index 8f38b34cae..0000000000
--- a/docs/docs/en/api/faststream/redis/message/DefaultListMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.DefaultListMessage
diff --git a/docs/docs/en/api/faststream/redis/message/DefaultStreamMessage.md b/docs/docs/en/api/faststream/redis/message/DefaultStreamMessage.md
deleted file mode 100644
index 6016bb624e..0000000000
--- a/docs/docs/en/api/faststream/redis/message/DefaultStreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.DefaultStreamMessage
diff --git a/docs/docs/en/api/faststream/redis/message/ListMessage.md b/docs/docs/en/api/faststream/redis/message/ListMessage.md
deleted file mode 100644
index 5e81a9f727..0000000000
--- a/docs/docs/en/api/faststream/redis/message/ListMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.ListMessage
diff --git a/docs/docs/en/api/faststream/redis/message/PubSubMessage.md b/docs/docs/en/api/faststream/redis/message/PubSubMessage.md
deleted file mode 100644
index 795cecb12e..0000000000
--- a/docs/docs/en/api/faststream/redis/message/PubSubMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.PubSubMessage
diff --git a/docs/docs/en/api/faststream/redis/message/RedisBatchListMessage.md b/docs/docs/en/api/faststream/redis/message/RedisBatchListMessage.md
deleted file mode 100644
index ec7d3983bd..0000000000
--- a/docs/docs/en/api/faststream/redis/message/RedisBatchListMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.RedisBatchListMessage
diff --git a/docs/docs/en/api/faststream/redis/message/RedisBatchStreamMessage.md b/docs/docs/en/api/faststream/redis/message/RedisBatchStreamMessage.md
deleted file mode 100644
index 2c66613eb7..0000000000
--- a/docs/docs/en/api/faststream/redis/message/RedisBatchStreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.RedisBatchStreamMessage
diff --git a/docs/docs/en/api/faststream/redis/message/RedisListMessage.md b/docs/docs/en/api/faststream/redis/message/RedisListMessage.md
deleted file mode 100644
index 8c996cb7f0..0000000000
--- a/docs/docs/en/api/faststream/redis/message/RedisListMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.RedisListMessage
diff --git a/docs/docs/en/api/faststream/redis/message/RedisMessage.md b/docs/docs/en/api/faststream/redis/message/RedisMessage.md
deleted file mode 100644
index 1b0654e7ce..0000000000
--- a/docs/docs/en/api/faststream/redis/message/RedisMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.RedisMessage
diff --git a/docs/docs/en/api/faststream/redis/message/RedisStreamMessage.md b/docs/docs/en/api/faststream/redis/message/RedisStreamMessage.md
deleted file mode 100644
index c36385a141..0000000000
--- a/docs/docs/en/api/faststream/redis/message/RedisStreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.RedisStreamMessage
diff --git a/docs/docs/en/api/faststream/redis/message/StreamMessage.md b/docs/docs/en/api/faststream/redis/message/StreamMessage.md
deleted file mode 100644
index f4e6a5d57e..0000000000
--- a/docs/docs/en/api/faststream/redis/message/StreamMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.StreamMessage
diff --git a/docs/docs/en/api/faststream/redis/message/UnifyRedisDict.md b/docs/docs/en/api/faststream/redis/message/UnifyRedisDict.md
deleted file mode 100644
index 9485ca2848..0000000000
--- a/docs/docs/en/api/faststream/redis/message/UnifyRedisDict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.UnifyRedisDict
diff --git a/docs/docs/en/api/faststream/redis/message/UnifyRedisMessage.md b/docs/docs/en/api/faststream/redis/message/UnifyRedisMessage.md
deleted file mode 100644
index dee09d1657..0000000000
--- a/docs/docs/en/api/faststream/redis/message/UnifyRedisMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.message.UnifyRedisMessage
diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md b/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md
deleted file mode 100644
index 537a2dc7b9..0000000000
--- a/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.opentelemetry.RedisTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md b/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md
deleted file mode 100644
index 4c0febf261..0000000000
--- a/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.opentelemetry.middleware.RedisTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md
deleted file mode 100644
index 26e7859c34..0000000000
--- a/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.opentelemetry.provider.RedisTelemetrySettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/parser/RawMessage.md b/docs/docs/en/api/faststream/redis/parser/RawMessage.md
deleted file mode 100644
index 4add7b37fd..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RawMessage.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RawMessage
diff --git a/docs/docs/en/api/faststream/redis/parser/RedisBatchListParser.md b/docs/docs/en/api/faststream/redis/parser/RedisBatchListParser.md
deleted file mode 100644
index e3a583eee8..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RedisBatchListParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RedisBatchListParser
diff --git a/docs/docs/en/api/faststream/redis/parser/RedisBatchStreamParser.md b/docs/docs/en/api/faststream/redis/parser/RedisBatchStreamParser.md
deleted file mode 100644
index 28ed437573..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RedisBatchStreamParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RedisBatchStreamParser
diff --git a/docs/docs/en/api/faststream/redis/parser/RedisListParser.md b/docs/docs/en/api/faststream/redis/parser/RedisListParser.md
deleted file mode 100644
index fd0cf87991..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RedisListParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RedisListParser
diff --git a/docs/docs/en/api/faststream/redis/parser/RedisPubSubParser.md b/docs/docs/en/api/faststream/redis/parser/RedisPubSubParser.md
deleted file mode 100644
index 93ab92cfdb..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RedisPubSubParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RedisPubSubParser
diff --git a/docs/docs/en/api/faststream/redis/parser/RedisStreamParser.md b/docs/docs/en/api/faststream/redis/parser/RedisStreamParser.md
deleted file mode 100644
index 79633d06ad..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/RedisStreamParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.RedisStreamParser
diff --git a/docs/docs/en/api/faststream/redis/parser/SimpleParser.md b/docs/docs/en/api/faststream/redis/parser/SimpleParser.md
deleted file mode 100644
index 239d3fda25..0000000000
--- a/docs/docs/en/api/faststream/redis/parser/SimpleParser.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.parser.SimpleParser
diff --git a/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md b/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md
deleted file mode 100644
index 01b23fe4f1..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.RedisPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md b/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md
deleted file mode 100644
index c29cc91130..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.middleware.RedisPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md
deleted file mode 100644
index 243414331b..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.provider.BaseRedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md
deleted file mode 100644
index 33d1d2d3a1..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.provider.BatchRedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md
deleted file mode 100644
index a7f5f3abe8..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.provider.RedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md
deleted file mode 100644
index aa4812f1e2..0000000000
--- a/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIChannelPublisher.md b/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIChannelPublisher.md
deleted file mode 100644
index a3bef9a56c..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIChannelPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.asyncapi.AsyncAPIChannelPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListBatchPublisher.md b/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListBatchPublisher.md
deleted file mode 100644
index ab4361bd85..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListBatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.asyncapi.AsyncAPIListBatchPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListPublisher.md b/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListPublisher.md
deleted file mode 100644
index 0c233cc74b..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIListPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.asyncapi.AsyncAPIListPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIPublisher.md b/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIPublisher.md
deleted file mode 100644
index 4243308fb7..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.asyncapi.AsyncAPIPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIStreamPublisher.md b/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIStreamPublisher.md
deleted file mode 100644
index 29fb6329f3..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/asyncapi/AsyncAPIStreamPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.asyncapi.AsyncAPIStreamPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/producer/RedisFastProducer.md b/docs/docs/en/api/faststream/redis/publisher/producer/RedisFastProducer.md
deleted file mode 100644
index 3bc630cc42..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/producer/RedisFastProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.producer.RedisFastProducer
diff --git a/docs/docs/en/api/faststream/redis/publisher/usecase/ChannelPublisher.md b/docs/docs/en/api/faststream/redis/publisher/usecase/ChannelPublisher.md
deleted file mode 100644
index 8aad760800..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/usecase/ChannelPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.usecase.ChannelPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/usecase/ListBatchPublisher.md b/docs/docs/en/api/faststream/redis/publisher/usecase/ListBatchPublisher.md
deleted file mode 100644
index d7a1be63e4..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/usecase/ListBatchPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.usecase.ListBatchPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/usecase/ListPublisher.md b/docs/docs/en/api/faststream/redis/publisher/usecase/ListPublisher.md
deleted file mode 100644
index 59895dc001..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/usecase/ListPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.usecase.ListPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/usecase/LogicPublisher.md b/docs/docs/en/api/faststream/redis/publisher/usecase/LogicPublisher.md
deleted file mode 100644
index c441bcc461..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/usecase/LogicPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.usecase.LogicPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/usecase/StreamPublisher.md b/docs/docs/en/api/faststream/redis/publisher/usecase/StreamPublisher.md
deleted file mode 100644
index ea56c9d699..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/usecase/StreamPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.usecase.StreamPublisher
diff --git a/docs/docs/en/api/faststream/redis/response/RedisResponse.md b/docs/docs/en/api/faststream/redis/response/RedisResponse.md
deleted file mode 100644
index dd7fbe72eb..0000000000
--- a/docs/docs/en/api/faststream/redis/response/RedisResponse.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.response.RedisResponse
diff --git a/docs/docs/en/api/faststream/redis/router/RedisPublisher.md b/docs/docs/en/api/faststream/redis/router/RedisPublisher.md
deleted file mode 100644
index fd1cad4d37..0000000000
--- a/docs/docs/en/api/faststream/redis/router/RedisPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.router.RedisPublisher
diff --git a/docs/docs/en/api/faststream/redis/router/RedisRoute.md b/docs/docs/en/api/faststream/redis/router/RedisRoute.md
deleted file mode 100644
index d6e1f525a7..0000000000
--- a/docs/docs/en/api/faststream/redis/router/RedisRoute.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.router.RedisRoute
diff --git a/docs/docs/en/api/faststream/redis/router/RedisRouter.md b/docs/docs/en/api/faststream/redis/router/RedisRouter.md
deleted file mode 100644
index 373ceea5a8..0000000000
--- a/docs/docs/en/api/faststream/redis/router/RedisRouter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.router.RedisRouter
diff --git a/docs/docs/en/api/faststream/redis/schemas/ListSub.md b/docs/docs/en/api/faststream/redis/schemas/ListSub.md
deleted file mode 100644
index 3e0b448229..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/ListSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.ListSub
diff --git a/docs/docs/en/api/faststream/redis/schemas/PubSub.md b/docs/docs/en/api/faststream/redis/schemas/PubSub.md
deleted file mode 100644
index 078a8e2d8e..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/PubSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.PubSub
diff --git a/docs/docs/en/api/faststream/redis/schemas/StreamSub.md b/docs/docs/en/api/faststream/redis/schemas/StreamSub.md
deleted file mode 100644
index 396e594c0b..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/StreamSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.StreamSub
diff --git a/docs/docs/en/api/faststream/redis/schemas/list_sub/ListSub.md b/docs/docs/en/api/faststream/redis/schemas/list_sub/ListSub.md
deleted file mode 100644
index f4b58ff4fb..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/list_sub/ListSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.list_sub.ListSub
diff --git a/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md b/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md
deleted file mode 100644
index 7a9d46c451..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.proto.RedisAsyncAPIProtocol
diff --git a/docs/docs/en/api/faststream/redis/schemas/proto/validate_options.md b/docs/docs/en/api/faststream/redis/schemas/proto/validate_options.md
deleted file mode 100644
index 7a5381120e..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/proto/validate_options.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.proto.validate_options
diff --git a/docs/docs/en/api/faststream/redis/schemas/pub_sub/PubSub.md b/docs/docs/en/api/faststream/redis/schemas/pub_sub/PubSub.md
deleted file mode 100644
index 08552c7b8c..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/pub_sub/PubSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.pub_sub.PubSub
diff --git a/docs/docs/en/api/faststream/redis/schemas/stream_sub/StreamSub.md b/docs/docs/en/api/faststream/redis/schemas/stream_sub/StreamSub.md
deleted file mode 100644
index e1a8d44d4e..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/stream_sub/StreamSub.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.stream_sub.StreamSub
diff --git a/docs/docs/en/api/faststream/redis/security/parse_security.md b/docs/docs/en/api/faststream/redis/security/parse_security.md
deleted file mode 100644
index d3673649db..0000000000
--- a/docs/docs/en/api/faststream/redis/security/parse_security.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.security.parse_security
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIChannelSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIChannelSubscriber.md
deleted file mode 100644
index 7cb7260111..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIChannelSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPIChannelSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListBatchSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListBatchSubscriber.md
deleted file mode 100644
index 26aa621262..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListBatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPIListBatchSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListSubscriber.md
deleted file mode 100644
index c65ba472d5..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIListSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPIListSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md
deleted file mode 100644
index 099f0a4ff2..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPIStreamBatchSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md
deleted file mode 100644
index 3d85ce9587..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPIStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md
deleted file mode 100644
index c957f32688..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.asyncapi.AsyncAPISubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md
deleted file mode 100644
index d5cf7eadc8..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.factory.create_subscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchListSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchListSubscriber.md
deleted file mode 100644
index aee1b8aa9b..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchListSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.BatchListSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md
deleted file mode 100644
index 0f8e4f2e1b..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.BatchStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/ChannelSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/ChannelSubscriber.md
deleted file mode 100644
index 3ab1fc045a..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/ChannelSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.ChannelSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/ListSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/ListSubscriber.md
deleted file mode 100644
index f7c44e8be5..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/ListSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.ListSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/LogicSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/LogicSubscriber.md
deleted file mode 100644
index e3531e7dcc..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/LogicSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.LogicSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamSubscriber.md
deleted file mode 100644
index 6e2ac31d7f..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.StreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/testing/ChannelVisitor.md b/docs/docs/en/api/faststream/redis/testing/ChannelVisitor.md
deleted file mode 100644
index f916be2ae8..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/ChannelVisitor.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.ChannelVisitor
diff --git a/docs/docs/en/api/faststream/redis/testing/FakeProducer.md b/docs/docs/en/api/faststream/redis/testing/FakeProducer.md
deleted file mode 100644
index e05efb6448..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/FakeProducer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.FakeProducer
diff --git a/docs/docs/en/api/faststream/redis/testing/ListVisitor.md b/docs/docs/en/api/faststream/redis/testing/ListVisitor.md
deleted file mode 100644
index 414b8a0400..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/ListVisitor.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.ListVisitor
diff --git a/docs/docs/en/api/faststream/redis/testing/StreamVisitor.md b/docs/docs/en/api/faststream/redis/testing/StreamVisitor.md
deleted file mode 100644
index 0b72d99109..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/StreamVisitor.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.StreamVisitor
diff --git a/docs/docs/en/api/faststream/redis/testing/TestRedisBroker.md b/docs/docs/en/api/faststream/redis/testing/TestRedisBroker.md
deleted file mode 100644
index 22946e09f8..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/TestRedisBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.TestRedisBroker
diff --git a/docs/docs/en/api/faststream/redis/testing/Visitor.md b/docs/docs/en/api/faststream/redis/testing/Visitor.md
deleted file mode 100644
index 746688710f..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/Visitor.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.Visitor
diff --git a/docs/docs/en/api/faststream/redis/testing/build_message.md b/docs/docs/en/api/faststream/redis/testing/build_message.md
deleted file mode 100644
index b2265905c6..0000000000
--- a/docs/docs/en/api/faststream/redis/testing/build_message.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.testing.build_message
diff --git a/docs/docs/en/api/faststream/security/BaseSecurity.md b/docs/docs/en/api/faststream/security/BaseSecurity.md
deleted file mode 100644
index 0e5abb09ae..0000000000
--- a/docs/docs/en/api/faststream/security/BaseSecurity.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.BaseSecurity
diff --git a/docs/docs/en/api/faststream/security/SASLGSSAPI.md b/docs/docs/en/api/faststream/security/SASLGSSAPI.md
deleted file mode 100644
index 8b6eec2741..0000000000
--- a/docs/docs/en/api/faststream/security/SASLGSSAPI.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.SASLGSSAPI
diff --git a/docs/docs/en/api/faststream/security/SASLOAuthBearer.md b/docs/docs/en/api/faststream/security/SASLOAuthBearer.md
deleted file mode 100644
index 9652a58840..0000000000
--- a/docs/docs/en/api/faststream/security/SASLOAuthBearer.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.SASLOAuthBearer
diff --git a/docs/docs/en/api/faststream/security/SASLPlaintext.md b/docs/docs/en/api/faststream/security/SASLPlaintext.md
deleted file mode 100644
index b4b5165f27..0000000000
--- a/docs/docs/en/api/faststream/security/SASLPlaintext.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.SASLPlaintext
diff --git a/docs/docs/en/api/faststream/security/SASLScram256.md b/docs/docs/en/api/faststream/security/SASLScram256.md
deleted file mode 100644
index 4d50681fa9..0000000000
--- a/docs/docs/en/api/faststream/security/SASLScram256.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.SASLScram256
diff --git a/docs/docs/en/api/faststream/security/SASLScram512.md b/docs/docs/en/api/faststream/security/SASLScram512.md
deleted file mode 100644
index 115645cc8c..0000000000
--- a/docs/docs/en/api/faststream/security/SASLScram512.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.security.SASLScram512
diff --git a/docs/docs/en/api/faststream/testing/TestApp.md b/docs/docs/en/api/faststream/testing/TestApp.md
deleted file mode 100644
index 3d8f650f0f..0000000000
--- a/docs/docs/en/api/faststream/testing/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.TestApp
diff --git a/docs/docs/en/api/faststream/testing/app/TestApp.md b/docs/docs/en/api/faststream/testing/app/TestApp.md
deleted file mode 100644
index 2468f3755c..0000000000
--- a/docs/docs/en/api/faststream/testing/app/TestApp.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.app.TestApp
diff --git a/docs/docs/en/api/faststream/testing/broker/TestBroker.md b/docs/docs/en/api/faststream/testing/broker/TestBroker.md
deleted file mode 100644
index 48e34a6ca3..0000000000
--- a/docs/docs/en/api/faststream/testing/broker/TestBroker.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.broker.TestBroker
diff --git a/docs/docs/en/api/faststream/testing/broker/patch_broker_calls.md b/docs/docs/en/api/faststream/testing/broker/patch_broker_calls.md
deleted file mode 100644
index 12a6431765..0000000000
--- a/docs/docs/en/api/faststream/testing/broker/patch_broker_calls.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.testing.broker.patch_broker_calls
diff --git a/docs/docs/en/api/faststream/types/LoggerProto.md b/docs/docs/en/api/faststream/types/LoggerProto.md
deleted file mode 100644
index 064320bf42..0000000000
--- a/docs/docs/en/api/faststream/types/LoggerProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.types.LoggerProto
diff --git a/docs/docs/en/api/faststream/types/StandardDataclass.md b/docs/docs/en/api/faststream/types/StandardDataclass.md
deleted file mode 100644
index 5140818794..0000000000
--- a/docs/docs/en/api/faststream/types/StandardDataclass.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.types.StandardDataclass
diff --git a/docs/docs/en/api/faststream/utils/Context.md b/docs/docs/en/api/faststream/utils/Context.md
deleted file mode 100644
index 3e4f9f17c5..0000000000
--- a/docs/docs/en/api/faststream/utils/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.Context
diff --git a/docs/docs/en/api/faststream/utils/ContextRepo.md b/docs/docs/en/api/faststream/utils/ContextRepo.md
deleted file mode 100644
index dd18ad81e4..0000000000
--- a/docs/docs/en/api/faststream/utils/ContextRepo.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.ContextRepo
diff --git a/docs/docs/en/api/faststream/utils/Depends.md b/docs/docs/en/api/faststream/utils/Depends.md
deleted file mode 100644
index c0704687e8..0000000000
--- a/docs/docs/en/api/faststream/utils/Depends.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: fast_depends.use.Depends
diff --git a/docs/docs/en/api/faststream/utils/Header.md b/docs/docs/en/api/faststream/utils/Header.md
deleted file mode 100644
index 10e6ccaec7..0000000000
--- a/docs/docs/en/api/faststream/utils/Header.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.Header
diff --git a/docs/docs/en/api/faststream/utils/NoCast.md b/docs/docs/en/api/faststream/utils/NoCast.md
deleted file mode 100644
index 606a31e563..0000000000
--- a/docs/docs/en/api/faststream/utils/NoCast.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.NoCast
diff --git a/docs/docs/en/api/faststream/utils/Path.md b/docs/docs/en/api/faststream/utils/Path.md
deleted file mode 100644
index b311930841..0000000000
--- a/docs/docs/en/api/faststream/utils/Path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.Path
diff --git a/docs/docs/en/api/faststream/utils/apply_types.md b/docs/docs/en/api/faststream/utils/apply_types.md
deleted file mode 100644
index 9dc4603bd2..0000000000
--- a/docs/docs/en/api/faststream/utils/apply_types.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: fast_depends.use.inject
diff --git a/docs/docs/en/api/faststream/utils/ast/find_ast_node.md b/docs/docs/en/api/faststream/utils/ast/find_ast_node.md
deleted file mode 100644
index 228e6f058c..0000000000
--- a/docs/docs/en/api/faststream/utils/ast/find_ast_node.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.ast.find_ast_node
diff --git a/docs/docs/en/api/faststream/utils/ast/find_withitems.md b/docs/docs/en/api/faststream/utils/ast/find_withitems.md
deleted file mode 100644
index 123acd71e4..0000000000
--- a/docs/docs/en/api/faststream/utils/ast/find_withitems.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.ast.find_withitems
diff --git a/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md b/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md
deleted file mode 100644
index c9d68c1ed2..0000000000
--- a/docs/docs/en/api/faststream/utils/ast/get_withitem_calls.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.ast.get_withitem_calls
diff --git a/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md b/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md
deleted file mode 100644
index 61cf140ea6..0000000000
--- a/docs/docs/en/api/faststream/utils/ast/is_contains_context_name.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.ast.is_contains_context_name
diff --git a/docs/docs/en/api/faststream/utils/classes/Singleton.md b/docs/docs/en/api/faststream/utils/classes/Singleton.md
deleted file mode 100644
index c9751ee2bd..0000000000
--- a/docs/docs/en/api/faststream/utils/classes/Singleton.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.classes.Singleton
diff --git a/docs/docs/en/api/faststream/utils/context/Context.md b/docs/docs/en/api/faststream/utils/context/Context.md
deleted file mode 100644
index 5669863fee..0000000000
--- a/docs/docs/en/api/faststream/utils/context/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.Context
diff --git a/docs/docs/en/api/faststream/utils/context/ContextRepo.md b/docs/docs/en/api/faststream/utils/context/ContextRepo.md
deleted file mode 100644
index 50a7133aeb..0000000000
--- a/docs/docs/en/api/faststream/utils/context/ContextRepo.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.ContextRepo
diff --git a/docs/docs/en/api/faststream/utils/context/Header.md b/docs/docs/en/api/faststream/utils/context/Header.md
deleted file mode 100644
index 7e10284ec1..0000000000
--- a/docs/docs/en/api/faststream/utils/context/Header.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.Header
diff --git a/docs/docs/en/api/faststream/utils/context/Path.md b/docs/docs/en/api/faststream/utils/context/Path.md
deleted file mode 100644
index 92c2ef36fe..0000000000
--- a/docs/docs/en/api/faststream/utils/context/Path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.Path
diff --git a/docs/docs/en/api/faststream/utils/context/builders/Context.md b/docs/docs/en/api/faststream/utils/context/builders/Context.md
deleted file mode 100644
index 6cdf6f36fe..0000000000
--- a/docs/docs/en/api/faststream/utils/context/builders/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.builders.Context
diff --git a/docs/docs/en/api/faststream/utils/context/builders/Header.md b/docs/docs/en/api/faststream/utils/context/builders/Header.md
deleted file mode 100644
index e3f6e41ba6..0000000000
--- a/docs/docs/en/api/faststream/utils/context/builders/Header.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.builders.Header
diff --git a/docs/docs/en/api/faststream/utils/context/builders/Path.md b/docs/docs/en/api/faststream/utils/context/builders/Path.md
deleted file mode 100644
index 5203903c45..0000000000
--- a/docs/docs/en/api/faststream/utils/context/builders/Path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.builders.Path
diff --git a/docs/docs/en/api/faststream/utils/context/repository/ContextRepo.md b/docs/docs/en/api/faststream/utils/context/repository/ContextRepo.md
deleted file mode 100644
index ad968d8954..0000000000
--- a/docs/docs/en/api/faststream/utils/context/repository/ContextRepo.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.repository.ContextRepo
diff --git a/docs/docs/en/api/faststream/utils/context/types/Context.md b/docs/docs/en/api/faststream/utils/context/types/Context.md
deleted file mode 100644
index 3ac9c51fad..0000000000
--- a/docs/docs/en/api/faststream/utils/context/types/Context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.types.Context
diff --git a/docs/docs/en/api/faststream/utils/context/types/resolve_context_by_name.md b/docs/docs/en/api/faststream/utils/context/types/resolve_context_by_name.md
deleted file mode 100644
index 60ab9fc23c..0000000000
--- a/docs/docs/en/api/faststream/utils/context/types/resolve_context_by_name.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.context.types.resolve_context_by_name
diff --git a/docs/docs/en/api/faststream/utils/data/filter_by_dict.md b/docs/docs/en/api/faststream/utils/data/filter_by_dict.md
deleted file mode 100644
index 87d03b5288..0000000000
--- a/docs/docs/en/api/faststream/utils/data/filter_by_dict.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.data.filter_by_dict
diff --git a/docs/docs/en/api/faststream/utils/functions/call_or_await.md b/docs/docs/en/api/faststream/utils/functions/call_or_await.md
deleted file mode 100644
index 9bb63aa18c..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/call_or_await.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: fast_depends.utils.run_async
diff --git a/docs/docs/en/api/faststream/utils/functions/drop_response_type.md b/docs/docs/en/api/faststream/utils/functions/drop_response_type.md
deleted file mode 100644
index a39e8a2699..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/drop_response_type.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.drop_response_type
diff --git a/docs/docs/en/api/faststream/utils/functions/fake_context.md b/docs/docs/en/api/faststream/utils/functions/fake_context.md
deleted file mode 100644
index 3943186ba4..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/fake_context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.fake_context
diff --git a/docs/docs/en/api/faststream/utils/functions/return_input.md b/docs/docs/en/api/faststream/utils/functions/return_input.md
deleted file mode 100644
index d5514e013f..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/return_input.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.return_input
diff --git a/docs/docs/en/api/faststream/utils/functions/sync_fake_context.md b/docs/docs/en/api/faststream/utils/functions/sync_fake_context.md
deleted file mode 100644
index 0860846843..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/sync_fake_context.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.sync_fake_context
diff --git a/docs/docs/en/api/faststream/utils/functions/timeout_scope.md b/docs/docs/en/api/faststream/utils/functions/timeout_scope.md
deleted file mode 100644
index 1577a7593a..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/timeout_scope.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.timeout_scope
diff --git a/docs/docs/en/api/faststream/utils/functions/to_async.md b/docs/docs/en/api/faststream/utils/functions/to_async.md
deleted file mode 100644
index 715b43d3ac..0000000000
--- a/docs/docs/en/api/faststream/utils/functions/to_async.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.functions.to_async
diff --git a/docs/docs/en/api/faststream/utils/no_cast/NoCast.md b/docs/docs/en/api/faststream/utils/no_cast/NoCast.md
deleted file mode 100644
index 4fcc6054ba..0000000000
--- a/docs/docs/en/api/faststream/utils/no_cast/NoCast.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.no_cast.NoCast
diff --git a/docs/docs/en/api/faststream/utils/nuid/NUID.md b/docs/docs/en/api/faststream/utils/nuid/NUID.md
deleted file mode 100644
index 4e43844efe..0000000000
--- a/docs/docs/en/api/faststream/utils/nuid/NUID.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.nuid.NUID
diff --git a/docs/docs/en/api/faststream/utils/path/compile_path.md b/docs/docs/en/api/faststream/utils/path/compile_path.md
deleted file mode 100644
index 136d5ab1b9..0000000000
--- a/docs/docs/en/api/faststream/utils/path/compile_path.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.utils.path.compile_path
diff --git a/docs/docs/en/faststream.md b/docs/docs/en/faststream.md
index 30389fd1e6..6f64dfd8a6 100644
--- a/docs/docs/en/faststream.md
+++ b/docs/docs/en/faststream.md
@@ -22,7 +22,7 @@ search:
-
+
@@ -30,37 +30,33 @@ search:
-
+
-
+
-
+
-
+
-
+
-
+
-
-
-
-
-
+
diff --git a/docs/docs/en/getting-started/acknowlegment.md b/docs/docs/en/getting-started/acknowlegment.md
new file mode 100644
index 0000000000..e26179e30f
--- /dev/null
+++ b/docs/docs/en/getting-started/acknowlegment.md
@@ -0,0 +1,24 @@
+# Acknowledgment
+
+Since unexpected errors may occur during message processing, **FastStream** has an `ack_policy` parameter.
+
+`AckPolicy` have 4 variants:
+
+- `ACK` means that the message will be acked anyway.
+
+- `NACK_ON_ERROR` means that the message will be nacked if an error occurs during processing and consumer will receive this message one more time.
+
+- `REJECT_ON_ERROR` means that the message will be rejected if an error occurs during processing and consumer will not receive this message again.
+
+- `DO_NOTHING` in this case *FastStream* will do nothing with the message. You must ack/nack/reject the message manually.
+
+
+You must provide this parameter when initializing the subscriber.
+
+```python linenums="1" hl_lines="5" title="main.py"
+from faststream import AckPolicy
+from faststream.nats import NatsBroker
+
+broker = NatsBroker()
+@broker.subscriber(ack_policy=AckPolicy.REJECT_ON_ERROR)
+```
diff --git a/docs/docs/en/getting-started/dependencies/index.md b/docs/docs/en/getting-started/dependencies/index.md
index 8d88ab81d2..6f2fae0c54 100644
--- a/docs/docs/en/getting-started/dependencies/index.md
+++ b/docs/docs/en/getting-started/dependencies/index.md
@@ -21,7 +21,7 @@ By default, it applies to all event handlers, unless you disabled the same optio
!!! warning
Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`.
- If you want to disable only type casting, use `validate=False` instead.
+ If you want to disable only type casting, use `serializer=None` instead.
This flag can be useful if you are using **FastStream** within another framework and you need to use its native dependency system.
diff --git a/docs/docs/en/getting-started/logging.md b/docs/docs/en/getting-started/logging.md
index d893aed248..5a896666bd 100644
--- a/docs/docs/en/getting-started/logging.md
+++ b/docs/docs/en/getting-started/logging.md
@@ -192,9 +192,9 @@ app = FastStream(broker, logger=logger)
And the job is done! Now you have a perfectly structured logs using **Structlog**.
```{.shell .no-copy}
-TIMESPAMP [info ] FastStream app starting... extra={}
-TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group', 'message_id': ''}
-TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group2', 'message_id': ''}
-TIMESPAMP [info ] FastStream app started successfully! To exit, press CTRL+C extra={'topic': '', 'group_id': '', 'message_id': ''}
+TIMESTAMP [info ] FastStream app starting... extra={}
+TIMESTAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group', 'message_id': ''}
+TIMESTAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group2', 'message_id': ''}
+TIMESTAMP [info ] FastStream app started successfully! To exit, press CTRL+C extra={'topic': '', 'group_id': '', 'message_id': ''}
```
{ data-search-exclude }
diff --git a/docs/docs/en/getting-started/subscription/index.md b/docs/docs/en/getting-started/subscription/index.md
index e1ae7ecef2..2604674830 100644
--- a/docs/docs/en/getting-started/subscription/index.md
+++ b/docs/docs/en/getting-started/subscription/index.md
@@ -41,7 +41,7 @@ This way **FastStream** still consumes `#!python json.loads` result, but without
!!! warning
Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`.
- If you want to disable only type casting, use `validate=False` instead.
+ If you want to disable only type casting, use `serializer=None` instead.
## Multiple Subscriptions
diff --git a/docs/docs/en/nats/jetstream/ack.md b/docs/docs/en/nats/jetstream/ack.md
index f003966493..a1dbc41168 100644
--- a/docs/docs/en/nats/jetstream/ack.md
+++ b/docs/docs/en/nats/jetstream/ack.md
@@ -16,29 +16,6 @@ In most cases, **FastStream** automatically acknowledges (*acks*) messages on yo
However, there are situations where you might want to use different acknowledgement logic.
-## Retries
-
-If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic.
-
-By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue:
-
-```python
-@broker.subscriber("test", retry=False) # don't handle exceptions
-async def base_handler(body: str):
- ...
-```
-
-If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one:
-
-```python
-@broker.subscriber("test", retry=True) # try again indefinitely
-async def base_handler(body: str):
- ...
-```
-
-!!! tip
- For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"}
-
## Manual Acknowledgement
If you want to acknowledge a message manually, you can get access directly to the message object via the [Context](../../getting-started/context/existed.md){.internal-link} and call the method.
diff --git a/docs/docs/en/rabbit/ack.md b/docs/docs/en/rabbit/ack.md
index d68b66a0cd..e7632742dc 100644
--- a/docs/docs/en/rabbit/ack.md
+++ b/docs/docs/en/rabbit/ack.md
@@ -16,44 +16,6 @@ In most cases, **FastStream** automatically acknowledges (*acks*) messages on yo
However, there are situations where you might want to use a different acknowledgement logic.
-## Retries
-
-If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic.
-
-By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue:
-
-```python
-@broker.subscriber("test", retry=False) # don't handle exceptions
-async def base_handler(body: str):
- ...
-```
-
-If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one:
-
-```python
-@broker.subscriber("test", retry=True) # try again indefinitely
-async def base_handler(body: str):
- ...
-```
-
-If the `retry` flag is set to an `int`, the message will be placed back in the queue, and the number of retries will be limited to this number:
-
-```python
-@broker.subscriber("test", retry=3) # make up to 3 attempts
-async def base_handler(body: str):
- ...
-```
-
-!!! tip
- **FastStream** identifies the message by its `message_id`. To make this option work, you should manually set this field on the producer side (if your library doesn't set it automatically).
-
-!!! bug
- At the moment, attempts are counted only by the current consumer. If the message goes to another consumer, it will have its own counter.
- Subsequently, this logic will be reworked.
-
-!!! tip
- For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"}
-
## Manual acknowledgement
If you want to acknowledge a message manually, you can get access directly to the message object via the [Context](../getting-started/context/existed.md){.internal-link} and call the method.
diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md
index cfe5149109..02d814d290 100644
--- a/docs/docs/en/release.md
+++ b/docs/docs/en/release.md
@@ -55,7 +55,7 @@ Also, thanks to [@Sehat1137](https://github.com/Sehat1137){.external-link target
Well, you (community) made a new breathtaken release for us!
Thanks to all of this release contributors.
-Special thanks to [@Flosckow](https://github.com/Flosckow){.external-link target="_blank"}. He promores a new perfect feature - concurrent Kafka subscriber (with autocommit mode)
+Special thanks to [@Flosckow](https://github.com/Flosckow){.external-link target="_blank"} . He promotes a new perfect feature - concurrent Kafka subscriber (with autocommit mode)
```python
from faststream.kafka import KafkaBroker
diff --git a/docs/docs_src/confluent/ack/errors.py b/docs/docs_src/confluent/ack/errors.py
index 36ceb61424..72bc9e1aba 100644
--- a/docs/docs_src/confluent/ack/errors.py
+++ b/docs/docs_src/confluent/ack/errors.py
@@ -1,4 +1,4 @@
-from faststream import FastStream
+from faststream import FastStream, AckPolicy
from faststream.exceptions import AckMessage
from faststream.confluent import KafkaBroker
@@ -7,7 +7,7 @@
@broker.subscriber(
- "test-error-topic", group_id="test-error-group", auto_commit=False, auto_offset_reset="earliest"
+ "test-error-topic", group_id="test-error-group", ack_policy=AckPolicy.REJECT_ON_ERROR, auto_offset_reset="earliest"
)
async def handle(body):
smth_processing(body)
diff --git a/docs/docs_src/confluent/publisher_object/example.py b/docs/docs_src/confluent/publisher_object/example.py
index a242c6e2fe..f54f6931bc 100644
--- a/docs/docs_src/confluent/publisher_object/example.py
+++ b/docs/docs_src/confluent/publisher_object/example.py
@@ -2,7 +2,7 @@
from pydantic import BaseModel, Field, NonNegativeFloat
from faststream import FastStream, Logger
-from faststream._compat import model_to_json
+from faststream._internal._compat import model_to_json
from faststream.confluent import KafkaBroker, TestKafkaBroker
broker = KafkaBroker("localhost:9092")
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
index 52c427af6c..1dc0c0b9e9 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
@@ -1,5 +1,5 @@
from faststream import FastStream
-from faststream.kafka import KafkaBroker, KafkaMessage
+from faststream.kafka import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_broker.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_broker.py
index ac3a8f4234..1157ddbd6d 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_broker.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_broker.py
@@ -1,13 +1,17 @@
from faststream import FastStream
-from faststream.kafka import KafkaBroker, KafkaMessage
-from faststream.asyncapi.schema import Tag
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
broker = KafkaBroker(
"localhost:9092",
description="Kafka broker running locally",
- asyncapi_url="non-sensitive-url:9092",
+ specification_url="non-sensitive-url:9092",
)
app = FastStream(broker)
+docs_obj = AsyncAPI(
+ broker,
+ schema_version="2.6.0",
+)
@broker.publisher("output_data")
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_handler.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_handler.py
index b48022e133..6382cc5712 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_handler.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_handler.py
@@ -2,6 +2,7 @@
from faststream import FastStream
from faststream.kafka import KafkaBroker, KafkaMessage
+from faststream.specification.asyncapi import AsyncAPI
class DataBasic(BaseModel):
@@ -12,6 +13,10 @@ class DataBasic(BaseModel):
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
+docs_obj = AsyncAPI(
+ broker,
+ schema_version="2.6.0",
+)
@broker.publisher(
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
index 7c284c8299..d177e86909 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
@@ -1,18 +1,21 @@
from faststream import FastStream
-from faststream.kafka import KafkaBroker, KafkaMessage
-from faststream.asyncapi.schema import Contact, ExternalDocs, License, Tag
+from faststream.specification.asyncapi import AsyncAPI
+from faststream.specification import License, Contact
+from faststream.kafka import KafkaBroker
broker = KafkaBroker("localhost:9092")
description="""# Title of the description
This description supports **Markdown** syntax"""
-app = FastStream(
+app = FastStream(broker)
+docs_obj = AsyncAPI(
broker,
title="My App",
- version="1.0.0",
+ app_version="1.0.0",
description=description,
license=License(name="MIT", url="https://opensource.org/license/mit/"),
terms_of_service="https://my-terms.com/",
contact=Contact(name="support", url="https://help.com/"),
+ schema_version="2.6.0",
)
@broker.publisher("output_data")
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/payload_info.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/payload_info.py
index 87635921c8..541e2bdbb3 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/payload_info.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/payload_info.py
@@ -2,6 +2,7 @@
from faststream import FastStream
from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
class DataBasic(BaseModel):
@@ -12,6 +13,10 @@ class DataBasic(BaseModel):
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
+docs_obj = AsyncAPI(
+ broker,
+ schema_version="2.6.0",
+)
@broker.publisher("output_data")
diff --git a/docs/docs_src/getting_started/asyncapi/serve.py b/docs/docs_src/getting_started/asyncapi/serve.py
index 5ea752fc2c..c8b42f3118 100644
--- a/docs/docs_src/getting_started/asyncapi/serve.py
+++ b/docs/docs_src/getting_started/asyncapi/serve.py
@@ -5,22 +5,22 @@
"""
-gen_json_cmd = """
+gen_asyncapi_json_cmd = """
faststream docs gen basic:app
"""
-gen_yaml_cmd = """
+gen_asyncapi_yaml_cmd = """
faststream docs gen --yaml basic:app
"""
-serve_cmd = """
+asyncapi_serve_cmd = """
faststream docs serve basic:app
"""
-serve_json_cmd = """
+asyncapi_serve_json_cmd = """
faststream docs serve asyncapi.json
"""
-serve_yaml_cmd = """
+asyncapi_serve_yaml_cmd = """
faststream docs serve asyncapi.yaml
"""
diff --git a/docs/docs_src/getting_started/context/confluent/cast.py b/docs/docs_src/getting_started/context/confluent/cast.py
index 3d0b14c343..77000f7b5b 100644
--- a/docs/docs_src/getting_started/context/confluent/cast.py
+++ b/docs/docs_src/getting_started/context/confluent/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.confluent import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-topic")
async def handle(
diff --git a/docs/docs_src/getting_started/context/confluent/custom_local_context.py b/docs/docs_src/getting_started/context/confluent/custom_local_context.py
index e10da7f3fa..5c23081e2d 100644
--- a/docs/docs_src/getting_started/context/confluent/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/confluent/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: KafkaMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/confluent/manual_local_context.py b/docs/docs_src/getting_started/context/confluent/manual_local_context.py
index c4264548d0..d419bda9a2 100644
--- a/docs/docs_src/getting_started/context/confluent/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/confluent/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types, ContextRepo
from faststream.confluent import KafkaBroker
from faststream.confluent.annotations import KafkaMessage
@@ -10,16 +10,17 @@
async def handle(
msg: str,
message: KafkaMessage,
+ context: ContextRepo,
):
tag = context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: KafkaMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/kafka/cast.py b/docs/docs_src/getting_started/context/kafka/cast.py
index 1ef06d3595..00db482531 100644
--- a/docs/docs_src/getting_started/context/kafka/cast.py
+++ b/docs/docs_src/getting_started/context/kafka/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.kafka import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-topic")
async def handle(
diff --git a/docs/docs_src/getting_started/context/kafka/custom_local_context.py b/docs/docs_src/getting_started/context/kafka/custom_local_context.py
index e20a5a6567..e137319775 100644
--- a/docs/docs_src/getting_started/context/kafka/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/kafka/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: KafkaMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/kafka/manual_local_context.py b/docs/docs_src/getting_started/context/kafka/manual_local_context.py
index 3e39cff046..4e69f6600a 100644
--- a/docs/docs_src/getting_started/context/kafka/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/kafka/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types, ContextRepo
from faststream.kafka import KafkaBroker
from faststream.kafka.annotations import KafkaMessage
@@ -10,16 +10,17 @@
async def handle(
msg: str,
message: KafkaMessage,
+ context: ContextRepo,
):
tag = context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: KafkaMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/nats/cast.py b/docs/docs_src/getting_started/context/nats/cast.py
index 0733561043..128cb19dd8 100644
--- a/docs/docs_src/getting_started/context/nats/cast.py
+++ b/docs/docs_src/getting_started/context/nats/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.nats import NatsBroker
broker = NatsBroker("nats://localhost:4222")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-subject")
async def handle(
diff --git a/docs/docs_src/getting_started/context/nats/custom_local_context.py b/docs/docs_src/getting_started/context/nats/custom_local_context.py
index 510ec251e4..484bb9f5f8 100644
--- a/docs/docs_src/getting_started/context/nats/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/nats/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: NatsMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/nats/manual_local_context.py b/docs/docs_src/getting_started/context/nats/manual_local_context.py
index 72a3519daf..fac68e4394 100644
--- a/docs/docs_src/getting_started/context/nats/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/nats/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.nats import NatsBroker
from faststream.nats.annotations import NatsMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: NatsMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: NatsMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/nested.py b/docs/docs_src/getting_started/context/nested.py
index 6eac7ca816..362112850d 100644
--- a/docs/docs_src/getting_started/context/nested.py
+++ b/docs/docs_src/getting_started/context/nested.py
@@ -11,6 +11,6 @@ async def handler(body):
nested_func(body)
-@apply_types
+@apply_types(context__=broker.context)
def nested_func(body, logger=Context()):
logger.info(body)
diff --git a/docs/docs_src/getting_started/context/rabbit/cast.py b/docs/docs_src/getting_started/context/rabbit/cast.py
index 24cf1bf72e..47ce2b4525 100644
--- a/docs/docs_src/getting_started/context/rabbit/cast.py
+++ b/docs/docs_src/getting_started/context/rabbit/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.rabbit import RabbitBroker
broker = RabbitBroker("amqp://guest:guest@localhost:5672/")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-queue")
async def handle(
diff --git a/docs/docs_src/getting_started/context/rabbit/custom_local_context.py b/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
index 6ee9866967..9a3f922073 100644
--- a/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: RabbitMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/rabbit/manual_local_context.py b/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
index 426abe88bb..c6859ff184 100644
--- a/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.rabbit import RabbitBroker
from faststream.rabbit.annotations import RabbitMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: RabbitMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: RabbitMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/redis/cast.py b/docs/docs_src/getting_started/context/redis/cast.py
index fbd5eaeb3b..203daafb30 100644
--- a/docs/docs_src/getting_started/context/redis/cast.py
+++ b/docs/docs_src/getting_started/context/redis/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.redis import RedisBroker
broker = RedisBroker("redis://localhost:6379")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-channel")
async def handle(
diff --git a/docs/docs_src/getting_started/context/redis/custom_local_context.py b/docs/docs_src/getting_started/context/redis/custom_local_context.py
index 4feb1eb438..9e06b3ea93 100644
--- a/docs/docs_src/getting_started/context/redis/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/redis/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: RedisMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/redis/manual_local_context.py b/docs/docs_src/getting_started/context/redis/manual_local_context.py
index f52af02782..74a5ced413 100644
--- a/docs/docs_src/getting_started/context/redis/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/redis/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.redis import RedisBroker
from faststream.redis.annotations import RedisMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: RedisMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: RedisMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/lifespan/multiple.py b/docs/docs_src/getting_started/lifespan/multiple.py
index f0280d4da4..d1d6fd75f6 100644
--- a/docs/docs_src/getting_started/lifespan/multiple.py
+++ b/docs/docs_src/getting_started/lifespan/multiple.py
@@ -1,6 +1,8 @@
+from unittest.mock import AsyncMock
+
from faststream import Context, ContextRepo, FastStream
-app = FastStream()
+app = FastStream(AsyncMock())
@app.on_startup
diff --git a/docs/docs_src/getting_started/subscription/confluent/real_testing.py b/docs/docs_src/getting_started/subscription/confluent/real_testing.py
index 43973935b9..fcbd09f7e4 100644
--- a/docs/docs_src/getting_started/subscription/confluent/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/confluent/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/confluent/testing.py b/docs/docs_src/getting_started/subscription/confluent/testing.py
index 57ed6acaaa..dfb2bf964d 100644
--- a/docs/docs_src/getting_started/subscription/confluent/testing.py
+++ b/docs/docs_src/getting_started/subscription/confluent/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/kafka/real_testing.py b/docs/docs_src/getting_started/subscription/kafka/real_testing.py
index 0cf374b233..5eb6fd7817 100644
--- a/docs/docs_src/getting_started/subscription/kafka/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/kafka/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/kafka/testing.py b/docs/docs_src/getting_started/subscription/kafka/testing.py
index e1f6241276..cf834ff802 100644
--- a/docs/docs_src/getting_started/subscription/kafka/testing.py
+++ b/docs/docs_src/getting_started/subscription/kafka/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/nats/real_testing.py b/docs/docs_src/getting_started/subscription/nats/real_testing.py
index 5e9d6e4567..c14123218c 100644
--- a/docs/docs_src/getting_started/subscription/nats/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/nats/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
diff --git a/docs/docs_src/getting_started/subscription/nats/testing.py b/docs/docs_src/getting_started/subscription/nats/testing.py
index 0f7560e043..4d66a744c0 100644
--- a/docs/docs_src/getting_started/subscription/nats/testing.py
+++ b/docs/docs_src/getting_started/subscription/nats/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
diff --git a/docs/docs_src/getting_started/subscription/rabbit/real_testing.py b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
index 900b6046e7..7cf61a2df5 100644
--- a/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
diff --git a/docs/docs_src/getting_started/subscription/rabbit/testing.py b/docs/docs_src/getting_started/subscription/rabbit/testing.py
index 78425924da..f49be05c7a 100644
--- a/docs/docs_src/getting_started/subscription/rabbit/testing.py
+++ b/docs/docs_src/getting_started/subscription/rabbit/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
diff --git a/docs/docs_src/getting_started/subscription/redis/real_testing.py b/docs/docs_src/getting_started/subscription/redis/real_testing.py
index b2c05c203e..6514d66902 100644
--- a/docs/docs_src/getting_started/subscription/redis/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/redis/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
diff --git a/docs/docs_src/getting_started/subscription/redis/testing.py b/docs/docs_src/getting_started/subscription/redis/testing.py
index 4934366f75..bb38ffd5fe 100644
--- a/docs/docs_src/getting_started/subscription/redis/testing.py
+++ b/docs/docs_src/getting_started/subscription/redis/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
diff --git a/docs/docs_src/index/confluent/test.py b/docs/docs_src/index/confluent/test.py
index 1cc613d157..b569184a81 100644
--- a/docs/docs_src/index/confluent/test.py
+++ b/docs/docs_src/index/confluent/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestKafkaBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-topic")
diff --git a/docs/docs_src/index/kafka/test.py b/docs/docs_src/index/kafka/test.py
index bfd740312c..67b57e6f12 100644
--- a/docs/docs_src/index/kafka/test.py
+++ b/docs/docs_src/index/kafka/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestKafkaBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-topic")
diff --git a/docs/docs_src/index/nats/test.py b/docs/docs_src/index/nats/test.py
index 85b2e6de76..ca2e71e7b9 100644
--- a/docs/docs_src/index/nats/test.py
+++ b/docs/docs_src/index/nats/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestNatsBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-subject")
diff --git a/docs/docs_src/index/rabbit/test.py b/docs/docs_src/index/rabbit/test.py
index a193db35b2..7b67df49dc 100644
--- a/docs/docs_src/index/rabbit/test.py
+++ b/docs/docs_src/index/rabbit/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestRabbitBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-queue")
diff --git a/docs/docs_src/index/redis/test.py b/docs/docs_src/index/redis/test.py
index 9a14ba4190..411e032edb 100644
--- a/docs/docs_src/index/redis/test.py
+++ b/docs/docs_src/index/redis/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestRedisBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-channel")
diff --git a/docs/docs_src/kafka/ack/errors.py b/docs/docs_src/kafka/ack/errors.py
index 19d333976d..6f293ab681 100644
--- a/docs/docs_src/kafka/ack/errors.py
+++ b/docs/docs_src/kafka/ack/errors.py
@@ -1,4 +1,4 @@
-from faststream import FastStream
+from faststream import FastStream, AckPolicy
from faststream.exceptions import AckMessage
from faststream.kafka import KafkaBroker
@@ -7,7 +7,7 @@
@broker.subscriber(
- "test-topic", group_id="test-group", auto_commit=False
+ "test-topic", group_id="test-group", ack_policy=AckPolicy.REJECT_ON_ERROR,
)
async def handle(body):
smth_processing(body)
diff --git a/docs/docs_src/kafka/basic/basic.py b/docs/docs_src/kafka/basic/basic.py
index 58d1666a1b..29b14a63f7 100644
--- a/docs/docs_src/kafka/basic/basic.py
+++ b/docs/docs_src/kafka/basic/basic.py
@@ -2,6 +2,7 @@
from faststream import FastStream, Logger
from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
class DataBasic(BaseModel):
@@ -12,6 +13,7 @@ class DataBasic(BaseModel):
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
+asyncapi = AsyncAPI(broker, schema_version="3.0.0")
@broker.publisher("output_data")
diff --git a/docs/docs_src/kafka/publisher_object/example.py b/docs/docs_src/kafka/publisher_object/example.py
index 4bb6c5a957..0cf213a64b 100644
--- a/docs/docs_src/kafka/publisher_object/example.py
+++ b/docs/docs_src/kafka/publisher_object/example.py
@@ -2,7 +2,7 @@
from pydantic import BaseModel, Field, NonNegativeFloat
from faststream import FastStream, Logger
-from faststream._compat import model_to_json
+from faststream._internal._compat import model_to_json
from faststream.kafka import KafkaBroker, TestKafkaBroker
broker = KafkaBroker("localhost:9092")
diff --git a/docs/docs_src/nats/js/main.py b/docs/docs_src/nats/js/main.py
index a65749b082..715e04fd4b 100644
--- a/docs/docs_src/nats/js/main.py
+++ b/docs/docs_src/nats/js/main.py
@@ -1,5 +1,6 @@
from faststream import FastStream, Logger
from faststream.nats import JStream, NatsBroker
+from nats.js.api import DeliverPolicy
broker = NatsBroker()
app = FastStream(broker)
@@ -9,7 +10,7 @@
@broker.subscriber(
"js-subject",
stream=stream,
- deliver_policy="new",
+ deliver_policy=DeliverPolicy.NEW,
)
async def handler(msg: str, logger: Logger):
logger.info(msg)
diff --git a/docs/expand_markdown.py b/docs/expand_markdown.py
index 4cc40e83c0..1d06c6fb06 100644
--- a/docs/expand_markdown.py
+++ b/docs/expand_markdown.py
@@ -68,9 +68,12 @@ def expand_markdown(
input_markdown_path: Path = typer.Argument(...),
output_markdown_path: Path = typer.Argument(...),
):
- with input_markdown_path.open() as input_file, output_markdown_path.open(
- "w"
- ) as output_file:
+ with (
+ input_markdown_path.open() as input_file,
+ output_markdown_path.open(
+ "w",
+ ) as output_file,
+ ):
for line in input_file:
# Check if the line does not contain the "{!>" pattern
if "{!>" not in line:
diff --git a/docs/update_releases.py b/docs/update_releases.py
index 79a6ca0d6b..7e7e82ac41 100644
--- a/docs/update_releases.py
+++ b/docs/update_releases.py
@@ -35,7 +35,9 @@ def convert_links_and_usernames(text):
if "](" not in text:
# Convert HTTP/HTTPS links
text = re.sub(
- r"(https?://.*\/(.*))", r'[#\2](\1){.external-link target="_blank"}', text
+ r"(https?://.*\/(.*))",
+ r'[#\2](\1){.external-link target="_blank"}',
+ text,
)
# Convert GitHub usernames to links
@@ -83,7 +85,7 @@ def update_release_notes(realease_notes_path: Path):
+ "\n" # adding an addition newline after the header results in one empty file being added every time we run the script
+ changelog
+ "\n"
- ).replace("\r", "")
+ ).replace("\r", ""),
)
diff --git a/examples/e04_msg_filter.py b/examples/e04_msg_filter.py
index cacdae63de..852c9442ed 100644
--- a/examples/e04_msg_filter.py
+++ b/examples/e04_msg_filter.py
@@ -5,13 +5,15 @@
broker = RabbitBroker("amqp://guest:guest@localhost:5672/")
app = FastStream(broker)
+subscriber = broker.subscriber("test-queue")
-@broker.subscriber("test-queue", filter=lambda m: m.content_type == "application/json")
+
+@subscriber(filter=lambda m: m.content_type == "application/json")
async def handle_json(msg, logger: Logger):
logger.info(f"JSON message: {msg}")
-@broker.subscriber("test-queue")
+@subscriber
async def handle_other_messages(msg, logger: Logger):
logger.info(f"Default message: {msg}")
diff --git a/examples/e10_middlewares.py b/examples/e10_middlewares.py
index 03a0519d79..31a2a257c9 100644
--- a/examples/e10_middlewares.py
+++ b/examples/e10_middlewares.py
@@ -25,7 +25,7 @@ async def subscriber_middleware(
msg: RabbitMessage,
) -> Any:
print(f"call handler middleware with body: {msg}")
- msg._decoded_body = "fake message"
+ msg.body = b"fake message"
result = await call_next(msg)
print("handler middleware out")
return result
diff --git a/examples/kafka/ack_after_process.py b/examples/kafka/ack_after_process.py
index 7a00b7fac7..97550fdb87 100644
--- a/examples/kafka/ack_after_process.py
+++ b/examples/kafka/ack_after_process.py
@@ -1,14 +1,13 @@
-from faststream import FastStream, Logger
+from faststream import FastStream, Logger, AckPolicy
from faststream.kafka import KafkaBroker
broker = KafkaBroker()
app = FastStream(broker)
-
@broker.subscriber(
"test",
group_id="group",
- auto_commit=False,
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
async def handler(msg: str, logger: Logger):
logger.info(msg)
diff --git a/faststream/__init__.py b/faststream/__init__.py
index b4267f7895..cad7e628bf 100644
--- a/faststream/__init__.py
+++ b/faststream/__init__.py
@@ -1,15 +1,24 @@
"""A Python framework for building services interacting with Apache Kafka, RabbitMQ, NATS and Redis."""
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.testing.app import TestApp
+from faststream._internal.utils import apply_types
+from faststream.annotations import ContextRepo, Logger
from faststream.app import FastStream
-from faststream.broker.middlewares import BaseMiddleware, ExceptionMiddleware
-from faststream.broker.response import Response
-from faststream.testing.app import TestApp
-from faststream.utils import Context, Depends, Header, Path, apply_types, context
+from faststream.middlewares import AckPolicy, BaseMiddleware, ExceptionMiddleware
+from faststream.params import (
+ Context,
+ Depends,
+ Header,
+ NoCast,
+ Path,
+)
+from faststream.response import Response
__all__ = (
# middlewares
+ "AckPolicy",
"BaseMiddleware",
+ # params
"Context",
"ContextRepo",
"Depends",
@@ -26,5 +35,4 @@
"TestApp",
# utils
"apply_types",
- "context",
)
diff --git a/faststream/__main__.py b/faststream/__main__.py
index ba92a618a6..1b1a44657f 100644
--- a/faststream/__main__.py
+++ b/faststream/__main__.py
@@ -2,20 +2,20 @@
import warnings
-try:
- from faststream.cli.main import cli
-except ImportError:
- has_typer = False
-else:
- has_typer = True
+from faststream._internal._compat import HAS_TYPER
-if not has_typer:
- raise ImportError(
+if not HAS_TYPER:
+ msg = (
"\n\nYou're trying to use the FastStream CLI, "
"\nbut you haven't installed the required dependencies."
"\nPlease install them using the following command: "
'\npip install "faststream[cli]"'
)
+ raise ImportError(
+ msg,
+ )
+
+from faststream._internal.cli.main import cli
warnings.filterwarnings("default", category=ImportWarning, module="faststream")
diff --git a/faststream/_compat.py b/faststream/_compat.py
deleted file mode 100644
index 6cbb5b4b89..0000000000
--- a/faststream/_compat.py
+++ /dev/null
@@ -1,168 +0,0 @@
-import json
-import os
-import sys
-from importlib.metadata import version as get_version
-from typing import Any, Callable, Dict, Mapping, Optional, Type, TypeVar, Union
-
-from pydantic import BaseModel as BaseModel
-from pydantic.version import VERSION as PYDANTIC_VERSION
-
-from faststream.types import AnyDict
-
-IS_WINDOWS = (
- sys.platform == "win32" or sys.platform == "cygwin" or sys.platform == "msys"
-)
-
-
-ModelVar = TypeVar("ModelVar", bound=BaseModel)
-
-
-def is_test_env() -> bool:
- return bool(os.getenv("PYTEST_CURRENT_TEST"))
-
-
-json_dumps: Callable[..., bytes]
-orjson: Any
-ujson: Any
-
-try:
- import orjson # type: ignore[no-redef]
-except ImportError:
- orjson = None
-
-try:
- import ujson
-except ImportError:
- ujson = None
-
-if orjson:
- json_loads = orjson.loads
- json_dumps = orjson.dumps
-
-elif ujson:
- json_loads = ujson.loads
-
- def json_dumps(*a: Any, **kw: Any) -> bytes:
- return ujson.dumps(*a, **kw).encode() # type: ignore
-
-else:
- json_loads = json.loads
-
- def json_dumps(*a: Any, **kw: Any) -> bytes:
- return json.dumps(*a, **kw).encode()
-
-
-JsonSchemaValue = Mapping[str, Any]
-
-major, minor, *_ = PYDANTIC_VERSION.split(".")
-_PYDANTCI_MAJOR, _PYDANTIC_MINOR = int(major), int(minor)
-
-PYDANTIC_V2 = _PYDANTCI_MAJOR >= 2
-
-if PYDANTIC_V2:
- if _PYDANTIC_MINOR >= 4:
- from pydantic.annotated_handlers import (
- GetJsonSchemaHandler as GetJsonSchemaHandler,
- )
- from pydantic_core.core_schema import (
- with_info_plain_validator_function as with_info_plain_validator_function,
- )
- else:
- from pydantic._internal._annotated_handlers import ( # type: ignore[no-redef]
- GetJsonSchemaHandler as GetJsonSchemaHandler,
- )
- from pydantic_core.core_schema import (
- general_plain_validator_function as with_info_plain_validator_function,
- )
-
- from pydantic.fields import FieldInfo as FieldInfo
- from pydantic_core import CoreSchema as CoreSchema
- from pydantic_core import PydanticUndefined as PydanticUndefined
- from pydantic_core import to_jsonable_python
-
- SCHEMA_FIELD = "json_schema_extra"
- DEF_KEY = "$defs"
-
- def model_to_jsonable(
- model: BaseModel,
- **kwargs: Any,
- ) -> Any:
- return to_jsonable_python(model, **kwargs)
-
- def dump_json(data: Any) -> bytes:
- return json_dumps(model_to_jsonable(data))
-
- def get_model_fields(model: Type[BaseModel]) -> Dict[str, Any]:
- return model.model_fields
-
- def model_to_json(model: BaseModel, **kwargs: Any) -> str:
- return model.model_dump_json(**kwargs)
-
- def model_parse(
- model: Type[ModelVar], data: Union[str, bytes], **kwargs: Any
- ) -> ModelVar:
- return model.model_validate_json(data, **kwargs)
-
- def model_schema(model: Type[BaseModel], **kwargs: Any) -> AnyDict:
- return model.model_json_schema(**kwargs)
-
-else:
- from pydantic.fields import FieldInfo as FieldInfo
- from pydantic.json import pydantic_encoder
-
- GetJsonSchemaHandler = Any # type: ignore[assignment,misc]
- CoreSchema = Any # type: ignore[assignment,misc]
-
- SCHEMA_FIELD = "schema_extra"
- DEF_KEY = "definitions"
-
- PydanticUndefined = Ellipsis # type: ignore[assignment]
-
- def dump_json(data: Any) -> bytes:
- return json_dumps(data, default=pydantic_encoder)
-
- def get_model_fields(model: Type[BaseModel]) -> Dict[str, Any]:
- return model.__fields__ # type: ignore[return-value]
-
- def model_to_json(model: BaseModel, **kwargs: Any) -> str:
- return model.json(**kwargs)
-
- def model_parse(
- model: Type[ModelVar], data: Union[str, bytes], **kwargs: Any
- ) -> ModelVar:
- return model.parse_raw(data, **kwargs)
-
- def model_schema(model: Type[BaseModel], **kwargs: Any) -> AnyDict:
- return model.schema(**kwargs)
-
- def model_to_jsonable(
- model: BaseModel,
- **kwargs: Any,
- ) -> Any:
- return json_loads(model.json(**kwargs))
-
- # TODO: pydantic types misc
- def with_info_plain_validator_function( # type: ignore[misc]
- function: Callable[..., Any],
- *,
- ref: Optional[str] = None,
- metadata: Any = None,
- serialization: Any = None,
- ) -> JsonSchemaValue:
- return {}
-
-
-major, *_ = get_version("anyio").split(".")
-_ANYIO_MAJOR = int(major)
-ANYIO_V3 = _ANYIO_MAJOR == 3
-
-
-if ANYIO_V3:
- from anyio import ExceptionGroup as ExceptionGroup # type: ignore[attr-defined]
-else:
- if sys.version_info < (3, 11):
- from exceptiongroup import (
- ExceptionGroup as ExceptionGroup,
- )
- else:
- ExceptionGroup = ExceptionGroup
diff --git a/faststream/_internal/_compat.py b/faststream/_internal/_compat.py
new file mode 100644
index 0000000000..b515aaface
--- /dev/null
+++ b/faststream/_internal/_compat.py
@@ -0,0 +1,257 @@
+import json
+import sys
+import warnings
+from collections import UserString
+from collections.abc import Iterable, Mapping
+from importlib.metadata import version as get_version
+from importlib.util import find_spec
+from typing import (
+ Any,
+ Callable,
+ Optional,
+ TypeVar,
+ Union,
+)
+
+from pydantic import BaseModel
+from pydantic.version import VERSION as PYDANTIC_VERSION
+
+from faststream._internal.basic_types import AnyDict
+
+IS_WINDOWS = (
+ sys.platform == "win32" or sys.platform == "cygwin" or sys.platform == "msys"
+)
+
+__all__ = (
+ "HAS_TYPER",
+ "PYDANTIC_V2",
+ "BaseModel",
+ "CoreSchema",
+ "EmailStr",
+ "ExceptionGroup",
+ "GetJsonSchemaHandler",
+ "PydanticUndefined",
+ "json_dumps",
+ "json_loads",
+ "with_info_plain_validator_function",
+)
+
+try:
+ HAS_TYPER = find_spec("typer") is not None
+except ImportError:
+ HAS_TYPER = False
+
+
+json_dumps: Callable[..., bytes]
+orjson: Any
+ujson: Any
+
+try:
+ import orjson # type: ignore[no-redef]
+except ImportError:
+ orjson = None
+
+try:
+ import ujson
+except ImportError:
+ ujson = None
+
+if orjson:
+ json_loads = orjson.loads
+ json_dumps = orjson.dumps
+
+elif ujson:
+ json_loads = ujson.loads
+
+ def json_dumps(*a: Any, **kw: Any) -> bytes:
+ return ujson.dumps(*a, **kw).encode() # type: ignore[no-any-return]
+
+else:
+ json_loads = json.loads
+
+ def json_dumps(*a: Any, **kw: Any) -> bytes:
+ return json.dumps(*a, **kw).encode()
+
+
+ModelVar = TypeVar("ModelVar", bound=BaseModel)
+
+JsonSchemaValue = Mapping[str, Any]
+major, minor, *_ = PYDANTIC_VERSION.split(".")
+_PYDANTCI_MAJOR, _PYDANTIC_MINOR = int(major), int(minor)
+
+PYDANTIC_V2 = _PYDANTCI_MAJOR >= 2
+
+if PYDANTIC_V2:
+ if _PYDANTIC_MINOR >= 4:
+ from pydantic.annotated_handlers import (
+ GetJsonSchemaHandler,
+ )
+ from pydantic_core.core_schema import (
+ with_info_plain_validator_function,
+ )
+ else:
+ from pydantic._internal._annotated_handlers import ( # type: ignore[no-redef]
+ GetJsonSchemaHandler,
+ )
+ from pydantic_core.core_schema import (
+ general_plain_validator_function as with_info_plain_validator_function,
+ )
+
+ from pydantic_core import CoreSchema, PydanticUndefined, to_jsonable_python
+
+ SCHEMA_FIELD = "json_schema_extra"
+ DEF_KEY = "$defs"
+
+ def model_to_jsonable(
+ model: BaseModel,
+ **kwargs: Any,
+ ) -> Any:
+ return to_jsonable_python(model, **kwargs)
+
+ def dump_json(data: Any) -> bytes:
+ return json_dumps(model_to_jsonable(data))
+
+ def get_model_fields(model: type[BaseModel]) -> AnyDict:
+ return model.model_fields # type: ignore[return-value]
+
+ def model_to_json(model: BaseModel, **kwargs: Any) -> str:
+ return model.model_dump_json(**kwargs)
+
+ def model_parse(
+ model: type[ModelVar],
+ data: Union[str, bytes],
+ **kwargs: Any,
+ ) -> ModelVar:
+ return model.model_validate_json(data, **kwargs)
+
+ def model_schema(model: type[BaseModel], **kwargs: Any) -> AnyDict:
+ return model.model_json_schema(**kwargs)
+
+else:
+ from pydantic.json import pydantic_encoder
+
+ GetJsonSchemaHandler = Any # type: ignore[assignment,misc]
+ CoreSchema = Any # type: ignore[assignment,misc]
+
+ SCHEMA_FIELD = "schema_extra"
+ DEF_KEY = "definitions"
+
+ PydanticUndefined = Ellipsis # type: ignore[assignment]
+
+ def dump_json(data: Any) -> bytes:
+ return json_dumps(data, default=pydantic_encoder)
+
+ def get_model_fields(model: type[BaseModel]) -> AnyDict:
+ return model.__fields__ # type: ignore[return-value]
+
+ def model_to_json(model: BaseModel, **kwargs: Any) -> str:
+ return model.json(**kwargs)
+
+ def model_parse(
+ model: type[ModelVar],
+ data: Union[str, bytes],
+ **kwargs: Any,
+ ) -> ModelVar:
+ return model.parse_raw(data, **kwargs)
+
+ def model_schema(model: type[BaseModel], **kwargs: Any) -> AnyDict:
+ return model.schema(**kwargs)
+
+ def model_to_jsonable(
+ model: BaseModel,
+ **kwargs: Any,
+ ) -> Any:
+ return json_loads(model.json(**kwargs))
+
+ # TODO: pydantic types misc
+ def with_info_plain_validator_function( # type: ignore[misc]
+ function: Callable[..., Any],
+ *,
+ ref: Optional[str] = None,
+ metadata: Any = None,
+ serialization: Any = None,
+ ) -> JsonSchemaValue:
+ return {}
+
+
+major, *_ = get_version("anyio").split(".")
+_ANYIO_MAJOR = int(major)
+ANYIO_V3 = _ANYIO_MAJOR == 3
+
+
+if ANYIO_V3:
+ from anyio import ExceptionGroup # type: ignore[attr-defined]
+elif sys.version_info >= (3, 11):
+ ExceptionGroup = ExceptionGroup # noqa: PLW0127
+else:
+ from exceptiongroup import ExceptionGroup
+
+try:
+ import email_validator
+
+ if email_validator is None:
+ raise ImportError
+ from pydantic import EmailStr
+except ImportError: # pragma: no cover
+ # NOTE: EmailStr mock was copied from the FastAPI
+ # https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/models.py#24
+ class EmailStr(UserString): # type: ignore[no-redef]
+ """EmailStr is a string that should be an email.
+
+ Note: EmailStr mock was copied from the FastAPI:
+ https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/models.py#24
+ """
+
+ @classmethod
+ def __get_validators__(cls) -> Iterable[Callable[..., Any]]:
+ """Returns the validators for the EmailStr class."""
+ yield cls.validate
+
+ @classmethod
+ def validate(cls, v: Any) -> str:
+ """Validates the EmailStr class."""
+ warnings.warn(
+ "email-validator bot installed, email fields will be treated as str.\n"
+ "To install, run: pip install email-validator",
+ category=RuntimeWarning,
+ stacklevel=1,
+ )
+ return str(v)
+
+ @classmethod
+ def _validate(cls, __input_value: Any, _: Any) -> str:
+ warnings.warn(
+ "email-validator bot installed, email fields will be treated as str.\n"
+ "To install, run: pip install email-validator",
+ category=RuntimeWarning,
+ stacklevel=1,
+ )
+ return str(__input_value)
+
+ @classmethod
+ def __get_pydantic_json_schema__(
+ cls,
+ core_schema: CoreSchema,
+ handler: GetJsonSchemaHandler,
+ ) -> JsonSchemaValue:
+ """Returns the JSON schema for the EmailStr class.
+
+ Args:
+ core_schema : the core schema
+ handler : the handler
+ """
+ return {"type": "string", "format": "email"}
+
+ @classmethod
+ def __get_pydantic_core_schema__(
+ cls,
+ source: type[Any],
+ handler: Callable[[Any], CoreSchema],
+ ) -> JsonSchemaValue:
+ """Returns the core schema for the EmailStr class.
+
+ Args:
+ source : the source
+ handler : the handler
+ """
+ return with_info_plain_validator_function(cls._validate)
diff --git a/faststream/_internal/application.py b/faststream/_internal/application.py
index ff2e39fac4..185dcf0878 100644
--- a/faststream/_internal/application.py
+++ b/faststream/_internal/application.py
@@ -1,211 +1,355 @@
import logging
-from abc import ABC, abstractmethod
+from abc import abstractmethod
+from collections.abc import AsyncIterator, Sequence
+from contextlib import asynccontextmanager
from typing import (
TYPE_CHECKING,
Any,
Callable,
- Dict,
- List,
Optional,
- Sequence,
TypeVar,
- Union,
)
-import anyio
+from fast_depends import Provider
from typing_extensions import ParamSpec
-from faststream.asyncapi.proto import AsyncAPIApplication
-from faststream.log.logging import logger
-from faststream.utils import apply_types, context
-from faststream.utils.functions import drop_response_type, fake_context, to_async
-
-P_HookParams = ParamSpec("P_HookParams")
-T_HookReturn = TypeVar("T_HookReturn")
+from faststream._internal.constants import EMPTY
+from faststream._internal.context import ContextRepo
+from faststream._internal.log import logger
+from faststream._internal.state import DIState
+from faststream._internal.state.application import (
+ ApplicationState,
+ BasicApplicationState,
+ RunningApplicationState,
+)
+from faststream._internal.state.broker import OuterBrokerState
+from faststream._internal.utils import apply_types
+from faststream._internal.utils.functions import (
+ drop_response_type,
+ fake_context,
+ to_async,
+)
+from faststream.exceptions import SetupError
if TYPE_CHECKING:
- from faststream.asyncapi.schema import (
- Contact,
- ContactDict,
- ExternalDocs,
- ExternalDocsDict,
- License,
- LicenseDict,
- Tag,
- TagDict,
- )
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.types import (
- AnyDict,
- AnyHttpUrl,
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import (
+ AnyCallable,
AsyncFunc,
Lifespan,
LoggerProto,
SettingField,
)
+ from faststream._internal.broker.broker import BrokerUsecase
+
+
+try:
+ from pydantic import ValidationError as PValidation
+
+ from faststream.exceptions import StartupValidationError
+ @asynccontextmanager
+ async def catch_startup_validation_error() -> AsyncIterator[None]:
+ try:
+ yield
+ except PValidation as e:
+ missed_fields = []
+ invalid_fields = []
+ for x in e.errors():
+ location = str(x["loc"][0])
+ if x["type"] == "missing":
+ missed_fields.append(location)
+ else:
+ invalid_fields.append(location)
-class Application(ABC, AsyncAPIApplication):
+ raise StartupValidationError(
+ missed_fields=missed_fields,
+ invalid_fields=invalid_fields,
+ ) from e
+
+except ImportError:
+ catch_startup_validation_error = fake_context
+
+
+P_HookParams = ParamSpec("P_HookParams")
+T_HookReturn = TypeVar("T_HookReturn")
+
+
+class StartAbleApplication:
def __init__(
self,
broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ /,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ ) -> None:
+ self._init_setupable_(
+ broker,
+ provider=provider,
+ serializer=serializer,
+ )
+
+ def _init_setupable_( # noqa: PLW3201
+ self,
+ broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ /,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ ) -> None:
+ self.context = ContextRepo()
+ self.provider = provider or Provider()
+
+ if serializer is EMPTY:
+ from fast_depends.pydantic.serializer import PydanticSerializer
+
+ serializer = PydanticSerializer()
+
+ self._state: ApplicationState = BasicApplicationState(
+ di_state=DIState(
+ use_fastdepends=True,
+ get_dependent=None,
+ call_decorators=(),
+ serializer=serializer,
+ provider=self.provider,
+ context=self.context,
+ )
+ )
+
+ self.brokers = [broker] if broker else []
+
+ self._setup()
+
+ def _setup(self) -> None:
+ for broker in self.brokers:
+ broker._setup(OuterBrokerState(di_state=self._state.di_state))
+
+ async def _start_broker(self) -> None:
+ assert self.broker, "You should setup a broker"
+ await self.broker.start()
+
+ @property
+ def broker(self) -> Optional["BrokerUsecase[Any, Any]"]:
+ return self.brokers[0] if self.brokers else None
+
+ def set_broker(self, broker: "BrokerUsecase[Any, Any]") -> None:
+ """Set already existed App object broker.
+
+ Useful then you create/init broker in `on_startup` hook.
+ """
+ if self.brokers:
+ msg = f"`{self}` already has a broker. You can't use multiple brokers until 1.0.0 release."
+ raise SetupError(msg)
+
+ self.brokers.append(broker)
+ self._setup()
+
+
+class Application(StartAbleApplication):
+ def __init__(
+ self,
+ broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ /,
logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
lifespan: Optional["Lifespan"] = None,
- # AsyncAPI args,
- title: str = "FastStream",
- version: str = "0.1.0",
- description: str = "",
- terms_of_service: Optional["AnyHttpUrl"] = None,
- license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None,
- contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None,
- tags: Optional[Sequence[Union["Tag", "TagDict", "AnyDict"]]] = None,
- external_docs: Optional[
- Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]
- ] = None,
- identifier: Optional[str] = None,
- on_startup: Sequence[Callable[P_HookParams, T_HookReturn]] = (),
- after_startup: Sequence[Callable[P_HookParams, T_HookReturn]] = (),
- on_shutdown: Sequence[Callable[P_HookParams, T_HookReturn]] = (),
- after_shutdown: Sequence[Callable[P_HookParams, T_HookReturn]] = (),
+ on_startup: Sequence["AnyCallable"] = (),
+ after_startup: Sequence["AnyCallable"] = (),
+ on_shutdown: Sequence["AnyCallable"] = (),
+ after_shutdown: Sequence["AnyCallable"] = (),
) -> None:
- context.set_global("app", self)
+ super().__init__(
+ broker,
+ provider=provider,
+ serializer=serializer,
+ )
+
+ self.context.set_global("app", self)
- self._should_exit = False
- self.broker = broker
self.logger = logger
- self.context = context
- self._on_startup_calling: List[AsyncFunc] = [
- apply_types(to_async(x)) for x in on_startup
+ self._on_startup_calling: list[AsyncFunc] = [
+ apply_types(to_async(x), context__=self.context) for x in on_startup
]
- self._after_startup_calling: List[AsyncFunc] = [
- apply_types(to_async(x)) for x in after_startup
+ self._after_startup_calling: list[AsyncFunc] = [
+ apply_types(to_async(x), context__=self.context) for x in after_startup
]
- self._on_shutdown_calling: List[AsyncFunc] = [
- apply_types(to_async(x)) for x in on_shutdown
+ self._on_shutdown_calling: list[AsyncFunc] = [
+ apply_types(to_async(x), context__=self.context) for x in on_shutdown
]
- self._after_shutdown_calling: List[AsyncFunc] = [
- apply_types(to_async(x)) for x in after_shutdown
+ self._after_shutdown_calling: list[AsyncFunc] = [
+ apply_types(to_async(x), context__=self.context) for x in after_shutdown
]
if lifespan is not None:
self.lifespan_context = apply_types(
- func=lifespan, wrap_model=drop_response_type
+ func=lifespan,
+ wrap_model=drop_response_type,
+ context__=self.context,
)
else:
self.lifespan_context = fake_context
- # AsyncAPI information
- self.title = title
- self.version = version
- self.description = description
- self.terms_of_service = terms_of_service
- self.license = license
- self.contact = contact
- self.identifier = identifier
- self.asyncapi_tags = tags
- self.external_docs = external_docs
+ @property
+ def running(self) -> bool:
+ return self._state.running
+
+ @abstractmethod
+ def exit(self) -> None:
+ """Stop application manually."""
+ ...
@abstractmethod
async def run(
self,
log_level: int,
- run_extra_options: Optional[Dict[str, "SettingField"]] = None,
- sleep_time: float = 0.1,
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
) -> None: ...
- def set_broker(self, broker: "BrokerUsecase[Any, Any]") -> None:
- """Set already existed App object broker.
+ # Startup
- Useful then you create/init broker in `on_startup` hook.
- """
- self.broker = broker
-
- def on_startup(
+ async def _startup(
self,
- func: Callable[P_HookParams, T_HookReturn],
- ) -> Callable[P_HookParams, T_HookReturn]:
- """Add hook running BEFORE broker connected.
+ log_level: int = logging.INFO,
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
+ ) -> None:
+ """Private method calls `start` with logging."""
+ async with self._startup_logging(log_level=log_level):
+ await self.start(**(run_extra_options or {}))
- This hook also takes an extra CLI options as a kwargs.
- """
- self._on_startup_calling.append(apply_types(to_async(func)))
- return func
+ self._state = RunningApplicationState(di_state=self._state.di_state)
- def on_shutdown(
+ async def start(
self,
- func: Callable[P_HookParams, T_HookReturn],
- ) -> Callable[P_HookParams, T_HookReturn]:
- """Add hook running BEFORE broker disconnected."""
- self._on_shutdown_calling.append(apply_types(to_async(func)))
- return func
+ **run_extra_options: "SettingField",
+ ) -> None:
+ """Executes startup hooks and start broker."""
+ async with self._start_hooks_context(**run_extra_options):
+ await self._start_broker()
- def after_startup(
+ @asynccontextmanager
+ async def _start_hooks_context(
self,
- func: Callable[P_HookParams, T_HookReturn],
- ) -> Callable[P_HookParams, T_HookReturn]:
- """Add hook running AFTER broker connected."""
- self._after_startup_calling.append(apply_types(to_async(func)))
- return func
+ **run_extra_options: "SettingField",
+ ) -> AsyncIterator[None]:
+ async with catch_startup_validation_error():
+ for func in self._on_startup_calling:
+ await func(**run_extra_options)
- def after_shutdown(
+ yield
+
+ for func in self._after_startup_calling:
+ await func()
+
+ @asynccontextmanager
+ async def _startup_logging(
self,
- func: Callable[P_HookParams, T_HookReturn],
- ) -> Callable[P_HookParams, T_HookReturn]:
- """Add hook running AFTER broker disconnected."""
- self._after_shutdown_calling.append(apply_types(to_async(func)))
- return func
+ log_level: int = logging.INFO,
+ ) -> AsyncIterator[None]:
+ """Separated startup logging."""
+ self._log(
+ log_level,
+ "FastStream app starting...",
+ )
- def exit(self) -> None:
- """Stop application manually."""
- self._should_exit = True
+ yield
- async def _main_loop(self, sleep_time: float) -> None:
- """Run loop till exit signal."""
- while not self._should_exit: # noqa: ASYNC110 (requested by creator)
- await anyio.sleep(sleep_time)
+ self._log(
+ log_level,
+ "FastStream app started successfully! To exit, press CTRL+C",
+ )
- async def start(
- self,
- **run_extra_options: "SettingField",
- ) -> None:
- """Executes startup hooks and start broker."""
- for func in self._on_startup_calling:
- await func(**run_extra_options)
+ # Shutdown
- if self.broker is not None:
- await self.broker.start()
+ async def _shutdown(self, log_level: int = logging.INFO) -> None:
+ """Private method calls `stop` with logging."""
+ async with self._shutdown_logging(log_level=log_level):
+ await self.stop()
- for func in self._after_startup_calling:
- await func()
+ self._state = BasicApplicationState(di_state=self._state.di_state)
async def stop(self) -> None:
"""Executes shutdown hooks and stop broker."""
+ async with self._shutdown_hooks_context():
+ for broker in self.brokers:
+ await broker.close()
+
+ @asynccontextmanager
+ async def _shutdown_hooks_context(self) -> AsyncIterator[None]:
for func in self._on_shutdown_calling:
await func()
- if self.broker is not None:
- await self.broker.close()
+ yield
for func in self._after_shutdown_calling:
await func()
- async def _startup(
+ @asynccontextmanager
+ async def _shutdown_logging(
self,
log_level: int = logging.INFO,
- run_extra_options: Optional[Dict[str, "SettingField"]] = None,
- ) -> None:
- self._log(log_level, "FastStream app starting...")
- await self.start(**(run_extra_options or {}))
+ ) -> AsyncIterator[None]:
+ """Separated startup logging."""
self._log(
- log_level, "FastStream app started successfully! To exit, press CTRL+C"
+ log_level,
+ "FastStream app shutting down...",
)
- async def _shutdown(self, log_level: int = logging.INFO) -> None:
- self._log(log_level, "FastStream app shutting down...")
- await self.stop()
- self._log(log_level, "FastStream app shut down gracefully.")
+ yield
+
+ self._log(
+ log_level,
+ "FastStream app shut down gracefully.",
+ )
+
+ # Service methods
def _log(self, level: int, message: str) -> None:
if self.logger is not None:
self.logger.log(level, message)
+
+ # Hooks
+
+ def on_startup(
+ self,
+ func: Callable[P_HookParams, T_HookReturn],
+ ) -> Callable[P_HookParams, T_HookReturn]:
+ """Add hook running BEFORE broker connected.
+
+ This hook also takes an extra CLI options as a kwargs.
+ """
+ self._on_startup_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
+ return func
+
+ def on_shutdown(
+ self,
+ func: Callable[P_HookParams, T_HookReturn],
+ ) -> Callable[P_HookParams, T_HookReturn]:
+ """Add hook running BEFORE broker disconnected."""
+ self._on_shutdown_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
+ return func
+
+ def after_startup(
+ self,
+ func: Callable[P_HookParams, T_HookReturn],
+ ) -> Callable[P_HookParams, T_HookReturn]:
+ """Add hook running AFTER broker connected."""
+ self._after_startup_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
+ return func
+
+ def after_shutdown(
+ self,
+ func: Callable[P_HookParams, T_HookReturn],
+ ) -> Callable[P_HookParams, T_HookReturn]:
+ """Add hook running AFTER broker disconnected."""
+ self._after_shutdown_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
+ return func
diff --git a/faststream/_internal/basic_types.py b/faststream/_internal/basic_types.py
new file mode 100644
index 0000000000..e844171150
--- /dev/null
+++ b/faststream/_internal/basic_types.py
@@ -0,0 +1,105 @@
+from collections.abc import Awaitable, Mapping, Sequence
+from contextlib import AbstractAsyncContextManager
+from datetime import datetime
+from decimal import Decimal
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Optional,
+ Protocol,
+ TypeVar,
+ Union,
+)
+
+from typing_extensions import ParamSpec, TypeAlias
+
+AnyDict: TypeAlias = dict[str, Any]
+AnyHttpUrl: TypeAlias = str
+
+F_Return = TypeVar("F_Return")
+F_Spec = ParamSpec("F_Spec")
+
+AnyCallable: TypeAlias = Callable[..., Any]
+NoneCallable: TypeAlias = Callable[..., None]
+AsyncFunc: TypeAlias = Callable[..., Awaitable[Any]]
+AsyncFuncAny: TypeAlias = Callable[[Any], Awaitable[Any]]
+
+DecoratedCallable: TypeAlias = AnyCallable
+DecoratedCallableNone: TypeAlias = NoneCallable
+
+Decorator: TypeAlias = Callable[[AnyCallable], AnyCallable]
+
+JsonArray: TypeAlias = Sequence["DecodedMessage"]
+
+JsonTable: TypeAlias = dict[str, "DecodedMessage"]
+
+JsonDecodable: TypeAlias = Union[
+ bool,
+ bytes,
+ bytearray,
+ float,
+ int,
+ str,
+ None,
+]
+
+DecodedMessage: TypeAlias = Union[
+ JsonDecodable,
+ JsonArray,
+ JsonTable,
+]
+
+SendableArray: TypeAlias = Sequence["BaseSendableMessage"]
+
+SendableTable: TypeAlias = dict[str, "BaseSendableMessage"]
+
+
+class StandardDataclass(Protocol):
+ """Protocol to check type is dataclass."""
+
+ __dataclass_fields__: ClassVar[AnyDict]
+
+
+BaseSendableMessage: TypeAlias = Union[
+ JsonDecodable,
+ Decimal,
+ datetime,
+ StandardDataclass,
+ SendableTable,
+ SendableArray,
+ None,
+]
+
+try:
+ from faststream._internal._compat import BaseModel
+
+ SendableMessage: TypeAlias = Union[
+ BaseModel,
+ BaseSendableMessage,
+ ]
+
+except ImportError:
+ SendableMessage: TypeAlias = BaseSendableMessage # type: ignore[no-redef,misc]
+
+SettingField: TypeAlias = Union[
+ bool,
+ str,
+ list[Union[bool, str]],
+ list[str],
+ list[bool],
+]
+
+Lifespan: TypeAlias = Callable[..., AbstractAsyncContextManager[None]]
+
+
+class LoggerProto(Protocol):
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None: ...
diff --git a/faststream/broker/__init__.py b/faststream/_internal/broker/__init__.py
similarity index 100%
rename from faststream/broker/__init__.py
rename to faststream/_internal/broker/__init__.py
diff --git a/faststream/_internal/broker/abc_broker.py b/faststream/_internal/broker/abc_broker.py
new file mode 100644
index 0000000000..bd3a993e7a
--- /dev/null
+++ b/faststream/_internal/broker/abc_broker.py
@@ -0,0 +1,176 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Generic,
+ Optional,
+)
+
+from faststream._internal.publisher.proto import PublisherProto
+from faststream._internal.state import BrokerState, Pointer
+from faststream._internal.subscriber.proto import SubscriberProto
+from faststream._internal.types import BrokerMiddleware, CustomCallable, MsgType
+from faststream.specification.proto import EndpointSpecification
+from faststream.specification.schema import PublisherSpec, SubscriberSpec
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+
+class FinalSubscriber(
+ EndpointSpecification[MsgType, SubscriberSpec],
+ SubscriberProto[MsgType],
+):
+ @property
+ @abstractmethod
+ def call_name(self) -> str:
+ raise NotImplementedError
+
+
+class FinalPublisher(
+ EndpointSpecification[MsgType, PublisherSpec],
+ PublisherProto[MsgType],
+):
+ pass
+
+
+class ABCBroker(Generic[MsgType]):
+ _subscribers: list[FinalSubscriber[MsgType]]
+ _publishers: list[FinalPublisher[MsgType]]
+
+ def __init__(
+ self,
+ *,
+ prefix: str,
+ dependencies: Iterable["Dependant"],
+ middlewares: Sequence["BrokerMiddleware[MsgType]"],
+ parser: Optional["CustomCallable"],
+ decoder: Optional["CustomCallable"],
+ include_in_schema: Optional[bool],
+ state: "BrokerState",
+ routers: Sequence["ABCBroker[MsgType]"],
+ ) -> None:
+ self.prefix = prefix
+ self.include_in_schema = include_in_schema
+
+ self._subscribers = []
+ self._publishers = []
+
+ self._dependencies = dependencies
+ self.middlewares = middlewares
+ self._parser = parser
+ self._decoder = decoder
+
+ self._state = Pointer(state)
+
+ self.include_routers(*routers)
+
+ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
+ """Append BrokerMiddleware to the end of middlewares list.
+
+ Current middleware will be used as a most inner of already existed ones.
+ """
+ self.middlewares = (*self.middlewares, middleware)
+
+ for sub in self._subscribers:
+ sub.add_middleware(middleware)
+
+ for pub in self._publishers:
+ pub.add_middleware(middleware)
+
+ @abstractmethod
+ def subscriber(
+ self,
+ subscriber: "FinalSubscriber[MsgType]",
+ is_running: bool = False,
+ ) -> "FinalSubscriber[MsgType]":
+ subscriber.add_prefix(self.prefix)
+ if not is_running:
+ self._subscribers.append(subscriber)
+ return subscriber
+
+ @abstractmethod
+ def publisher(
+ self,
+ publisher: "FinalPublisher[MsgType]",
+ is_running: bool = False,
+ ) -> "FinalPublisher[MsgType]":
+ publisher.add_prefix(self.prefix)
+ if not is_running:
+ self._publishers.append(publisher)
+ return publisher
+
+ def setup_publisher(
+ self,
+ publisher: "FinalPublisher[MsgType]",
+ **kwargs: Any,
+ ) -> None:
+ """Setup the Publisher to prepare it to starting."""
+ publisher._setup(**kwargs, state=self._state)
+
+ def _setup(self, state: Optional["BrokerState"]) -> None:
+ if state is not None:
+ self._state.set(state)
+
+ def include_router(
+ self,
+ router: "ABCBroker[Any]",
+ *,
+ prefix: str = "",
+ dependencies: Iterable["Dependant"] = (),
+ middlewares: Iterable["BrokerMiddleware[MsgType]"] = (),
+ include_in_schema: Optional[bool] = None,
+ ) -> None:
+ """Includes a router in the current object."""
+ router._setup(self._state.get())
+
+ for h in router._subscribers:
+ h.add_prefix(f"{self.prefix}{prefix}")
+
+ if include_in_schema is None:
+ h.include_in_schema = self._solve_include_in_schema(h.include_in_schema)
+ else:
+ h.include_in_schema = include_in_schema
+
+ h._broker_middlewares = (
+ *self.middlewares,
+ *middlewares,
+ *h._broker_middlewares,
+ )
+ h._broker_dependencies = (
+ *self._dependencies,
+ *dependencies,
+ *h._broker_dependencies,
+ )
+ self._subscribers.append(h)
+
+ for p in router._publishers:
+ p.add_prefix(self.prefix)
+
+ if include_in_schema is None:
+ p.include_in_schema = self._solve_include_in_schema(p.include_in_schema)
+ else:
+ p.include_in_schema = include_in_schema
+
+ p._broker_middlewares = (
+ *self.middlewares,
+ *middlewares,
+ *p._broker_middlewares,
+ )
+ self._publishers.append(p)
+
+ def include_routers(
+ self,
+ *routers: "ABCBroker[MsgType]",
+ ) -> None:
+ """Includes routers in the object."""
+ for r in routers:
+ self.include_router(r)
+
+ def _solve_include_in_schema(self, include_in_schema: bool) -> bool:
+ # should be `is False` to pass `None` case
+ if self.include_in_schema is False:
+ return False
+
+ return include_in_schema
diff --git a/faststream/_internal/broker/broker.py b/faststream/_internal/broker/broker.py
new file mode 100644
index 0000000000..c68ae0aa82
--- /dev/null
+++ b/faststream/_internal/broker/broker.py
@@ -0,0 +1,335 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Any,
+ Callable,
+ Generic,
+ Optional,
+ Union,
+ cast,
+)
+
+from fast_depends import Provider
+from typing_extensions import Doc, Self
+
+from faststream._internal.constants import EMPTY
+from faststream._internal.context.repository import ContextRepo
+from faststream._internal.state import (
+ DIState,
+ LoggerState,
+ SetupAble,
+)
+from faststream._internal.state.broker import (
+ BrokerState,
+ InitialBrokerState,
+)
+from faststream._internal.state.producer import ProducerUnset
+from faststream._internal.subscriber.proto import SubscriberProto
+from faststream._internal.types import (
+ AsyncCustomCallable,
+ BrokerMiddleware,
+ ConnectionType,
+ CustomCallable,
+ MsgType,
+)
+from faststream._internal.utils.functions import to_async
+from faststream.specification.proto import ServerSpecification
+
+from .abc_broker import ABCBroker, FinalPublisher
+from .pub_base import BrokerPublishMixin
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import AnyDict, Decorator
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
+
+
+class BrokerUsecase(
+ ABCBroker[MsgType],
+ SetupAble,
+ ServerSpecification,
+ BrokerPublishMixin[MsgType],
+ Generic[MsgType, ConnectionType],
+):
+ """A class representing a broker async use case."""
+
+ url: Union[str, list[str]]
+ _connection: Optional[ConnectionType]
+ middlewares: Sequence["BrokerMiddleware[MsgType]"]
+
+ def __init__(
+ self,
+ *,
+ decoder: Annotated[
+ Optional["CustomCallable"],
+ Doc("Custom decoder object."),
+ ],
+ parser: Annotated[
+ Optional["CustomCallable"],
+ Doc("Custom parser object."),
+ ],
+ dependencies: Annotated[
+ Iterable["Dependant"],
+ Doc("Dependencies to apply to all broker subscribers."),
+ ],
+ middlewares: Annotated[
+ Sequence["BrokerMiddleware[MsgType]"],
+ Doc("Middlewares to apply to all broker publishers/subscribers."),
+ ],
+ graceful_timeout: Annotated[
+ Optional[float],
+ Doc(
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
+ ),
+ ],
+ routers: Annotated[
+ Sequence["ABCBroker[MsgType]"],
+ Doc("Routers to apply to broker."),
+ ],
+ # Logging args
+ logger_state: LoggerState,
+ # FastDepends args
+ apply_types: Annotated[
+ bool,
+ Doc("Whether to use FastDepends or not."),
+ ],
+ serializer: Optional["SerializerProto"] = EMPTY,
+ _get_dependant: Annotated[
+ Optional[Callable[..., Any]],
+ Doc("Custom library dependant generator callback."),
+ ],
+ _call_decorators: Annotated[
+ Sequence["Decorator"],
+ Doc("Any custom decorator to apply to wrapped functions."),
+ ],
+ # AsyncAPI kwargs
+ protocol: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server protocol."),
+ ],
+ protocol_version: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server protocol version."),
+ ],
+ description: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server description."),
+ ],
+ tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
+ Doc("AsyncAPI server tags."),
+ ],
+ specification_url: Annotated[
+ Union[str, list[str]],
+ Doc("AsyncAPI hardcoded server addresses."),
+ ],
+ security: Annotated[
+ Optional["BaseSecurity"],
+ Doc(
+ "Security options to connect broker and generate AsyncAPI server security.",
+ ),
+ ],
+ **connection_kwargs: Any,
+ ) -> None:
+ state = InitialBrokerState(
+ di_state=DIState(
+ use_fastdepends=apply_types,
+ get_dependent=_get_dependant,
+ call_decorators=_call_decorators,
+ serializer=serializer,
+ provider=Provider(),
+ context=ContextRepo(),
+ ),
+ logger_state=logger_state,
+ graceful_timeout=graceful_timeout,
+ producer=ProducerUnset(),
+ )
+
+ super().__init__(
+ middlewares=middlewares,
+ dependencies=dependencies,
+ decoder=cast(
+ "Optional[AsyncCustomCallable]",
+ to_async(decoder) if decoder else None,
+ ),
+ parser=cast(
+ "Optional[AsyncCustomCallable]",
+ to_async(parser) if parser else None,
+ ),
+ routers=routers,
+ # Broker is a root router
+ include_in_schema=True,
+ prefix="",
+ state=state,
+ )
+
+ self.running = False
+
+ self._connection_kwargs = connection_kwargs
+ self._connection = None
+
+ # AsyncAPI information
+ self.url = specification_url
+ self.protocol = protocol
+ self.protocol_version = protocol_version
+ self.description = description
+ self.tags = tags
+ self.security = security
+
+ @property
+ def _producer(self) -> "ProducerProto":
+ return self._state.get().producer
+
+ @property
+ def context(self) -> "ContextRepo":
+ return self._state.get().di_state.context
+
+ @property
+ def provider(self) -> Provider:
+ return self._state.get().di_state.provider
+
+ async def __aenter__(self) -> "Self":
+ await self.connect()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional["TracebackType"],
+ ) -> None:
+ await self.close(exc_type, exc_val, exc_tb)
+
+ @abstractmethod
+ async def start(self) -> None:
+ """Start the broker async use case."""
+ # TODO: filter by already running handlers after TestClient refactor
+ state = self._state.get()
+
+ for subscriber in self._subscribers:
+ log_context = subscriber.get_log_context(None)
+ log_context.pop("message_id", None)
+ state.logger_state.params_storage.setup_log_contest(log_context)
+
+ state._setup_logger_state()
+
+ for subscriber in self._subscribers:
+ state.logger_state.log(
+ f"`{subscriber.call_name}` waiting for messages",
+ extra=subscriber.get_log_context(None),
+ )
+ await subscriber.start()
+
+ if not self.running:
+ self.running = True
+
+ async def connect(self, **kwargs: Any) -> ConnectionType:
+ """Connect to a remote server."""
+ if self._connection is None:
+ connection_kwargs = self._connection_kwargs.copy()
+ connection_kwargs.update(kwargs)
+ self._connection = await self._connect(**connection_kwargs)
+
+ return self._connection
+
+ @abstractmethod
+ async def _connect(self) -> ConnectionType:
+ """Connect to a resource."""
+ raise NotImplementedError
+
+ def _setup(self, state: Optional["BrokerState"] = None) -> None:
+ """Prepare all Broker entities to startup.
+
+ Method should be idempotent due could be called twice
+ """
+ broker_state = self._state.get()
+ current_di_state = broker_state.di_state
+ broker_serializer = current_di_state.serializer
+
+ if state is not None:
+ di_state = state.di_state
+
+ if broker_serializer is EMPTY:
+ broker_serializer = di_state.serializer
+
+ current_di_state.update(
+ serializer=broker_serializer,
+ provider=di_state.provider,
+ context=di_state.context,
+ )
+
+ else:
+ # Fallback to default state if there no
+ # parent container like FastStream object
+ if broker_serializer is EMPTY:
+ from fast_depends.pydantic import PydanticSerializer
+
+ broker_serializer = PydanticSerializer()
+
+ current_di_state.update(
+ serializer=broker_serializer,
+ )
+
+ broker_state._setup()
+
+ # TODO: move setup to object creation
+ for h in self._subscribers:
+ self.setup_subscriber(h)
+
+ for p in self._publishers:
+ self.setup_publisher(p)
+
+ def setup_subscriber(
+ self,
+ subscriber: SubscriberProto[MsgType],
+ **kwargs: Any,
+ ) -> None:
+ """Setup the Subscriber to prepare it to starting."""
+ data = self._subscriber_setup_extra.copy()
+ data.update(kwargs)
+ subscriber._setup(**data, state=self._state)
+
+ @property
+ def _subscriber_setup_extra(self) -> "AnyDict":
+ return {
+ "extra_context": {
+ "broker": self,
+ },
+ # broker options
+ "broker_parser": self._parser,
+ "broker_decoder": self._decoder,
+ }
+
+ def publisher(
+ self,
+ publisher: "FinalPublisher[MsgType]",
+ is_running: bool = False,
+ ) -> "FinalPublisher[MsgType]":
+ pub = super().publisher(publisher, is_running=self.running)
+ self.setup_publisher(pub)
+ return pub
+
+ async def close(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ """Closes the object."""
+ for h in self._subscribers:
+ await h.close()
+
+ self.running = False
+
+ @abstractmethod
+ async def ping(self, timeout: Optional[float]) -> bool:
+ """Check connection alive."""
+ raise NotImplementedError
diff --git a/faststream/_internal/broker/pub_base.py b/faststream/_internal/broker/pub_base.py
new file mode 100644
index 0000000000..d4d8563f57
--- /dev/null
+++ b/faststream/_internal/broker/pub_base.py
@@ -0,0 +1,101 @@
+from abc import abstractmethod
+from collections.abc import Sequence
+from functools import partial
+from typing import TYPE_CHECKING, Any, Generic
+
+from faststream._internal.subscriber.utils import process_msg
+from faststream._internal.types import MsgType
+from faststream.message.source_type import SourceType
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.context import ContextRepo
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.response import PublishCommand
+
+
+class BrokerPublishMixin(Generic[MsgType]):
+ middlewares: Sequence["BrokerMiddleware[MsgType]"]
+
+ @property
+ @abstractmethod
+ def context(self) -> "ContextRepo":
+ raise NotImplementedError
+
+ @abstractmethod
+ async def publish(
+ self,
+ message: "SendableMessage",
+ queue: str,
+ /,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ publish = producer.publish
+ context = self.context # caches property
+
+ for m in self.middlewares[::-1]:
+ publish = partial(m(None, context=context).publish_scope, publish)
+
+ return await publish(cmd)
+
+ async def publish_batch(
+ self,
+ *messages: "SendableMessage",
+ queue: str,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_publish_batch(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ publish = producer.publish_batch
+ context = self.context # caches property
+
+ for m in self.middlewares[::-1]:
+ publish = partial(m(None, context=context).publish_scope, publish)
+
+ return await publish(cmd)
+
+ @abstractmethod
+ async def request(
+ self,
+ message: "SendableMessage",
+ queue: str,
+ /,
+ timeout: float = 0.5,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_request(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ request = producer.request
+ context = self.context # caches property
+
+ for m in self.middlewares[::-1]:
+ request = partial(m(None, context=context).publish_scope, request)
+
+ published_msg = await request(cmd)
+
+ response_msg: Any = await process_msg(
+ msg=published_msg,
+ middlewares=(m(published_msg, context=context) for m in self.middlewares),
+ parser=producer._parser,
+ decoder=producer._decoder,
+ source_type=SourceType.RESPONSE,
+ )
+ return response_msg
diff --git a/faststream/_internal/broker/router.py b/faststream/_internal/broker/router.py
new file mode 100644
index 0000000000..7c0c8c2410
--- /dev/null
+++ b/faststream/_internal/broker/router.py
@@ -0,0 +1,96 @@
+from collections.abc import Iterable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+)
+
+from faststream._internal.state.broker import EmptyBrokerState
+from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ MsgType,
+)
+
+from .abc_broker import ABCBroker
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict
+
+
+class ArgsContainer:
+ """Class to store any arguments."""
+
+ __slots__ = ("args", "kwargs")
+
+ args: Iterable[Any]
+ kwargs: "AnyDict"
+
+ def __init__(
+ self,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+ self.args = args
+ self.kwargs = kwargs
+
+
+class SubscriberRoute(ArgsContainer):
+ """A generic class to represent a broker route."""
+
+ __slots__ = ("args", "call", "kwargs", "publishers")
+
+ call: Callable[..., Any]
+ publishers: Iterable[Any]
+
+ def __init__(
+ self,
+ call: Callable[..., Any],
+ *args: Any,
+ publishers: Iterable[ArgsContainer] = (),
+ **kwargs: Any,
+ ) -> None:
+ """Initialize a callable object with arguments and keyword arguments."""
+ self.call = call
+ self.publishers = publishers
+
+ super().__init__(*args, **kwargs)
+
+
+class BrokerRouter(ABCBroker[MsgType]):
+ """A generic class representing a broker router."""
+
+ def __init__(
+ self,
+ *,
+ handlers: Iterable[SubscriberRoute],
+ # base options
+ prefix: str,
+ dependencies: Iterable["Dependant"],
+ middlewares: Sequence["BrokerMiddleware[MsgType]"],
+ parser: Optional["CustomCallable"],
+ decoder: Optional["CustomCallable"],
+ include_in_schema: Optional[bool],
+ routers: Sequence["ABCBroker[MsgType]"],
+ ) -> None:
+ super().__init__(
+ prefix=prefix,
+ dependencies=dependencies,
+ middlewares=middlewares,
+ parser=parser,
+ decoder=decoder,
+ include_in_schema=include_in_schema,
+ state=EmptyBrokerState("You should include router to any broker."),
+ routers=routers,
+ )
+
+ for h in handlers:
+ call = h.call
+
+ for p in h.publishers:
+ call = self.publisher(*p.args, **p.kwargs)(call)
+
+ self.subscriber(*h.args, **h.kwargs)(call)
diff --git a/faststream/broker/core/__init__.py b/faststream/_internal/cli/__init__.py
similarity index 100%
rename from faststream/broker/core/__init__.py
rename to faststream/_internal/cli/__init__.py
diff --git a/faststream/_internal/cli/docs/__init__.py b/faststream/_internal/cli/docs/__init__.py
new file mode 100644
index 0000000000..fd1ad7c661
--- /dev/null
+++ b/faststream/_internal/cli/docs/__init__.py
@@ -0,0 +1,3 @@
+from .app import docs_app
+
+__all__ = ("docs_app",)
diff --git a/faststream/_internal/cli/docs/app.py b/faststream/_internal/cli/docs/app.py
new file mode 100644
index 0000000000..bbefed8f99
--- /dev/null
+++ b/faststream/_internal/cli/docs/app.py
@@ -0,0 +1,212 @@
+import json
+import sys
+import warnings
+from contextlib import suppress
+from pathlib import Path
+from typing import TYPE_CHECKING, Optional
+
+import typer
+from pydantic import ValidationError
+
+from faststream._internal._compat import json_dumps, model_parse
+from faststream._internal.cli.utils.imports import import_from_string
+from faststream.exceptions import INSTALL_WATCHFILES, INSTALL_YAML, SCHEMA_NOT_SUPPORTED
+from faststream.specification.asyncapi.site import serve_app
+from faststream.specification.asyncapi.v2_6_0.schema import (
+ ApplicationSchema as SchemaV2_6,
+)
+from faststream.specification.asyncapi.v3_0_0.schema import (
+ ApplicationSchema as SchemaV3,
+)
+from faststream.specification.base.specification import Specification
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+docs_app = typer.Typer(pretty_exceptions_short=True)
+
+
+@docs_app.command(name="serve")
+def serve(
+ docs: str = typer.Argument(
+ ...,
+ help="[python_module:Specification] or [asyncapi.yaml/.json] - path to your application or documentation.",
+ ),
+ host: str = typer.Option(
+ "localhost",
+ help="Documentation hosting address.",
+ ),
+ port: int = typer.Option(
+ 8000,
+ help="Documentation hosting port.",
+ ),
+ reload: bool = typer.Option(
+ False,
+ "--reload",
+ is_flag=True,
+ help="Restart documentation at directory files changes.",
+ ),
+ app_dir: str = typer.Option(
+ ".",
+ "--app-dir",
+ help=(
+ "Look for APP in the specified directory, by adding this to the PYTHONPATH."
+ " Defaults to the current working directory."
+ ),
+ ),
+ is_factory: bool = typer.Option(
+ False,
+ "--factory",
+ is_flag=True,
+ help="Treat APP as an application factory.",
+ ),
+) -> None:
+ """Serve project AsyncAPI schema."""
+ if ":" in docs:
+ if app_dir: # pragma: no branch
+ sys.path.insert(0, app_dir)
+
+ module, _ = import_from_string(docs, is_factory=is_factory)
+
+ module_parent = module.parent
+ extra_extensions: Sequence[str] = ()
+
+ else:
+ module_parent = Path.cwd()
+ schema_filepath = module_parent / docs
+ extra_extensions = (schema_filepath.suffix,)
+
+ if reload:
+ try:
+ from faststream._internal.cli.supervisors.watchfiles import WatchReloader
+
+ except ImportError:
+ warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1)
+ _parse_and_serve(docs, host, port, is_factory)
+
+ else:
+ WatchReloader(
+ target=_parse_and_serve,
+ args=(docs, host, port, is_factory),
+ reload_dirs=(str(module_parent),),
+ extra_extensions=extra_extensions,
+ ).run()
+
+ else:
+ _parse_and_serve(docs, host, port, is_factory)
+
+
+@docs_app.command(name="gen")
+def gen(
+ asyncapi: str = typer.Argument(
+ ...,
+ help="[python_module:Specification] - path to your AsyncAPI object.",
+ ),
+ yaml: bool = typer.Option(
+ False,
+ "--yaml",
+ is_flag=True,
+ help="Generate `asyncapi.yaml` schema.",
+ ),
+ out: Optional[str] = typer.Option(
+ None,
+ help="Output filename.",
+ ),
+ app_dir: str = typer.Option(
+ ".",
+ "--app-dir",
+ help=(
+ "Look for APP in the specified directory, by adding this to the PYTHONPATH."
+ " Defaults to the current working directory."
+ ),
+ ),
+ is_factory: bool = typer.Option(
+ False,
+ "--factory",
+ is_flag=True,
+ help="Treat APP as an application factory.",
+ ),
+ asyncapi_version: str = typer.Option(
+ "3.0.0",
+ "--version",
+ help="Version of asyncapi schema. Currently supported only 3.0.0 and 2.6.0",
+ ),
+) -> None:
+ """Generate project AsyncAPI schema."""
+ if app_dir: # pragma: no branch
+ sys.path.insert(0, app_dir)
+
+ _, asyncapi_obj = import_from_string(asyncapi, is_factory=is_factory)
+
+ assert isinstance(asyncapi_obj, Specification) # nosec B101
+
+ raw_schema = asyncapi_obj.schema
+
+ if yaml:
+ try:
+ schema = raw_schema.to_yaml()
+ except ImportError as e: # pragma: no cover
+ typer.echo(INSTALL_YAML, err=True)
+ raise typer.Exit(1) from e
+
+ name = out or "asyncapi.yaml"
+
+ with Path(name).open("w", encoding="utf-8") as f:
+ f.write(schema)
+
+ else:
+ schema = raw_schema.to_jsonable()
+ name = out or "asyncapi.json"
+
+ with Path(name).open("w", encoding="utf-8") as f:
+ json.dump(schema, f, indent=2)
+
+ typer.echo(f"Your project AsyncAPI scheme was placed to `{name}`")
+
+
+def _parse_and_serve(
+ docs: str,
+ host: str = "localhost",
+ port: int = 8000,
+ is_factory: bool = False,
+) -> None:
+ if ":" in docs:
+ _, docs_obj = import_from_string(docs, is_factory=is_factory)
+
+ assert isinstance(docs_obj, Specification) # nosec B101
+
+ raw_schema = docs_obj
+
+ else:
+ schema_filepath = Path.cwd() / docs
+
+ if schema_filepath.suffix == ".json":
+ data = schema_filepath.read_bytes()
+
+ elif schema_filepath.suffix in {".yaml", ".yml"}:
+ try:
+ import yaml
+ except ImportError as e: # pragma: no cover
+ typer.echo(INSTALL_YAML, err=True)
+ raise typer.Exit(1) from e
+
+ with schema_filepath.open("r") as f:
+ schema = yaml.safe_load(f)
+
+ data = json_dumps(schema)
+
+ else:
+ msg = f"Unknown extension given - {docs}; Please provide app in format [python_module:Specification] or [asyncapi.yaml/.json] - path to your application or documentation"
+ raise ValueError(
+ msg,
+ )
+
+ for schema in (SchemaV3, SchemaV2_6):
+ with suppress(ValidationError):
+ raw_schema = model_parse(schema, data)
+ break
+ else:
+ typer.echo(SCHEMA_NOT_SUPPORTED.format(schema_filename=docs), err=True)
+ raise typer.Exit(1)
+
+ serve_app(raw_schema, host, port)
diff --git a/faststream/_internal/cli/main.py b/faststream/_internal/cli/main.py
new file mode 100644
index 0000000000..e9efba45ea
--- /dev/null
+++ b/faststream/_internal/cli/main.py
@@ -0,0 +1,316 @@
+import logging
+import sys
+import warnings
+from contextlib import suppress
+from typing import TYPE_CHECKING, Any, Optional, cast
+
+import anyio
+import typer
+
+from faststream import FastStream
+from faststream.__about__ import __version__
+from faststream._internal._compat import json_loads
+from faststream._internal.application import Application
+from faststream.asgi import AsgiFastStream
+from faststream.exceptions import INSTALL_WATCHFILES, SetupError, StartupValidationError
+
+from .docs import docs_app
+from .utils.imports import import_from_string
+from .utils.logs import LogLevels, get_log_level, set_log_level
+from .utils.parser import parse_cli_args
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, SettingField
+ from faststream._internal.broker.broker import BrokerUsecase
+
+cli = typer.Typer(pretty_exceptions_short=True)
+cli.add_typer(docs_app, name="docs", help="Documentations commands")
+
+
+def version_callback(version: bool) -> None:
+ """Callback function for displaying version information."""
+ if version:
+ import platform
+
+ typer.echo(
+ f"Running FastStream {__version__} with {platform.python_implementation()} "
+ f"{platform.python_version()} on {platform.system()}",
+ )
+
+ raise typer.Exit
+
+
+@cli.callback()
+def main(
+ version: Optional[bool] = typer.Option(
+ False,
+ "-v",
+ "--version",
+ callback=version_callback,
+ is_eager=True,
+ help="Show current platform, python and FastStream version.",
+ ),
+) -> None:
+ """Generate, run and manage FastStream apps to greater development experience."""
+
+
+@cli.command(
+ context_settings={"allow_extra_args": True, "ignore_unknown_options": True},
+)
+def run(
+ ctx: typer.Context,
+ app: str = typer.Argument(
+ ...,
+ help="[python_module:FastStream] - path to your application.",
+ ),
+ workers: int = typer.Option(
+ 1,
+ "-w",
+ "--workers",
+ show_default=False,
+ help="Run [workers] applications with process spawning.",
+ envvar="FASTSTREAM_WORKERS",
+ ),
+ log_level: LogLevels = typer.Option(
+ LogLevels.notset,
+ "-l",
+ "--log-level",
+ case_sensitive=False,
+ help="Set selected level for FastStream and brokers logger objects.",
+ envvar="FASTSTREAM_LOG_LEVEL",
+ ),
+ reload: bool = typer.Option(
+ False,
+ "-r",
+ "--reload",
+ is_flag=True,
+ help="Restart app at directory files changes.",
+ ),
+ watch_extensions: list[str] = typer.Option(
+ (),
+ "--extension",
+ "--ext",
+ "--reload-extension",
+ "--reload-ext",
+ help="List of file extensions to watch by.",
+ ),
+ app_dir: str = typer.Option(
+ ".",
+ "--app-dir",
+ help=(
+ "Look for APP in the specified directory, by adding this to the PYTHONPATH."
+ " Defaults to the current working directory."
+ ),
+ envvar="FASTSTREAM_APP_DIR",
+ ),
+ is_factory: bool = typer.Option(
+ False,
+ "-f",
+ "--factory",
+ help="Treat APP as an application factory.",
+ ),
+) -> None:
+ """Run [MODULE:APP] FastStream application."""
+ if watch_extensions and not reload:
+ typer.echo(
+ "Extra reload extensions has no effect without `--reload` flag."
+ "\nProbably, you forgot it?",
+ )
+
+ app, extra = parse_cli_args(app, *ctx.args)
+ casted_log_level = get_log_level(log_level)
+
+ if app_dir: # pragma: no branch
+ sys.path.insert(0, app_dir)
+
+ # Should be imported after sys.path changes
+ module_path, app_obj = import_from_string(app, is_factory=is_factory)
+ app_obj = cast("Application", app_obj)
+
+ args = (app, extra, is_factory, casted_log_level)
+
+ if reload and workers > 1:
+ msg = "You can't use reload option with multiprocessing"
+ raise SetupError(msg)
+
+ if reload:
+ try:
+ from faststream._internal.cli.supervisors.watchfiles import WatchReloader
+ except ImportError:
+ warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1)
+ _run(*args)
+
+ else:
+ reload_dirs = []
+ if module_path:
+ reload_dirs.append(str(module_path))
+ if app_dir != ".":
+ reload_dirs.append(app_dir)
+
+ WatchReloader(
+ target=_run,
+ args=args,
+ reload_dirs=reload_dirs,
+ extra_extensions=watch_extensions,
+ ).run()
+
+ elif workers > 1:
+ if isinstance(app_obj, FastStream):
+ from faststream._internal.cli.supervisors.multiprocess import Multiprocess
+
+ Multiprocess(
+ target=_run,
+ args=(*args, logging.DEBUG),
+ workers=workers,
+ ).run()
+
+ elif isinstance(app_obj, AsgiFastStream):
+ from faststream._internal.cli.supervisors.asgi_multiprocess import (
+ ASGIMultiprocess,
+ )
+
+ ASGIMultiprocess(
+ target=app,
+ args=args,
+ workers=workers,
+ ).run()
+
+ else:
+ msg = f"Unexpected app type, expected FastStream or AsgiFastStream, got: {type(app_obj)}."
+ raise typer.BadParameter(msg)
+
+ else:
+ _run_imported_app(
+ app_obj,
+ extra_options=extra,
+ log_level=casted_log_level,
+ )
+
+
+def _run(
+ # NOTE: we should pass `str` due FastStream is not picklable
+ app: str,
+ extra_options: dict[str, "SettingField"],
+ is_factory: bool,
+ log_level: int = logging.NOTSET,
+ app_level: int = logging.INFO, # option for reloader only
+) -> None:
+ """Runs the specified application."""
+ _, app_obj = import_from_string(app, is_factory=is_factory)
+ app_obj = cast("Application", app_obj)
+ _run_imported_app(
+ app_obj,
+ extra_options=extra_options,
+ log_level=log_level,
+ app_level=app_level,
+ )
+
+
+def _run_imported_app(
+ app_obj: "Application",
+ extra_options: dict[str, "SettingField"],
+ log_level: int = logging.NOTSET,
+ app_level: int = logging.INFO, # option for reloader only
+) -> None:
+ if not isinstance(app_obj, Application):
+ msg = f'Imported object "{app_obj}" must be "Application" type.'
+ raise typer.BadParameter(
+ msg,
+ )
+
+ if log_level > 0:
+ set_log_level(log_level, app_obj)
+
+ if sys.platform not in {"win32", "cygwin", "cli"}: # pragma: no cover
+ with suppress(ImportError):
+ import uvloop
+
+ uvloop.install()
+
+ try:
+ anyio.run(
+ app_obj.run,
+ app_level,
+ extra_options,
+ )
+
+ except StartupValidationError as startup_exc:
+ from faststream._internal.cli.utils.errors import draw_startup_errors
+
+ draw_startup_errors(startup_exc)
+ sys.exit(1)
+
+
+@cli.command(
+ context_settings={"allow_extra_args": True, "ignore_unknown_options": True},
+)
+def publish(
+ ctx: typer.Context,
+ app: str = typer.Argument(
+ ...,
+ help="FastStream app instance, e.g., main:app.",
+ ),
+ message: str = typer.Argument(
+ ...,
+ help="JSON Message string to publish.",
+ ),
+ rpc: bool = typer.Option(
+ False,
+ help="Enable RPC mode and system output.",
+ ),
+ is_factory: bool = typer.Option(
+ False,
+ "--factory",
+ help="Treat APP as an application factory.",
+ ),
+) -> None:
+ """Publish a message using the specified broker in a FastStream application.
+
+ This command publishes a message to a broker configured in a FastStream app instance.
+ It supports various brokers and can handle extra arguments specific to each broker type.
+ These are parsed and passed to the broker's publish method.
+ """
+ app, extra = parse_cli_args(app, *ctx.args)
+
+ publish_extra: AnyDict = extra.copy()
+ if "timeout" in publish_extra:
+ publish_extra["timeout"] = float(publish_extra["timeout"])
+
+ try:
+ _, app_obj = import_from_string(app, is_factory=is_factory)
+
+ assert isinstance(app_obj, FastStream), app_obj # nosec B101
+
+ if not app_obj.broker:
+ msg = "Broker instance not found in the app."
+ raise ValueError(msg)
+
+ app_obj._setup()
+ result = anyio.run(publish_message, app_obj.broker, rpc, message, publish_extra)
+
+ if rpc:
+ typer.echo(result)
+
+ except Exception as e:
+ typer.echo(f"Publish error: {e}")
+ sys.exit(1)
+
+
+async def publish_message(
+ broker: "BrokerUsecase[Any, Any]",
+ rpc: bool,
+ message: str,
+ extra: "AnyDict",
+) -> Any:
+ with suppress(Exception):
+ message = json_loads(message)
+
+ try:
+ async with broker:
+ if rpc:
+ return await broker.request(message, **extra) # type: ignore[call-arg]
+ return await broker.publish(message, **extra) # type: ignore[call-arg]
+
+ except Exception as e:
+ typer.echo(f"Error when broker was publishing: {e!r}")
+ sys.exit(1)
diff --git a/faststream/broker/publisher/__init__.py b/faststream/_internal/cli/supervisors/__init__.py
similarity index 100%
rename from faststream/broker/publisher/__init__.py
rename to faststream/_internal/cli/supervisors/__init__.py
diff --git a/faststream/_internal/cli/supervisors/asgi_multiprocess.py b/faststream/_internal/cli/supervisors/asgi_multiprocess.py
new file mode 100644
index 0000000000..bcfba99010
--- /dev/null
+++ b/faststream/_internal/cli/supervisors/asgi_multiprocess.py
@@ -0,0 +1,43 @@
+import inspect
+from typing import TYPE_CHECKING
+
+from faststream.asgi.app import cast_uvicorn_params
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import SettingField
+
+
+class ASGIMultiprocess:
+ def __init__(
+ self,
+ target: str,
+ args: tuple[str, dict[str, "SettingField"], bool, int],
+ workers: int,
+ ) -> None:
+ _, uvicorn_kwargs, is_factory, log_level = args
+ self._target = target
+ self._uvicorn_kwargs = cast_uvicorn_params(uvicorn_kwargs or {})
+ self._workers = workers
+ self._is_factory = is_factory
+ self._log_level = log_level
+
+ def run(self) -> None:
+ try:
+ import uvicorn
+ except ImportError as e:
+ error_msg = "You need uvicorn to run FastStream ASGI App via CLI.\npip install uvicorn"
+ raise ImportError(error_msg) from e
+
+ uvicorn_params = set(inspect.signature(uvicorn.run).parameters.keys())
+
+ uvicorn.run(
+ self._target,
+ factory=self._is_factory,
+ workers=self._workers,
+ log_level=self._log_level,
+ **{
+ key: v
+ for key, v in self._uvicorn_kwargs.items()
+ if key in uvicorn_params
+ },
+ )
diff --git a/faststream/cli/supervisors/basereload.py b/faststream/_internal/cli/supervisors/basereload.py
similarity index 80%
rename from faststream/cli/supervisors/basereload.py
rename to faststream/_internal/cli/supervisors/basereload.py
index d0906cb56f..0b9f788897 100644
--- a/faststream/cli/supervisors/basereload.py
+++ b/faststream/_internal/cli/supervisors/basereload.py
@@ -1,13 +1,13 @@
import os
import threading
from multiprocessing.context import SpawnProcess
-from typing import TYPE_CHECKING, Any, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Optional
-from faststream.cli.supervisors.utils import get_subprocess, set_exit
-from faststream.log import logger
+from faststream._internal.cli.supervisors.utils import get_subprocess, set_exit
+from faststream._internal.log import logger
if TYPE_CHECKING:
- from faststream.types import DecoratedCallable
+ from faststream._internal.basic_types import DecoratedCallable
class BaseReload:
@@ -15,7 +15,7 @@ class BaseReload:
_process: SpawnProcess
_target: "DecoratedCallable"
- _args: Tuple[Any, ...]
+ _args: tuple[Any, ...]
reload_delay: Optional[float]
should_exit: threading.Event
@@ -25,7 +25,7 @@ class BaseReload:
def __init__(
self,
target: "DecoratedCallable",
- args: Tuple[Any, ...],
+ args: tuple[Any, ...],
reload_delay: Optional[float] = 0.5,
) -> None:
self._target = target
@@ -67,4 +67,5 @@ def _start_process(self) -> SpawnProcess:
return process
def should_restart(self) -> bool:
- raise NotImplementedError("Reload strategies should override should_restart()")
+ msg = "Reload strategies should override should_restart()"
+ raise NotImplementedError(msg)
diff --git a/faststream/cli/supervisors/multiprocess.py b/faststream/_internal/cli/supervisors/multiprocess.py
similarity index 85%
rename from faststream/cli/supervisors/multiprocess.py
rename to faststream/_internal/cli/supervisors/multiprocess.py
index a08fc5f273..e84f98cf7a 100644
--- a/faststream/cli/supervisors/multiprocess.py
+++ b/faststream/_internal/cli/supervisors/multiprocess.py
@@ -1,13 +1,13 @@
import signal
-from typing import TYPE_CHECKING, Any, List, Tuple
+from typing import TYPE_CHECKING, Any
-from faststream.cli.supervisors.basereload import BaseReload
-from faststream.log import logger
+from faststream._internal.cli.supervisors.basereload import BaseReload
+from faststream._internal.log import logger
if TYPE_CHECKING:
from multiprocessing.context import SpawnProcess
- from faststream.types import DecoratedCallable
+ from faststream._internal.basic_types import DecoratedCallable
class Multiprocess(BaseReload):
@@ -16,14 +16,14 @@ class Multiprocess(BaseReload):
def __init__(
self,
target: "DecoratedCallable",
- args: Tuple[Any, ...],
+ args: tuple[Any, ...],
workers: int,
reload_delay: float = 0.5,
) -> None:
super().__init__(target, args, reload_delay)
self.workers = workers
- self.processes: List[SpawnProcess] = []
+ self.processes: list[SpawnProcess] = []
def startup(self) -> None:
logger.info(f"Started parent process [{self.pid}]")
diff --git a/faststream/_internal/cli/supervisors/utils.py b/faststream/_internal/cli/supervisors/utils.py
new file mode 100644
index 0000000000..58e02b9a86
--- /dev/null
+++ b/faststream/_internal/cli/supervisors/utils.py
@@ -0,0 +1,75 @@
+import asyncio
+import multiprocessing
+import os
+import signal
+import sys
+from contextlib import suppress
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+if TYPE_CHECKING:
+ from multiprocessing.context import SpawnProcess
+ from types import FrameType
+
+ from faststream._internal.basic_types import DecoratedCallableNone
+
+multiprocessing.allow_connection_pickling()
+spawn = multiprocessing.get_context("spawn")
+
+
+HANDLED_SIGNALS = (
+ signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
+ signal.SIGTERM, # Unix signal 15. Sent by `kill `.
+)
+if sys.platform == "win32": # pragma: py-not-win32
+ HANDLED_SIGNALS += (signal.SIGBREAK,) # Windows signal 21. Sent by Ctrl+Break.
+
+
+def set_exit(
+ func: Callable[[int, Optional["FrameType"]], Any],
+ *,
+ sync: bool = False,
+) -> None:
+ """Set exit handler for signals.
+
+ Args:
+ func: A callable object that takes an integer and an optional frame type as arguments and returns any value.
+ sync: set sync or async signal callback.
+ """
+ if not sync:
+ with suppress(NotImplementedError):
+ loop = asyncio.get_event_loop()
+
+ for sig in HANDLED_SIGNALS:
+ loop.add_signal_handler(sig, func, sig, None)
+
+ return
+
+ # Windows or sync mode
+ for sig in HANDLED_SIGNALS:
+ signal.signal(sig, func)
+
+
+def get_subprocess(target: "DecoratedCallableNone", args: Any) -> "SpawnProcess":
+ """Spawn a subprocess."""
+ stdin_fileno: Optional[int]
+ try:
+ stdin_fileno = sys.stdin.fileno()
+ except OSError:
+ stdin_fileno = None
+
+ return spawn.Process(
+ target=subprocess_started,
+ args=args,
+ kwargs={"t": target, "stdin_fileno": stdin_fileno},
+ )
+
+
+def subprocess_started(
+ *args: Any,
+ t: "DecoratedCallableNone",
+ stdin_fileno: Optional[int],
+) -> None:
+ """Start a subprocess."""
+ if stdin_fileno is not None: # pragma: no cover
+ sys.stdin = os.fdopen(stdin_fileno)
+ t(*args)
diff --git a/faststream/cli/supervisors/watchfiles.py b/faststream/_internal/cli/supervisors/watchfiles.py
similarity index 87%
rename from faststream/cli/supervisors/watchfiles.py
rename to faststream/_internal/cli/supervisors/watchfiles.py
index 9b70dbff5b..d83e670fb3 100644
--- a/faststream/cli/supervisors/watchfiles.py
+++ b/faststream/_internal/cli/supervisors/watchfiles.py
@@ -1,13 +1,14 @@
+from collections.abc import Sequence
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
import watchfiles
-from faststream.cli.supervisors.basereload import BaseReload
-from faststream.log import logger
+from faststream._internal.cli.supervisors.basereload import BaseReload
+from faststream._internal.log import logger
if TYPE_CHECKING:
- from faststream.types import DecoratedCallable
+ from faststream._internal.basic_types import DecoratedCallable
class ExtendedFilter(watchfiles.PythonFilter):
@@ -38,7 +39,7 @@ class WatchReloader(BaseReload):
def __init__(
self,
target: "DecoratedCallable",
- args: Tuple[Any, ...],
+ args: tuple[Any, ...],
reload_dirs: Sequence[Union[Path, str]],
reload_delay: float = 0.3,
extra_extensions: Sequence[str] = (),
diff --git a/faststream/broker/subscriber/__init__.py b/faststream/_internal/cli/utils/__init__.py
similarity index 100%
rename from faststream/broker/subscriber/__init__.py
rename to faststream/_internal/cli/utils/__init__.py
diff --git a/faststream/_internal/cli/utils/errors.py b/faststream/_internal/cli/utils/errors.py
new file mode 100644
index 0000000000..c9d1eb0721
--- /dev/null
+++ b/faststream/_internal/cli/utils/errors.py
@@ -0,0 +1,38 @@
+from faststream.exceptions import StartupValidationError
+
+
+def draw_startup_errors(startup_exc: StartupValidationError) -> None:
+ from click.exceptions import BadParameter, MissingParameter
+ from typer.core import TyperOption
+
+ def draw_error(click_exc: BadParameter) -> None:
+ try:
+ from typer import rich_utils
+
+ rich_utils.rich_format_error(click_exc)
+ except ImportError:
+ click_exc.show()
+
+ for field in startup_exc.invalid_fields:
+ draw_error(
+ BadParameter(
+ message=(
+ "extra option in your application "
+ "`lifespan/on_startup` hook has a wrong type."
+ ),
+ param=TyperOption(param_decls=[f"--{field}"]),
+ ),
+ )
+
+ if startup_exc.missed_fields:
+ draw_error(
+ MissingParameter(
+ message=(
+ "You registered extra options in your application "
+ "`lifespan/on_startup` hook, but does not set in CLI."
+ ),
+ param=TyperOption(
+ param_decls=[f"--{x}" for x in startup_exc.missed_fields],
+ ),
+ ),
+ )
diff --git a/faststream/_internal/cli/utils/imports.py b/faststream/_internal/cli/utils/imports.py
new file mode 100644
index 0000000000..860b69a42a
--- /dev/null
+++ b/faststream/_internal/cli/utils/imports.py
@@ -0,0 +1,129 @@
+import importlib
+from importlib.util import module_from_spec, spec_from_file_location
+from pathlib import Path
+
+import typer
+
+from faststream.exceptions import SetupError
+
+
+def import_from_string(
+ import_str: str,
+ *,
+ is_factory: bool = False,
+) -> tuple[Path, object]:
+ module_path, instance = _import_object_or_factory(import_str)
+
+ if is_factory:
+ if callable(instance):
+ instance = instance()
+ else:
+ msg = f'"{instance}" is not a factory.'
+ raise typer.BadParameter(msg)
+
+ return module_path, instance
+
+
+def _import_object_or_factory(import_str: str) -> tuple[Path, object]:
+ """Import FastStream application from module specified by a string."""
+ if not isinstance(import_str, str):
+ msg = "Given value is not of type string"
+ raise typer.BadParameter(msg)
+
+ module_str, _, attrs_str = import_str.partition(":")
+ if not module_str or not attrs_str:
+ msg = f'Import string "{import_str}" must be in format ":"'
+ raise typer.BadParameter(
+ msg,
+ )
+
+ try:
+ module = importlib.import_module( # nosemgrep: python.lang.security.audit.non-literal-import.non-literal-import
+ module_str,
+ )
+
+ except ModuleNotFoundError:
+ module_path, import_obj_name = _get_obj_path(import_str)
+ instance = _try_import_app(module_path, import_obj_name)
+
+ else:
+ attr = module
+ try:
+ for attr_str in attrs_str.split("."):
+ attr = getattr(attr, attr_str)
+ instance = attr
+
+ except AttributeError as e:
+ typer.echo(e, err=True)
+ msg = f'Attribute "{attrs_str}" not found in module "{module_str}".'
+ raise typer.BadParameter(
+ msg,
+ ) from e
+
+ if module.__file__:
+ module_path = Path(module.__file__).resolve().parent
+ else:
+ module_path = Path.cwd()
+
+ return module_path, instance
+
+
+def _try_import_app(module: Path, app: str) -> object:
+ """Tries to import a FastStream app from a module."""
+ try:
+ app_object = _import_object(module, app)
+
+ except FileNotFoundError as e:
+ typer.echo(e, err=True)
+ msg = (
+ "Please, input module like [python_file:docs_object] or [module:attribute]"
+ )
+ raise typer.BadParameter(
+ msg,
+ ) from e
+
+ else:
+ return app_object
+
+
+def _import_object(module: Path, app: str) -> object:
+ """Import an object from a module."""
+ spec = spec_from_file_location(
+ "mode",
+ f"{module}.py",
+ submodule_search_locations=[str(module.parent.absolute())],
+ )
+
+ if spec is None: # pragma: no cover
+ raise FileNotFoundError(module)
+
+ mod = module_from_spec(spec)
+ loader = spec.loader
+
+ if loader is None: # pragma: no cover
+ msg = f"{spec} has no loader"
+ raise SetupError(msg)
+
+ loader.exec_module(mod)
+
+ try:
+ obj = getattr(mod, app)
+ except AttributeError as e:
+ raise FileNotFoundError(module) from e
+
+ return obj
+
+
+def _get_obj_path(obj_path: str) -> tuple[Path, str]:
+ """Get the application path."""
+ if ":" not in obj_path:
+ msg = f"`{obj_path}` is not a path to object"
+ raise SetupError(msg)
+
+ module, app_name = obj_path.split(":", 2)
+
+ mod_path = Path.cwd()
+ for i in module.split("."):
+ mod_path /= i
+
+ return mod_path, app_name
diff --git a/faststream/_internal/cli/utils/logs.py b/faststream/_internal/cli/utils/logs.py
new file mode 100644
index 0000000000..7233378201
--- /dev/null
+++ b/faststream/_internal/cli/utils/logs.py
@@ -0,0 +1,72 @@
+import logging
+from collections import defaultdict
+from enum import Enum
+from typing import TYPE_CHECKING, Union
+
+if TYPE_CHECKING:
+ from faststream._internal.application import Application
+
+
+class LogLevels(str, Enum):
+ """A class to represent log levels.
+
+ Attributes:
+ critical : critical log level
+ error : error log level
+ warning : warning log level
+ info : info log level
+ debug : debug log level
+ """
+
+ critical = "critical"
+ fatal = "fatal"
+ error = "error"
+ warning = "warning"
+ warn = "warn"
+ info = "info"
+ debug = "debug"
+ notset = "notset"
+
+
+LOG_LEVELS: defaultdict[str, int] = defaultdict(
+ lambda: logging.INFO,
+ critical=logging.CRITICAL,
+ fatal=logging.FATAL,
+ error=logging.ERROR,
+ warning=logging.WARNING,
+ warn=logging.WARNING,
+ info=logging.INFO,
+ debug=logging.DEBUG,
+ notset=logging.NOTSET,
+)
+
+
+def get_log_level(level: Union[LogLevels, str, int]) -> int:
+ """Get the log level.
+
+ Args:
+ level: The log level to get. Can be an integer, a LogLevels enum value, or a string.
+
+ Returns:
+ The log level as an integer.
+
+ """
+ if isinstance(level, int):
+ return level
+
+ if isinstance(level, LogLevels):
+ return LOG_LEVELS[level.value]
+
+ if isinstance(level, str): # pragma: no branch
+ return LOG_LEVELS[level.lower()]
+
+ return None
+
+
+def set_log_level(level: int, app: "Application") -> None:
+ """Sets the log level for an application."""
+ if app.logger and getattr(app.logger, "setLevel", None):
+ app.logger.setLevel(level) # type: ignore[attr-defined]
+
+ for broker in app.brokers:
+ broker._state.get().logger_state.set_level(level)
diff --git a/faststream/_internal/cli/utils/parser.py b/faststream/_internal/cli/utils/parser.py
new file mode 100644
index 0000000000..72ecff5251
--- /dev/null
+++ b/faststream/_internal/cli/utils/parser.py
@@ -0,0 +1,65 @@
+import re
+from functools import reduce
+from typing import TYPE_CHECKING, cast
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import SettingField
+
+
+def is_bind_arg(arg: str) -> bool:
+ """Determine whether the received argument refers to --bind.
+
+ bind arguments are like: 0.0.0.0:8000, [::]:8000, fd://2, /tmp/socket.sock
+
+ """
+ bind_regex = re.compile(r":\d+$|:/+\d|:/[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+")
+ return bool(bind_regex.search(arg))
+
+
+def parse_cli_args(*args: str) -> tuple[str, dict[str, "SettingField"]]:
+ """Parses command line arguments."""
+ extra_kwargs: dict[str, SettingField] = {}
+
+ k: str = ""
+ v: SettingField
+
+ field_args: list[str] = []
+ app = ""
+ for item in [
+ *reduce(
+ lambda acc, x: acc + x.split("="),
+ args,
+ cast("list[str]", []),
+ ),
+ "-",
+ ]:
+ if ":" in item and not is_bind_arg(item):
+ app = item
+
+ elif "-" in item:
+ if k:
+ k = k.strip().lstrip("-").replace("-", "_")
+
+ if len(field_args) == 0:
+ v = not k.startswith("no_")
+ elif len(field_args) == 1:
+ v = field_args[0]
+ else:
+ v = field_args
+
+ key = k.removeprefix("no_")
+ if (exists := extra_kwargs.get(key)) is not None:
+ v = [
+ *(exists if isinstance(exists, list) else [exists]),
+ *(v if isinstance(v, list) else [v]),
+ ]
+
+ extra_kwargs[key] = v
+ field_args = []
+
+ k = item
+
+ else:
+ field_args.append(item)
+
+ return app, extra_kwargs
diff --git a/faststream/_internal/constants.py b/faststream/_internal/constants.py
new file mode 100644
index 0000000000..c81916ed95
--- /dev/null
+++ b/faststream/_internal/constants.py
@@ -0,0 +1,25 @@
+from enum import Enum
+from typing import Any
+
+ContentType = str
+
+
+class ContentTypes(str, Enum):
+ """A class to represent content types."""
+
+ TEXT = "text/plain"
+ JSON = "application/json"
+
+
+class EmptyPlaceholder:
+ def __repr__(self) -> str:
+ return "EMPTY"
+
+ def __bool__(self) -> bool:
+ return False
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, EmptyPlaceholder)
+
+
+EMPTY: Any = EmptyPlaceholder()
diff --git a/faststream/_internal/context/__init__.py b/faststream/_internal/context/__init__.py
new file mode 100644
index 0000000000..f0a0b1d1cb
--- /dev/null
+++ b/faststream/_internal/context/__init__.py
@@ -0,0 +1,7 @@
+from .context_type import Context
+from .repository import ContextRepo
+
+__all__ = (
+ "Context",
+ "ContextRepo",
+)
diff --git a/faststream/_internal/context/context_type.py b/faststream/_internal/context/context_type.py
new file mode 100644
index 0000000000..b50e859066
--- /dev/null
+++ b/faststream/_internal/context/context_type.py
@@ -0,0 +1,78 @@
+from typing import Any, Callable, Optional
+
+from fast_depends.library import CustomField
+
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.constants import EMPTY
+
+from .resolve import resolve_context_by_name
+
+
+class Context(CustomField):
+ """A class to represent a context.
+
+ Attributes:
+ param_name : name of the parameter
+
+ Methods:
+ __init__ : constructor method
+ use : method to use the context
+ """
+
+ param_name: str
+
+ def __init__(
+ self,
+ real_name: str = "",
+ *,
+ default: Any = EMPTY,
+ initial: Optional[Callable[..., Any]] = None,
+ cast: bool = False,
+ prefix: str = "",
+ ) -> None:
+ """Initialize the object.
+
+ Args:
+ real_name: The real name of the object.
+ default: The default value of the object.
+ initial: The initial value builder.
+ cast: Whether to cast the object.
+ prefix: The prefix to be added to the name of the object.
+
+ Raises:
+ TypeError: If the default value is not provided.
+ """
+ self.name = real_name
+ self.default = default
+ self.prefix = prefix
+ self.initial = initial
+ super().__init__(
+ cast=cast,
+ required=(default is EMPTY),
+ )
+
+ def use(self, /, **kwargs: Any) -> AnyDict:
+ """Use the given keyword arguments.
+
+ Args:
+ **kwargs: Keyword arguments to be used
+
+ Returns:
+ A dictionary containing the updated keyword arguments
+ """
+ name = f"{self.prefix}{self.name or self.param_name}"
+
+ if EMPTY != ( # noqa: SIM300
+ v := resolve_context_by_name(
+ name=name,
+ default=self.default,
+ initial=self.initial,
+ context=kwargs["context__"],
+ )
+ ):
+ kwargs[self.param_name] = v
+
+ else:
+ kwargs.pop(self.param_name, None)
+
+ return kwargs
diff --git a/faststream/utils/context/repository.py b/faststream/_internal/context/repository.py
similarity index 89%
rename from faststream/utils/context/repository.py
rename to faststream/_internal/context/repository.py
index 034d2a504c..eab1763088 100644
--- a/faststream/utils/context/repository.py
+++ b/faststream/_internal/context/repository.py
@@ -1,18 +1,18 @@
+from collections.abc import Iterator, Mapping
from contextlib import contextmanager
from contextvars import ContextVar, Token
-from typing import Any, Dict, Iterator, Mapping
+from typing import Any
-from faststream.types import EMPTY, AnyDict
-from faststream.utils.classes import Singleton
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.constants import EMPTY
+from faststream.exceptions import ContextError
-__all__ = ("ContextRepo", "context")
-
-class ContextRepo(Singleton):
+class ContextRepo:
"""A class to represent a context repository."""
_global_context: AnyDict
- _scope_context: Dict[str, ContextVar[Any]]
+ _scope_context: dict[str, ContextVar[Any]]
def __init__(self) -> None:
"""Initialize the class.
@@ -131,19 +131,18 @@ def get(self, key: str, default: Any = None) -> Any:
"""
if (glob := self._global_context.get(key, EMPTY)) is EMPTY:
return self.get_local(key, default)
- else:
- return glob
+ return glob
- def __getattr__(self, __name: str) -> Any:
+ def __getattr__(self, name: str, /) -> Any:
"""This is a function that is part of a class. It is used to get an attribute value using the `__getattr__` method.
Args:
- __name: The name of the attribute to get.
+ name: The name of the attribute to get.
Returns:
The value of the attribute.
"""
- return self.get(__name)
+ return self.get(name)
def resolve(self, argument: str) -> Any:
"""Resolve the context of an argument.
@@ -160,7 +159,7 @@ def resolve(self, argument: str) -> Any:
first, *keys = argument.split(".")
if (v := self.get(first, EMPTY)) is EMPTY:
- raise KeyError(f"`{self.context}` does not contains `{first}` key")
+ raise ContextError(self.context, first)
for i in keys:
v = v[i] if isinstance(v, Mapping) else getattr(v, i)
@@ -170,6 +169,3 @@ def resolve(self, argument: str) -> Any:
def clear(self) -> None:
self._global_context = {"context": self}
self._scope_context.clear()
-
-
-context = ContextRepo()
diff --git a/faststream/_internal/context/resolve.py b/faststream/_internal/context/resolve.py
new file mode 100644
index 0000000000..854229175e
--- /dev/null
+++ b/faststream/_internal/context/resolve.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+from faststream._internal.constants import EMPTY
+
+if TYPE_CHECKING:
+ from .repository import ContextRepo
+
+
+def resolve_context_by_name(
+ name: str,
+ default: Any,
+ initial: Optional[Callable[..., Any]],
+ context: "ContextRepo",
+) -> Any:
+ value: Any = EMPTY
+
+ try:
+ value = context.resolve(name)
+
+ except (KeyError, AttributeError):
+ if EMPTY != default: # noqa: SIM300
+ value = default
+
+ elif initial is not None:
+ value = initial()
+ context.set_global(name, value)
+
+ return value
diff --git a/faststream/_internal/fastapi/__init__.py b/faststream/_internal/fastapi/__init__.py
new file mode 100644
index 0000000000..011777a593
--- /dev/null
+++ b/faststream/_internal/fastapi/__init__.py
@@ -0,0 +1,7 @@
+from faststream._internal.fastapi.route import StreamMessage
+from faststream._internal.fastapi.router import StreamRouter
+
+__all__ = (
+ "StreamMessage",
+ "StreamRouter",
+)
diff --git a/faststream/_internal/fastapi/_compat.py b/faststream/_internal/fastapi/_compat.py
new file mode 100644
index 0000000000..864492373f
--- /dev/null
+++ b/faststream/_internal/fastapi/_compat.py
@@ -0,0 +1,137 @@
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any, Optional
+
+from fastapi import __version__ as FASTAPI_VERSION # noqa: N812
+from fastapi.dependencies.utils import solve_dependencies
+from starlette.background import BackgroundTasks
+from typing_extensions import Never
+
+from faststream._internal.basic_types import AnyDict
+
+if TYPE_CHECKING:
+ from fastapi.dependencies.models import Dependant
+ from fastapi.requests import Request
+
+major, minor, patch, *_ = FASTAPI_VERSION.split(".")
+
+_FASTAPI_MAJOR, _FASTAPI_MINOR = int(major), int(minor)
+
+FASTAPI_V2 = _FASTAPI_MAJOR > 0 or _FASTAPI_MINOR > 100
+FASTAPI_V106 = _FASTAPI_MAJOR > 0 or _FASTAPI_MINOR >= 106
+
+try:
+ _FASTAPI_PATCH = int(patch)
+except ValueError:
+ FASTAPI_v102_3 = True
+ FASTAPI_v102_4 = True
+else:
+ FASTAPI_v102_3 = (
+ _FASTAPI_MAJOR > 0
+ or _FASTAPI_MINOR > 112
+ or (_FASTAPI_MINOR == 112 and _FASTAPI_PATCH > 2)
+ )
+ FASTAPI_v102_4 = (
+ _FASTAPI_MAJOR > 0
+ or _FASTAPI_MINOR > 112
+ or (_FASTAPI_MINOR == 112 and _FASTAPI_PATCH > 3)
+ )
+
+__all__ = (
+ "RequestValidationError",
+ "create_response_field",
+ "raise_fastapi_validation_error",
+ "solve_faststream_dependency",
+)
+
+
+@dataclass
+class SolvedDependency:
+ values: AnyDict
+ errors: list[Any]
+ background_tasks: Optional[BackgroundTasks]
+
+
+if FASTAPI_V2:
+ from fastapi._compat import _normalize_errors
+ from fastapi.exceptions import RequestValidationError
+
+ def raise_fastapi_validation_error(errors: list[Any], body: AnyDict) -> Never:
+ raise RequestValidationError(_normalize_errors(errors), body=body)
+
+else:
+ from pydantic import ( # type: ignore[assignment]
+ ValidationError as RequestValidationError,
+ create_model,
+ )
+
+ ROUTER_VALIDATION_ERROR_MODEL = create_model("StreamRoute")
+
+ def raise_fastapi_validation_error(errors: list[Any], body: AnyDict) -> Never:
+ raise RequestValidationError(errors, ROUTER_VALIDATION_ERROR_MODEL) # type: ignore[misc]
+
+
+if FASTAPI_v102_3:
+ from fastapi.utils import (
+ create_model_field as create_response_field,
+ )
+
+ extra = {"embed_body_fields": False} if FASTAPI_v102_4 else {}
+
+ async def solve_faststream_dependency(
+ request: "Request",
+ dependant: "Dependant",
+ dependency_overrides_provider: Optional[Any],
+ **kwargs: Any,
+ ) -> SolvedDependency:
+ solved_result = await solve_dependencies(
+ request=request,
+ body=request._body, # type: ignore[arg-type]
+ dependant=dependant,
+ dependency_overrides_provider=dependency_overrides_provider,
+ **extra, # type: ignore[arg-type]
+ **kwargs,
+ )
+ values, errors, background = (
+ solved_result.values,
+ solved_result.errors,
+ solved_result.background_tasks,
+ )
+
+ return SolvedDependency(
+ values=values,
+ errors=errors,
+ background_tasks=background,
+ )
+
+else:
+ from fastapi.utils import ( # type: ignore[attr-defined,no-redef]
+ create_response_field,
+ )
+
+ async def solve_faststream_dependency(
+ request: "Request",
+ dependant: "Dependant",
+ dependency_overrides_provider: Optional[Any],
+ **kwargs: Any,
+ ) -> SolvedDependency:
+ solved_result = await solve_dependencies(
+ request=request,
+ body=request._body, # type: ignore[arg-type]
+ dependant=dependant,
+ dependency_overrides_provider=dependency_overrides_provider,
+ **kwargs,
+ )
+
+ (
+ values,
+ errors,
+ background,
+ _response,
+ _dependency_cache,
+ ) = solved_result # type: ignore[misc]
+
+ return SolvedDependency(
+ values=values, # type: ignore[has-type]
+ errors=errors, # type: ignore[has-type]
+ background_tasks=background, # type: ignore[has-type]
+ )
diff --git a/faststream/_internal/fastapi/context.py b/faststream/_internal/fastapi/context.py
new file mode 100644
index 0000000000..78d8dd26a7
--- /dev/null
+++ b/faststream/_internal/fastapi/context.py
@@ -0,0 +1,33 @@
+import logging
+from typing import Annotated, Any, Callable, Optional
+
+from fastapi import params
+
+from faststream._internal.constants import EMPTY
+from faststream._internal.context import ContextRepo as CR
+from faststream._internal.context.resolve import resolve_context_by_name
+
+
+def Context( # noqa: N802
+ name: str,
+ *,
+ default: Any = EMPTY,
+ initial: Optional[Callable[..., Any]] = None,
+) -> Any:
+ """Get access to objects of the Context."""
+
+ def solve_context(
+ context: Annotated[Any, params.Header(alias="context__")],
+ ) -> Any:
+ return resolve_context_by_name(
+ name=name,
+ default=default,
+ initial=initial,
+ context=context,
+ )
+
+ return params.Depends(solve_context, use_cache=True)
+
+
+Logger = Annotated[logging.Logger, Context("logger")]
+ContextRepo = Annotated[CR, Context("context")]
diff --git a/faststream/broker/fastapi/get_dependant.py b/faststream/_internal/fastapi/get_dependant.py
similarity index 84%
rename from faststream/broker/fastapi/get_dependant.py
rename to faststream/_internal/fastapi/get_dependant.py
index 1f43659e1a..a53f33affd 100644
--- a/faststream/broker/fastapi/get_dependant.py
+++ b/faststream/_internal/fastapi/get_dependant.py
@@ -1,8 +1,10 @@
-from typing import TYPE_CHECKING, Any, Callable, Iterable, cast
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Any, Callable, cast
+from fast_depends.library.serializer import OptionItem
from fastapi.dependencies.utils import get_dependant, get_parameterless_sub_dependant
-from faststream._compat import PYDANTIC_V2
+from faststream._internal._compat import PYDANTIC_V2
if TYPE_CHECKING:
from fastapi import params
@@ -19,9 +21,7 @@ def get_fastapi_dependant(
dependencies=dependencies,
)
- dependent = _patch_fastapi_dependent(dependent)
-
- return dependent
+ return _patch_fastapi_dependent(dependent)
def get_fastapi_native_dependant(
@@ -47,7 +47,7 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
"""Patch FastAPI by adding fields for AsyncAPI schema generation."""
from pydantic import Field, create_model # FastAPI always has pydantic
- from faststream._compat import PydanticUndefined
+ from faststream._internal._compat import PydanticUndefined
params = dependant.query_params + dependant.body_params
@@ -68,7 +68,7 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
if PYDANTIC_V2:
from pydantic.fields import FieldInfo
- info = cast(FieldInfo, info)
+ info = cast("FieldInfo", info)
field_data.update(
{
@@ -81,7 +81,7 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
"examples": info.examples,
"exclude": info.exclude,
"json_schema_extra": info.json_schema_extra,
- }
+ },
)
f = next(
@@ -95,7 +95,7 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
else:
from pydantic.fields import ModelField # type: ignore[attr-defined]
- info = cast(ModelField, info)
+ info = cast("ModelField", info)
field_data.update(
{
@@ -107,7 +107,7 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
"ge": info.field_info.ge,
"lt": info.field_info.lt,
"le": info.field_info.le,
- }
+ },
)
f = Field(**field_data) # type: ignore[pydantic-field,unused-ignore]
@@ -117,10 +117,13 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
)
dependant.model = create_model( # type: ignore[attr-defined]
- getattr(dependant.call, "__name__", type(dependant.call).__name__)
+ getattr(dependant.call, "__name__", type(dependant.call).__name__),
)
dependant.custom_fields = {} # type: ignore[attr-defined]
- dependant.flat_params = params_unique # type: ignore[attr-defined]
+ dependant.flat_params = [ # type: ignore[attr-defined]
+ OptionItem(field_name=name, field_type=type_, default_value=default)
+ for name, (type_, default) in params_unique.items()
+ ]
return dependant
diff --git a/faststream/broker/fastapi/route.py b/faststream/_internal/fastapi/route.py
similarity index 86%
rename from faststream/broker/fastapi/route.py
rename to faststream/_internal/fastapi/route.py
index 03c983cae2..70573fc818 100644
--- a/faststream/broker/fastapi/route.py
+++ b/faststream/_internal/fastapi/route.py
@@ -1,15 +1,13 @@
import asyncio
import inspect
+from collections.abc import Awaitable, Iterable
from contextlib import AsyncExitStack
from functools import wraps
from itertools import dropwhile
from typing import (
TYPE_CHECKING,
Any,
- Awaitable,
Callable,
- Iterable,
- List,
Optional,
Union,
)
@@ -17,9 +15,11 @@
from fastapi.routing import run_endpoint_function, serialize_response
from starlette.requests import Request
-from faststream.broker.fastapi.get_dependant import get_fastapi_native_dependant
-from faststream.broker.response import Response, ensure_response
-from faststream.broker.types import P_HandlerParams, T_HandlerReturn
+from faststream._internal.fastapi.get_dependant import (
+ get_fastapi_native_dependant,
+)
+from faststream._internal.types import P_HandlerParams, T_HandlerReturn
+from faststream.response import Response, ensure_response
from ._compat import (
FASTAPI_V106,
@@ -34,8 +34,9 @@
from fastapi.dependencies.models import Dependant
from fastapi.types import IncEx
- from faststream.broker.message import StreamMessage as NativeMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.state import DIState
+ from faststream.message import StreamMessage as NativeMessage
class StreamMessage(Request):
@@ -43,14 +44,14 @@ class StreamMessage(Request):
scope: "AnyDict"
_cookies: "AnyDict"
- _headers: "AnyDict" # type: ignore
- _body: Union["AnyDict", List[Any]] # type: ignore
- _query_params: "AnyDict" # type: ignore
+ _headers: "AnyDict" # type: ignore[assignment]
+ _body: Union["AnyDict", list[Any]] # type: ignore[assignment]
+ _query_params: "AnyDict" # type: ignore[assignment]
def __init__(
self,
*,
- body: Union["AnyDict", List[Any]],
+ body: Union["AnyDict", list[Any]],
headers: "AnyDict",
path: "AnyDict",
) -> None:
@@ -75,10 +76,11 @@ def wrap_callable_to_fastapi_compatible(
response_model_exclude_unset: bool,
response_model_exclude_defaults: bool,
response_model_exclude_none: bool,
+ state: "DIState",
) -> Callable[["NativeMessage[Any]"], Awaitable[Any]]:
- __magic_attr = "__faststream_consumer__"
+ magic_attr = "__faststream_consumer__"
- if getattr(user_callable, __magic_attr, False):
+ if getattr(user_callable, magic_attr, False):
return user_callable # type: ignore[return-value]
if response_model:
@@ -100,9 +102,10 @@ def wrap_callable_to_fastapi_compatible(
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
+ state=state,
)
- setattr(parsed_callable, __magic_attr, True)
+ setattr(parsed_callable, magic_attr, True)
return wraps(user_callable)(parsed_callable)
@@ -117,6 +120,7 @@ def build_faststream_to_fastapi_parser(
response_model_exclude_unset: bool,
response_model_exclude_defaults: bool,
response_model_exclude_none: bool,
+ state: "DIState",
) -> Callable[["NativeMessage[Any]"], Awaitable[Any]]:
"""Creates a session for handling requests."""
assert dependent.call # nosec B101
@@ -147,7 +151,7 @@ async def parsed_consumer(message: "NativeMessage[Any]") -> Any:
"""Wrapper, that parser FastStream message to FastAPI compatible one."""
body = await message.decode()
- fastapi_body: Union[AnyDict, List[Any]]
+ fastapi_body: Union[AnyDict, list[Any]]
if first_arg is not None:
if isinstance(body, dict):
path = fastapi_body = body or {}
@@ -158,14 +162,14 @@ async def parsed_consumer(message: "NativeMessage[Any]") -> Any:
stream_message = StreamMessage(
body=fastapi_body,
- headers=message.headers,
+ headers={"context__": state.context, **message.headers},
path={**path, **message.path},
)
else:
stream_message = StreamMessage(
body={},
- headers={},
+ headers={"context__": state.context},
path={},
)
@@ -238,6 +242,7 @@ async def app(
return response
- raise AssertionError("unreachable")
+ msg = "unreachable"
+ raise AssertionError(msg)
return app
diff --git a/faststream/_internal/fastapi/router.py b/faststream/_internal/fastapi/router.py
new file mode 100644
index 0000000000..9451503045
--- /dev/null
+++ b/faststream/_internal/fastapi/router.py
@@ -0,0 +1,544 @@
+import json
+import warnings
+from abc import abstractmethod
+from collections.abc import AsyncIterator, Awaitable, Iterable, Mapping, Sequence
+from contextlib import asynccontextmanager
+from enum import Enum
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Generic,
+ Optional,
+ Union,
+ cast,
+ overload,
+)
+from weakref import WeakSet
+
+from fastapi.datastructures import Default
+from fastapi.responses import HTMLResponse
+from fastapi.routing import APIRoute, APIRouter
+from fastapi.utils import generate_unique_id
+from starlette.responses import JSONResponse, Response
+from starlette.routing import BaseRoute, _DefaultLifespan
+
+from faststream._internal.application import StartAbleApplication
+from faststream._internal.broker.router import BrokerRouter
+from faststream._internal.fastapi.get_dependant import get_fastapi_dependant
+from faststream._internal.fastapi.route import (
+ wrap_callable_to_fastapi_compatible,
+)
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream._internal.utils.functions import fake_context, to_async
+from faststream.middlewares import BaseMiddleware
+from faststream.specification.asyncapi.site import get_asyncapi_html
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from fastapi import FastAPI, params
+ from fastapi.background import BackgroundTasks
+ from fastapi.types import IncEx
+ from starlette import routing
+ from starlette.types import ASGIApp, AppType, Lifespan
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream._internal.proto import NameRequired
+ from faststream._internal.publisher.proto import PublisherProto
+ from faststream._internal.subscriber.call_wrapper import HandlerCallWrapper
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.message import StreamMessage
+ from faststream.specification.base.specification import Specification
+ from faststream.specification.schema.extra import Tag, TagDict
+
+
+class _BackgroundMiddleware(BaseMiddleware):
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ if not exc_type and (
+ background := cast(
+ "Optional[BackgroundTasks]",
+ getattr(self.context.get_local("message"), "background", None),
+ )
+ ):
+ await background()
+
+ return await super().after_processed(exc_type, exc_val, exc_tb)
+
+
+class StreamRouter(
+ APIRouter,
+ StartAbleApplication,
+ Generic[MsgType],
+):
+ """A class to route streams."""
+
+ broker_class: type["BrokerUsecase[MsgType, Any]"]
+ broker: "BrokerUsecase[MsgType, Any]"
+ docs_router: Optional[APIRouter]
+ _after_startup_hooks: list[Callable[[Any], Awaitable[Optional[Mapping[str, Any]]]]]
+ _on_shutdown_hooks: list[Callable[[Any], Awaitable[None]]]
+ schema: Optional["Specification"]
+
+ title: str
+ description: str
+ version: str
+ license: Optional["AnyDict"]
+ contact: Optional["AnyDict"]
+
+ def __init__(
+ self,
+ *connection_args: Any,
+ middlewares: Sequence["BrokerMiddleware[MsgType]"] = (),
+ prefix: str = "",
+ tags: Optional[list[Union[str, Enum]]] = None,
+ dependencies: Optional[Sequence["params.Depends"]] = None,
+ default_response_class: type["Response"] = Default(JSONResponse),
+ responses: Optional[dict[Union[int, str], "AnyDict"]] = None,
+ callbacks: Optional[list["routing.BaseRoute"]] = None,
+ routes: Optional[list["routing.BaseRoute"]] = None,
+ redirect_slashes: bool = True,
+ default: Optional["ASGIApp"] = None,
+ dependency_overrides_provider: Optional[Any] = None,
+ route_class: type["APIRoute"] = APIRoute,
+ on_startup: Optional[Sequence[Callable[[], Any]]] = None,
+ on_shutdown: Optional[Sequence[Callable[[], Any]]] = None,
+ deprecated: Optional[bool] = None,
+ include_in_schema: bool = True,
+ setup_state: bool = True,
+ lifespan: Optional["Lifespan[Any]"] = None,
+ generate_unique_id_function: Callable[["APIRoute"], str] = Default(
+ generate_unique_id,
+ ),
+ # Specification information
+ specification_tags: Iterable[Union["Tag", "TagDict"]] = (),
+ schema_url: Optional[str] = "/asyncapi",
+ **connection_kwars: Any,
+ ) -> None:
+ assert ( # nosec B101
+ self.broker_class
+ ), "You should specify `broker_class` at your implementation"
+
+ broker = self.broker_class(
+ *connection_args,
+ middlewares=(
+ *middlewares,
+ # allow to catch background exceptions in user middlewares
+ _BackgroundMiddleware,
+ ),
+ _get_dependant=get_fastapi_dependant,
+ tags=specification_tags,
+ apply_types=False,
+ **connection_kwars,
+ )
+
+ self._init_setupable_(
+ broker,
+ provider=None,
+ )
+
+ self.setup_state = setup_state
+
+ # Specification information
+ # Empty
+ self.terms_of_service = None
+ self.identifier = None
+ self.specification_tags = None
+ self.external_docs = None
+ # parse from FastAPI app on startup
+ self.title = ""
+ self.version = ""
+ self.description = ""
+ self.license = None
+ self.contact = None
+
+ self.schema = None
+
+ super().__init__(
+ prefix=prefix,
+ tags=tags,
+ dependencies=dependencies,
+ default_response_class=default_response_class,
+ responses=responses,
+ callbacks=callbacks,
+ routes=routes,
+ redirect_slashes=redirect_slashes,
+ default=default,
+ dependency_overrides_provider=dependency_overrides_provider,
+ route_class=route_class,
+ deprecated=deprecated,
+ include_in_schema=include_in_schema,
+ generate_unique_id_function=generate_unique_id_function,
+ lifespan=self._wrap_lifespan(lifespan),
+ on_startup=on_startup,
+ on_shutdown=on_shutdown,
+ )
+
+ self.weak_dependencies_provider: WeakSet[Any] = WeakSet()
+ if dependency_overrides_provider is not None:
+ self.weak_dependencies_provider.add(dependency_overrides_provider)
+
+ if self.include_in_schema:
+ self.docs_router = self._asyncapi_router(schema_url)
+ else:
+ self.docs_router = None
+
+ self._after_startup_hooks = []
+ self._on_shutdown_hooks = []
+
+ self._lifespan_started = False
+
+ def _get_dependencies_overides_provider(self) -> Optional[Any]:
+ """Dependency provider WeakRef resolver."""
+ if self.dependency_overrides_provider is not None:
+ return self.dependency_overrides_provider
+ return next(iter(self.weak_dependencies_provider), None)
+
+ def _add_api_mq_route(
+ self,
+ dependencies: Iterable["params.Depends"],
+ response_model: Any,
+ response_model_include: Optional["IncEx"],
+ response_model_exclude: Optional["IncEx"],
+ response_model_by_alias: bool,
+ response_model_exclude_unset: bool,
+ response_model_exclude_defaults: bool,
+ response_model_exclude_none: bool,
+ ) -> Callable[
+ [Callable[..., Any]],
+ Callable[["StreamMessage[Any]"], Awaitable[Any]],
+ ]:
+ """Decorator before `broker.subscriber`, that wraps function to FastAPI-compatible one."""
+
+ def wrapper(
+ endpoint: Callable[..., Any],
+ ) -> Callable[["StreamMessage[Any]"], Awaitable[Any]]:
+ """Patch user function to make it FastAPI-compatible."""
+ return wrap_callable_to_fastapi_compatible(
+ user_callable=endpoint,
+ dependencies=dependencies,
+ response_model=response_model,
+ response_model_include=response_model_include,
+ response_model_exclude=response_model_exclude,
+ response_model_by_alias=response_model_by_alias,
+ response_model_exclude_unset=response_model_exclude_unset,
+ response_model_exclude_defaults=response_model_exclude_defaults,
+ response_model_exclude_none=response_model_exclude_none,
+ provider_factory=self._get_dependencies_overides_provider,
+ state=self._state.di_state,
+ )
+
+ return wrapper
+
+ def subscriber(
+ self,
+ *extra: Union["NameRequired", str],
+ dependencies: Iterable["params.Depends"],
+ response_model: Any,
+ response_model_include: Optional["IncEx"],
+ response_model_exclude: Optional["IncEx"],
+ response_model_by_alias: bool,
+ response_model_exclude_unset: bool,
+ response_model_exclude_defaults: bool,
+ response_model_exclude_none: bool,
+ **broker_kwargs: Any,
+ ) -> Callable[
+ [Callable[P_HandlerParams, T_HandlerReturn]],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ]:
+ """A function decorator for subscribing to a message queue."""
+ dependencies = (*self.dependencies, *dependencies)
+
+ sub = self.broker.subscriber( # type: ignore[call-arg]
+ *extra, # type: ignore[arg-type]
+ dependencies=dependencies,
+ **broker_kwargs,
+ )
+
+ sub._call_decorators = ( # type: ignore[attr-defined]
+ self._add_api_mq_route(
+ dependencies=dependencies,
+ response_model=response_model,
+ response_model_include=response_model_include,
+ response_model_exclude=response_model_exclude,
+ response_model_by_alias=response_model_by_alias,
+ response_model_exclude_unset=response_model_exclude_unset,
+ response_model_exclude_defaults=response_model_exclude_defaults,
+ response_model_exclude_none=response_model_exclude_none,
+ ),
+ )
+
+ return sub
+
+ def _wrap_lifespan(
+ self,
+ lifespan: Optional["Lifespan[Any]"] = None,
+ ) -> "Lifespan[Any]":
+ lifespan_context = lifespan if lifespan is not None else _DefaultLifespan(self)
+
+ @asynccontextmanager
+ async def start_broker_lifespan(
+ app: "FastAPI",
+ ) -> AsyncIterator[Optional[Mapping[str, Any]]]:
+ """Starts the lifespan of a broker."""
+ if not len(self.weak_dependencies_provider):
+ self.weak_dependencies_provider.add(app)
+
+ if self.docs_router:
+ self.title = app.title
+ self.description = app.description
+ self.version = app.version
+ self.contact = app.contact
+ self.license = app.license_info
+
+ from faststream.specification.asyncapi import AsyncAPI
+
+ self.schema = AsyncAPI(
+ self.broker,
+ title=self.title,
+ description=self.description,
+ app_version=self.version,
+ contact=self.contact,
+ license=self.license,
+ schema_version="3.0.0",
+ )
+
+ app.include_router(self.docs_router)
+
+ if not len(self.weak_dependencies_provider):
+ self.weak_dependencies_provider.add(app)
+
+ async with lifespan_context(app) as maybe_context:
+ lifespan_extra = {"broker": self.broker, **(maybe_context or {})}
+
+ if not self._lifespan_started:
+ await self._start_broker()
+ self._lifespan_started = True
+ else:
+ warnings.warn(
+ "Specifying 'lifespan_context' manually is no longer necessary with FastAPI >= 0.112.2.",
+ category=RuntimeWarning,
+ stacklevel=2,
+ )
+
+ for h in self._after_startup_hooks:
+ lifespan_extra.update(await h(app) or {})
+
+ try:
+ if self.setup_state:
+ yield lifespan_extra
+ else:
+ # NOTE: old asgi compatibility
+ yield None
+
+ for h in self._on_shutdown_hooks:
+ await h(app)
+
+ finally:
+ await self.broker.close()
+
+ return start_broker_lifespan # type: ignore[return-value]
+
+ @overload
+ def after_startup(
+ self,
+ func: Callable[["AppType"], Mapping[str, Any]],
+ ) -> Callable[["AppType"], Mapping[str, Any]]: ...
+
+ @overload
+ def after_startup(
+ self,
+ func: Callable[["AppType"], Awaitable[Mapping[str, Any]]],
+ ) -> Callable[["AppType"], Awaitable[Mapping[str, Any]]]: ...
+
+ @overload
+ def after_startup(
+ self,
+ func: Callable[["AppType"], None],
+ ) -> Callable[["AppType"], None]: ...
+
+ @overload
+ def after_startup(
+ self,
+ func: Callable[["AppType"], Awaitable[None]],
+ ) -> Callable[["AppType"], Awaitable[None]]: ...
+
+ def after_startup(
+ self,
+ func: Union[
+ Callable[["AppType"], Mapping[str, Any]],
+ Callable[["AppType"], Awaitable[Mapping[str, Any]]],
+ Callable[["AppType"], None],
+ Callable[["AppType"], Awaitable[None]],
+ ],
+ ) -> Union[
+ Callable[["AppType"], Mapping[str, Any]],
+ Callable[["AppType"], Awaitable[Mapping[str, Any]]],
+ Callable[["AppType"], None],
+ Callable[["AppType"], Awaitable[None]],
+ ]:
+ """Register a function to be executed after startup."""
+ self._after_startup_hooks.append(to_async(func))
+ return func
+
+ @overload
+ def on_broker_shutdown(
+ self,
+ func: Callable[["AppType"], None],
+ ) -> Callable[["AppType"], None]: ...
+
+ @overload
+ def on_broker_shutdown(
+ self,
+ func: Callable[["AppType"], Awaitable[None]],
+ ) -> Callable[["AppType"], Awaitable[None]]: ...
+
+ def on_broker_shutdown(
+ self,
+ func: Union[
+ Callable[["AppType"], None],
+ Callable[["AppType"], Awaitable[None]],
+ ],
+ ) -> Union[
+ Callable[["AppType"], None],
+ Callable[["AppType"], Awaitable[None]],
+ ]:
+ """Register a function to be executed before broker stop."""
+ self._on_shutdown_hooks.append(to_async(func))
+ return func
+
+ @abstractmethod
+ def publisher(self) -> "PublisherProto[MsgType]":
+ """Create Publisher object."""
+ raise NotImplementedError
+
+ def _asyncapi_router(self, schema_url: Optional[str]) -> Optional[APIRouter]:
+ """Creates an API router for serving AsyncAPI documentation."""
+ if not self.include_in_schema or not schema_url:
+ return None
+
+ def download_app_json_schema() -> Response:
+ assert ( # nosec B101
+ self.schema
+ ), "You need to run application lifespan at first"
+
+ return Response(
+ content=json.dumps(self.schema.to_jsonable(), indent=2),
+ headers={"Content-Type": "application/octet-stream"},
+ )
+
+ def download_app_yaml_schema() -> Response:
+ assert ( # nosec B101
+ self.schema
+ ), "You need to run application lifespan at first"
+
+ return Response(
+ content=self.schema.to_yaml(),
+ headers={
+ "Content-Type": "application/octet-stream",
+ },
+ )
+
+ def serve_asyncapi_schema(
+ sidebar: bool = True,
+ info: bool = True,
+ servers: bool = True,
+ operations: bool = True,
+ messages: bool = True,
+ schemas: bool = True,
+ errors: bool = True,
+ expandMessageExamples: bool = True,
+ ) -> HTMLResponse:
+ """Serve the AsyncAPI schema as an HTML response."""
+ assert ( # nosec B101
+ self.schema
+ ), "You need to run application lifespan at first"
+
+ return HTMLResponse(
+ content=get_asyncapi_html(
+ self.schema,
+ sidebar=sidebar,
+ info=info,
+ servers=servers,
+ operations=operations,
+ messages=messages,
+ schemas=schemas,
+ errors=errors,
+ expand_message_examples=expandMessageExamples,
+ ),
+ )
+
+ docs_router = APIRouter(
+ prefix=self.prefix,
+ tags=["asyncapi"],
+ redirect_slashes=self.redirect_slashes,
+ default=self.default,
+ deprecated=self.deprecated,
+ )
+ docs_router.get(schema_url)(serve_asyncapi_schema)
+ docs_router.get(f"{schema_url}.json")(download_app_json_schema)
+ docs_router.get(f"{schema_url}.yaml")(download_app_yaml_schema)
+ return docs_router
+
+ def include_router( # type: ignore[override]
+ self,
+ router: Union["StreamRouter[MsgType]", "BrokerRouter[MsgType]"],
+ *,
+ prefix: str = "",
+ tags: Optional[list[Union[str, Enum]]] = None,
+ dependencies: Optional[Sequence["params.Depends"]] = None,
+ default_response_class: type[Response] = Default(JSONResponse),
+ responses: Optional[dict[Union[int, str], "AnyDict"]] = None,
+ callbacks: Optional[list["BaseRoute"]] = None,
+ deprecated: Optional[bool] = None,
+ include_in_schema: bool = True,
+ generate_unique_id_function: Callable[["APIRoute"], str] = Default(
+ generate_unique_id,
+ ),
+ ) -> None:
+ """Includes a router in the API."""
+ if isinstance(router, BrokerRouter):
+ for sub in router._subscribers:
+ sub._call_decorators = ( # type: ignore[attr-defined]
+ self._add_api_mq_route(
+ dependencies=(),
+ response_model=Default(None),
+ response_model_include=None,
+ response_model_exclude=None,
+ response_model_by_alias=True,
+ response_model_exclude_unset=False,
+ response_model_exclude_defaults=False,
+ response_model_exclude_none=False,
+ ),
+ )
+
+ self.broker.include_router(router)
+ return
+
+ if isinstance(router, StreamRouter): # pragma: no branch
+ router.lifespan_context = fake_context
+ self.broker.include_router(router.broker)
+ router.weak_dependencies_provider = self.weak_dependencies_provider
+
+ super().include_router(
+ router=router,
+ prefix=prefix,
+ tags=tags,
+ dependencies=dependencies,
+ default_response_class=default_response_class,
+ responses=responses,
+ callbacks=callbacks,
+ deprecated=deprecated,
+ include_in_schema=include_in_schema,
+ generate_unique_id_function=generate_unique_id_function,
+ )
diff --git a/faststream/_internal/log/__init__.py b/faststream/_internal/log/__init__.py
new file mode 100644
index 0000000000..c9139b092b
--- /dev/null
+++ b/faststream/_internal/log/__init__.py
@@ -0,0 +1,3 @@
+from faststream._internal.log.logging import logger
+
+__all__ = ("logger",)
diff --git a/faststream/log/formatter.py b/faststream/_internal/log/formatter.py
similarity index 98%
rename from faststream/log/formatter.py
rename to faststream/_internal/log/formatter.py
index fe1aa3a267..0e8608051c 100644
--- a/faststream/log/formatter.py
+++ b/faststream/_internal/log/formatter.py
@@ -37,7 +37,7 @@ def __init__(
use one of %-formatting, :meth:`str.format` (``{}``) formatting or
:class:`string.Template` formatting in your format string.
"""
- if use_colors in (True, False):
+ if use_colors in {True, False}:
self.use_colors = use_colors
else:
self.use_colors = sys.stdout.isatty()
diff --git a/faststream/_internal/log/logging.py b/faststream/_internal/log/logging.py
new file mode 100644
index 0000000000..2fa346ca8a
--- /dev/null
+++ b/faststream/_internal/log/logging.py
@@ -0,0 +1,75 @@
+import logging
+import sys
+from collections.abc import Mapping
+from logging import LogRecord
+from typing import TYPE_CHECKING
+
+from faststream._internal.log.formatter import ColourizedFormatter
+
+if TYPE_CHECKING:
+ from faststream._internal.context.repository import ContextRepo
+
+
+logger = logging.getLogger("faststream")
+logger.setLevel(logging.INFO)
+logger.propagate = False
+main_handler = logging.StreamHandler(stream=sys.stderr)
+main_handler.setFormatter(
+ ColourizedFormatter(
+ fmt="%(asctime)s %(levelname)8s - %(message)s",
+ use_colors=True,
+ ),
+)
+logger.addHandler(main_handler)
+
+
+class ExtendedFilter(logging.Filter):
+ def __init__(
+ self,
+ default_context: Mapping[str, str],
+ message_id_ln: int,
+ context: "ContextRepo",
+ name: str = "",
+ ) -> None:
+ self.default_context = default_context
+ self.message_id_ln = message_id_ln
+ self.context = context
+ super().__init__(name)
+
+ def filter(self, record: LogRecord) -> bool:
+ if is_suitable := super().filter(record):
+ log_context: Mapping[str, str] = self.context.get_local(
+ "log_context",
+ self.default_context,
+ )
+
+ for k, v in log_context.items():
+ value = getattr(record, k, v)
+ setattr(record, k, value)
+
+ record.message_id = getattr(record, "message_id", "")[: self.message_id_ln]
+
+ return is_suitable
+
+
+def get_broker_logger(
+ name: str,
+ default_context: Mapping[str, str],
+ message_id_ln: int,
+ fmt: str,
+ context: "ContextRepo",
+ log_level: int,
+) -> logging.Logger:
+ logger = logging.getLogger(f"faststream.access.{name}")
+ logger.setLevel(log_level)
+ logger.propagate = False
+ logger.addFilter(ExtendedFilter(default_context, message_id_ln, context=context))
+ handler = logging.StreamHandler(stream=sys.stdout)
+ handler.setFormatter(
+ ColourizedFormatter(
+ fmt=fmt,
+ use_colors=True,
+ ),
+ )
+ logger.addHandler(handler)
+ return logger
diff --git a/faststream/_internal/middlewares.py b/faststream/_internal/middlewares.py
new file mode 100644
index 0000000000..3177dd45a0
--- /dev/null
+++ b/faststream/_internal/middlewares.py
@@ -0,0 +1,117 @@
+from collections.abc import Awaitable
+from typing import TYPE_CHECKING, Any, Callable, Generic, Optional
+
+from typing_extensions import Self
+
+from faststream._internal.types import AnyMsg, PublishCommandType
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream.message import StreamMessage
+
+
+class BaseMiddleware(Generic[PublishCommandType, AnyMsg]):
+ """A base middleware class."""
+
+ def __init__(
+ self,
+ msg: Optional[AnyMsg],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> None:
+ self.msg = msg
+ self.context = context
+
+ async def on_receive(self) -> None:
+ """Hook to call on message receive."""
+
+ async def after_processed(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ """Asynchronously called after processing."""
+ return False
+
+ async def __aenter__(self) -> Self:
+ await self.on_receive()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ """Exit the asynchronous context manager."""
+ return await self.after_processed(exc_type, exc_val, exc_tb)
+
+ async def on_consume(
+ self,
+ msg: "StreamMessage[AnyMsg]",
+ ) -> "StreamMessage[AnyMsg]":
+ """This option was deprecated and will be removed in 0.7.0. Please, use `consume_scope` instead."""
+ return msg
+
+ async def after_consume(self, err: Optional[Exception]) -> None:
+ """This option was deprecated and will be removed in 0.7.0. Please, use `consume_scope` instead."""
+ if err is not None:
+ raise err
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[AnyMsg]",
+ ) -> Any:
+ """Asynchronously consumes a message and returns an asynchronous iterator of decoded messages."""
+ err: Optional[Exception] = None
+ try:
+ result = await call_next(await self.on_consume(msg))
+
+ except Exception as e:
+ err = e
+
+ else:
+ return result
+
+ finally:
+ await self.after_consume(err)
+
+ async def on_publish(
+ self,
+ msg: PublishCommandType,
+ ) -> PublishCommandType:
+ """This option was deprecated and will be removed in 0.7.0. Please, use `publish_scope` instead."""
+ return msg
+
+ async def after_publish(
+ self,
+ err: Optional[Exception],
+ ) -> None:
+ """This option was deprecated and will be removed in 0.7.0. Please, use `publish_scope` instead."""
+ if err is not None:
+ raise err
+
+ async def publish_scope(
+ self,
+ call_next: Callable[[PublishCommandType], Awaitable[Any]],
+ cmd: PublishCommandType,
+ ) -> Any:
+ """Publish a message and return an async iterator."""
+ err: Optional[Exception] = None
+ try:
+ result = await call_next(await self.on_publish(cmd))
+
+ except Exception as e:
+ err = e
+
+ else:
+ return result
+
+ finally:
+ await self.after_publish(err)
diff --git a/faststream/_internal/proto.py b/faststream/_internal/proto.py
new file mode 100644
index 0000000000..9eb8ed33aa
--- /dev/null
+++ b/faststream/_internal/proto.py
@@ -0,0 +1,78 @@
+from abc import abstractmethod
+from typing import Any, Callable, Optional, Protocol, TypeVar, Union, overload
+
+from faststream._internal.subscriber.call_wrapper import (
+ HandlerCallWrapper,
+ ensure_call_wrapper,
+)
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+
+
+class EndpointWrapper(Protocol[MsgType]):
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn],
+ ],
+ ) -> HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]:
+ handler: HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn] = (
+ ensure_call_wrapper(func)
+ )
+ return handler
+
+
+class Endpoint(EndpointWrapper[MsgType]):
+ @abstractmethod
+ def add_prefix(self, prefix: str) -> None: ...
+
+
+NameRequiredCls = TypeVar("NameRequiredCls", bound="NameRequired")
+
+
+class NameRequired:
+ """Required name option object."""
+
+ def __eq__(self, value: object, /) -> bool:
+ """Compares the current object with another object for equality."""
+ if value is None:
+ return False
+
+ if not isinstance(value, NameRequired):
+ return NotImplemented
+
+ return self.name == value.name
+
+ def __init__(self, name: str) -> None:
+ self.name = name
+
+ @overload
+ @classmethod
+ def validate(
+ cls: type[NameRequiredCls],
+ value: Union[str, NameRequiredCls],
+ **kwargs: Any,
+ ) -> NameRequiredCls: ...
+
+ @overload
+ @classmethod
+ def validate(
+ cls: type[NameRequiredCls],
+ value: None,
+ **kwargs: Any,
+ ) -> None: ...
+
+ @classmethod
+ def validate(
+ cls: type[NameRequiredCls],
+ value: Union[str, NameRequiredCls, None],
+ **kwargs: Any,
+ ) -> Optional[NameRequiredCls]:
+ """Factory to create object."""
+ if value is not None and isinstance(value, str):
+ value = cls(value, **kwargs)
+ return value
diff --git a/faststream/broker/wrapper/__init__.py b/faststream/_internal/publisher/__init__.py
similarity index 100%
rename from faststream/broker/wrapper/__init__.py
rename to faststream/_internal/publisher/__init__.py
diff --git a/faststream/_internal/publisher/fake.py b/faststream/_internal/publisher/fake.py
new file mode 100644
index 0000000000..fc69628816
--- /dev/null
+++ b/faststream/_internal/publisher/fake.py
@@ -0,0 +1,72 @@
+from abc import abstractmethod
+from collections.abc import Iterable
+from functools import partial
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream._internal.basic_types import SendableMessage
+from faststream._internal.publisher.proto import BasePublisherProto
+from faststream.response.publish_type import PublishType
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AsyncFunc
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream._internal.types import PublisherMiddleware
+ from faststream.response.response import PublishCommand
+
+
+class FakePublisher(BasePublisherProto):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ *,
+ producer: "ProducerProto",
+ ) -> None:
+ """Initialize an object."""
+ self._producer = producer
+
+ @abstractmethod
+ def patch_command(self, cmd: "PublishCommand") -> "PublishCommand":
+ cmd.publish_type = PublishType.REPLY
+ return cmd
+
+ async def _publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> Any:
+ """This method should be called in subscriber flow only."""
+ cmd = self.patch_command(cmd)
+
+ call: AsyncFunc = self._producer.publish
+ for m in _extra_middlewares:
+ call = partial(m, call)
+
+ return await call(cmd)
+
+ async def publish(
+ self,
+ message: SendableMessage,
+ /,
+ *,
+ correlation_id: Optional[str] = None,
+ ) -> Optional[Any]:
+ msg = (
+ f"`{self.__class__.__name__}` can be used only to publish "
+ "a response for `reply-to` or `RPC` messages."
+ )
+ raise NotImplementedError(msg)
+
+ async def request(
+ self,
+ message: "SendableMessage",
+ /,
+ *,
+ correlation_id: Optional[str] = None,
+ ) -> Any:
+ msg = (
+ f"`{self.__class__.__name__}` can be used only to publish "
+ "a response for `reply-to` or `RPC` messages."
+ )
+ raise NotImplementedError(msg)
diff --git a/faststream/_internal/publisher/proto.py b/faststream/_internal/publisher/proto.py
new file mode 100644
index 0000000000..ab3dabbfb9
--- /dev/null
+++ b/faststream/_internal/publisher/proto.py
@@ -0,0 +1,113 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+ Protocol,
+)
+
+from faststream._internal.proto import Endpoint
+from faststream._internal.types import (
+ MsgType,
+)
+from faststream.response.response import PublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ PublisherMiddleware,
+ )
+ from faststream.response.response import PublishCommand
+
+
+class ProducerProto(Protocol):
+ _parser: "AsyncCallable"
+ _decoder: "AsyncCallable"
+
+ @abstractmethod
+ async def publish(self, cmd: "PublishCommand") -> Optional[Any]:
+ """Publishes a message asynchronously."""
+ ...
+
+ @abstractmethod
+ async def request(self, cmd: "PublishCommand") -> Any:
+ """Publishes a message synchronously."""
+ ...
+
+ @abstractmethod
+ async def publish_batch(self, cmd: "PublishCommand") -> Any:
+ """Publishes a messages batch asynchronously."""
+ ...
+
+
+class ProducerFactory(Protocol):
+ def __call__(
+ self, parser: "AsyncCallable", decoder: "AsyncCallable"
+ ) -> ProducerProto: ...
+
+
+class BasePublisherProto(Protocol):
+ @abstractmethod
+ async def publish(
+ self,
+ message: "SendableMessage",
+ /,
+ *,
+ correlation_id: Optional[str] = None,
+ ) -> Optional[Any]:
+ """Public method to publish a message.
+
+ Should be called by user only `broker.publisher(...).publish(...)`.
+ """
+ ...
+
+ @abstractmethod
+ async def _publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """Private method to publish a message.
+
+ Should be called inside `publish` method or as a step of `consume` scope.
+ """
+ ...
+
+ @abstractmethod
+ async def request(
+ self,
+ message: "SendableMessage",
+ /,
+ *,
+ correlation_id: Optional[str] = None,
+ ) -> Optional[Any]:
+ """Publishes a message synchronously."""
+ ...
+
+
+class PublisherProto(
+ Endpoint[MsgType],
+ BasePublisherProto,
+):
+ _broker_middlewares: Sequence["BrokerMiddleware[MsgType]"]
+ _middlewares: Sequence["PublisherMiddleware"]
+
+ @property
+ @abstractmethod
+ def _producer(self) -> "ProducerProto": ...
+
+ @abstractmethod
+ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ...
+
+ @abstractmethod
+ def _setup(
+ self,
+ *,
+ state: "Pointer[BrokerState]",
+ producer: "ProducerProto",
+ ) -> None: ...
diff --git a/faststream/_internal/publisher/specified.py b/faststream/_internal/publisher/specified.py
new file mode 100644
index 0000000000..db9c64a974
--- /dev/null
+++ b/faststream/_internal/publisher/specified.py
@@ -0,0 +1,92 @@
+from inspect import Parameter, unwrap
+from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+
+from fast_depends.core import build_call_model
+from fast_depends.pydantic._compat import create_model, get_config_base
+
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream.specification.asyncapi.message import get_model_schema
+from faststream.specification.asyncapi.utils import to_camelcase
+from faststream.specification.proto import EndpointSpecification
+from faststream.specification.schema import PublisherSpec
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyCallable, AnyDict
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.subscriber.call_wrapper import HandlerCallWrapper
+
+
+class SpecificationPublisher(EndpointSpecification[MsgType, PublisherSpec]):
+ """A base class for publishers in an asynchronous API."""
+
+ _state: "Pointer[BrokerState]" # should be set in next parent
+
+ def __init__(
+ self,
+ *args: Any,
+ schema_: Optional[Any],
+ **kwargs: Any,
+ ) -> None:
+ self.calls: list[AnyCallable] = []
+
+ self.schema_ = schema_
+
+ super().__init__(*args, **kwargs)
+
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ],
+ ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
+ handler = super().__call__(func)
+ self.calls.append(handler._original_call)
+ return handler
+
+ def get_payloads(self) -> list[tuple["AnyDict", str]]:
+ payloads: list[tuple[AnyDict, str]] = []
+
+ if self.schema_:
+ body = get_model_schema(
+ call=create_model(
+ "",
+ __config__=get_config_base(),
+ response__=(self.schema_, ...),
+ ),
+ prefix=f"{self.name}:Message",
+ )
+
+ if body: # pragma: no branch
+ payloads.append((body, ""))
+
+ else:
+ di_state = self._state.get().di_state
+
+ for call in self.calls:
+ call_model = build_call_model(
+ call,
+ dependency_provider=di_state.provider,
+ serializer_cls=di_state.serializer,
+ )
+
+ response_type = next(
+ iter(call_model.serializer.response_option.values())
+ ).field_type
+ if response_type is not None and response_type is not Parameter.empty:
+ body = get_model_schema(
+ create_model(
+ "",
+ __config__=get_config_base(),
+ response__=(response_type, ...),
+ ),
+ prefix=f"{self.name}:Message",
+ )
+ if body:
+ payloads.append((body, to_camelcase(unwrap(call).__name__)))
+
+ return payloads
diff --git a/faststream/_internal/publisher/usecase.py b/faststream/_internal/publisher/usecase.py
new file mode 100644
index 0000000000..662430fbbb
--- /dev/null
+++ b/faststream/_internal/publisher/usecase.py
@@ -0,0 +1,180 @@
+from collections.abc import Awaitable, Iterable, Sequence
+from functools import partial
+from itertools import chain
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+ Union,
+)
+from unittest.mock import MagicMock
+
+from typing_extensions import override
+
+from faststream._internal.publisher.proto import PublisherProto
+from faststream._internal.state import BrokerState, EmptyBrokerState, Pointer
+from faststream._internal.state.producer import ProducerUnset
+from faststream._internal.subscriber.call_wrapper import (
+ HandlerCallWrapper,
+)
+from faststream._internal.subscriber.utils import process_msg
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream.message.source_type import SourceType
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ PublisherMiddleware,
+ )
+ from faststream.response.response import PublishCommand
+
+
+class PublisherUsecase(PublisherProto[MsgType]):
+ """A base class for publishers in an asynchronous API."""
+
+ def __init__(
+ self,
+ *,
+ broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ ) -> None:
+ self.middlewares = middlewares
+ self._broker_middlewares = broker_middlewares
+
+ self.__producer: Optional[ProducerProto] = ProducerUnset()
+
+ self._fake_handler = False
+ self.mock: Optional[MagicMock] = None
+
+ self._state: Pointer[BrokerState] = Pointer(
+ EmptyBrokerState("You should include publisher to any broker.")
+ )
+
+ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
+ self._broker_middlewares = (*self._broker_middlewares, middleware)
+
+ @property
+ def _producer(self) -> "ProducerProto":
+ return self.__producer or self._state.get().producer
+
+ @override
+ def _setup(
+ self,
+ *,
+ state: "Pointer[BrokerState]",
+ producer: Optional["ProducerProto"] = None,
+ ) -> None:
+ self._state = state
+ self.__producer = producer
+
+ def set_test(
+ self,
+ *,
+ mock: MagicMock,
+ with_fake: bool,
+ ) -> None:
+ """Turn publisher to testing mode."""
+ self.mock = mock
+ self._fake_handler = with_fake
+
+ def reset_test(self) -> None:
+ """Turn off publisher's testing mode."""
+ self._fake_handler = False
+ self.mock = None
+
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn],
+ ],
+ ) -> HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]:
+ """Decorate user's function by current publisher."""
+ handler = super().__call__(func)
+ handler._publishers.append(self)
+ return handler
+
+ async def _basic_publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> Any:
+ pub: Callable[..., Awaitable[Any]] = self._producer.publish
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ self.middlewares[::-1],
+ (
+ _extra_middlewares
+ or (
+ m(None, context=context).publish_scope
+ for m in self._broker_middlewares[::-1]
+ )
+ ),
+ ):
+ pub = partial(pub_m, pub)
+
+ return await pub(cmd)
+
+ async def _basic_request(
+ self,
+ cmd: "PublishCommand",
+ ) -> Optional[Any]:
+ request = self._producer.request
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ self.middlewares[::-1],
+ (
+ m(None, context=context).publish_scope
+ for m in self._broker_middlewares[::-1]
+ ),
+ ):
+ request = partial(pub_m, request)
+
+ published_msg = await request(cmd)
+
+ response_msg: Any = await process_msg(
+ msg=published_msg,
+ middlewares=(
+ m(published_msg, context=context)
+ for m in self._broker_middlewares[::-1]
+ ),
+ parser=self._producer._parser,
+ decoder=self._producer._decoder,
+ source_type=SourceType.RESPONSE,
+ )
+ return response_msg
+
+ async def _basic_publish_batch(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> Any:
+ pub = self._producer.publish_batch
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ self.middlewares[::-1],
+ (
+ _extra_middlewares
+ or (
+ m(None, context=context).publish_scope
+ for m in self._broker_middlewares[::-1]
+ )
+ ),
+ ):
+ pub = partial(pub_m, pub)
+
+ return await pub(cmd)
diff --git a/faststream/_internal/state/__init__.py b/faststream/_internal/state/__init__.py
new file mode 100644
index 0000000000..47975c762b
--- /dev/null
+++ b/faststream/_internal/state/__init__.py
@@ -0,0 +1,22 @@
+from .application import BasicApplicationState, RunningApplicationState
+from .broker import BrokerState, EmptyBrokerState
+from .fast_depends import DIState
+from .logger import LoggerParamsStorage, LoggerState
+from .pointer import Pointer
+from .proto import SetupAble
+
+__all__ = (
+ # state
+ "BasicApplicationState",
+ "BrokerState",
+ # FastDepend
+ "DIState",
+ "EmptyBrokerState",
+ "LoggerParamsStorage",
+ # logging
+ "LoggerState",
+ "Pointer",
+ "RunningApplicationState",
+ # proto
+ "SetupAble",
+)
diff --git a/faststream/_internal/state/application.py b/faststream/_internal/state/application.py
new file mode 100644
index 0000000000..e63aa3be29
--- /dev/null
+++ b/faststream/_internal/state/application.py
@@ -0,0 +1,28 @@
+from abc import ABC, abstractmethod
+
+from faststream._internal.state.fast_depends import DIState
+
+
+class ApplicationState(ABC):
+ def __init__(self, di_state: DIState) -> None:
+ self._di_state = di_state
+
+ @property
+ @abstractmethod
+ def running(self) -> bool: ...
+
+ @property
+ def di_state(self) -> DIState:
+ return self._di_state
+
+
+class BasicApplicationState(ApplicationState):
+ @property
+ def running(self) -> bool:
+ return False
+
+
+class RunningApplicationState(ApplicationState):
+ @property
+ def running(self) -> bool:
+ return True
diff --git a/faststream/_internal/state/broker.py b/faststream/_internal/state/broker.py
new file mode 100644
index 0000000000..c108beb037
--- /dev/null
+++ b/faststream/_internal/state/broker.py
@@ -0,0 +1,102 @@
+from typing import TYPE_CHECKING, Optional, Protocol
+
+from faststream.exceptions import IncorrectState
+
+from .producer import ProducerUnset
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+
+ from .fast_depends import DIState
+ from .logger import LoggerState
+
+
+class BrokerState(Protocol):
+ di_state: "DIState"
+ logger_state: "LoggerState"
+ producer: "ProducerProto"
+
+ # Persistent variables
+ graceful_timeout: Optional[float]
+
+ def _setup(self) -> None: ...
+
+ def _setup_logger_state(self) -> None: ...
+
+ def __bool__(self) -> bool: ...
+
+
+class _EmptyBrokerState(BrokerState):
+ def __init__(self, error_msg: str) -> None:
+ self.error_msg = error_msg
+ self.producer = ProducerUnset()
+
+ @property
+ def logger_state(self) -> "LoggerState":
+ raise IncorrectState(self.error_msg)
+
+ @logger_state.setter
+ def logger_state(self, value: "LoggerState", /) -> None:
+ raise IncorrectState(self.error_msg)
+
+ @property
+ def graceful_timeout(self) -> Optional[float]:
+ raise IncorrectState(self.error_msg)
+
+ @graceful_timeout.setter
+ def graceful_timeout(self, value: Optional[float], /) -> None:
+ raise IncorrectState(self.error_msg)
+
+ def _setup(self) -> None:
+ pass
+
+ def _setup_logger_state(self) -> None:
+ pass
+
+ def __bool__(self) -> bool:
+ return False
+
+
+class EmptyBrokerState(_EmptyBrokerState):
+ @property
+ def di_state(self) -> "DIState":
+ raise IncorrectState(self.error_msg)
+
+ @di_state.setter
+ def di_state(self, value: "DIState", /) -> None:
+ raise IncorrectState(self.error_msg)
+
+
+class OuterBrokerState(_EmptyBrokerState):
+ def __init__(self, *, di_state: "DIState") -> None:
+ self.di_state = di_state
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class InitialBrokerState(BrokerState):
+ def __init__(
+ self,
+ *,
+ di_state: "DIState",
+ logger_state: "LoggerState",
+ graceful_timeout: Optional[float],
+ producer: "ProducerProto",
+ ) -> None:
+ self.di_state = di_state
+ self.logger_state = logger_state
+
+ self.graceful_timeout = graceful_timeout
+ self.producer = producer
+
+ self.setupped = False
+
+ def _setup(self) -> None:
+ self.setupped = True
+
+ def _setup_logger_state(self) -> None:
+ self.logger_state._setup(context=self.di_state.context)
+
+ def __bool__(self) -> bool:
+ return self.setupped
diff --git a/faststream/_internal/state/fast_depends.py b/faststream/_internal/state/fast_depends.py
new file mode 100644
index 0000000000..a5e7a098ad
--- /dev/null
+++ b/faststream/_internal/state/fast_depends.py
@@ -0,0 +1,38 @@
+from collections.abc import Sequence
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+from faststream._internal.constants import EMPTY
+
+if TYPE_CHECKING:
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import Decorator
+ from faststream._internal.context import ContextRepo
+
+
+@dataclass
+class DIState:
+ use_fastdepends: bool
+ get_dependent: Optional[Callable[..., Any]]
+ call_decorators: Sequence["Decorator"]
+ provider: "Provider"
+ serializer: Optional["SerializerProto"]
+ context: "ContextRepo"
+
+ def update(
+ self,
+ *,
+ provider: "Provider" = EMPTY,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ context: "ContextRepo" = EMPTY,
+ ) -> None:
+ if provider is not EMPTY:
+ self.provider = provider
+
+ if serializer is not EMPTY:
+ self.serializer = serializer
+
+ if context is not EMPTY:
+ self.context = context
diff --git a/faststream/_internal/state/logger/__init__.py b/faststream/_internal/state/logger/__init__.py
new file mode 100644
index 0000000000..466e24c689
--- /dev/null
+++ b/faststream/_internal/state/logger/__init__.py
@@ -0,0 +1,9 @@
+from .params_storage import DefaultLoggerStorage, LoggerParamsStorage
+from .state import LoggerState, make_logger_state
+
+__all__ = (
+ "DefaultLoggerStorage",
+ "LoggerParamsStorage",
+ "LoggerState",
+ "make_logger_state",
+)
diff --git a/faststream/_internal/state/logger/logger_proxy.py b/faststream/_internal/state/logger/logger_proxy.py
new file mode 100644
index 0000000000..0693fd184e
--- /dev/null
+++ b/faststream/_internal/state/logger/logger_proxy.py
@@ -0,0 +1,103 @@
+from abc import abstractmethod
+from collections.abc import Mapping
+from typing import Any, Optional
+
+from faststream._internal.basic_types import LoggerProto
+from faststream.exceptions import IncorrectState
+
+
+class LoggerObject(LoggerProto):
+ logger: Optional["LoggerProto"]
+
+ @abstractmethod
+ def __bool__(self) -> bool: ...
+
+
+class NotSetLoggerObject(LoggerObject):
+ """Default logger proxy for state.
+
+ Raises an error if user tries to log smth before state setup.
+ """
+
+ def __init__(self) -> None:
+ self.logger = None
+
+ def __bool__(self) -> bool:
+ return False
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}()"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ err_msg = "Logger object not set. Please, call `_setup_logger_state` of parent broker state."
+ raise IncorrectState(err_msg)
+
+
+class EmptyLoggerObject(LoggerObject):
+ """Empty logger proxy for state.
+
+ Will be used if user setup `logger=None`.
+ """
+
+ def __init__(self) -> None:
+ self.logger = None
+
+ def __bool__(self) -> bool:
+ return True
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}()"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ pass
+
+
+class RealLoggerObject(LoggerObject):
+ """Empty logger proxy for state.
+
+ Will be used if user setup custom `logger` (.params_storage.ManualLoggerStorage)
+ or in default logger case (.params_storage.DefaultLoggerStorage).
+ """
+
+ logger: "LoggerProto"
+
+ def __init__(self, logger: "LoggerProto") -> None:
+ self.logger = logger
+
+ def __bool__(self) -> bool:
+ return True
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(logger={self.logger})"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ self.logger.log(
+ level,
+ msg,
+ extra=extra,
+ exc_info=exc_info,
+ )
diff --git a/faststream/_internal/state/logger/params_storage.py b/faststream/_internal/state/logger/params_storage.py
new file mode 100644
index 0000000000..c63e0b8e99
--- /dev/null
+++ b/faststream/_internal/state/logger/params_storage.py
@@ -0,0 +1,73 @@
+import warnings
+from abc import abstractmethod
+from typing import TYPE_CHECKING, Optional, Protocol
+
+from faststream._internal.constants import EMPTY
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
+
+
+def make_logger_storage(
+ logger: Optional["LoggerProto"],
+ log_fmt: Optional[str],
+ default_storage_cls: type["DefaultLoggerStorage"],
+) -> "LoggerParamsStorage":
+ if logger is EMPTY:
+ return default_storage_cls(log_fmt)
+
+ if log_fmt:
+ warnings.warn(
+ message="You can't set custom `logger` with `log_fmt` both.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ return EmptyLoggerStorage() if logger is None else ManualLoggerStorage(logger)
+
+
+class LoggerParamsStorage(Protocol):
+ def setup_log_contest(self, params: "AnyDict") -> None: ...
+
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]: ...
+
+ def set_level(self, level: int) -> None: ...
+
+
+class EmptyLoggerStorage(LoggerParamsStorage):
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ pass
+
+ def get_logger(self, *, context: "ContextRepo") -> None:
+ return None
+
+ def set_level(self, level: int) -> None:
+ pass
+
+
+class ManualLoggerStorage(LoggerParamsStorage):
+ def __init__(self, logger: "LoggerProto") -> None:
+ self.__logger = logger
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ pass
+
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
+ return self.__logger
+
+ def set_level(self, level: int) -> None:
+ if getattr(self.__logger, "setLevel", None):
+ self.__logger.setLevel(level) # type: ignore[attr-defined]
+
+
+class DefaultLoggerStorage(LoggerParamsStorage):
+ def __init__(self, log_fmt: Optional[str]) -> None:
+ self._log_fmt = log_fmt
+
+ @abstractmethod
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
+ raise NotImplementedError
+
+ def set_level(self, level: int) -> None:
+ raise NotImplementedError
diff --git a/faststream/_internal/state/logger/state.py b/faststream/_internal/state/logger/state.py
new file mode 100644
index 0000000000..7fd495e020
--- /dev/null
+++ b/faststream/_internal/state/logger/state.py
@@ -0,0 +1,80 @@
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.state.proto import SetupAble
+
+from .logger_proxy import (
+ EmptyLoggerObject,
+ LoggerObject,
+ NotSetLoggerObject,
+ RealLoggerObject,
+)
+from .params_storage import (
+ DefaultLoggerStorage,
+ LoggerParamsStorage,
+ make_logger_storage,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
+
+
+def make_logger_state(
+ logger: Optional["LoggerProto"],
+ log_level: int,
+ log_fmt: Optional[str],
+ default_storage_cls: type["DefaultLoggerStorage"],
+) -> "LoggerState":
+ storage = make_logger_storage(
+ logger=logger,
+ log_fmt=log_fmt,
+ default_storage_cls=default_storage_cls,
+ )
+
+ return LoggerState(
+ log_level=log_level,
+ storage=storage,
+ )
+
+
+class LoggerState(SetupAble):
+ def __init__(
+ self,
+ log_level: int,
+ storage: LoggerParamsStorage,
+ ) -> None:
+ self.log_level = log_level
+ self.params_storage = storage
+
+ self.logger: LoggerObject = NotSetLoggerObject()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(log_level={self.log_level}, logger={self.logger})"
+
+ def set_level(self, level: int) -> None:
+ self.params_storage.set_level(level)
+
+ def log(
+ self,
+ message: str,
+ log_level: Optional[int] = None,
+ extra: Optional["AnyDict"] = None,
+ exc_info: Optional[BaseException] = None,
+ ) -> None:
+ self.logger.log(
+ (log_level or self.log_level),
+ message,
+ extra=extra,
+ exc_info=exc_info,
+ )
+
+ def _setup( # type: ignore[override]
+ self,
+ *,
+ context: "ContextRepo",
+ ) -> None:
+ if not self.logger:
+ if logger := self.params_storage.get_logger(context=context):
+ self.logger = RealLoggerObject(logger)
+ else:
+ self.logger = EmptyLoggerObject()
diff --git a/faststream/_internal/state/pointer.py b/faststream/_internal/state/pointer.py
new file mode 100644
index 0000000000..dbe927d5f9
--- /dev/null
+++ b/faststream/_internal/state/pointer.py
@@ -0,0 +1,26 @@
+from typing import TYPE_CHECKING, Generic, TypeVar
+
+from typing_extensions import Self
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+
+T = TypeVar("T")
+
+
+class Pointer(Generic[T]):
+ __slots__ = ("__value",)
+
+ def __init__(self, value: T) -> None:
+ self.__value = value
+
+ def set(self, new_value: T) -> "Self":
+ self.__value = new_value
+ return self
+
+ def get(self) -> T:
+ return self.__value
+
+ def patch_value(self, **kwargs: "AnyDict") -> None:
+ for k, v in kwargs.items():
+ setattr(self.__value, k, v)
diff --git a/faststream/_internal/state/producer.py b/faststream/_internal/state/producer.py
new file mode 100644
index 0000000000..30e6e6cf12
--- /dev/null
+++ b/faststream/_internal/state/producer.py
@@ -0,0 +1,37 @@
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream._internal.publisher.proto import ProducerProto
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from faststream._internal.types import AsyncCallable
+ from faststream.response import PublishCommand
+
+
+class ProducerUnset(ProducerProto):
+ msg = "Producer is unset yet. You should set producer in broker initial method."
+
+ @property
+ def _decoder(self) -> "AsyncCallable":
+ raise IncorrectState(self.msg)
+
+ @_decoder.setter
+ def _decoder(self, value: "AsyncCallable", /) -> None:
+ raise IncorrectState(self.msg)
+
+ @property
+ def _parser(self) -> "AsyncCallable":
+ raise IncorrectState(self.msg)
+
+ @_parser.setter
+ def _parser(self, value: "AsyncCallable", /) -> None:
+ raise IncorrectState(self.msg)
+
+ async def publish(self, cmd: "PublishCommand") -> Optional[Any]:
+ raise IncorrectState(self.msg)
+
+ async def request(self, cmd: "PublishCommand") -> Any:
+ raise IncorrectState(self.msg)
+
+ async def publish_batch(self, cmd: "PublishCommand") -> None:
+ raise IncorrectState(self.msg)
diff --git a/faststream/_internal/state/proto.py b/faststream/_internal/state/proto.py
new file mode 100644
index 0000000000..21b5eda882
--- /dev/null
+++ b/faststream/_internal/state/proto.py
@@ -0,0 +1,7 @@
+from abc import abstractmethod
+from typing import Protocol
+
+
+class SetupAble(Protocol):
+ @abstractmethod
+ def _setup(self) -> None: ...
diff --git a/faststream/cli/__init__.py b/faststream/_internal/subscriber/__init__.py
similarity index 100%
rename from faststream/cli/__init__.py
rename to faststream/_internal/subscriber/__init__.py
diff --git a/faststream/_internal/subscriber/call_item.py b/faststream/_internal/subscriber/call_item.py
new file mode 100644
index 0000000000..9858e833cd
--- /dev/null
+++ b/faststream/_internal/subscriber/call_item.py
@@ -0,0 +1,172 @@
+from collections.abc import Iterable, Sequence
+from functools import partial
+from inspect import unwrap
+from itertools import chain
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Generic,
+ Optional,
+ cast,
+)
+
+from typing_extensions import override
+
+from faststream._internal.state import SetupAble
+from faststream._internal.types import MsgType
+from faststream.exceptions import IgnoredException, SetupError
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AsyncFuncAny, Decorator
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.subscriber.call_wrapper import HandlerCallWrapper
+ from faststream._internal.types import (
+ AsyncCallable,
+ AsyncFilter,
+ CustomCallable,
+ SubscriberMiddleware,
+ )
+ from faststream.message import StreamMessage
+
+
+class HandlerItem(SetupAble, Generic[MsgType]):
+ """A class representing handler overloaded item."""
+
+ __slots__ = (
+ "dependant",
+ "dependencies",
+ "filter",
+ "handler",
+ "item_decoder",
+ "item_middlewares",
+ "item_parser",
+ )
+
+ dependant: Optional[Any]
+
+ def __init__(
+ self,
+ *,
+ handler: "HandlerCallWrapper[MsgType, ..., Any]",
+ filter: "AsyncFilter[StreamMessage[MsgType]]",
+ item_parser: Optional["CustomCallable"],
+ item_decoder: Optional["CustomCallable"],
+ item_middlewares: Sequence["SubscriberMiddleware[StreamMessage[MsgType]]"],
+ dependencies: Iterable["Dependant"],
+ ) -> None:
+ self.handler = handler
+ self.filter = filter
+ self.item_parser = item_parser
+ self.item_decoder = item_decoder
+ self.item_middlewares = item_middlewares
+ self.dependencies = dependencies
+ self.dependant = None
+
+ def __repr__(self) -> str:
+ filter_call = unwrap(self.filter)
+ filter_name = getattr(filter_call, "__name__", str(filter_call))
+ return f"<'{self.call_name}': filter='{filter_name}'>"
+
+ @override
+ def _setup( # type: ignore[override]
+ self,
+ *,
+ parser: "AsyncCallable",
+ decoder: "AsyncCallable",
+ state: "Pointer[BrokerState]",
+ broker_dependencies: Iterable["Dependant"],
+ _call_decorators: Iterable["Decorator"],
+ ) -> None:
+ if self.dependant is None:
+ di_state = state.get().di_state
+
+ self.item_parser = parser
+ self.item_decoder = decoder
+
+ dependencies = (*broker_dependencies, *self.dependencies)
+
+ dependant = self.handler.set_wrapped(
+ dependencies=dependencies,
+ _call_decorators=(*_call_decorators, *di_state.call_decorators),
+ state=di_state,
+ )
+
+ if di_state.get_dependent is None:
+ self.dependant = dependant
+ else:
+ self.dependant = di_state.get_dependent(
+ self.handler._original_call,
+ dependencies,
+ )
+
+ @property
+ def call_name(self) -> str:
+ """Returns the name of the original call."""
+ if self.handler is None:
+ return ""
+
+ caller = unwrap(self.handler._original_call)
+ return getattr(caller, "__name__", str(caller))
+
+ @property
+ def description(self) -> Optional[str]:
+ """Returns the description of original call."""
+ if self.handler is None:
+ return None
+
+ caller = unwrap(self.handler._original_call)
+ return getattr(caller, "__doc__", None)
+
+ async def is_suitable(
+ self,
+ msg: MsgType,
+ cache: dict[Any, Any],
+ ) -> Optional["StreamMessage[MsgType]"]:
+ """Check is message suite for current filter."""
+ if not (parser := cast("Optional[AsyncCallable]", self.item_parser)) or not (
+ decoder := cast("Optional[AsyncCallable]", self.item_decoder)
+ ):
+ error_msg = "You should setup `HandlerItem` at first."
+ raise SetupError(error_msg)
+
+ message = cache[parser] = cast(
+ "StreamMessage[MsgType]",
+ cache.get(parser) or await parser(msg),
+ )
+
+ # NOTE: final decoder will be set for success filter
+ message.set_decoder(decoder)
+
+ if await self.filter(message):
+ return message
+
+ return None
+
+ async def call(
+ self,
+ /,
+ message: "StreamMessage[MsgType]",
+ _extra_middlewares: Iterable["SubscriberMiddleware[Any]"],
+ ) -> Any:
+ """Execute wrapped handler with consume middlewares."""
+ call: AsyncFuncAny = self.handler.call_wrapped
+
+ for middleware in chain(self.item_middlewares[::-1], _extra_middlewares):
+ call = partial(middleware, call)
+
+ try:
+ result = await call(message)
+
+ except (IgnoredException, SystemExit):
+ self.handler.trigger()
+ raise
+
+ except Exception as e:
+ self.handler.trigger(error=e)
+ raise
+
+ else:
+ self.handler.trigger(result=result)
+ return result
diff --git a/faststream/_internal/subscriber/call_wrapper.py b/faststream/_internal/subscriber/call_wrapper.py
new file mode 100644
index 0000000000..dfe1b45dad
--- /dev/null
+++ b/faststream/_internal/subscriber/call_wrapper.py
@@ -0,0 +1,204 @@
+import asyncio
+from collections.abc import Awaitable, Iterable, Mapping, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Generic,
+ Optional,
+ Union,
+)
+from unittest.mock import MagicMock
+
+import anyio
+from fast_depends import inject
+from fast_depends.core import CallModel, build_call_model
+
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream._internal.utils.functions import to_async
+from faststream.exceptions import SetupError
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import Decorator
+ from faststream._internal.publisher.proto import PublisherProto
+ from faststream._internal.state.fast_depends import DIState
+ from faststream.message import StreamMessage
+
+
+def ensure_call_wrapper(
+ call: Union[
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ Callable[P_HandlerParams, T_HandlerReturn],
+ ],
+) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
+ if isinstance(call, HandlerCallWrapper):
+ return call
+
+ return HandlerCallWrapper(call)
+
+
+class HandlerCallWrapper(Generic[MsgType, P_HandlerParams, T_HandlerReturn]):
+ """A generic class to wrap handler calls."""
+
+ mock: Optional[MagicMock]
+ future: Optional["asyncio.Future[Any]"]
+ is_test: bool
+
+ _wrapped_call: Optional[Callable[..., Awaitable[Any]]]
+ _original_call: Callable[P_HandlerParams, T_HandlerReturn]
+ _publishers: list["PublisherProto[MsgType]"]
+
+ __slots__ = (
+ "_original_call",
+ "_publishers",
+ "_wrapped_call",
+ "future",
+ "is_test",
+ "mock",
+ )
+
+ def __init__(
+ self,
+ call: Callable[P_HandlerParams, T_HandlerReturn],
+ ) -> None:
+ """Initialize a handler."""
+ self._original_call = call
+ self._wrapped_call = None
+ self._publishers = []
+
+ self.mock = None
+ self.future = None
+ self.is_test = False
+
+ def __call__(
+ self,
+ *args: P_HandlerParams.args,
+ **kwargs: P_HandlerParams.kwargs,
+ ) -> T_HandlerReturn:
+ """Calls the object as a function."""
+ return self._original_call(*args, **kwargs)
+
+ async def call_wrapped(
+ self,
+ message: "StreamMessage[MsgType]",
+ ) -> Any:
+ """Calls the wrapped function with the given message."""
+ assert self._wrapped_call, "You should use `set_wrapped` first" # nosec B101
+ if self.is_test:
+ assert self.mock # nosec B101
+ self.mock(await message.decode())
+ return await self._wrapped_call(message)
+
+ async def wait_call(self, timeout: Optional[float] = None) -> None:
+ """Waits for a call with an optional timeout."""
+ assert ( # nosec B101
+ self.future is not None
+ ), "You can use this method only with TestClient"
+ with anyio.fail_after(timeout):
+ await self.future
+
+ def set_test(self) -> None:
+ self.is_test = True
+ if self.mock is None:
+ self.mock = MagicMock()
+ self.refresh(with_mock=True)
+
+ def reset_test(self) -> None:
+ self.is_test = False
+ self.mock = None
+ self.future = None
+
+ def trigger(
+ self,
+ result: Any = None,
+ error: Optional[BaseException] = None,
+ ) -> None:
+ if not self.is_test:
+ return
+
+ if self.future is None:
+ msg = "You can use this method only with TestClient"
+ raise SetupError(msg)
+
+ if self.future.done():
+ self.future = asyncio.Future()
+
+ if error:
+ self.future.set_exception(error)
+ else:
+ self.future.set_result(result)
+
+ def refresh(self, with_mock: bool = False) -> None:
+ if asyncio.events._get_running_loop() is not None:
+ self.future = asyncio.Future()
+
+ if with_mock and self.mock is not None:
+ self.mock.reset_mock()
+
+ def set_wrapped(
+ self,
+ *,
+ dependencies: Sequence["Dependant"],
+ _call_decorators: Iterable["Decorator"],
+ state: "DIState",
+ ) -> Optional["CallModel"]:
+ call = self._original_call
+ for decor in _call_decorators:
+ call = decor(call)
+ self._original_call = call
+
+ f: Callable[..., Awaitable[Any]] = to_async(call)
+
+ dependent: Optional[CallModel] = None
+ if state.get_dependent is None:
+ dependent = build_call_model(
+ f,
+ extra_dependencies=dependencies,
+ dependency_provider=state.provider,
+ serializer_cls=state.serializer,
+ )
+
+ if state.use_fastdepends:
+ wrapper = inject(
+ func=None,
+ context__=state.context,
+ )
+ f = wrapper(func=f, model=dependent)
+
+ f = _wrap_decode_message(
+ func=f,
+ params_ln=len(dependent.flat_params),
+ )
+
+ self._wrapped_call = f
+ return dependent
+
+
+def _wrap_decode_message(
+ func: Callable[..., Awaitable[T_HandlerReturn]],
+ params_ln: int,
+) -> Callable[["StreamMessage[MsgType]"], Awaitable[T_HandlerReturn]]:
+ """Wraps a function to decode a message and pass it as an argument to the wrapped function."""
+
+ async def decode_wrapper(message: "StreamMessage[MsgType]") -> T_HandlerReturn:
+ """A wrapper function to decode and handle a message."""
+ msg = await message.decode()
+
+ if params_ln > 1:
+ if isinstance(msg, Mapping):
+ return await func(**msg)
+ if isinstance(msg, Sequence):
+ return await func(*msg)
+ else:
+ return await func(msg)
+
+ msg = "unreachable"
+ raise AssertionError(msg)
+
+ return decode_wrapper
diff --git a/faststream/broker/subscriber/mixins.py b/faststream/_internal/subscriber/mixins.py
similarity index 87%
rename from faststream/broker/subscriber/mixins.py
rename to faststream/_internal/subscriber/mixins.py
index 2043d8b1ae..19d7d9a5d4 100644
--- a/faststream/broker/subscriber/mixins.py
+++ b/faststream/_internal/subscriber/mixins.py
@@ -1,15 +1,10 @@
import asyncio
-from typing import (
- TYPE_CHECKING,
- Any,
- Coroutine,
- Generic,
- List,
-)
+from collections.abc import Coroutine
+from typing import TYPE_CHECKING, Any, Generic
import anyio
-from faststream.broker.types import MsgType
+from faststream._internal.types import MsgType
from .usecase import SubscriberUsecase
@@ -20,7 +15,7 @@
class TasksMixin(SubscriberUsecase[Any]):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
- self.tasks: List[asyncio.Task[Any]] = []
+ self.tasks: list[asyncio.Task[Any]] = []
def add_task(self, coro: Coroutine[Any, Any, Any]) -> None:
self.tasks.append(asyncio.create_task(coro))
@@ -33,7 +28,7 @@ async def close(self) -> None:
if not task.done():
task.cancel()
- self.tasks = []
+ self.tasks.clear()
class ConcurrentMixin(TasksMixin, Generic[MsgType]):
@@ -69,10 +64,7 @@ async def _serve_consume_queue(
async for msg in self.receive_stream:
tg.start_soon(self._consume_msg, msg)
- async def _consume_msg(
- self,
- msg: "MsgType",
- ) -> None:
+ async def _consume_msg(self, msg: "MsgType") -> None:
"""Proxy method to call `self.consume` with semaphore block."""
async with self.limiter:
await self.consume(msg)
diff --git a/faststream/_internal/subscriber/proto.py b/faststream/_internal/subscriber/proto.py
new file mode 100644
index 0000000000..cb24b32295
--- /dev/null
+++ b/faststream/_internal/subscriber/proto.py
@@ -0,0 +1,93 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Any, Optional
+
+from typing_extensions import Self
+
+from faststream._internal.proto import Endpoint
+from faststream._internal.types import MsgType
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import (
+ BasePublisherProto,
+ ProducerProto,
+ )
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ SubscriberMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.response import Response
+
+ from .call_item import HandlerItem
+
+
+class SubscriberProto(Endpoint[MsgType]):
+ calls: list["HandlerItem[MsgType]"]
+ running: bool
+
+ _broker_dependencies: Iterable["Dependant"]
+ _broker_middlewares: Sequence["BrokerMiddleware[MsgType]"]
+ _producer: Optional["ProducerProto"]
+
+ @abstractmethod
+ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ...
+
+ @abstractmethod
+ def get_log_context(
+ self,
+ msg: Optional["StreamMessage[MsgType]"],
+ /,
+ ) -> dict[str, str]: ...
+
+ @abstractmethod
+ def _setup(
+ self,
+ *,
+ extra_context: "AnyDict",
+ # broker options
+ broker_parser: Optional["CustomCallable"],
+ broker_decoder: Optional["CustomCallable"],
+ # dependant args
+ state: "Pointer[BrokerState]",
+ ) -> None: ...
+
+ @abstractmethod
+ def _make_response_publisher(
+ self,
+ message: "StreamMessage[MsgType]",
+ ) -> Iterable["BasePublisherProto"]: ...
+
+ @abstractmethod
+ async def start(self) -> None: ...
+
+ @abstractmethod
+ async def close(self) -> None: ...
+
+ @abstractmethod
+ async def consume(self, msg: MsgType) -> Any: ...
+
+ @abstractmethod
+ async def process_message(self, msg: MsgType) -> "Response": ...
+
+ @abstractmethod
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[StreamMessage[MsgType]]": ...
+
+ @abstractmethod
+ def add_call(
+ self,
+ *,
+ parser_: "CustomCallable",
+ decoder_: "CustomCallable",
+ middlewares_: Sequence["SubscriberMiddleware[Any]"],
+ dependencies_: Iterable["Dependant"],
+ ) -> Self: ...
diff --git a/faststream/_internal/subscriber/specified.py b/faststream/_internal/subscriber/specified.py
new file mode 100644
index 0000000000..50c36efbf6
--- /dev/null
+++ b/faststream/_internal/subscriber/specified.py
@@ -0,0 +1,74 @@
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+from faststream._internal.types import MsgType
+from faststream.exceptions import SetupError
+from faststream.specification.asyncapi.message import parse_handler_params
+from faststream.specification.asyncapi.utils import to_camelcase
+from faststream.specification.proto import EndpointSpecification
+from faststream.specification.schema import SubscriberSpec
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+
+ from .call_item import HandlerItem
+
+
+class SpecificationSubscriber(EndpointSpecification[MsgType, SubscriberSpec]):
+ calls: list["HandlerItem[MsgType]"]
+
+ def __init__(
+ self,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+ self.calls = []
+
+ # Call next base class parent init
+ super().__init__(*args, **kwargs)
+
+ @property
+ def call_name(self) -> str:
+ """Returns the name of the handler call."""
+ if not self.calls:
+ return "Subscriber"
+
+ return to_camelcase(self.calls[0].call_name)
+
+ def get_default_description(self) -> Optional[str]:
+ """Returns the description of the handler."""
+ if not self.calls:
+ return None
+
+ return self.calls[0].description
+
+ def get_payloads(self) -> list[tuple["AnyDict", str]]:
+ """Get the payloads of the handler."""
+ payloads: list[tuple[AnyDict, str]] = []
+
+ for h in self.calls:
+ if h.dependant is None:
+ msg = "You should setup `Handler` at first."
+ raise SetupError(msg)
+
+ body = parse_handler_params(
+ h.dependant,
+ prefix=f"{self.title_ or self.call_name}:Message",
+ )
+
+ payloads.append((body, to_camelcase(h.call_name)))
+
+ if not self.calls:
+ payloads.append(
+ (
+ {
+ "title": f"{self.title_ or self.call_name}:Message:Payload",
+ },
+ to_camelcase(self.call_name),
+ ),
+ )
+
+ return payloads
diff --git a/faststream/_internal/subscriber/usecase.py b/faststream/_internal/subscriber/usecase.py
new file mode 100644
index 0000000000..70e7f22525
--- /dev/null
+++ b/faststream/_internal/subscriber/usecase.py
@@ -0,0 +1,447 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from contextlib import AbstractContextManager, AsyncExitStack
+from itertools import chain
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Any,
+ Callable,
+ Optional,
+ Union,
+)
+
+from typing_extensions import Self, deprecated, overload, override
+
+from faststream._internal.subscriber.call_item import HandlerItem
+from faststream._internal.subscriber.proto import SubscriberProto
+from faststream._internal.subscriber.utils import (
+ MultiLock,
+ default_filter,
+ resolve_custom_func,
+)
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream._internal.utils.functions import sync_fake_context, to_async
+from faststream.exceptions import SetupError, StopConsume, SubscriberNotFound
+from faststream.middlewares import AckPolicy, AcknowledgementMiddleware
+from faststream.middlewares.logging import CriticalLogMiddleware
+from faststream.response import ensure_response
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict, Decorator
+ from faststream._internal.context.repository import ContextRepo
+ from faststream._internal.publisher.proto import (
+ BasePublisherProto,
+ )
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.subscriber.call_wrapper import HandlerCallWrapper
+ from faststream._internal.types import (
+ AsyncCallable,
+ AsyncFilter,
+ BrokerMiddleware,
+ CustomCallable,
+ Filter,
+ SubscriberMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import BaseMiddleware
+ from faststream.response import Response
+
+
+class _CallOptions:
+ __slots__ = (
+ "decoder",
+ "dependencies",
+ "middlewares",
+ "parser",
+ )
+
+ def __init__(
+ self,
+ *,
+ parser: Optional["CustomCallable"],
+ decoder: Optional["CustomCallable"],
+ middlewares: Sequence["SubscriberMiddleware[Any]"],
+ dependencies: Iterable["Dependant"],
+ ) -> None:
+ self.parser = parser
+ self.decoder = decoder
+ self.middlewares = middlewares
+ self.dependencies = dependencies
+
+
+class SubscriberUsecase(SubscriberProto[MsgType]):
+ """A class representing an asynchronous handler."""
+
+ lock: "AbstractContextManager[Any]"
+ extra_watcher_options: "AnyDict"
+ extra_context: "AnyDict"
+ graceful_timeout: Optional[float]
+
+ _broker_dependencies: Iterable["Dependant"]
+ _call_options: Optional["_CallOptions"]
+ _call_decorators: Iterable["Decorator"]
+
+ def __init__(
+ self,
+ *,
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ ack_policy: AckPolicy,
+ ) -> None:
+ """Initialize a new instance of the class."""
+ self.calls = []
+
+ self._parser = default_parser
+ self._decoder = default_decoder
+ self._no_reply = no_reply
+ self.ack_policy = ack_policy
+
+ self._call_options = None
+ self._call_decorators = ()
+
+ self.running = False
+ self.lock = sync_fake_context()
+
+ # Setup in include
+ self._broker_dependencies = broker_dependencies
+ self._broker_middlewares = broker_middlewares
+
+ # register in setup later
+ self.extra_context = {}
+ self.extra_watcher_options = {}
+
+ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
+ self._broker_middlewares = (*self._broker_middlewares, middleware)
+
+ @override
+ def _setup(
+ self,
+ *,
+ extra_context: "AnyDict",
+ # broker options
+ broker_parser: Optional["CustomCallable"],
+ broker_decoder: Optional["CustomCallable"],
+ # dependant args
+ state: "Pointer[BrokerState]",
+ ) -> None:
+ # TODO: add EmptyBrokerState to init
+ self._state = state
+
+ self.extra_context = extra_context
+
+ for call in self.calls:
+ if parser := call.item_parser or broker_parser:
+ async_parser = resolve_custom_func(to_async(parser), self._parser)
+ else:
+ async_parser = self._parser
+
+ if decoder := call.item_decoder or broker_decoder:
+ async_decoder = resolve_custom_func(to_async(decoder), self._decoder)
+ else:
+ async_decoder = self._decoder
+
+ self._parser = async_parser
+ self._decoder = async_decoder
+
+ call._setup(
+ parser=async_parser,
+ decoder=async_decoder,
+ state=state,
+ broker_dependencies=self._broker_dependencies,
+ _call_decorators=self._call_decorators,
+ )
+
+ call.handler.refresh(with_mock=False)
+
+ @abstractmethod
+ async def start(self) -> None:
+ """Start the handler."""
+ self.lock = MultiLock()
+
+ self.running = True
+
+ @abstractmethod
+ async def close(self) -> None:
+ """Close the handler.
+
+ Blocks event loop up to graceful_timeout seconds.
+ """
+ self.running = False
+ if isinstance(self.lock, MultiLock):
+ await self.lock.wait_release(self._state.get().graceful_timeout)
+
+ def add_call(
+ self,
+ *,
+ parser_: Optional["CustomCallable"],
+ decoder_: Optional["CustomCallable"],
+ middlewares_: Sequence["SubscriberMiddleware[Any]"],
+ dependencies_: Iterable["Dependant"],
+ ) -> Self:
+ self._call_options = _CallOptions(
+ parser=parser_,
+ decoder=decoder_,
+ middlewares=middlewares_,
+ dependencies=dependencies_,
+ )
+ return self
+
+ @overload
+ def __call__(
+ self,
+ func: None = None,
+ *,
+ filter: "Filter[StreamMessage[MsgType]]" = default_filter,
+ parser: Optional["CustomCallable"] = None,
+ decoder: Optional["CustomCallable"] = None,
+ middlewares: Annotated[
+ Sequence["SubscriberMiddleware[Any]"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = (),
+ dependencies: Iterable["Dependant"] = (),
+ ) -> Callable[
+ [Callable[P_HandlerParams, T_HandlerReturn]],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ]: ...
+
+ @overload
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ],
+ *,
+ filter: "Filter[StreamMessage[MsgType]]" = default_filter,
+ parser: Optional["CustomCallable"] = None,
+ decoder: Optional["CustomCallable"] = None,
+ middlewares: Annotated[
+ Sequence["SubscriberMiddleware[Any]"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = (),
+ dependencies: Iterable["Dependant"] = (),
+ ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]": ...
+
+ @override
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ None,
+ ] = None,
+ *,
+ filter: "Filter[StreamMessage[MsgType]]" = default_filter,
+ parser: Optional["CustomCallable"] = None,
+ decoder: Optional["CustomCallable"] = None,
+ middlewares: Annotated[
+ Sequence["SubscriberMiddleware[Any]"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = (),
+ dependencies: Iterable["Dependant"] = (),
+ ) -> Union[
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ Callable[
+ [Callable[P_HandlerParams, T_HandlerReturn]],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ],
+ ]:
+ if (options := self._call_options) is None:
+ msg = (
+ "You can't create subscriber directly. Please, use `add_call` at first."
+ )
+ raise SetupError(msg)
+
+ total_deps = (*options.dependencies, *dependencies)
+ total_middlewares = (*options.middlewares, *middlewares)
+ async_filter: AsyncFilter[StreamMessage[MsgType]] = to_async(filter)
+
+ def real_wrapper(
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ],
+ ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
+ handler = super(SubscriberUsecase, self).__call__(func)
+
+ self.calls.append(
+ HandlerItem[MsgType](
+ handler=handler,
+ filter=async_filter,
+ item_parser=parser or options.parser,
+ item_decoder=decoder or options.decoder,
+ item_middlewares=total_middlewares,
+ dependencies=total_deps,
+ ),
+ )
+
+ return handler
+
+ if func is None:
+ return real_wrapper
+
+ return real_wrapper(func)
+
+ async def consume(self, msg: MsgType) -> Any:
+ """Consume a message asynchronously."""
+ if not self.running:
+ return None
+
+ try:
+ return await self.process_message(msg)
+
+ except StopConsume:
+ # Stop handler at StopConsume exception
+ await self.close()
+
+ except SystemExit:
+ # Stop handler at `exit()` call
+ await self.close()
+
+ if app := self._state.get().di_state.context.get("app"):
+ app.exit()
+
+ except Exception: # nosec B110
+ # All other exceptions were logged by CriticalLogMiddleware
+ pass
+
+ async def process_message(self, msg: MsgType) -> "Response":
+ """Execute all message processing stages."""
+ broker_state = self._state.get()
+ context: ContextRepo = broker_state.di_state.context
+ logger_state = broker_state.logger_state
+
+ async with AsyncExitStack() as stack:
+ stack.enter_context(self.lock)
+
+ # Enter context before middlewares
+ stack.enter_context(context.scope("logger", logger_state.logger.logger))
+ for k, v in self.extra_context.items():
+ stack.enter_context(context.scope(k, v))
+
+ # enter all middlewares
+ middlewares: list[BaseMiddleware] = []
+ for base_m in self.__build__middlewares_stack():
+ middleware = base_m(msg, context=context)
+ middlewares.append(middleware)
+ await middleware.__aenter__()
+
+ cache: dict[Any, Any] = {}
+ parsing_error: Optional[Exception] = None
+ for h in self.calls:
+ try:
+ message = await h.is_suitable(msg, cache)
+ except Exception as e:
+ parsing_error = e
+ break
+
+ if message is not None:
+ stack.enter_context(
+ context.scope("log_context", self.get_log_context(message)),
+ )
+ stack.enter_context(context.scope("message", message))
+
+ # Middlewares should be exited before scope release
+ for m in middlewares:
+ stack.push_async_exit(m.__aexit__)
+
+ result_msg = ensure_response(
+ await h.call(
+ message=message,
+ # consumer middlewares
+ _extra_middlewares=(
+ m.consume_scope for m in middlewares[::-1]
+ ),
+ ),
+ )
+
+ if not result_msg.correlation_id:
+ result_msg.correlation_id = message.correlation_id
+
+ for p in chain(
+ self.__get_response_publisher(message),
+ h.handler._publishers,
+ ):
+ await p._publish(
+ result_msg.as_publish_command(),
+ _extra_middlewares=(
+ m.publish_scope for m in middlewares[::-1]
+ ),
+ )
+
+ # Return data for tests
+ return result_msg
+
+ # Suitable handler was not found or
+ # parsing/decoding exception occurred
+ for m in middlewares:
+ stack.push_async_exit(m.__aexit__)
+
+ # Reraise it to catch in tests
+ if parsing_error:
+ raise parsing_error
+
+ error_msg = f"There is no suitable handler for {msg=}"
+ raise SubscriberNotFound(error_msg)
+
+ # An error was raised and processed by some middleware
+ return ensure_response(None)
+
+ def __build__middlewares_stack(self) -> tuple["BrokerMiddleware[MsgType]", ...]:
+ logger_state = self._state.get().logger_state
+
+ if self.ack_policy is AckPolicy.DO_NOTHING:
+ broker_middlewares = (
+ CriticalLogMiddleware(logger_state),
+ *self._broker_middlewares,
+ )
+
+ else:
+ broker_middlewares = (
+ AcknowledgementMiddleware(
+ logger=logger_state,
+ ack_policy=self.ack_policy,
+ extra_options=self.extra_watcher_options,
+ ),
+ CriticalLogMiddleware(logger_state),
+ *self._broker_middlewares,
+ )
+
+ return broker_middlewares
+
+ def __get_response_publisher(
+ self,
+ message: "StreamMessage[MsgType]",
+ ) -> Iterable["BasePublisherProto"]:
+ if not message.reply_to or self._no_reply:
+ return ()
+
+ return self._make_response_publisher(message)
+
+ def get_log_context(
+ self,
+ message: Optional["StreamMessage[MsgType]"],
+ ) -> dict[str, str]:
+ """Generate log context."""
+ return {
+ "message_id": getattr(message, "message_id", ""),
+ }
diff --git a/faststream/_internal/subscriber/utils.py b/faststream/_internal/subscriber/utils.py
new file mode 100644
index 0000000000..67c22f793d
--- /dev/null
+++ b/faststream/_internal/subscriber/utils.py
@@ -0,0 +1,137 @@
+import asyncio
+import inspect
+from collections.abc import Awaitable, Iterable
+from contextlib import AsyncExitStack, suppress
+from functools import partial
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+ Union,
+ cast,
+)
+
+import anyio
+from typing_extensions import Self
+
+from faststream._internal.types import MsgType
+from faststream._internal.utils.functions import return_input, to_async
+from faststream.message.source_type import SourceType
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.types import (
+ AsyncCallable,
+ CustomCallable,
+ SyncCallable,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import BaseMiddleware
+
+
+async def process_msg(
+ msg: Optional[MsgType],
+ middlewares: Iterable["BaseMiddleware"],
+ parser: Callable[[MsgType], Awaitable["StreamMessage[MsgType]"]],
+ decoder: Callable[["StreamMessage[MsgType]"], "Any"],
+ source_type: SourceType = SourceType.CONSUME,
+) -> Optional["StreamMessage[MsgType]"]:
+ if msg is None:
+ return None
+
+ async with AsyncExitStack() as stack:
+ return_msg: Callable[
+ [StreamMessage[MsgType]],
+ Awaitable[StreamMessage[MsgType]],
+ ] = return_input
+
+ for m in middlewares:
+ await stack.enter_async_context(m)
+ return_msg = partial(m.consume_scope, return_msg)
+
+ parsed_msg = await parser(msg)
+ parsed_msg._source_type = source_type
+ parsed_msg.set_decoder(decoder)
+ return await return_msg(parsed_msg)
+
+ error_msg = "unreachable"
+ raise AssertionError(error_msg)
+
+
+async def default_filter(msg: "StreamMessage[Any]") -> bool:
+ """A function to filter stream messages."""
+ return not msg.processed
+
+
+class MultiLock:
+ """A class representing a multi lock.
+
+ This lock can be acquired multiple times.
+ `wait_release` method waits for all locks will be released.
+ """
+
+ def __init__(self) -> None:
+ """Initialize a new instance of the class."""
+ self.queue: asyncio.Queue[None] = asyncio.Queue()
+
+ def __enter__(self) -> Self:
+ """Enter the context."""
+ self.acquire()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional["TracebackType"],
+ ) -> None:
+ """Exit the context."""
+ self.release()
+
+ def acquire(self) -> None:
+ """Acquire lock."""
+ self.queue.put_nowait(None)
+
+ def release(self) -> None:
+ """Release lock."""
+ with suppress(asyncio.QueueEmpty, ValueError):
+ self.queue.get_nowait()
+ self.queue.task_done()
+
+ @property
+ def qsize(self) -> int:
+ """Return the size of the queue."""
+ return self.queue.qsize()
+
+ @property
+ def empty(self) -> bool:
+ """Return whether the queue is empty."""
+ return self.queue.empty()
+
+ async def wait_release(self, timeout: Optional[float] = None) -> None:
+ """Wait for the queue to be released.
+
+ Using for graceful shutdown.
+ """
+ if timeout:
+ with anyio.move_on_after(timeout):
+ await self.queue.join()
+
+
+def resolve_custom_func(
+ custom_func: Optional["CustomCallable"],
+ default_func: "AsyncCallable",
+) -> "AsyncCallable":
+ """Resolve a custom parser/decoder with default one."""
+ if custom_func is None:
+ return default_func
+
+ original_params = inspect.signature(custom_func).parameters
+
+ if len(original_params) == 1:
+ return to_async(cast("Union[SyncCallable, AsyncCallable]", custom_func))
+
+ name = tuple(original_params.items())[1][0]
+ return partial(to_async(custom_func), **{name: default_func})
diff --git a/faststream/_internal/testing/__init__.py b/faststream/_internal/testing/__init__.py
new file mode 100644
index 0000000000..b4f7ac676d
--- /dev/null
+++ b/faststream/_internal/testing/__init__.py
@@ -0,0 +1,3 @@
+from faststream._internal.testing.app import TestApp
+
+__all__ = ("TestApp",)
diff --git a/faststream/_internal/testing/app.py b/faststream/_internal/testing/app.py
new file mode 100644
index 0000000000..325ebc194a
--- /dev/null
+++ b/faststream/_internal/testing/app.py
@@ -0,0 +1,68 @@
+from contextlib import ExitStack
+from functools import partial
+from typing import TYPE_CHECKING, Optional
+
+from anyio.from_thread import start_blocking_portal
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.application import Application
+ from faststream._internal.basic_types import SettingField
+
+
+class TestApp:
+ """A class to represent a test application."""
+
+ __test__ = False
+
+ app: "Application"
+ _extra_options: dict[str, "SettingField"]
+
+ def __init__(
+ self,
+ app: "Application",
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
+ ) -> None:
+ self.app = app
+ self._extra_options = run_extra_options or {}
+
+ def __enter__(self) -> "Application":
+ with ExitStack() as stack:
+ portal = stack.enter_context(start_blocking_portal())
+
+ lifespan_context = self.app.lifespan_context(**self._extra_options)
+ stack.enter_context(portal.wrap_async_context_manager(lifespan_context))
+ portal.call(partial(self.app.start, **self._extra_options))
+
+ @stack.callback
+ def wait_shutdown() -> None:
+ portal.call(self.app.stop)
+
+ self.exit_stack = stack.pop_all()
+
+ return self.app
+
+ def __exit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ self.exit_stack.close()
+
+ async def __aenter__(self) -> "Application":
+ self.lifespan_scope = self.app.lifespan_context(**self._extra_options)
+ await self.lifespan_scope.__aenter__()
+ await self.app.start(**self._extra_options)
+ return self.app
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ """Exit the asynchronous context manager."""
+ await self.app.stop()
+ await self.lifespan_scope.__aexit__(exc_type, exc_val, exc_tb)
diff --git a/faststream/_internal/testing/ast.py b/faststream/_internal/testing/ast.py
new file mode 100644
index 0000000000..880e895a66
--- /dev/null
+++ b/faststream/_internal/testing/ast.py
@@ -0,0 +1,54 @@
+import ast
+import traceback
+from collections.abc import Iterator
+from functools import lru_cache
+from pathlib import Path
+from typing import Optional, Union, cast
+
+
+def is_contains_context_name(scip_name: str, name: str) -> bool:
+ stack = traceback.extract_stack()[-3]
+ tree = _read_source_ast(stack.filename)
+ node = cast("Union[ast.With, ast.AsyncWith]", _find_ast_node(tree, stack.lineno))
+ context_calls = _get_withitem_calls(node)
+
+ try:
+ pos = context_calls.index(scip_name)
+ except ValueError:
+ pos = 1
+
+ return name in context_calls[pos:]
+
+
+@lru_cache
+def _read_source_ast(filename: str) -> ast.Module:
+ return ast.parse(Path(filename).read_text(encoding="utf-8"))
+
+
+def _find_ast_node(module: ast.Module, lineno: Optional[int]) -> Optional[ast.AST]:
+ if lineno is not None: # pragma: no branch
+ for i in getattr(module, "body", ()):
+ if i.lineno == lineno:
+ return cast("ast.AST", i)
+
+ r = _find_ast_node(i, lineno)
+ if r is not None:
+ return r
+
+ return None
+
+
+def _find_withitems(node: Union[ast.With, ast.AsyncWith]) -> Iterator[ast.withitem]:
+ if isinstance(node, (ast.With, ast.AsyncWith)):
+ yield from node.items
+
+ for i in getattr(node, "body", ()):
+ yield from _find_withitems(i)
+
+
+def _get_withitem_calls(node: Union[ast.With, ast.AsyncWith]) -> list[str]:
+ return [
+ id
+ for i in _find_withitems(node)
+ if (id := getattr(i.context_expr.func, "id", None)) # type: ignore[attr-defined]
+ ]
diff --git a/faststream/_internal/testing/broker.py b/faststream/_internal/testing/broker.py
new file mode 100644
index 0000000000..9994274f68
--- /dev/null
+++ b/faststream/_internal/testing/broker.py
@@ -0,0 +1,227 @@
+import warnings
+from abc import abstractmethod
+from collections.abc import AsyncGenerator, Generator, Iterator
+from contextlib import asynccontextmanager, contextmanager
+from functools import partial
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Generic,
+ Optional,
+ TypeVar,
+)
+from unittest import mock
+from unittest.mock import MagicMock
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.state.logger.logger_proxy import RealLoggerObject
+from faststream._internal.subscriber.utils import MultiLock
+from faststream._internal.testing.app import TestApp
+from faststream._internal.testing.ast import is_contains_context_name
+from faststream._internal.utils.functions import sync_fake_context
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.subscriber.proto import SubscriberProto
+
+
+Broker = TypeVar("Broker", bound=BrokerUsecase[Any, Any])
+
+
+class TestBroker(Generic[Broker]):
+ """A class to represent a test broker."""
+
+ # This is set so pytest ignores this class
+ __test__ = False
+
+ def __init__(
+ self,
+ broker: Broker,
+ with_real: bool = False,
+ connect_only: Optional[bool] = None,
+ ) -> None:
+ self.with_real = with_real
+ self.broker = broker
+
+ if connect_only is None:
+ try:
+ connect_only = is_contains_context_name(
+ self.__class__.__name__,
+ TestApp.__name__,
+ )
+ except Exception: # pragma: no cover
+ warnings.warn(
+ (
+ "\nError `{e!r}` occurred at `{self.__class__.__name__}` AST parsing."
+ "\n`connect_only` is set to `False` by default."
+ ),
+ category=RuntimeWarning,
+ stacklevel=1,
+ )
+
+ connect_only = False
+
+ self.connect_only = connect_only
+ self._fake_subscribers: list[SubscriberProto[Any]] = []
+
+ async def __aenter__(self) -> Broker:
+ self._ctx = self._create_ctx()
+ return await self._ctx.__aenter__()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ await self._ctx.__aexit__(exc_type, exc_val, exc_tb)
+
+ @asynccontextmanager
+ async def _create_ctx(self) -> AsyncGenerator[Broker, None]:
+ if self.with_real:
+ self._fake_start(self.broker)
+ context = sync_fake_context()
+ else:
+ context = self._patch_broker(self.broker)
+
+ with context:
+ async with self.broker:
+ try:
+ if not self.connect_only:
+ await self.broker.start()
+ yield self.broker
+ finally:
+ self._fake_close(self.broker)
+
+ @contextmanager
+ def _patch_producer(self, broker: Broker) -> Iterator[None]:
+ raise NotImplementedError
+
+ @contextmanager
+ def _patch_logger(self, broker: Broker) -> Iterator[None]:
+ state = broker._state.get()
+ state._setup_logger_state()
+
+ logger_state = state.logger_state
+ old_log_object = logger_state.logger
+
+ logger_state.logger = RealLoggerObject(MagicMock())
+ try:
+ yield
+ finally:
+ logger_state.logger = old_log_object
+
+ @contextmanager
+ def _patch_broker(self, broker: Broker) -> Generator[None, None, None]:
+ with (
+ mock.patch.object(
+ broker,
+ "start",
+ wraps=partial(self._fake_start, broker),
+ ),
+ mock.patch.object(
+ broker,
+ "_connect",
+ wraps=partial(self._fake_connect, broker),
+ ),
+ mock.patch.object(
+ broker,
+ "close",
+ ),
+ mock.patch.object(
+ broker,
+ "_connection",
+ new=None,
+ ),
+ self._patch_producer(broker),
+ self._patch_logger(broker),
+ mock.patch.object(
+ broker,
+ "ping",
+ return_value=True,
+ ),
+ ):
+ broker._setup()
+ yield
+
+ def _fake_start(self, broker: Broker, *args: Any, **kwargs: Any) -> None:
+ patch_broker_calls(broker)
+
+ for p in broker._publishers:
+ if getattr(p, "_fake_handler", None):
+ continue
+
+ sub, is_real = self.create_publisher_fake_subscriber(broker, p)
+
+ if not is_real:
+ self._fake_subscribers.append(sub)
+
+ if not sub.calls:
+
+ @sub
+ async def publisher_response_subscriber(msg: Any) -> None:
+ pass
+
+ broker.setup_subscriber(sub)
+
+ if is_real:
+ mock = MagicMock()
+ p.set_test(mock=mock, with_fake=False) # type: ignore[attr-defined]
+ for h in sub.calls:
+ h.handler.set_test()
+ assert h.handler.mock # nosec B101
+ h.handler.mock.side_effect = mock
+
+ else:
+ handler = sub.calls[0].handler
+ handler.set_test()
+ assert handler.mock # nosec B101
+ p.set_test(mock=handler.mock, with_fake=True) # type: ignore[attr-defined]
+
+ for subscriber in broker._subscribers:
+ subscriber.running = True
+ subscriber.lock = MultiLock() # type: ignore[attr-defined]
+
+ def _fake_close(
+ self,
+ broker: Broker,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ for p in broker._publishers:
+ if getattr(p, "_fake_handler", None):
+ p.reset_test() # type: ignore[attr-defined]
+
+ self.broker._subscribers = [
+ sub for sub in self.broker._subscribers if sub not in self._fake_subscribers
+ ]
+ self._fake_subscribers.clear()
+
+ for h in broker._subscribers:
+ h.running = False
+ for call in h.calls:
+ call.handler.reset_test()
+
+ @staticmethod
+ @abstractmethod
+ def create_publisher_fake_subscriber(
+ broker: Broker,
+ publisher: Any,
+ ) -> tuple["SubscriberProto[Any]", bool]:
+ raise NotImplementedError
+
+ @staticmethod
+ @abstractmethod
+ async def _fake_connect(broker: Broker, *args: Any, **kwargs: Any) -> None:
+ raise NotImplementedError
+
+
+def patch_broker_calls(broker: "BrokerUsecase[Any, Any]") -> None:
+ """Patch broker calls."""
+ broker._setup()
+
+ for handler in broker._subscribers:
+ for h in handler.calls:
+ h.handler.set_test()
diff --git a/faststream/_internal/types.py b/faststream/_internal/types.py
new file mode 100644
index 0000000000..f23112b06e
--- /dev/null
+++ b/faststream/_internal/types.py
@@ -0,0 +1,106 @@
+from collections.abc import Awaitable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+ Protocol,
+ TypeVar,
+ Union,
+)
+
+from typing_extensions import (
+ ParamSpec,
+ TypeAlias,
+ TypeVar as TypeVar313,
+)
+
+from faststream._internal.basic_types import AsyncFuncAny
+from faststream._internal.context.repository import ContextRepo
+from faststream.message import StreamMessage
+from faststream.response.response import PublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.middlewares import BaseMiddleware
+
+
+AnyMsg = TypeVar313("AnyMsg", default=Any)
+AnyMsg_contra = TypeVar313("AnyMsg_contra", default=Any, contravariant=True)
+MsgType = TypeVar("MsgType")
+Msg_contra = TypeVar("Msg_contra", contravariant=True)
+StreamMsg = TypeVar("StreamMsg", bound=StreamMessage[Any])
+ConnectionType = TypeVar("ConnectionType")
+PublishCommandType = TypeVar313(
+ "PublishCommandType",
+ bound=PublishCommand,
+ default=Any,
+)
+
+SyncFilter: TypeAlias = Callable[[StreamMsg], bool]
+AsyncFilter: TypeAlias = Callable[[StreamMsg], Awaitable[bool]]
+Filter: TypeAlias = Union[
+ SyncFilter[StreamMsg],
+ AsyncFilter[StreamMsg],
+]
+
+SyncCallable: TypeAlias = Callable[
+ [Any],
+ Any,
+]
+AsyncCallable: TypeAlias = AsyncFuncAny
+AsyncCustomCallable: TypeAlias = Union[
+ AsyncFuncAny,
+ Callable[
+ [Any, AsyncFuncAny],
+ Awaitable[Any],
+ ],
+]
+CustomCallable: TypeAlias = Union[
+ AsyncCustomCallable,
+ SyncCallable,
+]
+
+P_HandlerParams = ParamSpec("P_HandlerParams")
+T_HandlerReturn = TypeVar("T_HandlerReturn")
+
+
+AsyncWrappedHandlerCall: TypeAlias = Callable[
+ [StreamMessage[MsgType]],
+ Awaitable[Optional[T_HandlerReturn]],
+]
+SyncWrappedHandlerCall: TypeAlias = Callable[
+ [StreamMessage[MsgType]],
+ Optional[T_HandlerReturn],
+]
+WrappedHandlerCall: TypeAlias = Union[
+ AsyncWrappedHandlerCall[MsgType, T_HandlerReturn],
+ SyncWrappedHandlerCall[MsgType, T_HandlerReturn],
+]
+
+
+class BrokerMiddleware(Protocol[AnyMsg_contra, PublishCommandType]):
+ """Middleware builder interface."""
+
+ def __call__(
+ self,
+ msg: Optional[AnyMsg_contra],
+ /,
+ *,
+ context: ContextRepo,
+ ) -> "BaseMiddleware[PublishCommandType]": ...
+
+
+SubscriberMiddleware: TypeAlias = Callable[
+ [AsyncFuncAny, MsgType],
+ MsgType,
+]
+
+
+class PublisherMiddleware(Protocol):
+ """Publisher middleware interface."""
+
+ def __call__(
+ self,
+ call_next: Callable[[PublishCommand], Awaitable[Any]],
+ cmd: PublishCommand,
+ ) -> Any: ...
diff --git a/faststream/_internal/utils/__init__.py b/faststream/_internal/utils/__init__.py
new file mode 100644
index 0000000000..58684b7fdc
--- /dev/null
+++ b/faststream/_internal/utils/__init__.py
@@ -0,0 +1,3 @@
+from fast_depends import inject as apply_types
+
+__all__ = ("apply_types",)
diff --git a/faststream/_internal/utils/data.py b/faststream/_internal/utils/data.py
new file mode 100644
index 0000000000..8f8a133636
--- /dev/null
+++ b/faststream/_internal/utils/data.py
@@ -0,0 +1,26 @@
+from typing import TypeVar
+
+from faststream._internal.basic_types import AnyDict
+
+TypedDictCls = TypeVar("TypedDictCls")
+
+
+def filter_by_dict(
+ typed_dict: type[TypedDictCls],
+ data: AnyDict,
+) -> tuple[TypedDictCls, AnyDict]:
+ annotations = typed_dict.__annotations__
+
+ out_data = {}
+ extra_data = {}
+
+ for k, v in data.items():
+ if k in annotations:
+ out_data[k] = v
+ else:
+ extra_data[k] = v
+
+ return (
+ typed_dict(out_data), # type: ignore[call-arg]
+ extra_data,
+ )
diff --git a/faststream/_internal/utils/functions.py b/faststream/_internal/utils/functions.py
new file mode 100644
index 0000000000..b824955efd
--- /dev/null
+++ b/faststream/_internal/utils/functions.py
@@ -0,0 +1,95 @@
+import asyncio
+from collections.abc import AsyncIterator, Awaitable, Iterator
+from concurrent.futures import Executor
+from contextlib import asynccontextmanager, contextmanager
+from functools import partial, wraps
+from typing import (
+ Any,
+ Callable,
+ Optional,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+from fast_depends.core import CallModel
+from fast_depends.utils import (
+ is_coroutine_callable,
+ run_async as call_or_await,
+ run_in_threadpool,
+)
+from typing_extensions import ParamSpec
+
+from faststream._internal.basic_types import F_Return, F_Spec
+
+__all__ = (
+ "call_or_await",
+ "drop_response_type",
+ "fake_context",
+ "to_async",
+)
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+@overload
+def to_async(
+ func: Callable[F_Spec, Awaitable[F_Return]],
+) -> Callable[F_Spec, Awaitable[F_Return]]: ...
+
+
+@overload
+def to_async(
+ func: Callable[F_Spec, F_Return],
+) -> Callable[F_Spec, Awaitable[F_Return]]: ...
+
+
+def to_async(
+ func: Union[
+ Callable[F_Spec, F_Return],
+ Callable[F_Spec, Awaitable[F_Return]],
+ ],
+) -> Callable[F_Spec, Awaitable[F_Return]]:
+ """Converts a synchronous function to an asynchronous function."""
+ if is_coroutine_callable(func):
+ return cast("Callable[F_Spec, Awaitable[F_Return]]", func)
+
+ func = cast("Callable[F_Spec, F_Return]", func)
+
+ @wraps(func)
+ async def to_async_wrapper(*args: F_Spec.args, **kwargs: F_Spec.kwargs) -> F_Return:
+ """Wraps a function to make it asynchronous."""
+ return await run_in_threadpool(func, *args, **kwargs)
+
+ return to_async_wrapper
+
+
+@asynccontextmanager
+async def fake_context(*args: Any, **kwargs: Any) -> AsyncIterator[None]:
+ yield None
+
+
+@contextmanager
+def sync_fake_context(*args: Any, **kwargs: Any) -> Iterator[None]:
+ yield None
+
+
+def drop_response_type(model: CallModel) -> CallModel:
+ model.serializer.response_callback = None
+ return model
+
+
+async def return_input(x: Any) -> Any:
+ return x
+
+
+async def run_in_executor(
+ executor: Optional[Executor],
+ func: Callable[P, T],
+ *args: P.args,
+ **kwargs: P.kwargs,
+) -> T:
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(executor, partial(func, *args, **kwargs))
diff --git a/faststream/utils/nuid.py b/faststream/_internal/utils/nuid.py
similarity index 96%
rename from faststream/utils/nuid.py
rename to faststream/_internal/utils/nuid.py
index a61aa08a8f..b15dc92ac1 100644
--- a/faststream/utils/nuid.py
+++ b/faststream/_internal/utils/nuid.py
@@ -36,7 +36,7 @@ class NUID:
"""
def __init__(self) -> None:
- self._prand = Random(randbelow(max_int)) # nosec B311
+ self._prand = Random(randbelow(max_int)) # nosec B311 # noqa: S311
self._seq = self._prand.randint(0, MAX_SEQ)
self._inc = MIN_INC + self._prand.randint(BASE + 1, INC)
self._prefix = bytearray()
diff --git a/faststream/utils/path.py b/faststream/_internal/utils/path.py
similarity index 81%
rename from faststream/utils/path.py
rename to faststream/_internal/utils/path.py
index c3c059bf71..4f4dc24635 100644
--- a/faststream/utils/path.py
+++ b/faststream/_internal/utils/path.py
@@ -1,16 +1,17 @@
import re
-from typing import Callable, Optional, Pattern, Tuple
+from re import Pattern
+from typing import Callable, Optional
from faststream.exceptions import SetupError
-PARAM_REGEX = re.compile("{([a-zA-Z0-9_]+)}")
+PARAM_REGEX = re.compile(r"{([a-zA-Z0-9_]+)}")
def compile_path(
path: str,
replace_symbol: str,
patch_regex: Callable[[str], str] = lambda x: x,
-) -> Tuple[Optional[Pattern[str]], str]:
+) -> tuple[Optional[Pattern[str]], str]:
path_regex = "^.*?"
original_path = ""
@@ -36,7 +37,8 @@ def compile_path(
if duplicated_params:
names = ", ".join(sorted(duplicated_params))
ending = "s" if len(duplicated_params) > 1 else ""
- raise SetupError(f"Duplicated param name{ending} {names} at path {path}")
+ msg = f"Duplicated param name{ending} {names} at path {path}"
+ raise SetupError(msg)
if idx == 0:
regex = None
diff --git a/faststream/annotations.py b/faststream/annotations.py
index 5532daf9f5..a845df0471 100644
--- a/faststream/annotations.py
+++ b/faststream/annotations.py
@@ -1,16 +1,10 @@
import logging
-from typing import TypeVar
-
-from typing_extensions import Annotated
+from typing import Annotated
+from faststream._internal.context import ContextRepo as CR
from faststream.app import FastStream as FS
-from faststream.utils.context import Context
-from faststream.utils.context import ContextRepo as CR
-from faststream.utils.no_cast import NoCast as NC
-
-_NoCastType = TypeVar("_NoCastType")
+from faststream.params import Context
Logger = Annotated[logging.Logger, Context("logger")]
ContextRepo = Annotated[CR, Context("context")]
-NoCast = Annotated[_NoCastType, NC()]
FastStream = Annotated[FS, Context("app")]
diff --git a/faststream/app.py b/faststream/app.py
index fc18797f1e..75692147d4 100644
--- a/faststream/app.py
+++ b/faststream/app.py
@@ -1,81 +1,100 @@
import logging
+from collections.abc import Sequence
from typing import (
TYPE_CHECKING,
- AsyncIterator,
- Dict,
+ Any,
Optional,
- Sequence,
- Tuple,
TypeVar,
)
import anyio
from typing_extensions import ParamSpec
-from faststream._compat import ExceptionGroup
+from faststream._internal._compat import ExceptionGroup
from faststream._internal.application import Application
+from faststream._internal.basic_types import Lifespan, LoggerProto
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.cli.supervisors.utils import set_exit
+from faststream._internal.constants import EMPTY
+from faststream._internal.log import logger
from faststream.asgi.app import AsgiFastStream
-from faststream.cli.supervisors.utils import set_exit
-from faststream.exceptions import ValidationError
-
-P_HookParams = ParamSpec("P_HookParams")
-T_HookReturn = TypeVar("T_HookReturn")
-
if TYPE_CHECKING:
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import (
+ AnyCallable,
+ Lifespan,
+ LoggerProto,
+ SettingField,
+ )
+ from faststream._internal.broker.broker import BrokerUsecase
from faststream.asgi.types import ASGIApp
- from faststream.types import SettingField
+
+P_HookParams = ParamSpec("P_HookParams")
+T_HookReturn = TypeVar("T_HookReturn")
class FastStream(Application):
"""A class representing a FastStream application."""
+ def __init__(
+ self,
+ broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ /,
+ # regular broker args
+ logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ lifespan: Optional["Lifespan"] = None,
+ on_startup: Sequence["AnyCallable"] = (),
+ after_startup: Sequence["AnyCallable"] = (),
+ on_shutdown: Sequence["AnyCallable"] = (),
+ after_shutdown: Sequence["AnyCallable"] = (),
+ ) -> None:
+ super().__init__(
+ broker,
+ logger=logger,
+ provider=provider,
+ serializer=serializer,
+ lifespan=lifespan,
+ on_startup=on_startup,
+ after_startup=after_startup,
+ on_shutdown=on_shutdown,
+ after_shutdown=after_shutdown,
+ )
+ self._should_exit = False
+
async def run(
self,
log_level: int = logging.INFO,
- run_extra_options: Optional[Dict[str, "SettingField"]] = None,
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
sleep_time: float = 0.1,
) -> None:
"""Run FastStream Application."""
- assert self.broker, "You should setup a broker" # nosec B101
-
set_exit(lambda *_: self.exit(), sync=False)
- async with catch_startup_validation_error(), self.lifespan_context(
- **(run_extra_options or {})
- ):
+ async with self.lifespan_context(**(run_extra_options or {})):
try:
async with anyio.create_task_group() as tg:
tg.start_soon(self._startup, log_level, run_extra_options)
- await self._main_loop(sleep_time)
+
+ while not self._should_exit: # noqa: ASYNC110 (requested by creator)
+ await anyio.sleep(sleep_time)
+
await self._shutdown(log_level)
tg.cancel_scope.cancel()
except ExceptionGroup as e:
for ex in e.exceptions:
raise ex from None
+ def exit(self) -> None:
+ """Stop application manually."""
+ self._should_exit = True
+
def as_asgi(
self,
- asgi_routes: Sequence[Tuple[str, "ASGIApp"]] = (),
- asyncapi_path: Optional[str] = None,
+ asgi_routes: Sequence[tuple[str, "ASGIApp"]] = (),
) -> AsgiFastStream:
- return AsgiFastStream.from_app(self, asgi_routes, asyncapi_path)
-
-
-try:
- from contextlib import asynccontextmanager
-
- from pydantic import ValidationError as PValidation
-
- @asynccontextmanager
- async def catch_startup_validation_error() -> AsyncIterator[None]:
- try:
- yield
- except PValidation as e:
- fields = [str(x["loc"][0]) for x in e.errors()]
- raise ValidationError(fields=fields) from e
-
-except ImportError:
- from faststream.utils.functions import fake_context
-
- catch_startup_validation_error = fake_context
+ return AsgiFastStream.from_app(self, asgi_routes)
diff --git a/faststream/asgi/app.py b/faststream/asgi/app.py
index 9f662ff1b9..559e149b0f 100644
--- a/faststream/asgi/app.py
+++ b/faststream/asgi/app.py
@@ -1,50 +1,75 @@
import inspect
import logging
import traceback
+from abc import abstractmethod
+from collections.abc import AsyncIterator, Sequence
from contextlib import asynccontextmanager
-from typing import (
- TYPE_CHECKING,
- Any,
- AsyncIterator,
- Dict,
- Optional,
- Sequence,
- Tuple,
- Union,
-)
+from typing import TYPE_CHECKING, Any, Optional, Protocol
import anyio
+from faststream._internal._compat import HAS_TYPER, ExceptionGroup
from faststream._internal.application import Application
-from faststream.asgi.factories import make_asyncapi_asgi
+from faststream._internal.constants import EMPTY
+from faststream._internal.log import logger
from faststream.asgi.response import AsgiResponse
from faststream.asgi.websocket import WebSocketClose
-from faststream.log.logging import logger
+from faststream.exceptions import StartupValidationError
if TYPE_CHECKING:
- from faststream.asgi.types import ASGIApp, Receive, Scope, Send
- from faststream.asyncapi.schema import (
- Contact,
- ContactDict,
- ExternalDocs,
- ExternalDocsDict,
- License,
- LicenseDict,
- Tag,
- TagDict,
- )
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.types import (
+ from types import FrameType
+
+ from anyio.abc import TaskStatus
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import (
AnyCallable,
AnyDict,
- AnyHttpUrl,
Lifespan,
LoggerProto,
SettingField,
)
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream.asgi.types import ASGIApp, Receive, Scope, Send
+
+ class UvicornServerProtocol(Protocol):
+ should_exit: bool
+ force_exit: bool
+
+ def handle_exit(self, sig: int, frame: Optional[FrameType]) -> None: ...
+
+
+class ServerState(Protocol):
+ extra_options: dict[str, "SettingField"]
+
+ @abstractmethod
+ def stop(self) -> None: ...
+
+class OuterRunState(ServerState):
+ def __init__(self) -> None:
+ self.extra_options = {}
-def cast_uvicorn_params(params: Dict[str, Any]) -> Dict[str, Any]:
+ def stop(self) -> None:
+ # TODO: resend signal to outer uvicorn
+ pass
+
+
+class CliRunState(ServerState):
+ def __init__(
+ self,
+ server: "UvicornServerProtocol",
+ extra_options: dict[str, "SettingField"],
+ ) -> None:
+ self.server = server
+ self.extra_options = extra_options
+
+ def stop(self) -> None:
+ self.server.should_exit = True
+
+
+def cast_uvicorn_params(params: "AnyDict") -> "AnyDict":
if port := params.get("port"):
params["port"] = int(port)
if fd := params.get("fd"):
@@ -53,45 +78,30 @@ def cast_uvicorn_params(params: Dict[str, Any]) -> Dict[str, Any]:
class AsgiFastStream(Application):
+ _server: ServerState
+
def __init__(
self,
broker: Optional["BrokerUsecase[Any, Any]"] = None,
/,
- asgi_routes: Sequence[Tuple[str, "ASGIApp"]] = (),
- asyncapi_path: Optional[str] = None,
+ asgi_routes: Sequence[tuple[str, "ASGIApp"]] = (),
# regular broker args
logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
lifespan: Optional["Lifespan"] = None,
- # AsyncAPI args,
- title: str = "FastStream",
- version: str = "0.1.0",
- description: str = "",
- terms_of_service: Optional["AnyHttpUrl"] = None,
- license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None,
- contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None,
- tags: Optional[Sequence[Union["Tag", "TagDict", "AnyDict"]]] = None,
- external_docs: Optional[
- Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]
- ] = None,
- identifier: Optional[str] = None,
+ # hooks
on_startup: Sequence["AnyCallable"] = (),
after_startup: Sequence["AnyCallable"] = (),
on_shutdown: Sequence["AnyCallable"] = (),
after_shutdown: Sequence["AnyCallable"] = (),
) -> None:
super().__init__(
- broker=broker,
+ broker,
logger=logger,
+ provider=provider,
+ serializer=serializer,
lifespan=lifespan,
- title=title,
- version=version,
- description=description,
- terms_of_service=terms_of_service,
- license=license,
- contact=contact,
- tags=tags,
- external_docs=external_docs,
- identifier=identifier,
on_startup=on_startup,
after_startup=after_startup,
on_shutdown=on_shutdown,
@@ -99,31 +109,20 @@ def __init__(
)
self.routes = list(asgi_routes)
- if asyncapi_path:
- self.mount(asyncapi_path, make_asyncapi_asgi(self))
+
+ self._server = OuterRunState()
@classmethod
def from_app(
cls,
app: Application,
- asgi_routes: Sequence[Tuple[str, "ASGIApp"]],
- asyncapi_path: Optional[str] = None,
+ asgi_routes: Sequence[tuple[str, "ASGIApp"]],
) -> "AsgiFastStream":
asgi_app = cls(
app.broker,
asgi_routes=asgi_routes,
- asyncapi_path=asyncapi_path,
logger=app.logger,
lifespan=None,
- title=app.title,
- version=app.version,
- description=app.description,
- terms_of_service=app.terms_of_service,
- license=app.license,
- contact=app.contact,
- tags=app.asyncapi_tags,
- external_docs=app.external_docs,
- identifier=app.identifier,
)
asgi_app.lifespan_context = app.lifespan_context
asgi_app._on_startup_calling = app._on_startup_calling
@@ -135,7 +134,13 @@ def from_app(
def mount(self, path: str, route: "ASGIApp") -> None:
self.routes.append((path, route))
- async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None:
+ async def __call__(
+ self,
+ scope: "Scope",
+ receive: "Receive",
+ send: "Send",
+ ) -> None:
+ """ASGI implementation."""
if scope["type"] == "lifespan":
await self.lifespan(scope, receive, send)
return
@@ -152,15 +157,13 @@ async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> No
async def run(
self,
log_level: int = logging.INFO,
- run_extra_options: Optional[Dict[str, "SettingField"]] = None,
- sleep_time: float = 0.1,
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
) -> None:
try:
import uvicorn
except ImportError as e:
- raise RuntimeError(
- "You need uvicorn to run FastStream ASGI App via CLI. pip install uvicorn"
- ) from e
+ error_msg = "You need uvicorn to run FastStream ASGI App via CLI.\npip install uvicorn"
+ raise ImportError(error_msg) from e
run_extra_options = cast_uvicorn_params(run_extra_options or {})
@@ -169,7 +172,7 @@ async def run(
config = uvicorn.Config(
app=self,
log_level=log_level,
- **{
+ **{ # type: ignore[arg-type]
key: v
for key, v in run_extra_options.items()
if key in uvicorn_config_params
@@ -179,41 +182,88 @@ async def run(
server = uvicorn.Server(config)
await server.serve()
+ def exit(self) -> None:
+ """Manual stop method."""
+ self._server.stop()
+
@asynccontextmanager
- async def start_lifespan_context(self) -> AsyncIterator[None]:
- async with anyio.create_task_group() as tg, self.lifespan_context():
- tg.start_soon(self._startup)
+ async def start_lifespan_context(
+ self,
+ run_extra_options: Optional[dict[str, "SettingField"]] = None,
+ ) -> AsyncIterator[None]:
+ run_extra_options = run_extra_options or self._server.extra_options
+ async with self.lifespan_context(**run_extra_options):
try:
- yield
- finally:
- await self._shutdown()
- tg.cancel_scope.cancel()
+ async with anyio.create_task_group() as tg:
+ await tg.start(self.__start, logging.INFO, run_extra_options)
+
+ try:
+ yield
+ finally:
+ await self._shutdown()
+ tg.cancel_scope.cancel()
+
+ except ExceptionGroup as e:
+ for ex in e.exceptions:
+ raise ex from None
+
+ async def __start(
+ self,
+ log_level: int,
+ run_extra_options: dict[str, "SettingField"],
+ *,
+ task_status: "TaskStatus[None]" = anyio.TASK_STATUS_IGNORED,
+ ) -> None:
+ """Redefenition of `_startup` method.
+
+ Waits for hooks run before broker start.
+ """
+ async with (
+ self._startup_logging(log_level=log_level),
+ self._start_hooks_context(**run_extra_options),
+ ):
+ task_status.started()
+ await self._start_broker()
async def lifespan(self, scope: "Scope", receive: "Receive", send: "Send") -> None:
"""Handle ASGI lifespan messages to start and shutdown the app."""
started = False
await receive() # handle `lifespan.startup` event
+ async def process_exception(ex: BaseException) -> None:
+ exc_text = traceback.format_exc()
+ if started:
+ await send({"type": "lifespan.shutdown.failed", "message": exc_text})
+ else:
+ await send({"type": "lifespan.startup.failed", "message": exc_text})
+ raise ex
+
try:
async with self.start_lifespan_context():
await send({"type": "lifespan.startup.complete"})
started = True
await receive() # handle `lifespan.shutdown` event
- except BaseException:
- exc_text = traceback.format_exc()
- if started:
- await send({"type": "lifespan.shutdown.failed", "message": exc_text})
+ except StartupValidationError as startup_exc:
+ # Process `on_startup` and `lifespan` missed extra options
+ if HAS_TYPER:
+ from faststream._internal.cli.utils.errors import draw_startup_errors
+
+ draw_startup_errors(startup_exc)
+ await send({"type": "lifespan.startup.failed", "message": ""})
+
else:
- await send({"type": "lifespan.startup.failed", "message": exc_text})
- raise
+ await process_exception(startup_exc)
+
+ except BaseException as base_exc:
+ await process_exception(base_exc)
else:
await send({"type": "lifespan.shutdown.complete"})
async def not_found(self, scope: "Scope", receive: "Receive", send: "Send") -> None:
- not_found_msg = "App doesn't support regular HTTP protocol."
+ not_found_msg = "Application doesn't support regular HTTP protocol."
if scope["type"] == "websocket":
websocket_close = WebSocketClose(
diff --git a/faststream/asgi/factories.py b/faststream/asgi/factories.py
index 54c6723e88..171bc4a414 100644
--- a/faststream/asgi/factories.py
+++ b/faststream/asgi/factories.py
@@ -6,17 +6,16 @@
from faststream.asgi.handlers import get
from faststream.asgi.response import AsgiResponse
-from faststream.asyncapi import get_app_schema
-from faststream.asyncapi.site import (
+from faststream.specification.asyncapi.site import (
ASYNCAPI_CSS_DEFAULT_URL,
ASYNCAPI_JS_DEFAULT_URL,
get_asyncapi_html,
)
if TYPE_CHECKING:
+ from faststream._internal.broker.broker import BrokerUsecase
from faststream.asgi.types import ASGIApp, Scope
- from faststream.asyncapi.proto import AsyncAPIApplication
- from faststream.broker.core.usecase import BrokerUsecase
+ from faststream.specification.base.specification import Specification
def make_ping_asgi(
@@ -31,14 +30,13 @@ def make_ping_asgi(
async def ping(scope: "Scope") -> AsgiResponse:
if await broker.ping(timeout):
return healthy_response
- else:
- return unhealthy_response
+ return unhealthy_response
return ping
def make_asyncapi_asgi(
- app: "AsyncAPIApplication",
+ schema: "Specification",
sidebar: bool = True,
info: bool = True,
servers: bool = True,
@@ -47,13 +45,12 @@ def make_asyncapi_asgi(
schemas: bool = True,
errors: bool = True,
expand_message_examples: bool = True,
- title: str = "FastStream",
asyncapi_js_url: str = ASYNCAPI_JS_DEFAULT_URL,
asyncapi_css_url: str = ASYNCAPI_CSS_DEFAULT_URL,
) -> "ASGIApp":
return AsgiResponse(
get_asyncapi_html(
- get_app_schema(app),
+ schema,
sidebar=sidebar,
info=info,
servers=servers,
@@ -62,7 +59,6 @@ def make_asyncapi_asgi(
schemas=schemas,
errors=errors,
expand_message_examples=expand_message_examples,
- title=title,
asyncapi_js_url=asyncapi_js_url,
asyncapi_css_url=asyncapi_css_url,
).encode("utf-8"),
diff --git a/faststream/asgi/handlers.py b/faststream/asgi/handlers.py
index e14234cdf6..0fb681e853 100644
--- a/faststream/asgi/handlers.py
+++ b/faststream/asgi/handlers.py
@@ -1,7 +1,7 @@
+from collections.abc import Sequence
from functools import wraps
from typing import (
TYPE_CHECKING,
- Sequence,
)
from faststream.asgi.response import AsgiResponse
@@ -32,7 +32,6 @@ async def asgi_wrapper(
response = error_response
await response(scope, receive, send)
- return
return asgi_wrapper
diff --git a/faststream/asgi/response.py b/faststream/asgi/response.py
index cfc9d37d59..48e19598ff 100644
--- a/faststream/asgi/response.py
+++ b/faststream/asgi/response.py
@@ -1,4 +1,5 @@
-from typing import TYPE_CHECKING, List, Mapping, Optional, Tuple
+from collections.abc import Mapping
+from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from faststream.asgi.types import Receive, Scope, Send
@@ -22,13 +23,13 @@ async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> No
"type": f"{prefix}http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
- }
+ },
)
await send(
{
"type": f"{prefix}http.response.body",
"body": self.body,
- }
+ },
)
@@ -36,9 +37,9 @@ def _get_response_headers(
body: bytes,
headers: Optional[Mapping[str, str]],
status_code: int,
-) -> List[Tuple[bytes, bytes]]:
+) -> list[tuple[bytes, bytes]]:
if headers is None:
- raw_headers: List[Tuple[bytes, bytes]] = []
+ raw_headers: list[tuple[bytes, bytes]] = []
populate_content_length = True
else:
@@ -52,7 +53,7 @@ def _get_response_headers(
if (
body
and populate_content_length
- and not (status_code < 200 or status_code in (204, 304))
+ and not (status_code < 200 or status_code in {204, 304})
):
content_length = str(len(body))
raw_headers.append((b"content-length", content_length.encode("latin-1")))
diff --git a/faststream/asgi/types.py b/faststream/asgi/types.py
index df9d96b098..62910a1a2c 100644
--- a/faststream/asgi/types.py
+++ b/faststream/asgi/types.py
@@ -1,4 +1,5 @@
-from typing import Any, Awaitable, Callable, MutableMapping
+from collections.abc import Awaitable, MutableMapping
+from typing import Any, Callable
Scope = MutableMapping[str, Any]
Message = MutableMapping[str, Any]
diff --git a/faststream/asgi/websocket.py b/faststream/asgi/websocket.py
index 4a7fdf45de..e28397e2d4 100644
--- a/faststream/asgi/websocket.py
+++ b/faststream/asgi/websocket.py
@@ -15,5 +15,5 @@ def __init__(
async def __call__(self, scope: "Scope", receive: "Receive", send: "Send") -> None:
await send(
- {"type": "websocket.close", "code": self.code, "reason": self.reason}
+ {"type": "websocket.close", "code": self.code, "reason": self.reason},
)
diff --git a/faststream/asyncapi/__init__.py b/faststream/asyncapi/__init__.py
deleted file mode 100644
index be11a98029..0000000000
--- a/faststream/asyncapi/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""AsyncAPI related functions."""
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.site import get_asyncapi_html
-
-__all__ = (
- "get_app_schema",
- "get_asyncapi_html",
-)
diff --git a/faststream/asyncapi/abc.py b/faststream/asyncapi/abc.py
deleted file mode 100644
index 64dd0fccde..0000000000
--- a/faststream/asyncapi/abc.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from abc import abstractmethod
-from typing import Any, Dict, Optional
-
-from faststream.asyncapi.proto import AsyncAPIProto
-from faststream.asyncapi.schema.channels import Channel
-
-
-class AsyncAPIOperation(AsyncAPIProto):
- """A class representing an asynchronous API operation."""
-
- @property
- def name(self) -> str:
- """Returns the name of the API operation."""
- return self.title_ or self.get_name()
-
- @abstractmethod
- def get_name(self) -> str:
- """Name property fallback."""
- raise NotImplementedError()
-
- @property
- def description(self) -> Optional[str]:
- """Returns the description of the API operation."""
- return self.description_ or self.get_description()
-
- def get_description(self) -> Optional[str]:
- """Description property fallback."""
- return None
-
- def schema(self) -> Dict[str, Channel]:
- """Returns the schema of the API operation as a dictionary of channel names and channel objects."""
- if self.include_in_schema:
- return self.get_schema()
- else:
- return {}
-
- @abstractmethod
- def get_schema(self) -> Dict[str, Channel]:
- """Generate AsyncAPI schema."""
- raise NotImplementedError()
-
- @abstractmethod
- def get_payloads(self) -> Any:
- """Generate AsyncAPI payloads."""
- raise NotImplementedError()
diff --git a/faststream/asyncapi/generate.py b/faststream/asyncapi/generate.py
deleted file mode 100644
index 61968fa3ba..0000000000
--- a/faststream/asyncapi/generate.py
+++ /dev/null
@@ -1,221 +0,0 @@
-from typing import TYPE_CHECKING, Any, Dict, List
-
-from faststream._compat import DEF_KEY
-from faststream.asyncapi.schema import (
- Channel,
- Components,
- Info,
- Message,
- Reference,
- Schema,
- Server,
-)
-from faststream.constants import ContentTypes
-
-if TYPE_CHECKING:
- from faststream.asyncapi.proto import AsyncAPIApplication
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.broker.types import ConnectionType, MsgType
-
-
-def get_app_schema(app: "AsyncAPIApplication") -> Schema:
- """Get the application schema."""
- broker = app.broker
- if broker is None: # pragma: no cover
- raise RuntimeError()
- broker.setup()
-
- servers = get_broker_server(broker)
- channels = get_broker_channels(broker)
-
- messages: Dict[str, Message] = {}
- payloads: Dict[str, Dict[str, Any]] = {}
- for channel_name, ch in channels.items():
- ch.servers = list(servers.keys())
-
- if ch.subscribe is not None:
- m = ch.subscribe.message
-
- if isinstance(m, Message): # pragma: no branch
- ch.subscribe.message = _resolve_msg_payloads(
- m,
- channel_name,
- payloads,
- messages,
- )
-
- if ch.publish is not None:
- m = ch.publish.message
-
- if isinstance(m, Message): # pragma: no branch
- ch.publish.message = _resolve_msg_payloads(
- m,
- channel_name,
- payloads,
- messages,
- )
- schema = Schema(
- info=Info(
- title=app.title,
- version=app.version,
- description=app.description,
- termsOfService=app.terms_of_service,
- contact=app.contact,
- license=app.license,
- ),
- defaultContentType=ContentTypes.json.value,
- id=app.identifier,
- tags=list(app.asyncapi_tags) if app.asyncapi_tags else None,
- externalDocs=app.external_docs,
- servers=servers,
- channels=channels,
- components=Components(
- messages=messages,
- schemas=payloads,
- securitySchemes=None
- if broker.security is None
- else broker.security.get_schema(),
- ),
- )
- return schema
-
-
-def get_broker_server(
- broker: "BrokerUsecase[MsgType, ConnectionType]",
-) -> Dict[str, Server]:
- """Get the broker server for an application."""
- servers = {}
-
- broker_meta: Dict[str, Any] = {
- "protocol": broker.protocol,
- "protocolVersion": broker.protocol_version,
- "description": broker.description,
- "tags": broker.tags,
- # TODO
- # "variables": "",
- # "bindings": "",
- }
-
- if broker.security is not None:
- broker_meta["security"] = broker.security.get_requirement()
-
- if isinstance(broker.url, str):
- servers["development"] = Server(
- url=broker.url,
- **broker_meta,
- )
-
- elif len(broker.url) == 1:
- servers["development"] = Server(
- url=broker.url[0],
- **broker_meta,
- )
-
- else:
- for i, url in enumerate(broker.url, 1):
- servers[f"Server{i}"] = Server(
- url=url,
- **broker_meta,
- )
-
- return servers
-
-
-def get_broker_channels(
- broker: "BrokerUsecase[MsgType, ConnectionType]",
-) -> Dict[str, Channel]:
- """Get the broker channels for an application."""
- channels = {}
-
- for h in broker._subscribers.values():
- channels.update(h.schema())
-
- for p in broker._publishers.values():
- channels.update(p.schema())
-
- return channels
-
-
-def _resolve_msg_payloads(
- m: Message,
- channel_name: str,
- payloads: Dict[str, Any],
- messages: Dict[str, Any],
-) -> Reference:
- """Replace message payload by reference and normalize payloads.
-
- Payloads and messages are editable dicts to store schemas for reference in AsyncAPI.
- """
- one_of_list: List[Reference] = []
- m.payload = _move_pydantic_refs(m.payload, DEF_KEY)
-
- if DEF_KEY in m.payload:
- payloads.update(m.payload.pop(DEF_KEY))
-
- one_of = m.payload.get("oneOf")
- if isinstance(one_of, dict):
- for p_title, p in one_of.items():
- p_title = p_title.replace("/", ".")
- payloads.update(p.pop(DEF_KEY, {}))
- if p_title not in payloads:
- payloads[p_title] = p
- one_of_list.append(Reference(**{"$ref": f"#/components/schemas/{p_title}"}))
-
- elif one_of is not None:
- # Descriminator case
- for p in one_of:
- p_value = next(iter(p.values()))
- p_title = p_value.split("/")[-1]
- p_title = p_title.replace("/", ".")
- if p_title not in payloads:
- payloads[p_title] = p
- one_of_list.append(Reference(**{"$ref": f"#/components/schemas/{p_title}"}))
-
- if not one_of_list:
- payloads.update(m.payload.pop(DEF_KEY, {}))
- p_title = m.payload.get("title", f"{channel_name}Payload")
- p_title = p_title.replace("/", ".")
- if p_title not in payloads:
- payloads[p_title] = m.payload
- m.payload = {"$ref": f"#/components/schemas/{p_title}"}
-
- else:
- m.payload["oneOf"] = one_of_list
-
- assert m.title # nosec B101
- m.title = m.title.replace("/", ".")
- messages[m.title] = m
- return Reference(**{"$ref": f"#/components/messages/{m.title}"})
-
-
-def _move_pydantic_refs(
- original: Any,
- key: str,
-) -> Any:
- """Remove pydantic references and replacem them by real schemas."""
- if not isinstance(original, Dict):
- return original
-
- data = original.copy()
-
- for k in data:
- item = data[k]
-
- if isinstance(item, str):
- if key in item:
- data[k] = data[k].replace(key, "components/schemas")
-
- elif isinstance(item, dict):
- data[k] = _move_pydantic_refs(data[k], key)
-
- elif isinstance(item, List):
- for i in range(len(data[k])):
- data[k][i] = _move_pydantic_refs(item[i], key)
-
- if (
- isinstance(desciminator := data.get("discriminator"), dict)
- and "propertyName" in desciminator
- ):
- data["discriminator"] = desciminator["propertyName"]
-
- return data
diff --git a/faststream/asyncapi/message.py b/faststream/asyncapi/message.py
deleted file mode 100644
index b37415dedc..0000000000
--- a/faststream/asyncapi/message.py
+++ /dev/null
@@ -1,135 +0,0 @@
-from inspect import isclass
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Type, overload
-
-from pydantic import BaseModel, create_model
-
-from faststream._compat import DEF_KEY, PYDANTIC_V2, get_model_fields, model_schema
-
-if TYPE_CHECKING:
- from fast_depends.core import CallModel
-
-
-def parse_handler_params(
- call: "CallModel[Any, Any]", prefix: str = ""
-) -> Dict[str, Any]:
- """Parses the handler parameters."""
- model = call.model
- assert model # nosec B101
-
- body = get_model_schema(
- create_model( # type: ignore[call-overload]
- model.__name__,
- **call.flat_params,
- ),
- prefix=prefix,
- exclude=tuple(call.custom_fields.keys()),
- )
-
- if body is None:
- return {"title": "EmptyPayload", "type": "null"}
-
- return body
-
-
-@overload
-def get_response_schema(call: None, prefix: str = "") -> None: ...
-
-
-@overload
-def get_response_schema(
- call: "CallModel[Any, Any]", prefix: str = ""
-) -> Dict[str, Any]: ...
-
-
-def get_response_schema(
- call: Optional["CallModel[Any, Any]"],
- prefix: str = "",
-) -> Optional[Dict[str, Any]]:
- """Get the response schema for a given call."""
- return get_model_schema(
- getattr(
- call, "response_model", None
- ), # NOTE: FastAPI Dependant object compatibility
- prefix=prefix,
- )
-
-
-@overload
-def get_model_schema(
- call: None,
- prefix: str = "",
- exclude: Sequence[str] = (),
-) -> None: ...
-
-
-@overload
-def get_model_schema(
- call: Type[BaseModel],
- prefix: str = "",
- exclude: Sequence[str] = (),
-) -> Dict[str, Any]: ...
-
-
-def get_model_schema(
- call: Optional[Type[BaseModel]],
- prefix: str = "",
- exclude: Sequence[str] = (),
-) -> Optional[Dict[str, Any]]:
- """Get the schema of a model."""
- if call is None:
- return None
-
- params = {k: v for k, v in get_model_fields(call).items() if k not in exclude}
- params_number = len(params)
-
- if params_number == 0:
- return None
-
- model = None
- use_original_model = False
- if params_number == 1:
- name, param = next(iter(params.items()))
- if (
- param.annotation
- and isclass(param.annotation)
- and issubclass(param.annotation, BaseModel) # NOTE: 3.7-3.10 compatibility
- ):
- model = param.annotation
- use_original_model = True
-
- if model is None:
- model = call
-
- body: Dict[str, Any] = model_schema(model)
- body["properties"] = body.get("properties", {})
- for i in exclude:
- body["properties"].pop(i, None)
- if required := body.get("required"):
- body["required"] = list(filter(lambda x: x not in exclude, required))
-
- if params_number == 1 and not use_original_model:
- param_body: Dict[str, Any] = body.get("properties", {})
- param_body = param_body[name]
-
- if defs := body.get(DEF_KEY):
- # single argument with useless reference
- if param_body.get("$ref"):
- ref_obj: Dict[str, Any] = next(iter(defs.values()))
- return ref_obj
- else:
- param_body[DEF_KEY] = defs
-
- original_title = param.title if PYDANTIC_V2 else param.field_info.title
-
- if original_title:
- use_original_model = True
- param_body["title"] = original_title
- else:
- param_body["title"] = name
-
- body = param_body
-
- if not use_original_model:
- body["title"] = f"{prefix}:Payload"
-
- return body
diff --git a/faststream/asyncapi/proto.py b/faststream/asyncapi/proto.py
deleted file mode 100644
index 81a76da837..0000000000
--- a/faststream/asyncapi/proto.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from abc import abstractmethod
-from typing import TYPE_CHECKING, Any, Dict, Optional, Protocol, Sequence, Union
-
-if TYPE_CHECKING:
- from faststream.asyncapi.schema import (
- Contact,
- ContactDict,
- ExternalDocs,
- ExternalDocsDict,
- License,
- LicenseDict,
- Tag,
- TagDict,
- )
- from faststream.asyncapi.schema.channels import Channel
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.types import (
- AnyDict,
- AnyHttpUrl,
- )
-
-
-class AsyncAPIApplication(Protocol):
- broker: Optional["BrokerUsecase[Any, Any]"]
-
- title: str
- version: str
- description: str
- terms_of_service: Optional["AnyHttpUrl"]
- license: Optional[Union["License", "LicenseDict", "AnyDict"]]
- contact: Optional[Union["Contact", "ContactDict", "AnyDict"]]
- asyncapi_tags: Optional[Sequence[Union["Tag", "TagDict", "AnyDict"]]]
- external_docs: Optional[Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]]
- identifier: Optional[str]
-
-
-class AsyncAPIProto(Protocol):
- """A class representing an asynchronous API operation."""
-
- title_: Optional[str]
- """AsyncAPI object title."""
-
- description_: Optional[str]
- """AsyncAPI object description."""
-
- include_in_schema: bool
- """Whetever to include operation in AsyncAPI schema or not."""
-
- @property
- @abstractmethod
- def name(self) -> str:
- """Returns the name of the API operation."""
- ...
-
- @property
- @abstractmethod
- def description(self) -> Optional[str]:
- """Returns the description of the API operation."""
- ...
-
- @abstractmethod
- def schema(self) -> Dict[str, "Channel"]:
- """Generate AsyncAPI schema."""
- ...
diff --git a/faststream/asyncapi/schema/__init__.py b/faststream/asyncapi/schema/__init__.py
deleted file mode 100644
index b9d626e5c6..0000000000
--- a/faststream/asyncapi/schema/__init__.py
+++ /dev/null
@@ -1,61 +0,0 @@
-"""AsyncAPI schema related functions."""
-
-from faststream.asyncapi.schema.bindings import (
- ChannelBinding,
- OperationBinding,
- ServerBinding,
-)
-from faststream.asyncapi.schema.channels import Channel
-from faststream.asyncapi.schema.info import (
- Contact,
- ContactDict,
- Info,
- License,
- LicenseDict,
-)
-from faststream.asyncapi.schema.main import ASYNC_API_VERSION, Components, Schema
-from faststream.asyncapi.schema.message import CorrelationId, Message
-from faststream.asyncapi.schema.operations import Operation
-from faststream.asyncapi.schema.security import SecuritySchemaComponent
-from faststream.asyncapi.schema.servers import Server
-from faststream.asyncapi.schema.utils import (
- ExternalDocs,
- ExternalDocsDict,
- Reference,
- Tag,
- TagDict,
-)
-
-__all__ = (
- # main
- "ASYNC_API_VERSION",
- # channels
- "Channel",
- "ChannelBinding",
- "Components",
- "Contact",
- "ContactDict",
- "CorrelationId",
- "ExternalDocs",
- "ExternalDocsDict",
- # info
- "Info",
- "License",
- "LicenseDict",
- # messages
- "Message",
- # subscription
- "Operation",
- "OperationBinding",
- "Reference",
- "Schema",
- # security
- "SecuritySchemaComponent",
- # servers
- "Server",
- # bindings
- "ServerBinding",
- # utils
- "Tag",
- "TagDict",
-)
diff --git a/faststream/asyncapi/schema/bindings/__init__.py b/faststream/asyncapi/schema/bindings/__init__.py
deleted file mode 100644
index 4b29e49a83..0000000000
--- a/faststream/asyncapi/schema/bindings/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-"""AsyncAPI schema bindings related functions."""
-
-from faststream.asyncapi.schema.bindings.main import (
- ChannelBinding,
- OperationBinding,
- ServerBinding,
-)
-
-__all__ = (
- "ChannelBinding",
- "OperationBinding",
- "ServerBinding",
-)
diff --git a/faststream/asyncapi/schema/bindings/amqp.py b/faststream/asyncapi/schema/bindings/amqp.py
deleted file mode 100644
index 8d9ead8dd0..0000000000
--- a/faststream/asyncapi/schema/bindings/amqp.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""AsyncAPI AMQP bindings.
-
-References: https://github.com/asyncapi/bindings/tree/master/amqp
-"""
-
-from typing import Literal, Optional
-
-from pydantic import BaseModel, Field, PositiveInt
-
-
-class Queue(BaseModel):
- """A class to represent a queue.
-
- Attributes:
- name : name of the queue
- durable : indicates if the queue is durable
- exclusive : indicates if the queue is exclusive
- autoDelete : indicates if the queue should be automatically deleted
- vhost : virtual host of the queue (default is "/")
- """
-
- name: str
- durable: bool
- exclusive: bool
- autoDelete: bool
- vhost: str = "/"
-
-
-class Exchange(BaseModel):
- """A class to represent an exchange.
-
- Attributes:
- name : name of the exchange (optional)
- type : type of the exchange, can be one of "default", "direct", "topic", "fanout", "headers"
- durable : whether the exchange is durable (optional)
- autoDelete : whether the exchange is automatically deleted (optional)
- vhost : virtual host of the exchange, default is "/"
- """
-
- type: Literal[
- "default",
- "direct",
- "topic",
- "fanout",
- "headers",
- "x-delayed-message",
- "x-consistent-hash",
- "x-modulus-hash",
- ]
-
- name: Optional[str] = None
- durable: Optional[bool] = None
- autoDelete: Optional[bool] = None
- vhost: str = "/"
-
-
-class ServerBinding(BaseModel):
- """A class to represent a server binding.
-
- Attributes:
- bindingVersion : version of the binding (default: "0.2.0")
- """
-
- bindingVersion: str = "0.2.0"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent channel binding.
-
- Attributes:
- is_ : Type of binding, can be "queue" or "routingKey"
- bindingVersion : Version of the binding
- queue : Optional queue object
- exchange : Optional exchange object
- """
-
- is_: Literal["queue", "routingKey"] = Field(..., alias="is")
- bindingVersion: str = "0.2.0"
- queue: Optional[Queue] = None
- exchange: Optional[Exchange] = None
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- cc : optional string representing the cc
- ack : boolean indicating if the operation is acknowledged
- replyTo : optional dictionary representing the replyTo
- bindingVersion : string representing the binding version
- """
-
- cc: Optional[str] = None
- ack: bool = True
- replyTo: Optional[str] = None
- deliveryMode: Optional[int] = None
- mandatory: Optional[bool] = None
- priority: Optional[PositiveInt] = None
- bindingVersion: str = "0.2.0"
diff --git a/faststream/asyncapi/schema/bindings/kafka.py b/faststream/asyncapi/schema/bindings/kafka.py
deleted file mode 100644
index 8f54abb0aa..0000000000
--- a/faststream/asyncapi/schema/bindings/kafka.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""AsyncAPI Kafka bindings.
-
-References: https://github.com/asyncapi/bindings/tree/master/kafka
-"""
-
-from typing import Any, Dict, Optional
-
-from pydantic import BaseModel, PositiveInt
-
-
-class ServerBinding(BaseModel):
- """A class to represent a server binding.
-
- Attributes:
- bindingVersion : version of the binding (default: "0.4.0")
- """
-
- bindingVersion: str = "0.4.0"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent a channel binding.
-
- Attributes:
- topic : optional string representing the topic
- partitions : optional positive integer representing the number of partitions
- replicas : optional positive integer representing the number of replicas
- bindingVersion : string representing the binding version
- """
-
- topic: Optional[str] = None
- partitions: Optional[PositiveInt] = None
- replicas: Optional[PositiveInt] = None
- # TODO:
- # topicConfiguration
- bindingVersion: str = "0.4.0"
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- groupId : optional dictionary representing the group ID
- clientId : optional dictionary representing the client ID
- replyTo : optional dictionary representing the reply-to
- bindingVersion : version of the binding (default: "0.4.0")
- """
-
- groupId: Optional[Dict[str, Any]] = None
- clientId: Optional[Dict[str, Any]] = None
- replyTo: Optional[Dict[str, Any]] = None
- bindingVersion: str = "0.4.0"
diff --git a/faststream/asyncapi/schema/bindings/main.py b/faststream/asyncapi/schema/bindings/main.py
deleted file mode 100644
index 582db39bf7..0000000000
--- a/faststream/asyncapi/schema/bindings/main.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from typing import Optional
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.schema.bindings import amqp as amqp_bindings
-from faststream.asyncapi.schema.bindings import kafka as kafka_bindings
-from faststream.asyncapi.schema.bindings import nats as nats_bindings
-from faststream.asyncapi.schema.bindings import redis as redis_bindings
-from faststream.asyncapi.schema.bindings import sqs as sqs_bindings
-
-
-class ServerBinding(BaseModel):
- """A class to represent server bindings.
-
- Attributes:
- amqp : AMQP server binding (optional)
- kafka : Kafka server binding (optional)
- sqs : SQS server binding (optional)
- nats : NATS server binding (optional)
- redis : Redis server binding (optional)
-
- """
-
- amqp: Optional[amqp_bindings.ServerBinding] = None
- kafka: Optional[kafka_bindings.ServerBinding] = None
- sqs: Optional[sqs_bindings.ServerBinding] = None
- nats: Optional[nats_bindings.ServerBinding] = None
- redis: Optional[redis_bindings.ServerBinding] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent channel bindings.
-
- Attributes:
- amqp : AMQP channel binding (optional)
- kafka : Kafka channel binding (optional)
- sqs : SQS channel binding (optional)
- nats : NATS channel binding (optional)
- redis : Redis channel binding (optional)
-
- """
-
- amqp: Optional[amqp_bindings.ChannelBinding] = None
- kafka: Optional[kafka_bindings.ChannelBinding] = None
- sqs: Optional[sqs_bindings.ChannelBinding] = None
- nats: Optional[nats_bindings.ChannelBinding] = None
- redis: Optional[redis_bindings.ChannelBinding] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- amqp : AMQP operation binding (optional)
- kafka : Kafka operation binding (optional)
- sqs : SQS operation binding (optional)
- nats : NATS operation binding (optional)
- redis : Redis operation binding (optional)
-
- """
-
- amqp: Optional[amqp_bindings.OperationBinding] = None
- kafka: Optional[kafka_bindings.OperationBinding] = None
- sqs: Optional[sqs_bindings.OperationBinding] = None
- nats: Optional[nats_bindings.OperationBinding] = None
- redis: Optional[redis_bindings.OperationBinding] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/bindings/nats.py b/faststream/asyncapi/schema/bindings/nats.py
deleted file mode 100644
index 3016c91075..0000000000
--- a/faststream/asyncapi/schema/bindings/nats.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""AsyncAPI NATS bindings.
-
-References: https://github.com/asyncapi/bindings/tree/master/nats
-"""
-
-from typing import Any, Dict, Optional
-
-from pydantic import BaseModel
-
-
-class ServerBinding(BaseModel):
- """A class to represent a server binding.
-
- Attributes:
- bindingVersion : version of the binding (default: "custom")
- """
-
- bindingVersion: str = "custom"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent channel binding.
-
- Attributes:
- subject : subject of the channel binding
- queue : optional queue for the channel binding
- bindingVersion : version of the channel binding, default is "custom"
- """
-
- subject: str
- queue: Optional[str] = None
- bindingVersion: str = "custom"
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- replyTo : optional dictionary containing reply information
- bindingVersion : version of the binding (default is "custom")
- """
-
- replyTo: Optional[Dict[str, Any]] = None
- bindingVersion: str = "custom"
diff --git a/faststream/asyncapi/schema/bindings/redis.py b/faststream/asyncapi/schema/bindings/redis.py
deleted file mode 100644
index fe82e94d1f..0000000000
--- a/faststream/asyncapi/schema/bindings/redis.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""AsyncAPI Redis bindings.
-
-References: https://github.com/asyncapi/bindings/tree/master/redis
-"""
-
-from typing import Any, Dict, Optional
-
-from pydantic import BaseModel
-
-
-class ServerBinding(BaseModel):
- """A class to represent a server binding.
-
- Attributes:
- bindingVersion : version of the binding (default: "custom")
- """
-
- bindingVersion: str = "custom"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent channel binding.
-
- Attributes:
- channel : the channel name
- method : the method used for binding (ssubscribe, psubscribe, subscribe)
- bindingVersion : the version of the binding
- """
-
- channel: str
- method: Optional[str] = None
- group_name: Optional[str] = None
- consumer_name: Optional[str] = None
- bindingVersion: str = "custom"
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- replyTo : optional dictionary containing reply information
- bindingVersion : version of the binding (default is "custom")
- """
-
- replyTo: Optional[Dict[str, Any]] = None
- bindingVersion: str = "custom"
diff --git a/faststream/asyncapi/schema/bindings/sqs.py b/faststream/asyncapi/schema/bindings/sqs.py
deleted file mode 100644
index 0aba239d8c..0000000000
--- a/faststream/asyncapi/schema/bindings/sqs.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""AsyncAPI SQS bindings.
-
-References: https://github.com/asyncapi/bindings/tree/master/sqs
-"""
-
-from typing import Any, Dict, Optional
-
-from pydantic import BaseModel
-
-
-class ServerBinding(BaseModel):
- """A class to represent a server binding.
-
- Attributes:
- bindingVersion : version of the binding (default: "custom")
- """
-
- bindingVersion: str = "custom"
-
-
-class ChannelBinding(BaseModel):
- """A class to represent channel binding.
-
- Attributes:
- queue : a dictionary representing the queue
- bindingVersion : a string representing the binding version (default: "custom")
- """
-
- queue: Dict[str, Any]
- bindingVersion: str = "custom"
-
-
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- replyTo : optional dictionary containing reply information
- bindingVersion : version of the binding, default is "custom"
- """
-
- replyTo: Optional[Dict[str, Any]] = None
- bindingVersion: str = "custom"
diff --git a/faststream/asyncapi/schema/channels.py b/faststream/asyncapi/schema/channels.py
deleted file mode 100644
index cfee0d342b..0000000000
--- a/faststream/asyncapi/schema/channels.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from typing import List, Optional
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.schema.bindings import ChannelBinding
-from faststream.asyncapi.schema.operations import Operation
-from faststream.asyncapi.schema.utils import Parameter
-
-
-class Channel(BaseModel):
- """A class to represent a channel.
-
- Attributes:
- description : optional description of the channel
- servers : optional list of servers associated with the channel
- bindings : optional channel binding
- subscribe : optional operation for subscribing to the channel
- publish : optional operation for publishing to the channel
- parameters : optional parameters associated with the channel
-
- Configurations:
- model_config : configuration for the model (only applicable for Pydantic version 2)
- Config : configuration for the class (only applicable for Pydantic version 1)
-
- """
-
- description: Optional[str] = None
- servers: Optional[List[str]] = None
- bindings: Optional[ChannelBinding] = None
- subscribe: Optional[Operation] = None
- publish: Optional[Operation] = None
- parameters: Optional[Parameter] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/info.py b/faststream/asyncapi/schema/info.py
deleted file mode 100644
index 1e5a1a2d6f..0000000000
--- a/faststream/asyncapi/schema/info.py
+++ /dev/null
@@ -1,185 +0,0 @@
-from typing import Any, Callable, Dict, Iterable, Optional, Type, Union
-
-from pydantic import AnyHttpUrl, BaseModel
-from typing_extensions import Required, TypedDict
-
-from faststream._compat import (
- PYDANTIC_V2,
- CoreSchema,
- GetJsonSchemaHandler,
- JsonSchemaValue,
- with_info_plain_validator_function,
-)
-from faststream.log import logger
-
-try:
- import email_validator
-
- if email_validator is None:
- raise ImportError
- from pydantic import EmailStr
-
-except ImportError: # pragma: no cover
- # NOTE: EmailStr mock was copied from the FastAPI
- # https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/models.py#24
- class EmailStr(str): # type: ignore
- """EmailStr is a string that should be an email.
-
- Note: EmailStr mock was copied from the FastAPI:
- https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/models.py#24
-
- """
-
- @classmethod
- def __get_validators__(cls) -> Iterable[Callable[..., Any]]:
- """Returns the validators for the EmailStr class."""
- yield cls.validate
-
- @classmethod
- def validate(cls, v: Any) -> str:
- """Validates the EmailStr class."""
- logger.warning(
- "email-validator bot installed, email fields will be treated as str.\n"
- "To install, run: pip install email-validator"
- )
- return str(v)
-
- @classmethod
- def _validate(cls, __input_value: Any, _: Any) -> str:
- logger.warning(
- "email-validator bot installed, email fields will be treated as str.\n"
- "To install, run: pip install email-validator"
- )
- return str(__input_value)
-
- @classmethod
- def __get_pydantic_json_schema__(
- cls,
- core_schema: CoreSchema,
- handler: GetJsonSchemaHandler,
- ) -> JsonSchemaValue:
- """Returns the JSON schema for the EmailStr class.
-
- Args:
- core_schema : the core schema
- handler : the handler
- """
- return {"type": "string", "format": "email"}
-
- @classmethod
- def __get_pydantic_core_schema__(
- cls,
- source: Type[Any],
- handler: Callable[[Any], CoreSchema],
- ) -> JsonSchemaValue:
- """Returns the core schema for the EmailStr class.
-
- Args:
- source : the source
- handler : the handler
- """
- return with_info_plain_validator_function(cls._validate)
-
-
-class ContactDict(TypedDict, total=False):
- """A class to represent a dictionary of contact information.
-
- Attributes:
- name : required name of the contact (type: str)
- url : URL of the contact (type: AnyHttpUrl)
- email : email address of the contact (type: EmailStr)
-
- """
-
- name: Required[str]
- url: AnyHttpUrl
- email: EmailStr
-
-
-class Contact(BaseModel):
- """A class to represent a contact.
-
- Attributes:
- name : name of the contact (str)
- url : URL of the contact (Optional[AnyHttpUrl])
- email : email of the contact (Optional[EmailStr])
-
- """
-
- name: str
- url: Optional[AnyHttpUrl] = None
- email: Optional[EmailStr] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class LicenseDict(TypedDict, total=False):
- """A dictionary-like class to represent a license.
-
- Attributes:
- name : required name of the license (type: str)
- url : URL of the license (type: AnyHttpUrl)
-
- """
-
- name: Required[str]
- url: AnyHttpUrl
-
-
-class License(BaseModel):
- """A class to represent a license.
-
- Attributes:
- name : name of the license
- url : URL of the license (optional)
-
- Config:
- extra : allow additional attributes in the model (PYDANTIC_V2)
-
- """
-
- name: str
- url: Optional[AnyHttpUrl] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class Info(BaseModel):
- """A class to represent information.
-
- Attributes:
- title : title of the information
- version : version of the information (default: "1.0.0")
- description : description of the information (default: "")
- termsOfService : terms of service for the information (default: None)
- contact : contact information for the information (default: None)
- license : license information for the information (default: None)
-
- """
-
- title: str
- version: str = "1.0.0"
- description: str = ""
- termsOfService: Optional[AnyHttpUrl] = None
- contact: Optional[Union[Contact, ContactDict, Dict[str, Any]]] = None
- license: Optional[Union[License, LicenseDict, Dict[str, Any]]] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/main.py b/faststream/asyncapi/schema/main.py
deleted file mode 100644
index acceca985d..0000000000
--- a/faststream/asyncapi/schema/main.py
+++ /dev/null
@@ -1,124 +0,0 @@
-from typing import Any, Dict, List, Optional, Union
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2, model_to_json, model_to_jsonable
-from faststream.asyncapi.schema.channels import Channel
-from faststream.asyncapi.schema.info import Info
-from faststream.asyncapi.schema.message import Message
-from faststream.asyncapi.schema.servers import Server
-from faststream.asyncapi.schema.utils import (
- ExternalDocs,
- ExternalDocsDict,
- Tag,
- TagDict,
-)
-
-ASYNC_API_VERSION = "2.6.0"
-
-
-class Components(BaseModel):
- # TODO
- # servers
- # serverVariables
- # channels
- """A class to represent components in a system.
-
- Attributes:
- messages : Optional dictionary of messages
- schemas : Optional dictionary of schemas
-
- Note:
- The following attributes are not implemented yet:
- - servers
- - serverVariables
- - channels
- - securitySchemes
- - parameters
- - correlationIds
- - operationTraits
- - messageTraits
- - serverBindings
- - channelBindings
- - operationBindings
- - messageBindings
-
- """
-
- messages: Optional[Dict[str, Message]] = None
- schemas: Optional[Dict[str, Dict[str, Any]]] = None
- securitySchemes: Optional[Dict[str, Dict[str, Any]]] = None
- # parameters
- # correlationIds
- # operationTraits
- # messageTraits
- # serverBindings
- # channelBindings
- # operationBindings
- # messageBindings
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class Schema(BaseModel):
- """A class to represent a schema.
-
- Attributes:
- asyncapi : version of the async API
- id : optional ID
- defaultContentType : optional default content type
- info : information about the schema
- servers : optional dictionary of servers
- channels : dictionary of channels
- components : optional components of the schema
- tags : optional list of tags
- externalDocs : optional external documentation
-
- Methods:
- to_jsonable() -> Any: Convert the schema to a JSON-serializable object.
- to_json() -> str: Convert the schema to a JSON string.
- to_yaml() -> str: Convert the schema to a YAML string.
-
- """
-
- asyncapi: str = ASYNC_API_VERSION
- id: Optional[str] = None
- defaultContentType: Optional[str] = None
- info: Info
- servers: Optional[Dict[str, Server]] = None
- channels: Dict[str, Channel]
- components: Optional[Components] = None
- tags: Optional[List[Union[Tag, TagDict, Dict[str, Any]]]] = None
- externalDocs: Optional[Union[ExternalDocs, ExternalDocsDict, Dict[str, Any]]] = None
-
- def to_jsonable(self) -> Any:
- """Convert the schema to a JSON-serializable object."""
- return model_to_jsonable(
- self,
- by_alias=True,
- exclude_none=True,
- )
-
- def to_json(self) -> str:
- """Convert the schema to a JSON string."""
- return model_to_json(
- self,
- by_alias=True,
- exclude_none=True,
- )
-
- def to_yaml(self) -> str:
- """Convert the schema to a YAML string."""
- from io import StringIO
-
- import yaml
-
- io = StringIO(initial_value="", newline="\n")
- yaml.dump(self.to_jsonable(), io, sort_keys=False)
- return io.getvalue()
diff --git a/faststream/asyncapi/schema/message.py b/faststream/asyncapi/schema/message.py
deleted file mode 100644
index 3c9a09f22e..0000000000
--- a/faststream/asyncapi/schema/message.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from typing import Any, Dict, List, Optional, Union
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.schema.utils import (
- ExternalDocs,
- Tag,
-)
-
-
-class CorrelationId(BaseModel):
- """A class to represent a correlation ID.
-
- Attributes:
- description : optional description of the correlation ID
- location : location of the correlation ID
-
- Configurations:
- extra : allows extra fields in the correlation ID model
-
- """
-
- description: Optional[str] = None
- location: str
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class Message(BaseModel):
- """A class to represent a message.
-
- Attributes:
- title : title of the message
- name : name of the message
- summary : summary of the message
- description : description of the message
- messageId : ID of the message
- correlationId : correlation ID of the message
- contentType : content type of the message
- payload : dictionary representing the payload of the message
- tags : list of tags associated with the message
- externalDocs : external documentation associated with the message
-
- """
-
- title: Optional[str] = None
- name: Optional[str] = None
- summary: Optional[str] = None
- description: Optional[str] = None
- messageId: Optional[str] = None
- correlationId: Optional[CorrelationId] = None
- contentType: Optional[str] = None
-
- payload: Dict[str, Any]
- # TODO:
- # headers
- # schemaFormat
- # bindings
- # examples
- # traits
-
- tags: Optional[List[Union[Tag, Dict[str, Any]]]] = (
- None # TODO: weird TagDict behavior
- )
- externalDocs: Optional[Union[ExternalDocs, Dict[str, Any]]] = (
- None # TODO: weird ExternalDocsDict behavior
- )
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/operations.py b/faststream/asyncapi/schema/operations.py
deleted file mode 100644
index c929d71263..0000000000
--- a/faststream/asyncapi/schema/operations.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from typing import Any, Dict, List, Optional, Union
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.schema.bindings import OperationBinding
-from faststream.asyncapi.schema.message import Message
-from faststream.asyncapi.schema.utils import (
- ExternalDocs,
- ExternalDocsDict,
- Reference,
- Tag,
- TagDict,
-)
-
-
-class Operation(BaseModel):
- """A class to represent an operation.
-
- Attributes:
- operationId : ID of the operation
- summary : summary of the operation
- description : description of the operation
- bindings : bindings of the operation
- message : message of the operation
- security : security details of the operation
- tags : tags associated with the operation
- externalDocs : external documentation for the operation
-
- """
-
- operationId: Optional[str] = None
- summary: Optional[str] = None
- description: Optional[str] = None
-
- bindings: Optional[OperationBinding] = None
-
- message: Union[Message, Reference]
-
- security: Optional[Dict[str, List[str]]] = None
-
- # TODO
- # traits
-
- tags: Optional[List[Union[Tag, TagDict, Dict[str, Any]]]] = None
- externalDocs: Optional[Union[ExternalDocs, ExternalDocsDict, Dict[str, Any]]] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/security.py b/faststream/asyncapi/schema/security.py
deleted file mode 100644
index a157dc5cf5..0000000000
--- a/faststream/asyncapi/schema/security.py
+++ /dev/null
@@ -1,108 +0,0 @@
-from typing import Dict, Literal, Optional
-
-from pydantic import AnyHttpUrl, BaseModel, Field
-
-from faststream._compat import PYDANTIC_V2
-
-
-class OauthFlowObj(BaseModel):
- """A class to represent an OAuth flow object.
-
- Attributes:
- authorizationUrl : Optional[AnyHttpUrl] : The URL for authorization
- tokenUrl : Optional[AnyHttpUrl] : The URL for token
- refreshUrl : Optional[AnyHttpUrl] : The URL for refresh
- scopes : Dict[str, str] : The scopes for the OAuth flow
-
- """
-
- authorizationUrl: Optional[AnyHttpUrl] = None
- tokenUrl: Optional[AnyHttpUrl] = None
- refreshUrl: Optional[AnyHttpUrl] = None
- scopes: Dict[str, str]
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class OauthFlows(BaseModel):
- """A class to represent OAuth flows.
-
- Attributes:
- implicit : Optional[OauthFlowObj] : Implicit OAuth flow object
- password : Optional[OauthFlowObj] : Password OAuth flow object
- clientCredentials : Optional[OauthFlowObj] : Client credentials OAuth flow object
- authorizationCode : Optional[OauthFlowObj] : Authorization code OAuth flow object
-
- """
-
- implicit: Optional[OauthFlowObj] = None
- password: Optional[OauthFlowObj] = None
- clientCredentials: Optional[OauthFlowObj] = None
- authorizationCode: Optional[OauthFlowObj] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class SecuritySchemaComponent(BaseModel):
- """A class to represent a security schema component.
-
- Attributes:
- type : Literal, the type of the security schema component
- name : optional name of the security schema component
- description : optional description of the security schema component
- in_ : optional location of the security schema component
- schema_ : optional schema of the security schema component
- bearerFormat : optional bearer format of the security schema component
- openIdConnectUrl : optional OpenID Connect URL of the security schema component
- flows : optional OAuth flows of the security schema component
-
- """
-
- type: Literal[
- "userPassword",
- "apikey",
- "X509",
- "symmetricEncryption",
- "asymmetricEncryption",
- "httpApiKey",
- "http",
- "oauth2",
- "openIdConnect",
- "plain",
- "scramSha256",
- "scramSha512",
- "gssapi",
- ]
- name: Optional[str] = None
- description: Optional[str] = None
- in_: Optional[str] = Field(
- default=None,
- alias="in",
- )
- schema_: Optional[str] = Field(
- default=None,
- alias="schema",
- )
- bearerFormat: Optional[str] = None
- openIdConnectUrl: Optional[str] = None
- flows: Optional[OauthFlows] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/servers.py b/faststream/asyncapi/schema/servers.py
deleted file mode 100644
index 06e2829c69..0000000000
--- a/faststream/asyncapi/schema/servers.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from typing import Any, Dict, List, Optional, Union
-
-from pydantic import BaseModel
-
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.schema.bindings import ServerBinding
-from faststream.asyncapi.schema.utils import Reference, Tag, TagDict
-
-SecurityRequirement = List[Dict[str, List[str]]]
-
-
-class ServerVariable(BaseModel):
- """A class to represent a server variable.
-
- Attributes:
- enum : list of possible values for the server variable (optional)
- default : default value for the server variable (optional)
- description : description of the server variable (optional)
- examples : list of example values for the server variable (optional)
-
- """
-
- enum: Optional[List[str]] = None
- default: Optional[str] = None
- description: Optional[str] = None
- examples: Optional[List[str]] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class Server(BaseModel):
- """A class to represent a server.
-
- Attributes:
- url : URL of the server
- protocol : protocol used by the server
- description : optional description of the server
- protocolVersion : optional version of the protocol used by the server
- tags : optional list of tags associated with the server
- security : optional security requirement for the server
- variables : optional dictionary of server variables
- bindings : optional server binding
-
- Note:
- The attributes `description`, `protocolVersion`, `tags`, `security`, `variables`, and `bindings` are all optional.
-
- Configurations:
- If `PYDANTIC_V2` is True, the model configuration is set to allow extra attributes.
- Otherwise, the `Config` class is defined with the `extra` attribute set to "allow".
-
- """
-
- url: str
- protocol: str
- description: Optional[str] = None
- protocolVersion: Optional[str] = None
- tags: Optional[List[Union[Tag, TagDict, Dict[str, Any]]]] = None
- security: Optional[SecurityRequirement] = None
- variables: Optional[Dict[str, Union[ServerVariable, Reference]]] = None
- bindings: Optional[Union[ServerBinding, Reference]] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
diff --git a/faststream/asyncapi/schema/utils.py b/faststream/asyncapi/schema/utils.py
deleted file mode 100644
index 6857f93552..0000000000
--- a/faststream/asyncapi/schema/utils.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from typing import Optional, Union
-
-from pydantic import AnyHttpUrl, BaseModel, Field
-from typing_extensions import Required, TypedDict
-
-from faststream._compat import PYDANTIC_V2
-
-
-class ExternalDocsDict(TypedDict, total=False):
- """A dictionary type for representing external documentation.
-
- Attributes:
- url : Required URL for the external documentation
- description : Description of the external documentation
-
- """
-
- url: Required[AnyHttpUrl]
- description: str
-
-
-class ExternalDocs(BaseModel):
- """A class to represent external documentation.
-
- Attributes:
- url : URL of the external documentation
- description : optional description of the external documentation
-
- """
-
- url: AnyHttpUrl
- description: Optional[str] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class TagDict(TypedDict, total=False):
- """A dictionary-like class for storing tags.
-
- Attributes:
- name : required name of the tag
- description : description of the tag
- externalDocs : external documentation for the tag
-
- """
-
- name: Required[str]
- description: str
- externalDocs: Union[ExternalDocs, ExternalDocsDict]
-
-
-class Tag(BaseModel):
- """A class to represent a tag.
-
- Attributes:
- name : name of the tag
- description : description of the tag (optional)
- externalDocs : external documentation for the tag (optional)
-
- """
-
- name: str
- description: Optional[str] = None
- externalDocs: Optional[Union[ExternalDocs, ExternalDocsDict]] = None
-
- if PYDANTIC_V2:
- model_config = {"extra": "allow"}
-
- else:
-
- class Config:
- extra = "allow"
-
-
-class Reference(BaseModel):
- """A class to represent a reference.
-
- Attributes:
- ref : the reference string
-
- """
-
- ref: str = Field(..., alias="$ref")
-
-
-class Parameter(BaseModel):
- """A class to represent a parameter."""
-
- # TODO
- ...
diff --git a/faststream/asyncapi/utils.py b/faststream/asyncapi/utils.py
deleted file mode 100644
index 4edddae6ad..0000000000
--- a/faststream/asyncapi/utils.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import TYPE_CHECKING, List, Tuple
-
-if TYPE_CHECKING:
- from faststream.types import AnyDict
-
-
-def to_camelcase(*names: str) -> str:
- return " ".join(names).replace("_", " ").title().replace(" ", "")
-
-
-def resolve_payloads(
- payloads: List[Tuple["AnyDict", str]],
- extra: str = "",
- served_words: int = 1,
-) -> "AnyDict":
- ln = len(payloads)
- payload: AnyDict
- if ln > 1:
- one_of_payloads = {}
-
- for body, handler_name in payloads:
- title = body["title"]
- words = title.split(":")
-
- if len(words) > 1: # not pydantic model case
- body["title"] = title = ":".join(
- filter(
- lambda x: bool(x),
- (
- handler_name,
- extra if extra not in words else "",
- *words[served_words:],
- ),
- )
- )
-
- one_of_payloads[title] = body
-
- payload = {"oneOf": one_of_payloads}
-
- elif ln == 1:
- payload = payloads[0][0]
-
- else:
- payload = {}
-
- return payload
diff --git a/faststream/broker/acknowledgement_watcher.py b/faststream/broker/acknowledgement_watcher.py
deleted file mode 100644
index 4084274095..0000000000
--- a/faststream/broker/acknowledgement_watcher.py
+++ /dev/null
@@ -1,220 +0,0 @@
-import logging
-from abc import ABC, abstractmethod
-from collections import Counter
-from typing import TYPE_CHECKING, Any, Optional, Type, Union
-from typing import Counter as CounterType
-
-from faststream.exceptions import (
- AckMessage,
- HandlerException,
- NackMessage,
- RejectMessage,
- SkipMessage,
-)
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.types import MsgType
- from faststream.types import LoggerProto
-
-
-class BaseWatcher(ABC):
- """A base class for a watcher."""
-
- max_tries: int
-
- def __init__(
- self,
- max_tries: int = 0,
- logger: Optional["LoggerProto"] = None,
- ) -> None:
- self.logger = logger
- self.max_tries = max_tries
-
- @abstractmethod
- def add(self, message_id: str) -> None:
- """Add a message."""
- raise NotImplementedError()
-
- @abstractmethod
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- raise NotImplementedError()
-
- @abstractmethod
- def remove(self, message_id: str) -> None:
- """Remove a message."""
- raise NotImplementedError()
-
-
-class EndlessWatcher(BaseWatcher):
- """A class to watch and track messages."""
-
- def add(self, message_id: str) -> None:
- """Add a message to the list."""
- pass
-
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- return False
-
- def remove(self, message_id: str) -> None:
- """Remove a message."""
- pass
-
-
-class OneTryWatcher(BaseWatcher):
- """A class to watch and track messages."""
-
- def add(self, message_id: str) -> None:
- """Add a message."""
- pass
-
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- return True
-
- def remove(self, message_id: str) -> None:
- """Remove a message."""
- pass
-
-
-class CounterWatcher(BaseWatcher):
- """A class to watch and track the count of messages."""
-
- memory: CounterType[str]
-
- def __init__(
- self,
- max_tries: int = 3,
- logger: Optional["LoggerProto"] = None,
- ) -> None:
- super().__init__(logger=logger, max_tries=max_tries)
- self.memory = Counter()
-
- def add(self, message_id: str) -> None:
- """Check if the given message ID is the maximum attempt."""
- self.memory[message_id] += 1
-
- def is_max(self, message_id: str) -> bool:
- """Check if the number of tries for a message has exceeded the maximum allowed tries."""
- is_max = self.memory[message_id] > self.max_tries
- if self.logger is not None:
- if is_max:
- self.logger.log(
- logging.ERROR, f"Already retried {self.max_tries} times. Skipped."
- )
- else:
- self.logger.log(
- logging.ERROR, "Error is occurred. Pushing back to queue."
- )
- return is_max
-
- def remove(self, message_id: str) -> None:
- """Remove a message from memory."""
- self.memory[message_id] = 0
- self.memory += Counter()
-
-
-class WatcherContext:
- """A class representing a context for a watcher."""
-
- def __init__(
- self,
- message: "StreamMessage[MsgType]",
- watcher: BaseWatcher,
- logger: Optional["LoggerProto"] = None,
- **extra_options: Any,
- ) -> None:
- self.watcher = watcher
- self.message = message
- self.extra_options = extra_options
- self.logger = logger
-
- async def __aenter__(self) -> None:
- self.watcher.add(self.message.message_id)
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional["TracebackType"],
- ) -> bool:
- """Exit the asynchronous context manager."""
- if not exc_type:
- await self.__ack()
-
- elif isinstance(exc_val, HandlerException):
- if isinstance(exc_val, SkipMessage):
- self.watcher.remove(self.message.message_id)
-
- elif isinstance(exc_val, AckMessage):
- await self.__ack(**exc_val.extra_options)
-
- elif isinstance(exc_val, NackMessage):
- await self.__nack(**exc_val.extra_options)
-
- elif isinstance(exc_val, RejectMessage): # pragma: no branch
- await self.__reject(**exc_val.extra_options)
-
- # Exception was processed and suppressed
- return True
-
- elif self.watcher.is_max(self.message.message_id):
- await self.__reject()
-
- else:
- await self.__nack()
-
- # Exception was not processed
- return False
-
- async def __ack(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.ack(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
- else:
- self.watcher.remove(self.message.message_id)
-
- async def __nack(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.nack(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
-
- async def __reject(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.reject(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
- else:
- self.watcher.remove(self.message.message_id)
-
-
-def get_watcher(
- logger: Optional["LoggerProto"],
- try_number: Union[bool, int],
-) -> BaseWatcher:
- """Get a watcher object based on the provided parameters.
-
- Args:
- logger: Optional logger object for logging messages.
- try_number: Optional parameter to specify the type of watcher.
- - If set to True, an EndlessWatcher object will be returned.
- - If set to False, a OneTryWatcher object will be returned.
- - If set to an integer, a CounterWatcher object with the specified maximum number of tries will be returned.
- """
- watcher: Optional[BaseWatcher]
- if try_number is True:
- watcher = EndlessWatcher()
- elif try_number is False:
- watcher = OneTryWatcher()
- else:
- watcher = CounterWatcher(logger=logger, max_tries=try_number)
- return watcher
diff --git a/faststream/broker/core/abc.py b/faststream/broker/core/abc.py
deleted file mode 100644
index c514814b96..0000000000
--- a/faststream/broker/core/abc.py
+++ /dev/null
@@ -1,148 +0,0 @@
-from abc import abstractmethod
-from typing import (
- TYPE_CHECKING,
- Any,
- Generic,
- Iterable,
- Mapping,
- Optional,
- Sequence,
-)
-
-from faststream.broker.types import MsgType
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.publisher.proto import PublisherProto
- from faststream.broker.subscriber.proto import SubscriberProto
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
-
-
-class ABCBroker(Generic[MsgType]):
- _subscribers: Mapping[int, "SubscriberProto[MsgType]"]
- _publishers: Mapping[int, "PublisherProto[MsgType]"]
-
- def __init__(
- self,
- *,
- prefix: str,
- dependencies: Iterable["Depends"],
- middlewares: Sequence["BrokerMiddleware[MsgType]"],
- parser: Optional["CustomCallable"],
- decoder: Optional["CustomCallable"],
- include_in_schema: Optional[bool],
- ) -> None:
- self.prefix = prefix
- self.include_in_schema = include_in_schema
-
- self._subscribers = {}
- self._publishers = {}
-
- self._dependencies = dependencies
- self._middlewares = middlewares
- self._parser = parser
- self._decoder = decoder
-
- def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
- """Append BrokerMiddleware to the end of middlewares list.
-
- Current middleware will be used as a most inner of already existed ones.
- """
- self._middlewares = (*self._middlewares, middleware)
-
- for sub in self._subscribers.values():
- sub.add_middleware(middleware)
-
- for pub in self._publishers.values():
- pub.add_middleware(middleware)
-
- @abstractmethod
- def subscriber(
- self,
- subscriber: "SubscriberProto[MsgType]",
- ) -> "SubscriberProto[MsgType]":
- subscriber.add_prefix(self.prefix)
- key = hash(subscriber)
- subscriber = self._subscribers.get(key, subscriber)
- self._subscribers = {**self._subscribers, key: subscriber}
- return subscriber
-
- @abstractmethod
- def publisher(
- self,
- publisher: "PublisherProto[MsgType]",
- ) -> "PublisherProto[MsgType]":
- publisher.add_prefix(self.prefix)
- key = hash(publisher)
- publisher = self._publishers.get(key, publisher)
- self._publishers = {**self._publishers, key: publisher}
- return publisher
-
- def include_router(
- self,
- router: "ABCBroker[Any]",
- *,
- prefix: str = "",
- dependencies: Iterable["Depends"] = (),
- middlewares: Iterable["BrokerMiddleware[MsgType]"] = (),
- include_in_schema: Optional[bool] = None,
- ) -> None:
- """Includes a router in the current object."""
- for h in router._subscribers.values():
- h.add_prefix("".join((self.prefix, prefix)))
-
- if (key := hash(h)) not in self._subscribers:
- if include_in_schema is None:
- h.include_in_schema = self._solve_include_in_schema(
- h.include_in_schema
- )
- else:
- h.include_in_schema = include_in_schema
-
- h._broker_middlewares = (
- *self._middlewares,
- *middlewares,
- *h._broker_middlewares,
- )
- h._broker_dependencies = (
- *self._dependencies,
- *dependencies,
- *h._broker_dependencies,
- )
- self._subscribers = {**self._subscribers, key: h}
-
- for p in router._publishers.values():
- p.add_prefix(self.prefix)
-
- if (key := hash(p)) not in self._publishers:
- if include_in_schema is None:
- p.include_in_schema = self._solve_include_in_schema(
- p.include_in_schema
- )
- else:
- p.include_in_schema = include_in_schema
-
- p._broker_middlewares = (
- *self._middlewares,
- *middlewares,
- *p._broker_middlewares,
- )
- self._publishers = {**self._publishers, key: p}
-
- def include_routers(
- self,
- *routers: "ABCBroker[MsgType]",
- ) -> None:
- """Includes routers in the object."""
- for r in routers:
- self.include_router(r)
-
- def _solve_include_in_schema(self, include_in_schema: bool) -> bool:
- if self.include_in_schema is None or self.include_in_schema:
- return include_in_schema
- else:
- return self.include_in_schema
diff --git a/faststream/broker/core/logging.py b/faststream/broker/core/logging.py
deleted file mode 100644
index 06412bf7f2..0000000000
--- a/faststream/broker/core/logging.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import logging
-from abc import abstractmethod
-from typing import TYPE_CHECKING, Any, Optional
-
-from typing_extensions import Annotated, Doc
-
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.types import MsgType
-from faststream.types import EMPTY
-
-if TYPE_CHECKING:
- from faststream.types import AnyDict, LoggerProto
-
-
-class LoggingBroker(ABCBroker[MsgType]):
- """A mixin class for logging."""
-
- logger: Optional["LoggerProto"]
-
- @abstractmethod
- def get_fmt(self) -> str:
- """Fallback method to get log format if `log_fmt` if not specified."""
- raise NotImplementedError()
-
- @abstractmethod
- def _setup_log_context(self) -> None:
- raise NotImplementedError()
-
- def __init__(
- self,
- *args: Any,
- default_logger: Annotated[
- logging.Logger,
- Doc("Logger object to use if `logger` is not set."),
- ],
- logger: Annotated[
- Optional["LoggerProto"],
- Doc("User specified logger to pass into Context and log service messages."),
- ],
- log_level: Annotated[
- int,
- Doc("Service messages log level."),
- ],
- log_fmt: Annotated[
- Optional[str],
- Doc("Default logger log format."),
- ],
- **kwargs: Any,
- ) -> None:
- if logger is not EMPTY:
- self.logger = logger
- self.use_custom = True
- else:
- self.logger = default_logger
- self.use_custom = False
-
- self._msg_log_level = log_level
- self._fmt = log_fmt
-
- super().__init__(*args, **kwargs)
-
- def _get_fmt(self) -> str:
- """Get default logger format at broker startup."""
- return self._fmt or self.get_fmt()
-
- def _log(
- self,
- message: Annotated[
- str,
- Doc("Log message."),
- ],
- log_level: Annotated[
- Optional[int],
- Doc("Log record level. Use `__init__: log_level` option if not specified."),
- ] = None,
- extra: Annotated[
- Optional["AnyDict"],
- Doc("Log record extra information."),
- ] = None,
- exc_info: Annotated[
- Optional[Exception],
- Doc("Exception object to log traceback."),
- ] = None,
- ) -> None:
- """Logs a message."""
- if self.logger is not None:
- self.logger.log(
- (log_level or self._msg_log_level),
- message,
- extra=extra,
- exc_info=exc_info,
- )
diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py
deleted file mode 100644
index 6da370b38d..0000000000
--- a/faststream/broker/core/usecase.py
+++ /dev/null
@@ -1,386 +0,0 @@
-import logging
-from abc import abstractmethod
-from contextlib import AsyncExitStack
-from functools import partial
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Generic,
- Iterable,
- List,
- Optional,
- Sequence,
- Type,
- Union,
- cast,
-)
-
-from typing_extensions import Annotated, Doc, Self
-
-from faststream._compat import is_test_env
-from faststream.broker.core.logging import LoggingBroker
-from faststream.broker.message import SourceType
-from faststream.broker.middlewares.logging import CriticalLogMiddleware
-from faststream.broker.proto import SetupAble
-from faststream.broker.subscriber.proto import SubscriberProto
-from faststream.broker.types import (
- AsyncCustomCallable,
- BrokerMiddleware,
- ConnectionType,
- CustomCallable,
- MsgType,
-)
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.log.logging import set_logger_fmt
-from faststream.utils.context.repository import context
-from faststream.utils.functions import return_input, to_async
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from fast_depends.dependencies import Depends
-
- from faststream.asyncapi.schema import Tag, TagDict
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import ProducerProto, PublisherProto
- from faststream.security import BaseSecurity
- from faststream.types import AnyDict, Decorator, LoggerProto
-
-
-class BrokerUsecase(
- LoggingBroker[MsgType],
- SetupAble,
- Generic[MsgType, ConnectionType],
-):
- """A class representing a broker async use case."""
-
- url: Union[str, Sequence[str]]
- _connection: Optional[ConnectionType]
- _producer: Optional["ProducerProto"]
-
- def __init__(
- self,
- *,
- decoder: Annotated[
- Optional["CustomCallable"],
- Doc("Custom decoder object."),
- ],
- parser: Annotated[
- Optional["CustomCallable"],
- Doc("Custom parser object."),
- ],
- dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies to apply to all broker subscribers."),
- ],
- middlewares: Annotated[
- Sequence["BrokerMiddleware[MsgType]"],
- Doc("Middlewares to apply to all broker publishers/subscribers."),
- ],
- graceful_timeout: Annotated[
- Optional[float],
- Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
- ),
- ],
- # Logging args
- default_logger: Annotated[
- logging.Logger,
- Doc("Logger object to use if `logger` is not set."),
- ],
- logger: Annotated[
- Optional["LoggerProto"],
- Doc("User specified logger to pass into Context and log service messages."),
- ],
- log_level: Annotated[
- int,
- Doc("Service messages log level."),
- ],
- log_fmt: Annotated[
- Optional[str],
- Doc("Default logger log format."),
- ],
- # FastDepends args
- apply_types: Annotated[
- bool,
- Doc("Whether to use FastDepends or not."),
- ],
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ],
- _get_dependant: Annotated[
- Optional[Callable[..., Any]],
- Doc("Custom library dependant generator callback."),
- ],
- _call_decorators: Annotated[
- Iterable["Decorator"],
- Doc("Any custom decorator to apply to wrapped functions."),
- ],
- # AsyncAPI kwargs
- protocol: Annotated[
- Optional[str],
- Doc("AsyncAPI server protocol."),
- ],
- protocol_version: Annotated[
- Optional[str],
- Doc("AsyncAPI server protocol version."),
- ],
- description: Annotated[
- Optional[str],
- Doc("AsyncAPI server description."),
- ],
- tags: Annotated[
- Optional[Iterable[Union["Tag", "TagDict"]]],
- Doc("AsyncAPI server tags."),
- ],
- asyncapi_url: Annotated[
- Union[str, List[str]],
- Doc("AsyncAPI hardcoded server addresses."),
- ],
- security: Annotated[
- Optional["BaseSecurity"],
- Doc(
- "Security options to connect broker and generate AsyncAPI server security."
- ),
- ],
- **connection_kwargs: Any,
- ) -> None:
- super().__init__(
- middlewares=middlewares,
- dependencies=dependencies,
- decoder=cast(
- Optional["AsyncCustomCallable"],
- to_async(decoder) if decoder else None,
- ),
- parser=cast(
- Optional["AsyncCustomCallable"],
- to_async(parser) if parser else None,
- ),
- # Broker is a root router
- include_in_schema=True,
- prefix="",
- # Logging args
- default_logger=default_logger,
- log_level=log_level,
- log_fmt=log_fmt,
- logger=logger,
- )
-
- self.running = False
- self.graceful_timeout = graceful_timeout
-
- self._connection_kwargs = connection_kwargs
- self._connection = None
- self._producer = None
-
- # TODO: remove useless middleware filter
- if not is_test_env():
- self._middlewares = (
- CriticalLogMiddleware(self.logger, log_level),
- *self._middlewares,
- )
-
- # TODO: move this context to Handlers' extra_context to support multiple brokers
- context.set_global("logger", self.logger)
- context.set_global("broker", self)
-
- # FastDepends args
- self._is_apply_types = apply_types
- self._is_validate = validate
- self._get_dependant = _get_dependant
- self._call_decorators = _call_decorators
-
- # AsyncAPI information
- self.url = asyncapi_url
- self.protocol = protocol
- self.protocol_version = protocol_version
- self.description = description
- self.tags = tags
- self.security = security
-
- async def __aenter__(self) -> "Self":
- await self.connect()
- return self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional["TracebackType"],
- ) -> None:
- await self.close(exc_type, exc_val, exc_tb)
-
- @abstractmethod
- async def start(self) -> None:
- """Start the broker async use case."""
- self._abc_start()
- await self.connect()
-
- async def connect(self, **kwargs: Any) -> ConnectionType:
- """Connect to a remote server."""
- if self._connection is None:
- connection_kwargs = self._connection_kwargs.copy()
- connection_kwargs.update(kwargs)
- self._connection = await self._connect(**connection_kwargs)
- self.setup()
- return self._connection
-
- @abstractmethod
- async def _connect(self) -> ConnectionType:
- """Connect to a resource."""
- raise NotImplementedError()
-
- def setup(self) -> None:
- """Prepare all Broker entities to startup."""
- for h in self._subscribers.values():
- self.setup_subscriber(h)
-
- for p in self._publishers.values():
- self.setup_publisher(p)
-
- def setup_subscriber(
- self,
- subscriber: SubscriberProto[MsgType],
- **kwargs: Any,
- ) -> None:
- """Setup the Subscriber to prepare it to starting."""
- data = self._subscriber_setup_extra.copy()
- data.update(kwargs)
- subscriber.setup(**data)
-
- def setup_publisher(
- self,
- publisher: "PublisherProto[MsgType]",
- **kwargs: Any,
- ) -> None:
- """Setup the Publisher to prepare it to starting."""
- data = self._publisher_setup_extra.copy()
- data.update(kwargs)
- publisher.setup(**data)
-
- @property
- def _subscriber_setup_extra(self) -> "AnyDict":
- return {
- "logger": self.logger,
- "producer": self._producer,
- "graceful_timeout": self.graceful_timeout,
- "extra_context": {},
- # broker options
- "broker_parser": self._parser,
- "broker_decoder": self._decoder,
- # dependant args
- "apply_types": self._is_apply_types,
- "is_validate": self._is_validate,
- "_get_dependant": self._get_dependant,
- "_call_decorators": self._call_decorators,
- }
-
- @property
- def _publisher_setup_extra(self) -> "AnyDict":
- return {
- "producer": self._producer,
- }
-
- def publisher(self, *args: Any, **kwargs: Any) -> "PublisherProto[MsgType]":
- pub = super().publisher(*args, **kwargs)
- if self.running:
- self.setup_publisher(pub)
- return pub
-
- def _abc_start(self) -> None:
- for h in self._subscribers.values():
- log_context = h.get_log_context(None)
- log_context.pop("message_id", None)
- self._setup_log_context(**log_context)
-
- if not self.running:
- self.running = True
-
- if not self.use_custom and self.logger is not None:
- set_logger_fmt(
- cast(logging.Logger, self.logger),
- self._get_fmt(),
- )
-
- async def close(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> None:
- """Closes the object."""
- self.running = False
-
- for h in self._subscribers.values():
- await h.close()
-
- if self._connection is not None:
- await self._close(exc_type, exc_val, exc_tb)
-
- @abstractmethod
- async def _close(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> None:
- """Close the object."""
- self._connection = None
-
- async def publish(
- self,
- msg: Any,
- *,
- producer: Optional["ProducerProto"],
- correlation_id: Optional[str] = None,
- **kwargs: Any,
- ) -> Optional[Any]:
- """Publish message directly."""
- assert producer, NOT_CONNECTED_YET # nosec B101
-
- publish = producer.publish
-
- for m in self._middlewares[::-1]:
- publish = partial(m(None).publish_scope, publish)
-
- return await publish(msg, correlation_id=correlation_id, **kwargs)
-
- async def request(
- self,
- msg: Any,
- *,
- producer: Optional["ProducerProto"],
- correlation_id: Optional[str] = None,
- **kwargs: Any,
- ) -> Any:
- """Publish message directly."""
- assert producer, NOT_CONNECTED_YET # nosec B101
-
- request = producer.request
- for m in self._middlewares[::-1]:
- request = partial(m(None).publish_scope, request)
-
- published_msg = await request(
- msg,
- correlation_id=correlation_id,
- **kwargs,
- )
-
- async with AsyncExitStack() as stack:
- return_msg = return_input
- for m in self._middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg: StreamMessage[Any] = await producer._parser(published_msg)
- parsed_msg._decoded_body = await producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- @abstractmethod
- async def ping(self, timeout: Optional[float]) -> bool:
- """Check connection alive."""
- raise NotImplementedError()
diff --git a/faststream/broker/fastapi/__init__.py b/faststream/broker/fastapi/__init__.py
deleted file mode 100644
index 4b683d238c..0000000000
--- a/faststream/broker/fastapi/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from faststream.broker.fastapi.route import StreamMessage
-from faststream.broker.fastapi.router import StreamRouter
-
-__all__ = (
- "StreamMessage",
- "StreamRouter",
-)
diff --git a/faststream/broker/fastapi/_compat.py b/faststream/broker/fastapi/_compat.py
deleted file mode 100644
index c51826690c..0000000000
--- a/faststream/broker/fastapi/_compat.py
+++ /dev/null
@@ -1,137 +0,0 @@
-from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any, List, Optional
-
-from fastapi import __version__ as FASTAPI_VERSION # noqa: N812
-from fastapi.dependencies.utils import solve_dependencies
-from starlette.background import BackgroundTasks
-from typing_extensions import Never
-
-from faststream.types import AnyDict
-
-if TYPE_CHECKING:
- from fastapi.dependencies.models import Dependant
- from fastapi.requests import Request
-
-major, minor, patch, *_ = FASTAPI_VERSION.split(".")
-
-_FASTAPI_MAJOR, _FASTAPI_MINOR = int(major), int(minor)
-
-FASTAPI_V2 = _FASTAPI_MAJOR > 0 or _FASTAPI_MINOR > 100
-FASTAPI_V106 = _FASTAPI_MAJOR > 0 or _FASTAPI_MINOR >= 106
-
-try:
- _FASTAPI_PATCH = int(patch)
-except ValueError:
- FASTAPI_v102_3 = True
- FASTAPI_v102_4 = True
-else:
- FASTAPI_v102_3 = (
- _FASTAPI_MAJOR > 0
- or _FASTAPI_MINOR > 112
- or (_FASTAPI_MINOR == 112 and _FASTAPI_PATCH > 2)
- )
- FASTAPI_v102_4 = (
- _FASTAPI_MAJOR > 0
- or _FASTAPI_MINOR > 112
- or (_FASTAPI_MINOR == 112 and _FASTAPI_PATCH > 3)
- )
-
-__all__ = (
- "RequestValidationError",
- "create_response_field",
- "raise_fastapi_validation_error",
- "solve_faststream_dependency",
-)
-
-
-@dataclass
-class SolvedDependency:
- values: AnyDict
- errors: List[Any]
- background_tasks: Optional[BackgroundTasks]
-
-
-if FASTAPI_V2:
- from fastapi._compat import _normalize_errors
- from fastapi.exceptions import RequestValidationError
-
- def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never:
- raise RequestValidationError(_normalize_errors(errors), body=body)
-
-else:
- from pydantic import ( # type: ignore[assignment]
- ValidationError as RequestValidationError,
- )
- from pydantic import create_model
-
- ROUTER_VALIDATION_ERROR_MODEL = create_model("StreamRoute")
-
- def raise_fastapi_validation_error(errors: List[Any], body: AnyDict) -> Never:
- raise RequestValidationError(errors, ROUTER_VALIDATION_ERROR_MODEL) # type: ignore[misc]
-
-
-if FASTAPI_v102_3:
- from fastapi.utils import (
- create_model_field as create_response_field,
- )
-
- extra = {"embed_body_fields": False} if FASTAPI_v102_4 else {}
-
- async def solve_faststream_dependency(
- request: "Request",
- dependant: "Dependant",
- dependency_overrides_provider: Optional[Any],
- **kwargs: Any,
- ) -> SolvedDependency:
- solved_result = await solve_dependencies(
- request=request,
- body=request._body, # type: ignore[arg-type]
- dependant=dependant,
- dependency_overrides_provider=dependency_overrides_provider,
- **extra, # type: ignore[arg-type]
- **kwargs,
- )
- values, errors, background = (
- solved_result.values,
- solved_result.errors,
- solved_result.background_tasks,
- )
-
- return SolvedDependency(
- values=values,
- errors=errors,
- background_tasks=background,
- )
-
-else:
- from fastapi.utils import ( # type: ignore[attr-defined,no-redef]
- create_response_field as create_response_field,
- )
-
- async def solve_faststream_dependency(
- request: "Request",
- dependant: "Dependant",
- dependency_overrides_provider: Optional[Any],
- **kwargs: Any,
- ) -> SolvedDependency:
- solved_result = await solve_dependencies(
- request=request,
- body=request._body, # type: ignore[arg-type]
- dependant=dependant,
- dependency_overrides_provider=dependency_overrides_provider,
- **kwargs,
- )
-
- (
- values,
- errors,
- background,
- _response,
- _dependency_cache,
- ) = solved_result # type: ignore[misc]
-
- return SolvedDependency(
- values=values, # type: ignore[has-type]
- errors=errors, # type: ignore[has-type]
- background_tasks=background, # type: ignore[has-type]
- )
diff --git a/faststream/broker/fastapi/context.py b/faststream/broker/fastapi/context.py
deleted file mode 100644
index 25edd313b7..0000000000
--- a/faststream/broker/fastapi/context.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import logging
-from typing import Any, Callable, Optional
-
-from fastapi import params
-from typing_extensions import Annotated
-
-from faststream.types import EMPTY
-from faststream.utils.context import ContextRepo as CR
-from faststream.utils.context.types import resolve_context_by_name
-
-
-def Context( # noqa: N802
- name: str,
- *,
- default: Any = EMPTY,
- initial: Optional[Callable[..., Any]] = None,
-) -> Any:
- """Get access to objects of the Context."""
- return params.Depends(
- lambda: resolve_context_by_name(
- name=name,
- default=default,
- initial=initial,
- ),
- use_cache=True,
- )
-
-
-Logger = Annotated[logging.Logger, Context("logger")]
-ContextRepo = Annotated[CR, Context("context")]
diff --git a/faststream/broker/fastapi/router.py b/faststream/broker/fastapi/router.py
deleted file mode 100644
index 6df6a85825..0000000000
--- a/faststream/broker/fastapi/router.py
+++ /dev/null
@@ -1,545 +0,0 @@
-import json
-import warnings
-from abc import abstractmethod
-from contextlib import asynccontextmanager
-from enum import Enum
-from typing import (
- TYPE_CHECKING,
- Any,
- AsyncIterator,
- Awaitable,
- Callable,
- Dict,
- Generic,
- Iterable,
- List,
- Mapping,
- Optional,
- Sequence,
- Type,
- Union,
- cast,
- overload,
-)
-from weakref import WeakSet
-
-from fastapi.background import BackgroundTasks
-from fastapi.datastructures import Default
-from fastapi.responses import HTMLResponse
-from fastapi.routing import APIRoute, APIRouter
-from fastapi.utils import generate_unique_id
-from starlette.responses import JSONResponse, Response
-from starlette.routing import BaseRoute, _DefaultLifespan
-
-from faststream.asyncapi.proto import AsyncAPIApplication
-from faststream.asyncapi.site import get_asyncapi_html
-from faststream.broker.fastapi.get_dependant import get_fastapi_dependant
-from faststream.broker.fastapi.route import wrap_callable_to_fastapi_compatible
-from faststream.broker.middlewares import BaseMiddleware
-from faststream.broker.router import BrokerRouter
-from faststream.broker.types import (
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
-)
-from faststream.utils.context.repository import context
-from faststream.utils.functions import fake_context, to_async
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from fastapi import FastAPI, params
- from fastapi.types import IncEx
- from starlette import routing
- from starlette.types import ASGIApp, AppType, Lifespan
-
- from faststream.asyncapi import schema as asyncapi
- from faststream.asyncapi.schema import Schema
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import PublisherProto
- from faststream.broker.schemas import NameRequired
- from faststream.broker.types import BrokerMiddleware
- from faststream.broker.wrapper.call import HandlerCallWrapper
- from faststream.types import AnyDict
-
-
-class _BackgroundMiddleware(BaseMiddleware):
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> Optional[bool]:
- if not exc_type and (
- background := cast(
- Optional[BackgroundTasks],
- getattr(context.get_local("message"), "background", None),
- )
- ):
- await background()
-
- return await super().after_processed(exc_type, exc_val, exc_tb)
-
-
-class StreamRouter(
- APIRouter,
- AsyncAPIApplication,
- Generic[MsgType],
-):
- """A class to route streams."""
-
- broker_class: Type["BrokerUsecase[MsgType, Any]"]
- broker: "BrokerUsecase[MsgType, Any]"
- docs_router: Optional[APIRouter]
- _after_startup_hooks: List[Callable[[Any], Awaitable[Optional[Mapping[str, Any]]]]]
- _on_shutdown_hooks: List[Callable[[Any], Awaitable[None]]]
- schema: Optional["Schema"]
-
- title: str
- description: str
- version: str
- license: Optional["AnyDict"]
- contact: Optional["AnyDict"]
-
- def __init__(
- self,
- *connection_args: Any,
- middlewares: Sequence["BrokerMiddleware[MsgType]"] = (),
- prefix: str = "",
- tags: Optional[List[Union[str, Enum]]] = None,
- dependencies: Optional[Sequence["params.Depends"]] = None,
- default_response_class: Type["Response"] = Default(JSONResponse),
- responses: Optional[Dict[Union[int, str], "AnyDict"]] = None,
- callbacks: Optional[List["routing.BaseRoute"]] = None,
- routes: Optional[List["routing.BaseRoute"]] = None,
- redirect_slashes: bool = True,
- default: Optional["ASGIApp"] = None,
- dependency_overrides_provider: Optional[Any] = None,
- route_class: Type["APIRoute"] = APIRoute,
- on_startup: Optional[Sequence[Callable[[], Any]]] = None,
- on_shutdown: Optional[Sequence[Callable[[], Any]]] = None,
- deprecated: Optional[bool] = None,
- include_in_schema: bool = True,
- setup_state: bool = True,
- lifespan: Optional["Lifespan[Any]"] = None,
- generate_unique_id_function: Callable[["APIRoute"], str] = Default(
- generate_unique_id
- ),
- # AsyncAPI information
- asyncapi_tags: Optional[
- Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]
- ] = None,
- schema_url: Optional[str] = "/asyncapi",
- **connection_kwars: Any,
- ) -> None:
- assert ( # nosec B101
- self.broker_class
- ), "You should specify `broker_class` at your implementation"
-
- self.broker = self.broker_class(
- *connection_args,
- middlewares=(
- *middlewares,
- # allow to catch background exceptions in user middlewares
- _BackgroundMiddleware,
- ),
- _get_dependant=get_fastapi_dependant,
- tags=asyncapi_tags,
- apply_types=False,
- **connection_kwars,
- )
-
- self.setup_state = setup_state
-
- # AsyncAPI information
- # Empty
- self.terms_of_service = None
- self.identifier = None
- self.asyncapi_tags = None
- self.external_docs = None
- # parse from FastAPI app on startup
- self.title = ""
- self.version = ""
- self.description = ""
- self.license = None
- self.contact = None
-
- self.schema = None
- # Flag to prevent double lifespan start
- self._lifespan_started = False
-
- super().__init__(
- prefix=prefix,
- tags=tags,
- dependencies=dependencies,
- default_response_class=default_response_class,
- responses=responses,
- callbacks=callbacks,
- routes=routes,
- redirect_slashes=redirect_slashes,
- default=default,
- dependency_overrides_provider=dependency_overrides_provider,
- route_class=route_class,
- deprecated=deprecated,
- include_in_schema=include_in_schema,
- generate_unique_id_function=generate_unique_id_function,
- lifespan=self._wrap_lifespan(lifespan),
- on_startup=on_startup,
- on_shutdown=on_shutdown,
- )
-
- self.weak_dependencies_provider: WeakSet[Any] = WeakSet()
- if dependency_overrides_provider is not None:
- self.weak_dependencies_provider.add(dependency_overrides_provider)
-
- if self.include_in_schema:
- self.docs_router = self._asyncapi_router(schema_url)
- else:
- self.docs_router = None
-
- self._after_startup_hooks = []
- self._on_shutdown_hooks = []
-
- def _get_dependencies_overides_provider(self) -> Optional[Any]:
- """Dependency provider WeakRef resolver."""
- if self.dependency_overrides_provider is not None:
- return self.dependency_overrides_provider
- else:
- return next(iter(self.weak_dependencies_provider), None)
-
- def _add_api_mq_route(
- self,
- dependencies: Iterable["params.Depends"],
- response_model: Any,
- response_model_include: Optional["IncEx"],
- response_model_exclude: Optional["IncEx"],
- response_model_by_alias: bool,
- response_model_exclude_unset: bool,
- response_model_exclude_defaults: bool,
- response_model_exclude_none: bool,
- ) -> Callable[
- [Callable[..., Any]],
- Callable[["StreamMessage[Any]"], Awaitable[Any]],
- ]:
- """Decorator before `broker.subscriber`, that wraps function to FastAPI-compatible one."""
-
- def wrapper(
- endpoint: Callable[..., Any],
- ) -> Callable[["StreamMessage[Any]"], Awaitable[Any]]:
- """Patch user function to make it FastAPI-compatible."""
- return wrap_callable_to_fastapi_compatible(
- user_callable=endpoint,
- dependencies=dependencies,
- response_model=response_model,
- response_model_include=response_model_include,
- response_model_exclude=response_model_exclude,
- response_model_by_alias=response_model_by_alias,
- response_model_exclude_unset=response_model_exclude_unset,
- response_model_exclude_defaults=response_model_exclude_defaults,
- response_model_exclude_none=response_model_exclude_none,
- provider_factory=self._get_dependencies_overides_provider,
- )
-
- return wrapper
-
- def subscriber(
- self,
- *extra: Union["NameRequired", str],
- dependencies: Iterable["params.Depends"],
- response_model: Any,
- response_model_include: Optional["IncEx"],
- response_model_exclude: Optional["IncEx"],
- response_model_by_alias: bool,
- response_model_exclude_unset: bool,
- response_model_exclude_defaults: bool,
- response_model_exclude_none: bool,
- **broker_kwargs: Any,
- ) -> Callable[
- [Callable[P_HandlerParams, T_HandlerReturn]],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- ]:
- """A function decorator for subscribing to a message queue."""
- dependencies = (*self.dependencies, *dependencies)
-
- sub = self.broker.subscriber( # type: ignore[call-arg]
- *extra, # type: ignore[arg-type]
- dependencies=dependencies,
- **broker_kwargs,
- )
-
- sub._call_decorators = ( # type: ignore[attr-defined]
- self._add_api_mq_route(
- dependencies=dependencies,
- response_model=response_model,
- response_model_include=response_model_include,
- response_model_exclude=response_model_exclude,
- response_model_by_alias=response_model_by_alias,
- response_model_exclude_unset=response_model_exclude_unset,
- response_model_exclude_defaults=response_model_exclude_defaults,
- response_model_exclude_none=response_model_exclude_none,
- ),
- )
-
- return sub
-
- def _wrap_lifespan(
- self, lifespan: Optional["Lifespan[Any]"] = None
- ) -> "Lifespan[Any]":
- lifespan_context = lifespan if lifespan is not None else _DefaultLifespan(self)
-
- @asynccontextmanager
- async def start_broker_lifespan(
- app: "FastAPI",
- ) -> AsyncIterator[Mapping[str, Any]]:
- """Starts the lifespan of a broker."""
- if not len(self.weak_dependencies_provider):
- self.weak_dependencies_provider.add(app)
-
- if self.docs_router:
- self.title = app.title
- self.description = app.description
- self.version = app.version
- self.contact = app.contact
- self.license = app.license_info
-
- from faststream.asyncapi.generate import get_app_schema
-
- self.schema = get_app_schema(self)
-
- app.include_router(self.docs_router)
-
- if not len(self.weak_dependencies_provider):
- self.weak_dependencies_provider.add(app)
-
- async with lifespan_context(app) as maybe_context:
- if maybe_context is None:
- context: AnyDict = {}
- else:
- context = dict(maybe_context)
-
- context.update({"broker": self.broker})
-
- if not self._lifespan_started:
- await self.broker.start()
- self._lifespan_started = True
- else:
- warnings.warn(
- "Specifying 'lifespan_context' manually is no longer necessary with FastAPI >= 0.112.2.",
- stacklevel=2,
- )
-
- for h in self._after_startup_hooks:
- h_context = await h(app)
- if h_context: # pragma: no branch
- context.update(h_context)
-
- try:
- if self.setup_state:
- yield context
- else:
- # NOTE: old asgi compatibility
- yield # type: ignore
-
- for h in self._on_shutdown_hooks:
- await h(app)
-
- finally:
- await self.broker.close()
-
- return start_broker_lifespan
-
- @overload
- def after_startup(
- self,
- func: Callable[["AppType"], Mapping[str, Any]],
- ) -> Callable[["AppType"], Mapping[str, Any]]: ...
-
- @overload
- def after_startup(
- self,
- func: Callable[["AppType"], Awaitable[Mapping[str, Any]]],
- ) -> Callable[["AppType"], Awaitable[Mapping[str, Any]]]: ...
-
- @overload
- def after_startup(
- self,
- func: Callable[["AppType"], None],
- ) -> Callable[["AppType"], None]: ...
-
- @overload
- def after_startup(
- self,
- func: Callable[["AppType"], Awaitable[None]],
- ) -> Callable[["AppType"], Awaitable[None]]: ...
-
- def after_startup(
- self,
- func: Union[
- Callable[["AppType"], Mapping[str, Any]],
- Callable[["AppType"], Awaitable[Mapping[str, Any]]],
- Callable[["AppType"], None],
- Callable[["AppType"], Awaitable[None]],
- ],
- ) -> Union[
- Callable[["AppType"], Mapping[str, Any]],
- Callable[["AppType"], Awaitable[Mapping[str, Any]]],
- Callable[["AppType"], None],
- Callable[["AppType"], Awaitable[None]],
- ]:
- """Register a function to be executed after startup."""
- self._after_startup_hooks.append(to_async(func))
- return func
-
- @overload
- def on_broker_shutdown(
- self,
- func: Callable[["AppType"], None],
- ) -> Callable[["AppType"], None]: ...
-
- @overload
- def on_broker_shutdown(
- self,
- func: Callable[["AppType"], Awaitable[None]],
- ) -> Callable[["AppType"], Awaitable[None]]: ...
-
- def on_broker_shutdown(
- self,
- func: Union[
- Callable[["AppType"], None],
- Callable[["AppType"], Awaitable[None]],
- ],
- ) -> Union[
- Callable[["AppType"], None],
- Callable[["AppType"], Awaitable[None]],
- ]:
- """Register a function to be executed before broker stop."""
- self._on_shutdown_hooks.append(to_async(func))
- return func
-
- @abstractmethod
- def publisher(self) -> "PublisherProto[MsgType]":
- """Create Publisher object."""
- raise NotImplementedError()
-
- def _asyncapi_router(self, schema_url: Optional[str]) -> Optional[APIRouter]:
- """Creates an API router for serving AsyncAPI documentation."""
- if not self.include_in_schema or not schema_url:
- return None
-
- def download_app_json_schema() -> Response:
- assert ( # nosec B101
- self.schema
- ), "You need to run application lifespan at first"
-
- return Response(
- content=json.dumps(self.schema.to_jsonable(), indent=2),
- headers={"Content-Type": "application/octet-stream"},
- )
-
- def download_app_yaml_schema() -> Response:
- assert ( # nosec B101
- self.schema
- ), "You need to run application lifespan at first"
-
- return Response(
- content=self.schema.to_yaml(),
- headers={
- "Content-Type": "application/octet-stream",
- },
- )
-
- def serve_asyncapi_schema(
- sidebar: bool = True,
- info: bool = True,
- servers: bool = True,
- operations: bool = True,
- messages: bool = True,
- schemas: bool = True,
- errors: bool = True,
- expandMessageExamples: bool = True,
- ) -> HTMLResponse:
- """Serve the AsyncAPI schema as an HTML response."""
- assert ( # nosec B101
- self.schema
- ), "You need to run application lifespan at first"
-
- return HTMLResponse(
- content=get_asyncapi_html(
- self.schema,
- sidebar=sidebar,
- info=info,
- servers=servers,
- operations=operations,
- messages=messages,
- schemas=schemas,
- errors=errors,
- expand_message_examples=expandMessageExamples,
- title=self.schema.info.title,
- )
- )
-
- docs_router = APIRouter(
- prefix=self.prefix,
- tags=["asyncapi"],
- redirect_slashes=self.redirect_slashes,
- default=self.default,
- deprecated=self.deprecated,
- )
- docs_router.get(schema_url)(serve_asyncapi_schema)
- docs_router.get(f"{schema_url}.json")(download_app_json_schema)
- docs_router.get(f"{schema_url}.yaml")(download_app_yaml_schema)
- return docs_router
-
- def include_router( # type: ignore[override]
- self,
- router: Union["StreamRouter[MsgType]", "BrokerRouter[MsgType]"],
- *,
- prefix: str = "",
- tags: Optional[List[Union[str, Enum]]] = None,
- dependencies: Optional[Sequence["params.Depends"]] = None,
- default_response_class: Type[Response] = Default(JSONResponse),
- responses: Optional[Dict[Union[int, str], "AnyDict"]] = None,
- callbacks: Optional[List["BaseRoute"]] = None,
- deprecated: Optional[bool] = None,
- include_in_schema: bool = True,
- generate_unique_id_function: Callable[["APIRoute"], str] = Default(
- generate_unique_id
- ),
- ) -> None:
- """Includes a router in the API."""
- if isinstance(router, BrokerRouter):
- for sub in router._subscribers.values():
- sub._call_decorators = ( # type: ignore[attr-defined]
- self._add_api_mq_route(
- dependencies=(),
- response_model=Default(None),
- response_model_include=None,
- response_model_exclude=None,
- response_model_by_alias=True,
- response_model_exclude_unset=False,
- response_model_exclude_defaults=False,
- response_model_exclude_none=False,
- ),
- )
-
- self.broker.include_router(router)
- return
-
- if isinstance(router, StreamRouter): # pragma: no branch
- router.lifespan_context = fake_context
- self.broker.include_router(router.broker)
- router.weak_dependencies_provider = self.weak_dependencies_provider
-
- super().include_router(
- router=router,
- prefix=prefix,
- tags=tags,
- dependencies=dependencies,
- default_response_class=default_response_class,
- responses=responses,
- callbacks=callbacks,
- deprecated=deprecated,
- include_in_schema=include_in_schema,
- generate_unique_id_function=generate_unique_id_function,
- )
diff --git a/faststream/broker/message.py b/faststream/broker/message.py
deleted file mode 100644
index 7c1dcae73a..0000000000
--- a/faststream/broker/message.py
+++ /dev/null
@@ -1,164 +0,0 @@
-import json
-from contextlib import suppress
-from dataclasses import dataclass, field
-from enum import Enum
-from typing import (
- TYPE_CHECKING,
- Any,
- Generic,
- List,
- Optional,
- Sequence,
- Tuple,
- TypeVar,
- Union,
- cast,
-)
-from uuid import uuid4
-
-from typing_extensions import deprecated
-
-from faststream._compat import dump_json, json_loads
-from faststream.constants import ContentTypes
-from faststream.types import EMPTY
-
-if TYPE_CHECKING:
- from faststream.types import AnyDict, DecodedMessage, SendableMessage
-
-# prevent circular imports
-MsgType = TypeVar("MsgType")
-
-
-class AckStatus(str, Enum):
- acked = "acked"
- nacked = "nacked"
- rejected = "rejected"
-
-
-class SourceType(str, Enum):
- Consume = "Consume"
- """Message consumed by basic subscriber flow."""
-
- Response = "Response"
- """RPC response consumed."""
-
-
-def gen_cor_id() -> str:
- """Generate random string to use as ID."""
- return str(uuid4())
-
-
-@dataclass
-class StreamMessage(Generic[MsgType]):
- """Generic class to represent a stream message."""
-
- raw_message: "MsgType"
-
- body: Union[bytes, Any]
- headers: "AnyDict" = field(default_factory=dict)
- batch_headers: List["AnyDict"] = field(default_factory=list)
- path: "AnyDict" = field(default_factory=dict)
-
- content_type: Optional[str] = None
- reply_to: str = ""
- message_id: str = field(default_factory=gen_cor_id) # pragma: no cover
- correlation_id: str = field(
- default_factory=gen_cor_id # pragma: no cover
- )
-
- processed: bool = field(default=False, init=False)
- committed: Optional[AckStatus] = field(default=None, init=False)
- _source_type: SourceType = field(default=SourceType.Consume)
- _decoded_body: Optional["DecodedMessage"] = field(default=None, init=False)
-
- async def ack(self) -> None:
- if not self.committed:
- self.committed = AckStatus.acked
-
- async def nack(self) -> None:
- if not self.committed:
- self.committed = AckStatus.nacked
-
- async def reject(self) -> None:
- if not self.committed:
- self.committed = AckStatus.rejected
-
- async def decode(self) -> Optional["DecodedMessage"]:
- """Serialize the message by lazy decoder."""
- # TODO: make it lazy after `decoded_body` removed
- return self._decoded_body
-
- @property
- @deprecated(
- "Deprecated in **FastStream 0.5.19**. "
- "Please, use `decode` lazy method instead. "
- "Argument will be removed in **FastStream 0.6.0**.",
- category=DeprecationWarning,
- stacklevel=1,
- )
- def decoded_body(self) -> Optional["DecodedMessage"]:
- return self._decoded_body
-
- @decoded_body.setter
- @deprecated(
- "Deprecated in **FastStream 0.5.19**. "
- "Please, use `decode` lazy method instead. "
- "Argument will be removed in **FastStream 0.6.0**.",
- category=DeprecationWarning,
- stacklevel=1,
- )
- def decoded_body(self, value: Optional["DecodedMessage"]) -> None:
- self._decoded_body = value
-
-
-def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage":
- """Decodes a message."""
- body: Any = getattr(message, "body", message)
- m: DecodedMessage = body
-
- if (content_type := getattr(message, "content_type", EMPTY)) is not EMPTY:
- content_type = cast(Optional[str], content_type)
-
- if not content_type:
- with suppress(json.JSONDecodeError, UnicodeDecodeError):
- m = json_loads(body)
-
- elif ContentTypes.text.value in content_type:
- m = body.decode()
-
- elif ContentTypes.json.value in content_type:
- m = json_loads(body)
-
- else:
- with suppress(json.JSONDecodeError, UnicodeDecodeError):
- m = json_loads(body)
-
- return m
-
-
-def encode_message(
- msg: Union[Sequence["SendableMessage"], "SendableMessage"],
-) -> Tuple[bytes, Optional[str]]:
- """Encodes a message."""
- if msg is None:
- return (
- b"",
- None,
- )
-
- if isinstance(msg, bytes):
- return (
- msg,
- None,
- )
-
- if isinstance(msg, str):
- return (
- msg.encode(),
- ContentTypes.text.value,
- )
-
- return (
- dump_json(msg),
- ContentTypes.json.value,
- )
diff --git a/faststream/broker/middlewares/__init__.py b/faststream/broker/middlewares/__init__.py
deleted file mode 100644
index c10aa33c3d..0000000000
--- a/faststream/broker/middlewares/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from faststream.broker.middlewares.base import BaseMiddleware
-from faststream.broker.middlewares.exception import ExceptionMiddleware
-
-__all__ = ("BaseMiddleware", "ExceptionMiddleware")
diff --git a/faststream/broker/middlewares/base.py b/faststream/broker/middlewares/base.py
deleted file mode 100644
index 5710c8ec1c..0000000000
--- a/faststream/broker/middlewares/base.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from typing import TYPE_CHECKING, Any, Optional, Type
-
-from typing_extensions import Self
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.message import StreamMessage
- from faststream.types import AsyncFunc, AsyncFuncAny
-
-
-class BaseMiddleware:
- """A base middleware class."""
-
- def __init__(self, msg: Optional[Any] = None) -> None:
- self.msg = msg
-
- async def on_receive(self) -> None:
- """Hook to call on message receive."""
- pass
-
- async def after_processed(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> Optional[bool]:
- """Asynchronously called after processing."""
- return False
-
- async def __aenter__(self) -> Self:
- await self.on_receive()
- return self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> Optional[bool]:
- """Exit the asynchronous context manager."""
- return await self.after_processed(exc_type, exc_val, exc_tb)
-
- async def on_consume(
- self,
- msg: "StreamMessage[Any]",
- ) -> "StreamMessage[Any]":
- """Asynchronously consumes a message."""
- return msg
-
- async def after_consume(self, err: Optional[Exception]) -> None:
- """A function to handle the result of consuming a resource asynchronously."""
- if err is not None:
- raise err
-
- async def consume_scope(
- self,
- call_next: "AsyncFuncAny",
- msg: "StreamMessage[Any]",
- ) -> Any:
- """Asynchronously consumes a message and returns an asynchronous iterator of decoded messages."""
- err: Optional[Exception] = None
- try:
- result = await call_next(await self.on_consume(msg))
-
- except Exception as e:
- err = e
-
- else:
- return result
-
- finally:
- await self.after_consume(err)
-
- async def on_publish(
- self,
- msg: Any,
- *args: Any,
- **kwargs: Any,
- ) -> Any:
- """Asynchronously handle a publish event."""
- return msg
-
- async def after_publish(
- self,
- err: Optional[Exception],
- ) -> None:
- """Asynchronous function to handle the after publish event."""
- if err is not None:
- raise err
-
- async def publish_scope(
- self,
- call_next: "AsyncFunc",
- msg: Any,
- *args: Any,
- **kwargs: Any,
- ) -> Any:
- """Publish a message and return an async iterator."""
- err: Optional[Exception] = None
- try:
- result = await call_next(
- await self.on_publish(msg, *args, **kwargs),
- *args,
- **kwargs,
- )
-
- except Exception as e:
- err = e
-
- else:
- return result
-
- finally:
- await self.after_publish(err)
diff --git a/faststream/broker/middlewares/exception.py b/faststream/broker/middlewares/exception.py
deleted file mode 100644
index f0325a1788..0000000000
--- a/faststream/broker/middlewares/exception.py
+++ /dev/null
@@ -1,212 +0,0 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- ContextManager,
- Dict,
- List,
- NoReturn,
- Optional,
- Tuple,
- Type,
- Union,
- cast,
- overload,
-)
-
-from typing_extensions import Literal, TypeAlias
-
-from faststream.broker.middlewares.base import BaseMiddleware
-from faststream.exceptions import IgnoredException
-from faststream.utils import apply_types, context
-from faststream.utils.functions import sync_fake_context, to_async
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.message import StreamMessage
- from faststream.types import AsyncFuncAny
-
-
-GeneralExceptionHandler: TypeAlias = Union[
- Callable[..., None], Callable[..., Awaitable[None]]
-]
-PublishingExceptionHandler: TypeAlias = Callable[..., "Any"]
-
-CastedGeneralExceptionHandler: TypeAlias = Callable[..., Awaitable[None]]
-CastedPublishingExceptionHandler: TypeAlias = Callable[..., Awaitable["Any"]]
-CastedHandlers: TypeAlias = List[
- Tuple[
- Type[Exception],
- CastedGeneralExceptionHandler,
- ]
-]
-CastedPublishingHandlers: TypeAlias = List[
- Tuple[
- Type[Exception],
- CastedPublishingExceptionHandler,
- ]
-]
-
-
-class BaseExceptionMiddleware(BaseMiddleware):
- def __init__(
- self,
- handlers: CastedHandlers,
- publish_handlers: CastedPublishingHandlers,
- msg: Optional[Any] = None,
- ) -> None:
- super().__init__(msg)
- self._handlers = handlers
- self._publish_handlers = publish_handlers
-
- async def consume_scope(
- self,
- call_next: "AsyncFuncAny",
- msg: "StreamMessage[Any]",
- ) -> Any:
- try:
- return await call_next(await self.on_consume(msg))
-
- except Exception as exc:
- exc_type = type(exc)
-
- for handler_type, handler in self._publish_handlers:
- if issubclass(exc_type, handler_type):
- return await handler(exc)
-
- raise exc
-
- async def after_processed(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> Optional[bool]:
- if exc_type:
- for handler_type, handler in self._handlers:
- if issubclass(exc_type, handler_type):
- # TODO: remove it after context will be moved to middleware
- # In case parser/decoder error occurred
- scope: ContextManager[Any]
- if not context.get_local("message"):
- scope = context.scope("message", self.msg)
- else:
- scope = sync_fake_context()
-
- with scope:
- await handler(exc_val)
-
- return True
-
- return False
-
- return None
-
-
-class ExceptionMiddleware:
- __slots__ = ("_handlers", "_publish_handlers")
-
- _handlers: CastedHandlers
- _publish_handlers: CastedPublishingHandlers
-
- def __init__(
- self,
- handlers: Optional[
- Dict[
- Type[Exception],
- GeneralExceptionHandler,
- ]
- ] = None,
- publish_handlers: Optional[
- Dict[
- Type[Exception],
- PublishingExceptionHandler,
- ]
- ] = None,
- ) -> None:
- self._handlers: CastedHandlers = [
- (IgnoredException, ignore_handler),
- *(
- (
- exc_type,
- apply_types(
- cast(Callable[..., Awaitable[None]], to_async(handler))
- ),
- )
- for exc_type, handler in (handlers or {}).items()
- ),
- ]
-
- self._publish_handlers: CastedPublishingHandlers = [
- (IgnoredException, ignore_handler),
- *(
- (exc_type, apply_types(to_async(handler)))
- for exc_type, handler in (publish_handlers or {}).items()
- ),
- ]
-
- @overload
- def add_handler(
- self,
- exc: Type[Exception],
- publish: Literal[False] = False,
- ) -> Callable[[GeneralExceptionHandler], GeneralExceptionHandler]: ...
-
- @overload
- def add_handler(
- self,
- exc: Type[Exception],
- publish: Literal[True],
- ) -> Callable[[PublishingExceptionHandler], PublishingExceptionHandler]: ...
-
- def add_handler(
- self,
- exc: Type[Exception],
- publish: bool = False,
- ) -> Union[
- Callable[[GeneralExceptionHandler], GeneralExceptionHandler],
- Callable[[PublishingExceptionHandler], PublishingExceptionHandler],
- ]:
- if publish:
-
- def pub_wrapper(
- func: PublishingExceptionHandler,
- ) -> PublishingExceptionHandler:
- self._publish_handlers.append(
- (
- exc,
- apply_types(to_async(func)),
- )
- )
- return func
-
- return pub_wrapper
-
- else:
-
- def default_wrapper(
- func: GeneralExceptionHandler,
- ) -> GeneralExceptionHandler:
- self._handlers.append(
- (
- exc,
- apply_types(to_async(func)),
- )
- )
- return func
-
- return default_wrapper
-
- def __call__(self, msg: Optional[Any]) -> BaseExceptionMiddleware:
- """Real middleware runtime constructor."""
- return BaseExceptionMiddleware(
- handlers=self._handlers,
- publish_handlers=self._publish_handlers,
- msg=msg,
- )
-
-
-async def ignore_handler(exception: IgnoredException) -> NoReturn:
- raise exception
diff --git a/faststream/broker/middlewares/logging.py b/faststream/broker/middlewares/logging.py
deleted file mode 100644
index 18c4c365e5..0000000000
--- a/faststream/broker/middlewares/logging.py
+++ /dev/null
@@ -1,74 +0,0 @@
-import logging
-from typing import TYPE_CHECKING, Any, Optional, Type
-
-from typing_extensions import Self
-
-from faststream.broker.middlewares.base import BaseMiddleware
-from faststream.exceptions import IgnoredException
-from faststream.utils.context.repository import context
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.message import StreamMessage
- from faststream.types import LoggerProto
-
-
-class CriticalLogMiddleware(BaseMiddleware):
- """A middleware class for logging critical errors."""
-
- def __init__(
- self,
- logger: Optional["LoggerProto"],
- log_level: int,
- ) -> None:
- """Initialize the class."""
- self.logger = logger
- self.log_level = log_level
-
- def __call__(self, msg: Optional[Any]) -> Self:
- """Call the object with a message."""
- self.msg = msg
- return self
-
- async def on_consume(
- self,
- msg: "StreamMessage[Any]",
- ) -> "StreamMessage[Any]":
- if self.logger is not None:
- c = context.get_local("log_context", {})
- self.logger.log(self.log_level, "Received", extra=c)
-
- return await super().on_consume(msg)
-
- async def after_processed(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> bool:
- """Asynchronously called after processing."""
- if self.logger is not None:
- c = context.get_local("log_context", {})
-
- if exc_type:
- if issubclass(exc_type, IgnoredException):
- self.logger.log(
- logging.INFO,
- exc_val,
- extra=c,
- )
- else:
- self.logger.log(
- logging.ERROR,
- f"{exc_type.__name__}: {exc_val}",
- exc_info=exc_val,
- extra=c,
- )
-
- self.logger.log(self.log_level, "Processed", extra=c)
-
- await super().after_processed(exc_type, exc_val, exc_tb)
-
- # Exception was not processed
- return False
diff --git a/faststream/broker/proto.py b/faststream/broker/proto.py
deleted file mode 100644
index c1083a92ba..0000000000
--- a/faststream/broker/proto.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from abc import abstractmethod
-from typing import Hashable, Protocol
-
-
-class SetupAble(Protocol):
- @abstractmethod
- def setup(self) -> None: ...
-
-
-class EndpointProto(SetupAble, Hashable, Protocol):
- @abstractmethod
- def add_prefix(self, prefix: str) -> None: ...
diff --git a/faststream/broker/publisher/fake.py b/faststream/broker/publisher/fake.py
deleted file mode 100644
index 83d681726b..0000000000
--- a/faststream/broker/publisher/fake.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from functools import partial
-from itertools import chain
-from typing import TYPE_CHECKING, Any, Optional, Sequence
-
-from faststream.broker.publisher.proto import BasePublisherProto
-
-if TYPE_CHECKING:
- from faststream.broker.types import PublisherMiddleware
- from faststream.types import AnyDict, AsyncFunc, SendableMessage
-
-
-class FakePublisher(BasePublisherProto):
- """Publisher Interface implementation to use as RPC or REPLY TO publisher."""
-
- def __init__(
- self,
- method: "AsyncFunc",
- *,
- publish_kwargs: "AnyDict",
- middlewares: Sequence["PublisherMiddleware"] = (),
- ) -> None:
- """Initialize an object."""
- self.method = method
- self.publish_kwargs = publish_kwargs
- self.middlewares = middlewares
-
- async def publish(
- self,
- message: "SendableMessage",
- *,
- correlation_id: Optional[str] = None,
- _extra_middlewares: Sequence["PublisherMiddleware"] = (),
- **kwargs: Any,
- ) -> Any:
- """Publish a message."""
- publish_kwargs = {
- "correlation_id": correlation_id,
- **self.publish_kwargs,
- **kwargs,
- }
-
- call: AsyncFunc = self.method
- for m in chain(_extra_middlewares, self.middlewares):
- call = partial(m, call)
-
- return await call(message, **publish_kwargs)
-
- async def request(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- _extra_middlewares: Sequence["PublisherMiddleware"] = (),
- ) -> Any:
- raise NotImplementedError(
- "`FakePublisher` can be used only to publish "
- "a response for `reply-to` or `RPC` messages."
- )
diff --git a/faststream/broker/publisher/proto.py b/faststream/broker/publisher/proto.py
deleted file mode 100644
index 9bb1a7be97..0000000000
--- a/faststream/broker/publisher/proto.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from abc import abstractmethod
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Generic,
- Optional,
- Protocol,
- Sequence,
-)
-
-from typing_extensions import override
-
-from faststream.asyncapi.proto import AsyncAPIProto
-from faststream.broker.proto import EndpointProto
-from faststream.broker.types import MsgType
-
-if TYPE_CHECKING:
- from faststream.broker.types import (
- AsyncCallable,
- BrokerMiddleware,
- P_HandlerParams,
- PublisherMiddleware,
- T_HandlerReturn,
- )
- from faststream.types import SendableMessage
-
-
-class ProducerProto(Protocol):
- _parser: "AsyncCallable"
- _decoder: "AsyncCallable"
-
- @abstractmethod
- async def publish(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- ) -> Optional[Any]:
- """Publishes a message asynchronously."""
- ...
-
- @abstractmethod
- async def request(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- ) -> Any:
- """Publishes a message synchronously."""
- ...
-
-
-class BasePublisherProto(Protocol):
- @abstractmethod
- async def publish(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- _extra_middlewares: Sequence["PublisherMiddleware"] = (),
- ) -> Optional[Any]:
- """Publishes a message asynchronously."""
- ...
-
- @abstractmethod
- async def request(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- _extra_middlewares: Sequence["PublisherMiddleware"] = (),
- ) -> Optional[Any]:
- """Publishes a message synchronously."""
- ...
-
-
-class PublisherProto(
- AsyncAPIProto,
- EndpointProto,
- BasePublisherProto,
- Generic[MsgType],
-):
- schema_: Any
-
- _broker_middlewares: Sequence["BrokerMiddleware[MsgType]"]
- _middlewares: Sequence["PublisherMiddleware"]
- _producer: Optional["ProducerProto"]
-
- @abstractmethod
- def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ...
-
- @staticmethod
- @abstractmethod
- def create() -> "PublisherProto[MsgType]":
- """Abstract factory to create a real Publisher."""
- ...
-
- @override
- @abstractmethod
- def setup( # type: ignore[override]
- self,
- *,
- producer: Optional["ProducerProto"],
- ) -> None: ...
-
- @abstractmethod
- def __call__(
- self,
- func: "Callable[P_HandlerParams, T_HandlerReturn]",
- ) -> "Callable[P_HandlerParams, T_HandlerReturn]": ...
diff --git a/faststream/broker/publisher/usecase.py b/faststream/broker/publisher/usecase.py
deleted file mode 100644
index 6ac51053c9..0000000000
--- a/faststream/broker/publisher/usecase.py
+++ /dev/null
@@ -1,174 +0,0 @@
-from abc import ABC
-from inspect import unwrap
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- List,
- Optional,
- Sequence,
- Tuple,
-)
-from unittest.mock import MagicMock
-
-from fast_depends._compat import create_model, get_config_base
-from fast_depends.core import CallModel, build_call_model
-from typing_extensions import Annotated, Doc, override
-
-from faststream.asyncapi.abc import AsyncAPIOperation
-from faststream.asyncapi.message import get_response_schema
-from faststream.asyncapi.utils import to_camelcase
-from faststream.broker.publisher.proto import PublisherProto
-from faststream.broker.types import (
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
-)
-from faststream.broker.wrapper.call import HandlerCallWrapper
-
-if TYPE_CHECKING:
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.broker.types import (
- BrokerMiddleware,
- PublisherMiddleware,
- )
- from faststream.types import AnyDict
-
-
-class PublisherUsecase(
- ABC,
- AsyncAPIOperation,
- PublisherProto[MsgType],
-):
- """A base class for publishers in an asynchronous API."""
-
- mock: Optional[MagicMock]
- calls: List[Callable[..., Any]]
-
- def __init__(
- self,
- *,
- broker_middlewares: Annotated[
- Sequence["BrokerMiddleware[MsgType]"],
- Doc("Top-level middlewares to use in direct `.publish` call."),
- ],
- middlewares: Annotated[
- Sequence["PublisherMiddleware"],
- Doc("Publisher middlewares."),
- ],
- # AsyncAPI args
- schema_: Annotated[
- Optional[Any],
- Doc(
- "AsyncAPI publishing message type"
- "Should be any python-native object annotation or `pydantic.BaseModel`."
- ),
- ],
- title_: Annotated[
- Optional[str],
- Doc("AsyncAPI object title."),
- ],
- description_: Annotated[
- Optional[str],
- Doc("AsyncAPI object description."),
- ],
- include_in_schema: Annotated[
- bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
- ],
- ) -> None:
- self.calls = []
- self._middlewares = middlewares
- self._broker_middlewares = broker_middlewares
- self._producer = None
-
- self._fake_handler = False
- self.mock = None
-
- # AsyncAPI
- self.title_ = title_
- self.description_ = description_
- self.include_in_schema = include_in_schema
- self.schema_ = schema_
-
- def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
- self._broker_middlewares = (*self._broker_middlewares, middleware)
-
- @override
- def setup( # type: ignore[override]
- self,
- *,
- producer: Optional["ProducerProto"],
- ) -> None:
- self._producer = producer
-
- def set_test(
- self,
- *,
- mock: Annotated[
- MagicMock,
- Doc("Mock object to check in tests."),
- ],
- with_fake: Annotated[
- bool,
- Doc("Whetevet publisher's fake subscriber created or not."),
- ],
- ) -> None:
- """Turn publisher to testing mode."""
- self.mock = mock
- self._fake_handler = with_fake
-
- def reset_test(self) -> None:
- """Turn off publisher's testing mode."""
- self._fake_handler = False
- self.mock = None
-
- def __call__(
- self,
- func: Callable[P_HandlerParams, T_HandlerReturn],
- ) -> HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]:
- """Decorate user's function by current publisher."""
- handler_call = HandlerCallWrapper[
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
- ](func)
- handler_call._publishers.append(self)
- self.calls.append(handler_call._original_call)
- return handler_call
-
- def get_payloads(self) -> List[Tuple["AnyDict", str]]:
- payloads: List[Tuple[AnyDict, str]] = []
-
- if self.schema_:
- params = {"response__": (self.schema_, ...)}
-
- call_model: CallModel[Any, Any] = CallModel(
- call=lambda: None,
- model=create_model("Fake"),
- response_model=create_model( # type: ignore[call-overload]
- "",
- __config__=get_config_base(),
- **params,
- ),
- params=params,
- )
-
- body = get_response_schema(
- call_model,
- prefix=f"{self.name}:Message",
- )
- if body: # pragma: no branch
- payloads.append((body, ""))
-
- else:
- for call in self.calls:
- call_model = build_call_model(call)
- body = get_response_schema(
- call_model,
- prefix=f"{self.name}:Message",
- )
- if body:
- payloads.append((body, to_camelcase(unwrap(call).__name__)))
-
- return payloads
diff --git a/faststream/broker/response.py b/faststream/broker/response.py
deleted file mode 100644
index fb08993251..0000000000
--- a/faststream/broker/response.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from typing import TYPE_CHECKING, Any, Optional, Union
-
-if TYPE_CHECKING:
- from faststream.types import AnyDict
-
-
-class Response:
- def __init__(
- self,
- body: "Any",
- *,
- headers: Optional["AnyDict"] = None,
- correlation_id: Optional[str] = None,
- ) -> None:
- """Initialize a handler."""
- self.body = body
- self.headers = headers or {}
- self.correlation_id = correlation_id
-
- def add_headers(
- self,
- extra_headers: "AnyDict",
- *,
- override: bool = True,
- ) -> None:
- if override:
- self.headers = {**self.headers, **extra_headers}
- else:
- self.headers = {**extra_headers, **self.headers}
-
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- "headers": self.headers,
- "correlation_id": self.correlation_id,
- }
- return publish_options
-
-
-def ensure_response(response: Union["Response", "Any"]) -> "Response":
- if isinstance(response, Response):
- return response
-
- return Response(response)
diff --git a/faststream/broker/router.py b/faststream/broker/router.py
deleted file mode 100644
index e9dcb399d0..0000000000
--- a/faststream/broker/router.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Iterable,
- Optional,
- Sequence,
-)
-
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.types import MsgType
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.types import AnyDict
-
-
-class ArgsContainer:
- """Class to store any arguments."""
-
- args: Iterable[Any]
- kwargs: "AnyDict"
-
- def __init__(
- self,
- *args: Any,
- **kwargs: Any,
- ) -> None:
- self.args = args
- self.kwargs = kwargs
-
-
-class SubscriberRoute(ArgsContainer):
- """A generic class to represent a broker route."""
-
- call: Callable[..., Any]
- publishers: Iterable[Any]
-
- def __init__(
- self,
- call: Callable[..., Any],
- *args: Any,
- publishers: Iterable[ArgsContainer] = (),
- **kwargs: Any,
- ) -> None:
- """Initialize a callable object with arguments and keyword arguments."""
- self.call = call
- self.publishers = publishers
-
- super().__init__(*args, **kwargs)
-
-
-class BrokerRouter(ABCBroker[MsgType]):
- """A generic class representing a broker router."""
-
- def __init__(
- self,
- *,
- handlers: Iterable[SubscriberRoute],
- # base options
- prefix: str,
- dependencies: Iterable["Depends"],
- middlewares: Sequence["BrokerMiddleware[MsgType]"],
- parser: Optional["CustomCallable"],
- decoder: Optional["CustomCallable"],
- include_in_schema: Optional[bool],
- ) -> None:
- super().__init__(
- prefix=prefix,
- dependencies=dependencies,
- middlewares=middlewares,
- parser=parser,
- decoder=decoder,
- include_in_schema=include_in_schema,
- )
-
- for h in handlers:
- call = h.call
-
- for p in h.publishers:
- call = self.publisher(*p.args, **p.kwargs)(call)
-
- self.subscriber(*h.args, **h.kwargs)(call)
diff --git a/faststream/broker/schemas.py b/faststream/broker/schemas.py
deleted file mode 100644
index 75b624df87..0000000000
--- a/faststream/broker/schemas.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import Any, Optional, Type, TypeVar, Union, overload
-
-NameRequiredCls = TypeVar("NameRequiredCls", bound="NameRequired")
-
-
-class NameRequired:
- """Required name option object."""
-
- def __eq__(self, __value: object) -> bool:
- """Compares the current object with another object for equality."""
- if __value is None:
- return False
-
- if not isinstance(__value, NameRequired):
- return NotImplemented
-
- return self.name == __value.name
-
- def __init__(self, name: str) -> None:
- self.name = name
-
- @overload
- @classmethod
- def validate(
- cls: Type[NameRequiredCls],
- value: Union[str, NameRequiredCls],
- **kwargs: Any,
- ) -> NameRequiredCls: ...
-
- @overload
- @classmethod
- def validate(
- cls: Type[NameRequiredCls],
- value: None,
- **kwargs: Any,
- ) -> None: ...
-
- @classmethod
- def validate(
- cls: Type[NameRequiredCls],
- value: Union[str, NameRequiredCls, None],
- **kwargs: Any,
- ) -> Optional[NameRequiredCls]:
- """Factory to create object."""
- if value is not None and isinstance(value, str):
- value = cls(value, **kwargs)
- return value
diff --git a/faststream/broker/subscriber/call_item.py b/faststream/broker/subscriber/call_item.py
deleted file mode 100644
index 72c8ae840c..0000000000
--- a/faststream/broker/subscriber/call_item.py
+++ /dev/null
@@ -1,177 +0,0 @@
-from functools import partial
-from inspect import unwrap
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Generic,
- Iterable,
- Optional,
- Sequence,
- cast,
-)
-
-from typing_extensions import override
-
-from faststream.broker.proto import SetupAble
-from faststream.broker.types import MsgType
-from faststream.exceptions import IgnoredException, SetupError
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.types import (
- AsyncCallable,
- AsyncFilter,
- CustomCallable,
- SubscriberMiddleware,
- )
- from faststream.broker.wrapper.call import HandlerCallWrapper
- from faststream.types import AsyncFuncAny, Decorator
-
-
-class HandlerItem(SetupAble, Generic[MsgType]):
- """A class representing handler overloaded item."""
-
- __slots__ = (
- "dependant",
- "dependencies",
- "filter",
- "handler",
- "item_decoder",
- "item_middlewares",
- "item_parser",
- )
-
- dependant: Optional[Any]
-
- def __init__(
- self,
- *,
- handler: "HandlerCallWrapper[MsgType, ..., Any]",
- filter: "AsyncFilter[StreamMessage[MsgType]]",
- item_parser: Optional["CustomCallable"],
- item_decoder: Optional["CustomCallable"],
- item_middlewares: Sequence["SubscriberMiddleware[StreamMessage[MsgType]]"],
- dependencies: Iterable["Depends"],
- ) -> None:
- self.handler = handler
- self.filter = filter
- self.item_parser = item_parser
- self.item_decoder = item_decoder
- self.item_middlewares = item_middlewares
- self.dependencies = dependencies
- self.dependant = None
-
- def __repr__(self) -> str:
- filter_call = unwrap(self.filter)
- filter_name = getattr(filter_call, "__name__", str(filter_call))
- return f"<'{self.call_name}': filter='{filter_name}'>"
-
- @override
- def setup( # type: ignore[override]
- self,
- *,
- parser: "AsyncCallable",
- decoder: "AsyncCallable",
- broker_dependencies: Iterable["Depends"],
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> None:
- if self.dependant is None:
- self.item_parser = parser
- self.item_decoder = decoder
-
- dependencies = (*broker_dependencies, *self.dependencies)
-
- dependant = self.handler.set_wrapped(
- apply_types=apply_types,
- is_validate=is_validate,
- dependencies=dependencies,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
- )
-
- if _get_dependant is None:
- self.dependant = dependant
- else:
- self.dependant = _get_dependant(
- self.handler._original_call,
- dependencies,
- )
-
- @property
- def call_name(self) -> str:
- """Returns the name of the original call."""
- if self.handler is None:
- return ""
-
- caller = unwrap(self.handler._original_call)
- name = getattr(caller, "__name__", str(caller))
- return name
-
- @property
- def description(self) -> Optional[str]:
- """Returns the description of original call."""
- if self.handler is None:
- return None
-
- caller = unwrap(self.handler._original_call)
- description = getattr(caller, "__doc__", None)
- return description
-
- async def is_suitable(
- self,
- msg: MsgType,
- cache: Dict[Any, Any],
- ) -> Optional["StreamMessage[MsgType]"]:
- """Check is message suite for current filter."""
- if not (parser := cast(Optional["AsyncCallable"], self.item_parser)) or not (
- decoder := cast(Optional["AsyncCallable"], self.item_decoder)
- ):
- raise SetupError("You should setup `HandlerItem` at first.")
-
- message = cache[parser] = cast(
- "StreamMessage[MsgType]", cache.get(parser) or await parser(msg)
- )
-
- message._decoded_body = cache[decoder] = cache.get(decoder) or await decoder(
- message
- )
-
- if await self.filter(message):
- return message
-
- return None
-
- async def call(
- self,
- /,
- message: "StreamMessage[MsgType]",
- _extra_middlewares: Iterable["SubscriberMiddleware[Any]"],
- ) -> Any:
- """Execute wrapped handler with consume middlewares."""
- call: AsyncFuncAny = self.handler.call_wrapped
-
- for middleware in chain(self.item_middlewares[::-1], _extra_middlewares):
- call = partial(middleware, call)
-
- try:
- result = await call(message)
-
- except (IgnoredException, SystemExit):
- self.handler.trigger()
- raise
-
- except Exception as e:
- self.handler.trigger(error=e)
- raise e
-
- else:
- self.handler.trigger(result=result)
- return result
diff --git a/faststream/broker/subscriber/proto.py b/faststream/broker/subscriber/proto.py
deleted file mode 100644
index be296eec17..0000000000
--- a/faststream/broker/subscriber/proto.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from abc import abstractmethod
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Iterable,
- List,
- Optional,
- Sequence,
-)
-
-from typing_extensions import Self, override
-
-from faststream.asyncapi.proto import AsyncAPIProto
-from faststream.broker.proto import EndpointProto
-from faststream.broker.types import MsgType
-from faststream.broker.wrapper.proto import WrapperProto
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import BasePublisherProto, ProducerProto
- from faststream.broker.response import Response
- from faststream.broker.subscriber.call_item import HandlerItem
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- Filter,
- SubscriberMiddleware,
- )
- from faststream.types import AnyDict, Decorator, LoggerProto
-
-
-class SubscriberProto(
- AsyncAPIProto,
- EndpointProto,
- WrapperProto[MsgType],
-):
- calls: List["HandlerItem[MsgType]"]
- running: bool
-
- _broker_dependencies: Iterable["Depends"]
- _broker_middlewares: Sequence["BrokerMiddleware[MsgType]"]
- _producer: Optional["ProducerProto"]
-
- @abstractmethod
- def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ...
-
- @abstractmethod
- def get_log_context(
- self,
- msg: Optional["StreamMessage[MsgType]"],
- /,
- ) -> Dict[str, str]: ...
-
- @override
- @abstractmethod
- def setup( # type: ignore[override]
- self,
- *,
- logger: Optional["LoggerProto"],
- graceful_timeout: Optional[float],
- broker_parser: Optional["CustomCallable"],
- broker_decoder: Optional["CustomCallable"],
- producer: Optional["ProducerProto"],
- extra_context: "AnyDict",
- # FastDepends options
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> None: ...
-
- @abstractmethod
- def _make_response_publisher(
- self,
- message: "StreamMessage[MsgType]",
- ) -> Iterable["BasePublisherProto"]: ...
-
- @property
- @abstractmethod
- def call_name(self) -> str: ...
-
- @abstractmethod
- async def start(self) -> None: ...
-
- @abstractmethod
- async def close(self) -> None: ...
-
- @abstractmethod
- async def consume(self, msg: MsgType) -> Any: ...
-
- @abstractmethod
- async def process_message(self, msg: MsgType) -> "Response": ...
-
- @abstractmethod
- async def get_one(
- self, *, timeout: float = 5.0
- ) -> "Optional[StreamMessage[MsgType]]": ...
-
- @abstractmethod
- def add_call(
- self,
- *,
- filter_: "Filter[Any]",
- parser_: "CustomCallable",
- decoder_: "CustomCallable",
- middlewares_: Sequence["SubscriberMiddleware[Any]"],
- dependencies_: Iterable["Depends"],
- ) -> Self: ...
diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py
deleted file mode 100644
index 9641a99513..0000000000
--- a/faststream/broker/subscriber/usecase.py
+++ /dev/null
@@ -1,472 +0,0 @@
-from abc import abstractmethod
-from contextlib import AsyncExitStack
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- ContextManager,
- Dict,
- Iterable,
- List,
- Optional,
- Sequence,
- Tuple,
- Union,
- overload,
-)
-
-from typing_extensions import Self, override
-
-from faststream.asyncapi.abc import AsyncAPIOperation
-from faststream.asyncapi.message import parse_handler_params
-from faststream.asyncapi.utils import to_camelcase
-from faststream.broker.response import ensure_response
-from faststream.broker.subscriber.call_item import HandlerItem
-from faststream.broker.subscriber.proto import SubscriberProto
-from faststream.broker.types import (
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
-)
-from faststream.broker.utils import MultiLock, get_watcher_context, resolve_custom_func
-from faststream.broker.wrapper.call import HandlerCallWrapper
-from faststream.exceptions import SetupError, StopConsume, SubscriberNotFound
-from faststream.utils.context.repository import context
-from faststream.utils.functions import sync_fake_context, to_async
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.middlewares import BaseMiddleware
- from faststream.broker.publisher.proto import BasePublisherProto, ProducerProto
- from faststream.broker.response import Response
- from faststream.broker.types import (
- AsyncCallable,
- BrokerMiddleware,
- CustomCallable,
- Filter,
- SubscriberMiddleware,
- )
- from faststream.types import AnyDict, Decorator, LoggerProto
-
-
-class _CallOptions:
- __slots__ = (
- "decoder",
- "dependencies",
- "filter",
- "middlewares",
- "parser",
- )
-
- def __init__(
- self,
- *,
- filter: "Filter[Any]",
- parser: Optional["CustomCallable"],
- decoder: Optional["CustomCallable"],
- middlewares: Sequence["SubscriberMiddleware[Any]"],
- dependencies: Iterable["Depends"],
- ) -> None:
- self.filter = filter
- self.parser = parser
- self.decoder = decoder
- self.middlewares = middlewares
- self.dependencies = dependencies
-
-
-class SubscriberUsecase(
- AsyncAPIOperation,
- SubscriberProto[MsgType],
-):
- """A class representing an asynchronous handler."""
-
- lock: ContextManager[Any]
- extra_watcher_options: "AnyDict"
- extra_context: "AnyDict"
- graceful_timeout: Optional[float]
-
- _broker_dependencies: Iterable["Depends"]
- _call_options: Optional["_CallOptions"]
- _call_decorators: Iterable["Decorator"]
-
- def __init__(
- self,
- *,
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- # AsyncAPI information
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- """Initialize a new instance of the class."""
- self.calls = []
-
- self._parser = default_parser
- self._decoder = default_decoder
- self._no_reply = no_reply
- # Watcher args
- self._no_ack = no_ack
- self._retry = retry
-
- self._call_options = None
- self._call_decorators = ()
- self.running = False
- self.lock = sync_fake_context()
-
- # Setup in include
- self._broker_dependencies = broker_dependencies
- self._broker_middlewares = broker_middlewares
-
- # register in setup later
- self._producer = None
- self.graceful_timeout = None
- self.extra_context = {}
- self.extra_watcher_options = {}
-
- # AsyncAPI
- self.title_ = title_
- self.description_ = description_
- self.include_in_schema = include_in_schema
-
- def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
- self._broker_middlewares = (*self._broker_middlewares, middleware)
-
- @override
- def setup( # type: ignore[override]
- self,
- *,
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
- extra_context: "AnyDict",
- # broker options
- broker_parser: Optional["CustomCallable"],
- broker_decoder: Optional["CustomCallable"],
- # dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> None:
- self.lock = MultiLock()
-
- self._producer = producer
- self.graceful_timeout = graceful_timeout
- self.extra_context = extra_context
-
- self.watcher = get_watcher_context(logger, self._no_ack, self._retry)
-
- for call in self.calls:
- if parser := call.item_parser or broker_parser:
- async_parser = resolve_custom_func(to_async(parser), self._parser)
- else:
- async_parser = self._parser
-
- if decoder := call.item_decoder or broker_decoder:
- async_decoder = resolve_custom_func(to_async(decoder), self._decoder)
- else:
- async_decoder = self._decoder
-
- self._parser = async_parser
- self._decoder = async_decoder
-
- call.setup(
- parser=async_parser,
- decoder=async_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=(*self._call_decorators, *_call_decorators),
- broker_dependencies=self._broker_dependencies,
- )
-
- call.handler.refresh(with_mock=False)
-
- @abstractmethod
- async def start(self) -> None:
- """Start the handler."""
- self.running = True
-
- @abstractmethod
- async def close(self) -> None:
- """Close the handler.
-
- Blocks event loop up to graceful_timeout seconds.
- """
- self.running = False
- if isinstance(self.lock, MultiLock):
- await self.lock.wait_release(self.graceful_timeout)
-
- def add_call(
- self,
- *,
- filter_: "Filter[Any]",
- parser_: Optional["CustomCallable"],
- decoder_: Optional["CustomCallable"],
- middlewares_: Sequence["SubscriberMiddleware[Any]"],
- dependencies_: Iterable["Depends"],
- ) -> Self:
- self._call_options = _CallOptions(
- filter=filter_,
- parser=parser_,
- decoder=decoder_,
- middlewares=middlewares_,
- dependencies=dependencies_,
- )
- return self
-
- @overload
- def __call__(
- self,
- func: None = None,
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> Callable[
- [Callable[P_HandlerParams, T_HandlerReturn]],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- ]: ...
-
- @overload
- def __call__(
- self,
- func: Callable[P_HandlerParams, T_HandlerReturn],
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]": ...
-
- def __call__(
- self,
- func: Optional[Callable[P_HandlerParams, T_HandlerReturn]] = None,
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> Any:
- if (options := self._call_options) is None:
- raise SetupError(
- "You can't create subscriber directly. Please, use `add_call` at first."
- )
-
- total_deps = (*options.dependencies, *dependencies)
- total_middlewares = (*options.middlewares, *middlewares)
-
- def real_wrapper(
- func: Callable[P_HandlerParams, T_HandlerReturn],
- ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
- handler = HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn](
- func
- )
-
- self.calls.append(
- HandlerItem[MsgType](
- handler=handler,
- filter=to_async(filter or options.filter),
- item_parser=parser or options.parser,
- item_decoder=decoder or options.decoder,
- item_middlewares=total_middlewares,
- dependencies=total_deps,
- )
- )
-
- return handler
-
- if func is None:
- return real_wrapper
-
- else:
- return real_wrapper(func)
-
- async def consume(self, msg: MsgType) -> Any:
- """Consume a message asynchronously."""
- if not self.running:
- return None
-
- try:
- return await self.process_message(msg)
-
- except StopConsume:
- # Stop handler at StopConsume exception
- await self.close()
-
- except SystemExit:
- # Stop handler at `exit()` call
- await self.close()
-
- if app := context.get("app"):
- app.exit()
-
- except Exception: # nosec B110
- # All other exceptions were logged by CriticalLogMiddleware
- pass
-
- async def process_message(self, msg: MsgType) -> "Response":
- """Execute all message processing stages."""
- async with AsyncExitStack() as stack:
- stack.enter_context(self.lock)
-
- # Enter context before middlewares
- for k, v in self.extra_context.items():
- stack.enter_context(context.scope(k, v))
-
- stack.enter_context(context.scope("handler_", self))
-
- # enter all middlewares
- middlewares: List[BaseMiddleware] = []
- for base_m in self._broker_middlewares:
- middleware = base_m(msg)
- middlewares.append(middleware)
- await middleware.__aenter__()
-
- cache: Dict[Any, Any] = {}
- parsing_error: Optional[Exception] = None
- for h in self.calls:
- try:
- message = await h.is_suitable(msg, cache)
- except Exception as e:
- parsing_error = e
- break
-
- if message is not None:
- # Acknowledgement scope
- # TODO: move it to scope enter at `retry` option deprecation
- await stack.enter_async_context(
- self.watcher(
- message,
- **self.extra_watcher_options,
- )
- )
-
- stack.enter_context(
- context.scope("log_context", self.get_log_context(message))
- )
- stack.enter_context(context.scope("message", message))
-
- # Middlewares should be exited before scope release
- for m in middlewares:
- stack.push_async_exit(m.__aexit__)
-
- result_msg = ensure_response(
- await h.call(
- message=message,
- # consumer middlewares
- _extra_middlewares=(
- m.consume_scope for m in middlewares[::-1]
- ),
- )
- )
-
- if not result_msg.correlation_id:
- result_msg.correlation_id = message.correlation_id
-
- for p in chain(
- self.__get_response_publisher(message),
- h.handler._publishers,
- ):
- await p.publish(
- result_msg.body,
- **result_msg.as_publish_kwargs(),
- # publisher middlewares
- _extra_middlewares=[
- m.publish_scope for m in middlewares[::-1]
- ],
- )
-
- # Return data for tests
- return result_msg
-
- # Suitable handler was not found or
- # parsing/decoding exception occurred
- for m in middlewares:
- stack.push_async_exit(m.__aexit__)
-
- if parsing_error:
- raise parsing_error
-
- else:
- raise SubscriberNotFound(f"There is no suitable handler for {msg=}")
-
- # An error was raised and processed by some middleware
- return ensure_response(None)
-
- def __get_response_publisher(
- self,
- message: "StreamMessage[MsgType]",
- ) -> Iterable["BasePublisherProto"]:
- if not message.reply_to or self._no_reply:
- return ()
-
- else:
- return self._make_response_publisher(message)
-
- def get_log_context(
- self,
- message: Optional["StreamMessage[MsgType]"],
- ) -> Dict[str, str]:
- """Generate log context."""
- return {
- "message_id": getattr(message, "message_id", ""),
- }
-
- # AsyncAPI methods
-
- @property
- def call_name(self) -> str:
- """Returns the name of the handler call."""
- if not self.calls:
- return "Subscriber"
-
- return to_camelcase(self.calls[0].call_name)
-
- def get_description(self) -> Optional[str]:
- """Returns the description of the handler."""
- if not self.calls: # pragma: no cover
- return None
-
- else:
- return self.calls[0].description
-
- def get_payloads(self) -> List[Tuple["AnyDict", str]]:
- """Get the payloads of the handler."""
- payloads: List[Tuple[AnyDict, str]] = []
-
- for h in self.calls:
- if h.dependant is None:
- raise SetupError("You should setup `Handler` at first.")
-
- body = parse_handler_params(
- h.dependant,
- prefix=f"{self.title_ or self.call_name}:Message",
- )
-
- payloads.append((body, to_camelcase(h.call_name)))
-
- if not self.calls:
- payloads.append(
- (
- {
- "title": f"{self.title_ or self.call_name}:Message:Payload",
- },
- to_camelcase(self.call_name),
- )
- )
-
- return payloads
diff --git a/faststream/broker/types.py b/faststream/broker/types.py
deleted file mode 100644
index 145a37418e..0000000000
--- a/faststream/broker/types.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from typing import (
- Any,
- Awaitable,
- Callable,
- Optional,
- Protocol,
- TypeVar,
- Union,
-)
-
-from typing_extensions import ParamSpec, TypeAlias
-
-from faststream.broker.message import StreamMessage
-from faststream.broker.middlewares import BaseMiddleware
-from faststream.types import AsyncFunc, AsyncFuncAny
-
-MsgType = TypeVar("MsgType")
-StreamMsg = TypeVar("StreamMsg", bound=StreamMessage[Any])
-ConnectionType = TypeVar("ConnectionType")
-
-
-SyncFilter: TypeAlias = Callable[[StreamMsg], bool]
-AsyncFilter: TypeAlias = Callable[[StreamMsg], Awaitable[bool]]
-Filter: TypeAlias = Union[
- SyncFilter[StreamMsg],
- AsyncFilter[StreamMsg],
-]
-
-SyncCallable: TypeAlias = Callable[
- [Any],
- Any,
-]
-AsyncCallable: TypeAlias = Callable[
- [Any],
- Awaitable[Any],
-]
-AsyncCustomCallable: TypeAlias = Union[
- AsyncCallable,
- Callable[
- [Any, AsyncCallable],
- Awaitable[Any],
- ],
-]
-CustomCallable: TypeAlias = Union[
- AsyncCustomCallable,
- SyncCallable,
-]
-
-P_HandlerParams = ParamSpec("P_HandlerParams")
-T_HandlerReturn = TypeVar("T_HandlerReturn")
-
-
-AsyncWrappedHandlerCall: TypeAlias = Callable[
- [StreamMessage[MsgType]],
- Awaitable[Optional[T_HandlerReturn]],
-]
-SyncWrappedHandlerCall: TypeAlias = Callable[
- [StreamMessage[MsgType]],
- Optional[T_HandlerReturn],
-]
-WrappedHandlerCall: TypeAlias = Union[
- AsyncWrappedHandlerCall[MsgType, T_HandlerReturn],
- SyncWrappedHandlerCall[MsgType, T_HandlerReturn],
-]
-
-
-BrokerMiddleware: TypeAlias = Callable[[Optional[MsgType]], BaseMiddleware]
-SubscriberMiddleware: TypeAlias = Callable[
- [AsyncFuncAny, MsgType],
- MsgType,
-]
-
-
-class PublisherMiddleware(Protocol):
- """Publisher middleware interface."""
-
- def __call__(
- self,
- call_next: AsyncFunc,
- *__args: Any,
- **__kwargs: Any,
- ) -> Any: ...
diff --git a/faststream/broker/utils.py b/faststream/broker/utils.py
deleted file mode 100644
index f605fc1d20..0000000000
--- a/faststream/broker/utils.py
+++ /dev/null
@@ -1,155 +0,0 @@
-import asyncio
-import inspect
-from contextlib import AsyncExitStack, suppress
-from functools import partial
-from typing import (
- TYPE_CHECKING,
- Any,
- AsyncContextManager,
- Awaitable,
- Callable,
- Optional,
- Sequence,
- Type,
- Union,
- cast,
-)
-
-import anyio
-from typing_extensions import Self
-
-from faststream.broker.acknowledgement_watcher import WatcherContext, get_watcher
-from faststream.broker.types import MsgType
-from faststream.utils.functions import fake_context, return_input, to_async
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.types import (
- AsyncCallable,
- BrokerMiddleware,
- CustomCallable,
- SyncCallable,
- )
- from faststream.types import LoggerProto
-
-
-async def process_msg(
- msg: Optional[MsgType],
- middlewares: Sequence["BrokerMiddleware[MsgType]"],
- parser: Callable[[MsgType], Awaitable["StreamMessage[MsgType]"]],
- decoder: Callable[["StreamMessage[MsgType]"], "Any"],
-) -> Optional["StreamMessage[MsgType]"]:
- if msg is None:
- return None
-
- async with AsyncExitStack() as stack:
- return_msg: Callable[
- [StreamMessage[MsgType]],
- Awaitable[StreamMessage[MsgType]],
- ] = return_input
-
- for m in middlewares[::-1]:
- mid = m(msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await parser(msg)
- parsed_msg._decoded_body = await decoder(parsed_msg)
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
-
-
-async def default_filter(msg: "StreamMessage[Any]") -> bool:
- """A function to filter stream messages."""
- return not msg.processed
-
-
-def get_watcher_context(
- logger: Optional["LoggerProto"],
- no_ack: bool,
- retry: Union[bool, int],
- **extra_options: Any,
-) -> Callable[..., AsyncContextManager[None]]:
- """Create Acknowledgement scope."""
- if no_ack:
- return fake_context
-
- else:
- return partial(
- WatcherContext,
- watcher=get_watcher(logger, retry),
- logger=logger,
- **extra_options,
- )
-
-
-class MultiLock:
- """A class representing a multi lock."""
-
- def __init__(self) -> None:
- """Initialize a new instance of the class."""
- self.queue: asyncio.Queue[None] = asyncio.Queue()
-
- def __enter__(self) -> Self:
- """Enter the context."""
- self.acquire()
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional["TracebackType"],
- ) -> None:
- """Exit the context."""
- self.release()
-
- def acquire(self) -> None:
- """Acquire lock."""
- self.queue.put_nowait(None)
-
- def release(self) -> None:
- """Release lock."""
- with suppress(asyncio.QueueEmpty, ValueError):
- self.queue.get_nowait()
- self.queue.task_done()
-
- @property
- def qsize(self) -> int:
- """Return the size of the queue."""
- return self.queue.qsize()
-
- @property
- def empty(self) -> bool:
- """Return whether the queue is empty."""
- return self.queue.empty()
-
- async def wait_release(self, timeout: Optional[float] = None) -> None:
- """Wait for the queue to be released.
-
- Using for graceful shutdown.
- """
- if timeout:
- with anyio.move_on_after(timeout):
- await self.queue.join()
-
-
-def resolve_custom_func(
- custom_func: Optional["CustomCallable"],
- default_func: "AsyncCallable",
-) -> "AsyncCallable":
- """Resolve a custom parser/decoder with default one."""
- if custom_func is None:
- return default_func
-
- original_params = inspect.signature(custom_func).parameters
-
- if len(original_params) == 1:
- return to_async(cast(Union["SyncCallable", "AsyncCallable"], custom_func))
-
- else:
- name = tuple(original_params.items())[1][0]
- return partial(to_async(custom_func), **{name: default_func})
diff --git a/faststream/broker/wrapper/call.py b/faststream/broker/wrapper/call.py
deleted file mode 100644
index 3072547688..0000000000
--- a/faststream/broker/wrapper/call.py
+++ /dev/null
@@ -1,205 +0,0 @@
-import asyncio
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Generic,
- Iterable,
- List,
- Mapping,
- Optional,
- Sequence,
- Union,
-)
-from unittest.mock import MagicMock
-
-import anyio
-from fast_depends.core import CallModel, build_call_model
-from fast_depends.use import _InjectWrapper, inject
-
-from faststream.broker.types import (
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
-)
-from faststream.exceptions import SetupError
-from faststream.utils.functions import to_async
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import PublisherProto
- from faststream.types import Decorator
-
-
-class HandlerCallWrapper(Generic[MsgType, P_HandlerParams, T_HandlerReturn]):
- """A generic class to wrap handler calls."""
-
- mock: Optional[MagicMock]
- future: Optional["asyncio.Future[Any]"]
- is_test: bool
-
- _wrapped_call: Optional[Callable[..., Awaitable[Any]]]
- _original_call: Callable[P_HandlerParams, T_HandlerReturn]
- _publishers: List["PublisherProto[MsgType]"]
-
- __slots__ = (
- "_original_call",
- "_publishers",
- "_wrapped_call",
- "future",
- "is_test",
- "mock",
- )
-
- def __new__(
- cls,
- call: Union[
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- Callable[P_HandlerParams, T_HandlerReturn],
- ],
- ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
- """Create a new instance of the class."""
- if isinstance(call, cls):
- return call
- else:
- return super().__new__(cls)
-
- def __init__(
- self,
- call: Callable[P_HandlerParams, T_HandlerReturn],
- ) -> None:
- """Initialize a handler."""
- if not isinstance(call, HandlerCallWrapper):
- self._original_call = call
- self._wrapped_call = None
- self._publishers = []
-
- self.mock = None
- self.future = None
- self.is_test = False
-
- def __call__(
- self,
- *args: P_HandlerParams.args,
- **kwargs: P_HandlerParams.kwargs,
- ) -> T_HandlerReturn:
- """Calls the object as a function."""
- return self._original_call(*args, **kwargs)
-
- def call_wrapped(
- self,
- message: "StreamMessage[MsgType]",
- ) -> Awaitable[Any]:
- """Calls the wrapped function with the given message."""
- assert self._wrapped_call, "You should use `set_wrapped` first" # nosec B101
- if self.is_test:
- assert self.mock # nosec B101
- self.mock(message._decoded_body)
- return self._wrapped_call(message)
-
- async def wait_call(self, timeout: Optional[float] = None) -> None:
- """Waits for a call with an optional timeout."""
- assert ( # nosec B101
- self.future is not None
- ), "You can use this method only with TestClient"
- with anyio.fail_after(timeout):
- await self.future
-
- def set_test(self) -> None:
- self.is_test = True
- if self.mock is None:
- self.mock = MagicMock()
- self.refresh(with_mock=True)
-
- def reset_test(self) -> None:
- self.is_test = False
- self.mock = None
- self.future = None
-
- def trigger(
- self,
- result: Any = None,
- error: Optional[BaseException] = None,
- ) -> None:
- if not self.is_test:
- return
-
- if self.future is None:
- raise SetupError("You can use this method only with TestClient")
-
- if self.future.done():
- self.future = asyncio.Future()
-
- if error:
- self.future.set_exception(error)
- else:
- self.future.set_result(result)
-
- def refresh(self, with_mock: bool = False) -> None:
- if asyncio.events._get_running_loop() is not None:
- self.future = asyncio.Future()
-
- if with_mock and self.mock is not None:
- self.mock.reset_mock()
-
- def set_wrapped(
- self,
- *,
- apply_types: bool,
- is_validate: bool,
- dependencies: Iterable["Depends"],
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> Optional["CallModel[..., Any]"]:
- call = self._original_call
- for decor in _call_decorators:
- call = decor(call)
- self._original_call = call
-
- f: Callable[..., Awaitable[Any]] = to_async(call)
-
- dependent: Optional[CallModel[..., Any]] = None
- if _get_dependant is None:
- dependent = build_call_model(
- f,
- cast=is_validate,
- extra_dependencies=dependencies, # type: ignore[arg-type]
- )
-
- if apply_types:
- wrapper: _InjectWrapper[Any, Any] = inject(func=None)
- f = wrapper(func=f, model=dependent)
-
- f = _wrap_decode_message(
- func=f,
- params_ln=len(dependent.flat_params),
- )
-
- self._wrapped_call = f
- return dependent
-
-
-def _wrap_decode_message(
- func: Callable[..., Awaitable[T_HandlerReturn]],
- params_ln: int,
-) -> Callable[["StreamMessage[MsgType]"], Awaitable[T_HandlerReturn]]:
- """Wraps a function to decode a message and pass it as an argument to the wrapped function."""
-
- async def decode_wrapper(message: "StreamMessage[MsgType]") -> T_HandlerReturn:
- """A wrapper function to decode and handle a message."""
- msg = await message.decode()
-
- if params_ln > 1:
- if isinstance(msg, Mapping):
- return await func(**msg)
- elif isinstance(msg, Sequence):
- return await func(*msg)
- else:
- return await func(msg)
-
- raise AssertionError("unreachable")
-
- return decode_wrapper
diff --git a/faststream/broker/wrapper/proto.py b/faststream/broker/wrapper/proto.py
deleted file mode 100644
index 9dfcb98af5..0000000000
--- a/faststream/broker/wrapper/proto.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Iterable,
- Optional,
- Protocol,
- Sequence,
- Union,
- overload,
-)
-
-from faststream.broker.types import (
- CustomCallable,
- Filter,
- MsgType,
- P_HandlerParams,
- SubscriberMiddleware,
- T_HandlerReturn,
-)
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.wrapper.call import HandlerCallWrapper
-
-
-class WrapperProto(Protocol[MsgType]):
- """Annotation class to represent @subscriber return type."""
-
- @overload
- def __call__(
- self,
- func: None = None,
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> Callable[
- [Callable[P_HandlerParams, T_HandlerReturn]],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- ]: ...
-
- @overload
- def __call__(
- self,
- func: Union[
- Callable[P_HandlerParams, T_HandlerReturn],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- ],
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]": ...
-
- def __call__(
- self,
- func: Union[
- Callable[P_HandlerParams, T_HandlerReturn],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- None,
- ] = None,
- *,
- filter: Optional["Filter[Any]"] = None,
- parser: Optional["CustomCallable"] = None,
- decoder: Optional["CustomCallable"] = None,
- middlewares: Sequence["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
- ) -> Union[
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- Callable[
- [Callable[P_HandlerParams, T_HandlerReturn]],
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- ],
- ]: ...
diff --git a/faststream/cli/docs/app.py b/faststream/cli/docs/app.py
deleted file mode 100644
index 1346543b1a..0000000000
--- a/faststream/cli/docs/app.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import json
-import sys
-import warnings
-from pathlib import Path
-from typing import Optional, Sequence
-
-import typer
-
-from faststream._compat import json_dumps, model_parse
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Schema
-from faststream.asyncapi.site import serve_app
-from faststream.cli.utils.imports import import_from_string
-from faststream.exceptions import INSTALL_WATCHFILES, INSTALL_YAML
-
-docs_app = typer.Typer(pretty_exceptions_short=True)
-
-
-@docs_app.command(name="serve")
-def serve(
- app: str = typer.Argument(
- ...,
- help="[python_module:FastStream] or [asyncapi.yaml/.json] - path to your application or documentation.",
- ),
- host: str = typer.Option(
- "localhost",
- help="Documentation hosting address.",
- ),
- port: int = typer.Option(
- 8000,
- help="Documentation hosting port.",
- ),
- reload: bool = typer.Option(
- False,
- "--reload",
- is_flag=True,
- help="Restart documentation at directory files changes.",
- ),
- app_dir: str = typer.Option(
- ".",
- "--app-dir",
- help=(
- "Look for APP in the specified directory, by adding this to the PYTHONPATH."
- " Defaults to the current working directory."
- ),
- ),
- is_factory: bool = typer.Option(
- False,
- "--factory",
- is_flag=True,
- help="Treat APP as an application factory.",
- ),
-) -> None:
- """Serve project AsyncAPI schema."""
- if ":" in app:
- if app_dir: # pragma: no branch
- sys.path.insert(0, app_dir)
-
- module, _ = import_from_string(app, is_factory=is_factory)
-
- module_parent = module.parent
- extra_extensions: Sequence[str] = ()
-
- else:
- module_parent = Path.cwd()
- schema_filepath = module_parent / app
- extra_extensions = (schema_filepath.suffix,)
-
- if reload:
- try:
- from faststream.cli.supervisors.watchfiles import WatchReloader
-
- except ImportError:
- warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1)
- _parse_and_serve(app, host, port, is_factory)
-
- else:
- WatchReloader(
- target=_parse_and_serve,
- args=(app, host, port, is_factory),
- reload_dirs=(str(module_parent),),
- extra_extensions=extra_extensions,
- ).run()
-
- else:
- _parse_and_serve(app, host, port, is_factory)
-
-
-@docs_app.command(name="gen")
-def gen(
- app: str = typer.Argument(
- ...,
- help="[python_module:FastStream] - path to your application.",
- ),
- yaml: bool = typer.Option(
- False,
- "--yaml",
- is_flag=True,
- help="Generate `asyncapi.yaml` schema.",
- ),
- out: Optional[str] = typer.Option(
- None,
- help="Output filename.",
- ),
- app_dir: str = typer.Option(
- ".",
- "--app-dir",
- help=(
- "Look for APP in the specified directory, by adding this to the PYTHONPATH."
- " Defaults to the current working directory."
- ),
- ),
- is_factory: bool = typer.Option(
- False,
- "--factory",
- is_flag=True,
- help="Treat APP as an application factory.",
- ),
-) -> None:
- """Generate project AsyncAPI schema."""
- if app_dir: # pragma: no branch
- sys.path.insert(0, app_dir)
-
- _, app_obj = import_from_string(app, is_factory=is_factory)
-
- raw_schema = get_app_schema(app_obj)
-
- if yaml:
- try:
- schema = raw_schema.to_yaml()
- except ImportError as e: # pragma: no cover
- typer.echo(INSTALL_YAML, err=True)
- raise typer.Exit(1) from e
-
- name = out or "asyncapi.yaml"
-
- with Path(name).open("w") as f:
- f.write(schema)
-
- else:
- schema = raw_schema.to_jsonable()
- name = out or "asyncapi.json"
-
- with Path(name).open("w") as f:
- json.dump(schema, f, indent=2)
-
- typer.echo(f"Your project AsyncAPI scheme was placed to `{name}`")
-
-
-def _parse_and_serve(
- app: str,
- host: str = "localhost",
- port: int = 8000,
- is_factory: bool = False,
-) -> None:
- if ":" in app:
- _, app_obj = import_from_string(app, is_factory=is_factory)
-
- raw_schema = get_app_schema(app_obj)
-
- else:
- schema_filepath = Path.cwd() / app
-
- if schema_filepath.suffix == ".json":
- data = schema_filepath.read_bytes()
-
- elif schema_filepath.suffix == ".yaml" or schema_filepath.suffix == ".yml":
- try:
- import yaml
- except ImportError as e: # pragma: no cover
- typer.echo(INSTALL_YAML, err=True)
- raise typer.Exit(1) from e
-
- with schema_filepath.open("r") as f:
- schema = yaml.safe_load(f)
-
- data = json_dumps(schema)
-
- else:
- raise ValueError(
- f"Unknown extension given - {app}; Please provide app in format [python_module:FastStream] or [asyncapi.yaml/.json] - path to your application or documentation"
- )
-
- raw_schema = model_parse(Schema, data)
-
- serve_app(raw_schema, host, port)
diff --git a/faststream/cli/main.py b/faststream/cli/main.py
deleted file mode 100644
index b149e7b8d0..0000000000
--- a/faststream/cli/main.py
+++ /dev/null
@@ -1,295 +0,0 @@
-import logging
-import sys
-import warnings
-from contextlib import suppress
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
-
-import anyio
-import typer
-from click.exceptions import MissingParameter
-from typer.core import TyperOption
-
-from faststream import FastStream
-from faststream.__about__ import __version__
-from faststream._internal.application import Application
-from faststream.asgi.app import AsgiFastStream
-from faststream.cli.docs.app import docs_app
-from faststream.cli.utils.imports import import_from_string
-from faststream.cli.utils.logs import LogLevels, get_log_level, set_log_level
-from faststream.cli.utils.parser import parse_cli_args
-from faststream.exceptions import INSTALL_WATCHFILES, SetupError, ValidationError
-
-if TYPE_CHECKING:
- from faststream.broker.core.usecase import BrokerUsecase
- from faststream.types import AnyDict, SettingField
-
-cli = typer.Typer(pretty_exceptions_short=True)
-cli.add_typer(docs_app, name="docs", help="AsyncAPI schema commands")
-
-
-def version_callback(version: bool) -> None:
- """Callback function for displaying version information."""
- if version:
- import platform
-
- typer.echo(
- f"Running FastStream {__version__} with {platform.python_implementation()} "
- f"{platform.python_version()} on {platform.system()}"
- )
-
- raise typer.Exit()
-
-
-@cli.callback()
-def main(
- version: Optional[bool] = typer.Option(
- False,
- "-v",
- "--version",
- callback=version_callback,
- is_eager=True,
- help="Show current platform, python and FastStream version.",
- ),
-) -> None:
- """Generate, run and manage FastStream apps to greater development experience."""
-
-
-@cli.command(
- context_settings={"allow_extra_args": True, "ignore_unknown_options": True}
-)
-def run(
- ctx: typer.Context,
- app: str = typer.Argument(
- ...,
- help="[python_module:FastStream] - path to your application.",
- ),
- workers: int = typer.Option(
- 1,
- show_default=False,
- help="Run [workers] applications with process spawning.",
- envvar="FASTSTREAM_WORKERS",
- ),
- log_level: LogLevels = typer.Option(
- LogLevels.notset,
- case_sensitive=False,
- help="Set selected level for FastStream and brokers logger objects.",
- envvar="FASTSTREAM_LOG_LEVEL",
- ),
- reload: bool = typer.Option(
- False,
- "--reload",
- is_flag=True,
- help="Restart app at directory files changes.",
- ),
- watch_extensions: List[str] = typer.Option(
- (),
- "--extension",
- "--ext",
- "--reload-extension",
- "--reload-ext",
- help="List of file extensions to watch by.",
- ),
- app_dir: str = typer.Option(
- ".",
- "--app-dir",
- help=(
- "Look for APP in the specified directory, by adding this to the PYTHONPATH."
- " Defaults to the current working directory."
- ),
- envvar="FASTSTREAM_APP_DIR",
- ),
- is_factory: bool = typer.Option(
- False,
- "--factory",
- is_flag=True,
- help="Treat APP as an application factory.",
- ),
-) -> None:
- """Run [MODULE:APP] FastStream application."""
- if watch_extensions and not reload:
- typer.echo(
- "Extra reload extensions has no effect without `--reload` flag."
- "\nProbably, you forgot it?"
- )
-
- app, extra = parse_cli_args(app, *ctx.args)
- casted_log_level = get_log_level(log_level)
-
- if app_dir: # pragma: no branch
- sys.path.insert(0, app_dir)
-
- # Should be imported after sys.path changes
- module_path, app_obj = import_from_string(app, is_factory=is_factory)
-
- args = (app, extra, is_factory, casted_log_level)
-
- if reload and workers > 1:
- raise SetupError("You can't use reload option with multiprocessing")
-
- if reload:
- try:
- from faststream.cli.supervisors.watchfiles import WatchReloader
- except ImportError:
- warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1)
- _run(*args)
-
- else:
- if app_dir != ".":
- reload_dirs = [str(module_path), app_dir]
- else:
- reload_dirs = [str(module_path)]
-
- WatchReloader(
- target=_run,
- args=args,
- reload_dirs=reload_dirs,
- extra_extensions=watch_extensions,
- ).run()
-
- elif workers > 1:
- if isinstance(app_obj, FastStream):
- from faststream.cli.supervisors.multiprocess import Multiprocess
-
- Multiprocess(
- target=_run,
- args=(*args, logging.DEBUG),
- workers=workers,
- ).run()
- elif isinstance(app_obj, AsgiFastStream):
- from faststream.cli.supervisors.asgi_multiprocess import ASGIMultiprocess
-
- ASGIMultiprocess(
- target=app,
- args=args, # type: ignore[arg-type]
- workers=workers,
- ).run()
- else:
- raise typer.BadParameter(
- f"Unexpected app type, expected FastStream or AsgiFastStream, got: {type(app_obj)}."
- )
-
- else:
- _run_imported_app(
- app_obj,
- extra_options=extra,
- log_level=casted_log_level,
- )
-
-
-def _run(
- # NOTE: we should pass `str` due FastStream is not picklable
- app: str,
- extra_options: Dict[str, "SettingField"],
- is_factory: bool,
- log_level: int = logging.NOTSET,
- app_level: int = logging.INFO, # option for reloader only
-) -> None:
- """Runs the specified application."""
- _, app_obj = import_from_string(app, is_factory=is_factory)
- _run_imported_app(
- app_obj,
- extra_options=extra_options,
- log_level=log_level,
- app_level=app_level,
- )
-
-
-def _run_imported_app(
- app_obj: "Application",
- extra_options: Dict[str, "SettingField"],
- log_level: int = logging.NOTSET,
- app_level: int = logging.INFO, # option for reloader only
-) -> None:
- if not isinstance(app_obj, Application):
- raise typer.BadParameter(
- f'Imported object "{app_obj}" must be "Application" type.',
- )
-
- if log_level > 0:
- set_log_level(log_level, app_obj)
-
- if sys.platform not in ("win32", "cygwin", "cli"): # pragma: no cover
- with suppress(ImportError):
- import uvloop
-
- uvloop.install()
-
- try:
- anyio.run(
- app_obj.run,
- app_level,
- extra_options,
- )
-
- except ValidationError as e:
- ex = MissingParameter(
- message=(
- "You registered extra options in your application "
- "`lifespan/on_startup` hook, but does not set in CLI."
- ),
- param=TyperOption(param_decls=[f"--{x}" for x in e.fields]),
- )
-
- try:
- from typer import rich_utils
-
- rich_utils.rich_format_error(ex)
- except ImportError:
- ex.show()
-
- sys.exit(1)
-
-
-@cli.command(
- context_settings={"allow_extra_args": True, "ignore_unknown_options": True}
-)
-def publish(
- ctx: typer.Context,
- app: str = typer.Argument(..., help="FastStream app instance, e.g., main:app."),
- message: str = typer.Argument(..., help="Message to be published."),
- rpc: bool = typer.Option(False, help="Enable RPC mode and system output."),
- is_factory: bool = typer.Option(
- False,
- "--factory",
- is_flag=True,
- help="Treat APP as an application factory.",
- ),
-) -> None:
- """Publish a message using the specified broker in a FastStream application.
-
- This command publishes a message to a broker configured in a FastStream app instance.
- It supports various brokers and can handle extra arguments specific to each broker type.
- These are parsed and passed to the broker's publish method.
- """
- app, extra = parse_cli_args(app, *ctx.args)
- extra["message"] = message
- extra["rpc"] = rpc
-
- try:
- if not app:
- raise ValueError("App parameter is required.")
- if not message:
- raise ValueError("Message parameter is required.")
-
- _, app_obj = import_from_string(app, is_factory=is_factory)
-
- if not app_obj.broker:
- raise ValueError("Broker instance not found in the app.")
-
- result = anyio.run(publish_message, app_obj.broker, extra)
-
- if rpc:
- typer.echo(result)
-
- except Exception as e:
- typer.echo(f"Publish error: {e}")
- sys.exit(1)
-
-
-async def publish_message(broker: "BrokerUsecase[Any, Any]", extra: "AnyDict") -> Any:
- try:
- async with broker:
- return await broker.publish(**extra)
- except Exception as e:
- typer.echo(f"Error when broker was publishing: {e}")
- sys.exit(1)
diff --git a/faststream/cli/supervisors/asgi_multiprocess.py b/faststream/cli/supervisors/asgi_multiprocess.py
deleted file mode 100644
index 2696398f8a..0000000000
--- a/faststream/cli/supervisors/asgi_multiprocess.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import inspect
-from typing import Dict, Tuple
-
-from faststream.asgi.app import cast_uvicorn_params
-
-
-class ASGIMultiprocess:
- def __init__(
- self, target: str, args: Tuple[str, Dict[str, str], bool, int], workers: int
- ) -> None:
- _, uvicorn_kwargs, is_factory, log_level = args
- self._target = target
- self._uvicorn_kwargs = cast_uvicorn_params(uvicorn_kwargs or {})
- self._workers = workers
- self._is_factory = is_factory
- self._log_level = log_level
-
- def run(self) -> None:
- try:
- import uvicorn
- except ImportError as e:
- raise RuntimeError(
- "You need uvicorn to run FastStream ASGI App via CLI. pip install uvicorn"
- ) from e
-
- uvicorn_params = set(inspect.signature(uvicorn.run).parameters.keys())
-
- uvicorn.run(
- self._target,
- factory=self._is_factory,
- workers=self._workers,
- log_level=self._log_level,
- **{
- key: v
- for key, v in self._uvicorn_kwargs.items()
- if key in uvicorn_params
- },
- )
diff --git a/faststream/cli/supervisors/utils.py b/faststream/cli/supervisors/utils.py
deleted file mode 100644
index 0d39460bd8..0000000000
--- a/faststream/cli/supervisors/utils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import asyncio
-import multiprocessing
-import os
-import signal
-import sys
-from contextlib import suppress
-from typing import TYPE_CHECKING, Any, Callable, Optional
-
-if TYPE_CHECKING:
- from multiprocessing.context import SpawnProcess
- from types import FrameType
-
- from faststream.types import DecoratedCallableNone
-
-multiprocessing.allow_connection_pickling()
-spawn = multiprocessing.get_context("spawn")
-
-
-HANDLED_SIGNALS = (
- signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
- signal.SIGTERM, # Unix signal 15. Sent by `kill `.
-)
-
-
-def set_exit(
- func: Callable[[int, Optional["FrameType"]], Any],
- *,
- sync: bool = False,
-) -> None:
- """Set exit handler for signals.
-
- Args:
- func: A callable object that takes an integer and an optional frame type as arguments and returns any value.
- sync: set sync or async signal callback.
- """
- if not sync:
- with suppress(NotImplementedError):
- loop = asyncio.get_event_loop()
-
- for sig in HANDLED_SIGNALS:
- loop.add_signal_handler(sig, func, sig, None)
-
- return
-
- # Windows or sync mode
- for sig in HANDLED_SIGNALS:
- signal.signal(sig, func)
-
-
-def get_subprocess(target: "DecoratedCallableNone", args: Any) -> "SpawnProcess":
- """Spawn a subprocess."""
- stdin_fileno: Optional[int]
- try:
- stdin_fileno = sys.stdin.fileno()
- except OSError:
- stdin_fileno = None
-
- return spawn.Process(
- target=subprocess_started,
- args=args,
- kwargs={"t": target, "stdin_fileno": stdin_fileno},
- )
-
-
-def subprocess_started(
- *args: Any,
- t: "DecoratedCallableNone",
- stdin_fileno: Optional[int],
-) -> None:
- """Start a subprocess."""
- if stdin_fileno is not None: # pragma: no cover
- sys.stdin = os.fdopen(stdin_fileno)
- t(*args)
diff --git a/faststream/cli/utils/imports.py b/faststream/cli/utils/imports.py
deleted file mode 100644
index a23fc1914e..0000000000
--- a/faststream/cli/utils/imports.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import importlib
-from importlib.util import module_from_spec, spec_from_file_location
-from pathlib import Path
-from typing import Tuple
-
-import typer
-
-from faststream._internal.application import Application
-from faststream.exceptions import SetupError
-
-
-def try_import_app(module: Path, app: str) -> "Application":
- """Tries to import a FastStream app from a module."""
- try:
- app_object = import_object(module, app)
-
- except FileNotFoundError as e:
- typer.echo(e, err=True)
- raise typer.BadParameter(
- "Please, input module like [python_file:faststream_app_name] or [module:attribute]"
- ) from e
-
- else:
- return app_object # type: ignore
-
-
-def import_object(module: Path, app: str) -> object:
- """Import an object from a module."""
- spec = spec_from_file_location(
- "mode",
- f"{module}.py",
- submodule_search_locations=[str(module.parent.absolute())],
- )
-
- if spec is None: # pragma: no cover
- raise FileNotFoundError(module)
-
- mod = module_from_spec(spec)
- loader = spec.loader
-
- if loader is None: # pragma: no cover
- raise SetupError(f"{spec} has no loader")
-
- loader.exec_module(mod)
-
- try:
- obj = getattr(mod, app)
- except AttributeError as e:
- raise FileNotFoundError(module) from e
-
- return obj
-
-
-def get_app_path(app: str) -> Tuple[Path, str]:
- """Get the application path."""
- if ":" not in app:
- raise SetupError(f"`{app}` is not a FastStream")
-
- module, app_name = app.split(":", 2)
-
- mod_path = Path.cwd()
- for i in module.split("."):
- mod_path = mod_path / i
-
- return mod_path, app_name
-
-
-def import_from_string(
- import_str: str,
- *,
- is_factory: bool = False,
-) -> Tuple[Path, "Application"]:
- module_path, instance = _import_obj_or_factory(import_str)
-
- if is_factory:
- if callable(instance):
- instance = instance()
- else:
- raise typer.BadParameter(f'"{instance}" is not a factory')
-
- if callable(instance) and not is_factory and not isinstance(instance, Application):
- raise typer.BadParameter("Please, use --factory option for callable object")
-
- return module_path, instance
-
-
-def _import_obj_or_factory(import_str: str) -> Tuple[Path, "Application"]:
- """Import FastStream application from module specified by a string."""
- if not isinstance(import_str, str):
- raise typer.BadParameter("Given value is not of type string")
-
- module_str, _, attrs_str = import_str.partition(":")
- if not module_str or not attrs_str:
- raise typer.BadParameter(
- f'Import string "{import_str}" must be in format ":"'
- )
-
- try:
- module = importlib.import_module( # nosemgrep: python.lang.security.audit.non-literal-import.non-literal-import
- module_str
- )
-
- except ModuleNotFoundError:
- module_path, app_name = get_app_path(import_str)
- instance = try_import_app(module_path, app_name)
-
- else:
- attr = module
- try:
- for attr_str in attrs_str.split("."):
- attr = getattr(attr, attr_str)
- instance = attr # type: ignore[assignment]
-
- except AttributeError as e:
- typer.echo(e, err=True)
- raise typer.BadParameter(
- f'Attribute "{attrs_str}" not found in module "{module_str}".'
- ) from e
-
- if module.__file__:
- module_path = Path(module.__file__).resolve().parent
- else:
- module_path = Path.cwd()
-
- return module_path, instance
diff --git a/faststream/cli/utils/logs.py b/faststream/cli/utils/logs.py
deleted file mode 100644
index c695b2e5be..0000000000
--- a/faststream/cli/utils/logs.py
+++ /dev/null
@@ -1,74 +0,0 @@
-import logging
-from collections import defaultdict
-from enum import Enum
-from typing import TYPE_CHECKING, DefaultDict, Optional, Union
-
-if TYPE_CHECKING:
- from faststream._internal.application import Application
- from faststream.types import LoggerProto
-
-
-class LogLevels(str, Enum):
- """A class to represent log levels.
-
- Attributes:
- critical : critical log level
- error : error log level
- warning : warning log level
- info : info log level
- debug : debug log level
- """
-
- critical = "critical"
- fatal = "fatal"
- error = "error"
- warning = "warning"
- warn = "warn"
- info = "info"
- debug = "debug"
- notset = "notset"
-
-
-LOG_LEVELS: DefaultDict[str, int] = defaultdict(
- lambda: logging.INFO,
- **{
- "critical": logging.CRITICAL,
- "fatal": logging.FATAL,
- "error": logging.ERROR,
- "warning": logging.WARNING,
- "warn": logging.WARN,
- "info": logging.INFO,
- "debug": logging.DEBUG,
- "notset": logging.NOTSET,
- },
-)
-
-
-def get_log_level(level: Union[LogLevels, str, int]) -> int:
- """Get the log level.
-
- Args:
- level: The log level to get. Can be an integer, a LogLevels enum value, or a string.
-
- Returns:
- The log level as an integer.
-
- """
- if isinstance(level, int):
- return level
-
- if isinstance(level, LogLevels):
- return LOG_LEVELS[level.value]
-
- if isinstance(level, str): # pragma: no branch
- return LOG_LEVELS[level.lower()]
-
-
-def set_log_level(level: int, app: "Application") -> None:
- """Sets the log level for an application."""
- if app.logger and getattr(app.logger, "setLevel", None):
- app.logger.setLevel(level) # type: ignore[attr-defined]
-
- broker_logger: Optional[LoggerProto] = getattr(app.broker, "logger", None)
- if broker_logger is not None and getattr(broker_logger, "setLevel", None):
- broker_logger.setLevel(level) # type: ignore[attr-defined]
diff --git a/faststream/cli/utils/parser.py b/faststream/cli/utils/parser.py
deleted file mode 100644
index f36c54935c..0000000000
--- a/faststream/cli/utils/parser.py
+++ /dev/null
@@ -1,83 +0,0 @@
-import re
-from functools import reduce
-from typing import TYPE_CHECKING, Dict, List, Tuple
-
-if TYPE_CHECKING:
- from faststream.types import SettingField
-
-
-def is_bind_arg(arg: str) -> bool:
- """Determine whether the received argument refers to --bind.
-
- bind arguments are like: 0.0.0.0:8000, [::]:8000, fd://2, /tmp/socket.sock
-
- """
- bind_regex = re.compile(r":\d+$|:/+\d|:/[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+")
- return bool(bind_regex.search(arg))
-
-
-def parse_cli_args(*args: str) -> Tuple[str, Dict[str, "SettingField"]]:
- """Parses command line arguments."""
- extra_kwargs: Dict[str, SettingField] = {}
-
- k: str = ""
- v: SettingField
-
- field_args: List[str] = []
- app = ""
- for item in [
- *reduce(
- lambda acc, x: acc + x.split("="), # type: ignore
- args,
- [],
- ),
- "-",
- ]:
- if ":" in item and not is_bind_arg(item):
- app = item
-
- else:
- if "-" in item:
- if k:
- k = k.strip().lstrip("-").replace("-", "_")
-
- if len(field_args) == 0:
- v = not k.startswith("no_")
- elif len(field_args) == 1:
- v = field_args[0]
- else:
- v = field_args
-
- key = remove_prefix(k, "no_")
- if (exists := extra_kwargs.get(key)) is not None:
- v = [
- *(exists if isinstance(exists, list) else [exists]),
- *(v if isinstance(v, list) else [v]),
- ]
-
- extra_kwargs[key] = v
- field_args = []
-
- k = item
-
- else:
- field_args.append(item)
-
- return app, extra_kwargs
-
-
-def remove_prefix(text: str, prefix: str) -> str:
- """Removes a prefix from a given text.
-
- Python 3.8 compatibility function
-
- Args:
- text (str): The text from which the prefix will be removed.
- prefix (str): The prefix to be removed from the text.
-
- Returns:
- str: The text with the prefix removed. If the text does not start with the prefix, the original text is returned.
- """
- if text.startswith(prefix):
- return text[len(prefix) :]
- return text
diff --git a/faststream/confluent/__init__.py b/faststream/confluent/__init__.py
index 17b9d19d01..8cd702ba5f 100644
--- a/faststream/confluent/__init__.py
+++ b/faststream/confluent/__init__.py
@@ -1,10 +1,10 @@
+from faststream._internal.testing.app import TestApp
from faststream.confluent.annotations import KafkaMessage
from faststream.confluent.broker import KafkaBroker
from faststream.confluent.response import KafkaResponse
from faststream.confluent.router import KafkaPublisher, KafkaRoute, KafkaRouter
from faststream.confluent.schemas import TopicPartition
from faststream.confluent.testing import TestKafkaBroker
-from faststream.testing.app import TestApp
__all__ = (
"KafkaBroker",
diff --git a/faststream/confluent/annotations.py b/faststream/confluent/annotations.py
index fec41b3817..e3c1f82af9 100644
--- a/faststream/confluent/annotations.py
+++ b/faststream/confluent/annotations.py
@@ -1,10 +1,11 @@
-from typing_extensions import Annotated
+from typing import Annotated
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.context import Context
+from faststream.annotations import ContextRepo, Logger
from faststream.confluent.broker import KafkaBroker as KB
from faststream.confluent.message import KafkaMessage as KM
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
-from faststream.utils.context import Context
+from faststream.params import NoCast
__all__ = (
"ContextRepo",
diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py
index 201249349e..6465c6cac4 100644
--- a/faststream/confluent/broker/broker.py
+++ b/faststream/confluent/broker/broker.py
@@ -1,69 +1,76 @@
import logging
+from collections.abc import Iterable, Sequence
from functools import partial
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Literal,
Optional,
- Tuple,
- Type,
TypeVar,
Union,
)
import anyio
-from typing_extensions import Annotated, Doc, override
+import confluent_kafka
+from typing_extensions import Doc, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.message import gen_cor_id
-from faststream.confluent.broker.logging import KafkaLoggingBroker
-from faststream.confluent.broker.registrator import KafkaRegistrator
-from faststream.confluent.client import (
- AsyncConfluentConsumer,
- AsyncConfluentProducer,
-)
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.constants import EMPTY
+from faststream._internal.utils.data import filter_by_dict
+from faststream.confluent.client import AsyncConfluentConsumer, AsyncConfluentProducer
from faststream.confluent.config import ConfluentFastConfig
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+from faststream.confluent.response import KafkaPublishCommand
from faststream.confluent.schemas.params import ConsumerConnectionParams
from faststream.confluent.security import parse_security
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.types import EMPTY
-from faststream.utils.data import filter_by_dict
+from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
+
+from .logging import make_kafka_logger_state
+from .registrator import KafkaRegistrator
if TYPE_CHECKING:
+ import asyncio
from types import TracebackType
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.confluent.config import ConfluentConfig
- from faststream.security import BaseSecurity
- from faststream.types import (
+ from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
Decorator,
LoggerProto,
SendableMessage,
)
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.confluent.config import ConfluentConfig
+ from faststream.confluent.message import KafkaMessage
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
Partition = TypeVar("Partition")
class KafkaBroker( # type: ignore[misc]
KafkaRegistrator,
- KafkaLoggingBroker,
+ BrokerUsecase[
+ Union[
+ confluent_kafka.Message,
+ tuple[confluent_kafka.Message, ...],
+ ],
+ Callable[..., AsyncConfluentConsumer],
+ ],
):
- url: List[str]
- _producer: Optional[AsyncConfluentFastProducer]
+ url: list[str]
+ _producer: AsyncConfluentFastProducer
def __init__(
self,
@@ -77,7 +84,7 @@ def __init__(
This does not have to be the full node list.
It just needs to have at least one broker that will respond to a
Metadata API Request. Default port is 9092.
- """
+ """,
),
] = "localhost",
*,
@@ -98,7 +105,7 @@ def __init__(
which we force a refresh of metadata even if we haven't seen any
partition leadership changes to proactively discover any new
brokers or partitions.
- """
+ """,
),
] = 5 * 60 * 1000,
connections_max_idle_ms: Annotated[
@@ -108,7 +115,7 @@ def __init__(
Close idle connections after the number
of milliseconds specified by this config. Specifying `None` will
disable idle checks.
- """
+ """,
),
] = 9 * 60 * 1000,
client_id: Annotated[
@@ -120,7 +127,7 @@ def __init__(
server-side log entries that correspond to this client. Also
submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`
for logging with respect to consumer group administration.
- """
+ """,
),
] = SERVICE_NAME,
allow_auto_create_topics: Annotated[
@@ -128,7 +135,7 @@ def __init__(
Doc(
"""
Allow automatic topic creation on the broker when subscribing to or assigning non-existent topics.
- """
+ """,
),
] = True,
config: Annotated[
@@ -137,7 +144,7 @@ def __init__(
"""
Extra configuration for the confluent-kafka-python
producer/consumer. See `confluent_kafka.Config `_.
- """
+ """,
),
] = None,
# publisher args
@@ -169,7 +176,7 @@ def __init__(
If unset, defaults to ``acks=1``. If `enable_idempotence` is
:data:`True` defaults to ``acks=all``.
- """
+ """,
),
] = EMPTY,
compression_type: Annotated[
@@ -180,14 +187,14 @@ def __init__(
Compression is of full batches of data, so the efficacy of batching
will also impact the compression ratio (more batching means better
compression).
- """
+ """,
),
] = None,
partitioner: Annotated[
Union[
str,
Callable[
- [bytes, List[Partition], List[Partition]],
+ [bytes, list[Partition], list[Partition]],
Partition,
],
],
@@ -201,7 +208,7 @@ def __init__(
messages with the same key are assigned to the same partition.
When a key is :data:`None`, the message is delivered to a random partition
(filtered to partitions with available leaders only, if possible).
- """
+ """,
),
] = "consistent_random",
max_request_size: Annotated[
@@ -213,7 +220,7 @@ def __init__(
has its own cap on record size which may be different from this.
This setting will limit the number of record batches the producer
will send in a single request to avoid sending huge requests.
- """
+ """,
),
] = 1024 * 1024,
linger_ms: Annotated[
@@ -228,7 +235,7 @@ def __init__(
This setting accomplishes this by adding a small amount of
artificial delay; that is, if first request is processed faster,
than `linger_ms`, producer will wait ``linger_ms - process_time``.
- """
+ """,
),
] = 0,
enable_idempotence: Annotated[
@@ -241,7 +248,7 @@ def __init__(
etc., may write duplicates of the retried message in the stream.
Note that enabling idempotence acks to set to ``all``. If it is not
explicitly set by the user it will be chosen.
- """
+ """,
),
] = False,
transactional_id: Optional[str] = None,
@@ -250,7 +257,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -262,26 +269,25 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
- Iterable[
- Union[
- "BrokerMiddleware[Message]",
- "BrokerMiddleware[Tuple[Message, ...]]",
- ]
- ],
+ Sequence["BrokerMiddleware[Union[Message, tuple[Message, ...]]]"],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[Message]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
# AsyncAPI args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Union[str, Iterable[str], None],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -298,9 +304,9 @@ def __init__(
Doc("AsyncAPI server description."),
] = None,
tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -319,10 +325,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -344,13 +347,13 @@ def __init__(
else list(bootstrap_servers)
)
- if asyncapi_url is not None:
- if isinstance(asyncapi_url, str):
- asyncapi_url = [asyncapi_url]
+ if specification_url is not None:
+ if isinstance(specification_url, str):
+ specification_url = [specification_url]
else:
- asyncapi_url = list(asyncapi_url)
+ specification_url = list(specification_url)
else:
- asyncapi_url = servers
+ specification_url = servers
super().__init__(
bootstrap_servers=servers,
@@ -376,38 +379,48 @@ def __init__(
decoder=decoder,
parser=parser,
middlewares=middlewares,
+ routers=routers,
# AsyncAPI args
description=description,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol,
protocol_version=protocol_version,
security=security,
tags=tags,
# Logging args
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
+ logger_state=make_kafka_logger_state(
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ ),
# FastDepends args
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
)
self.client_id = client_id
- self._producer = None
+
self.config = ConfluentFastConfig(config)
- async def _close(
+ self._state.patch_value(
+ producer=AsyncConfluentFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
+ async def close(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
- if self._producer is not None: # pragma: no branch
- await self._producer.stop()
- self._producer = None
+ await super().close(exc_type, exc_val, exc_tb)
+
+ await self._producer.disconnect()
- await super()._close(exc_type, exc_val, exc_tb)
+ self._connection = None
async def connect(
self,
@@ -435,33 +448,25 @@ async def _connect( # type: ignore[override]
native_producer = AsyncConfluentProducer(
**kwargs,
client_id=client_id,
- logger=self.logger,
config=self.config,
+ logger=self._state.get().logger_state,
)
- self._producer = AsyncConfluentFastProducer(
- producer=native_producer,
- parser=self._parser,
- decoder=self._decoder,
- )
+ self._producer.connect(native_producer)
+ connection_kwargs, _ = filter_by_dict(ConsumerConnectionParams, kwargs)
return partial(
AsyncConfluentConsumer,
- **filter_by_dict(ConsumerConnectionParams, kwargs),
- logger=self.logger,
+ **connection_kwargs,
+ logger=self._state.get().logger_state,
config=self.config,
)
async def start(self) -> None:
+ await self.connect()
+ self._setup()
await super().start()
- for handler in self._subscribers.values():
- self._log(
- f"`{handler.call_name}` waiting for messages",
- extra=handler.get_log_context(None),
- )
- await handler.start()
-
@property
def _subscriber_setup_extra(self) -> "AnyDict":
return {
@@ -473,104 +478,125 @@ def _subscriber_setup_extra(self) -> "AnyDict":
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
+ message: Annotated[
+ "SendableMessage",
+ Doc("Message body to send."),
+ ],
+ topic: Annotated[
+ str,
+ Doc("Topic where the message will be published."),
+ ],
+ *,
+ key: Union[bytes, str, None] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- reply_to: str = "",
- no_confirm: bool = False,
- # extra options to be compatible with test client
- **kwargs: Any,
- ) -> Optional[Any]:
- correlation_id = correlation_id or gen_cor_id()
+ headers: Annotated[
+ Optional[dict[str, str]],
+ Doc("Message headers to store metainformation."),
+ ] = None,
+ correlation_id: Annotated[
+ Optional[str],
+ Doc(
+ "Manual message **correlation_id** setter. "
+ "**correlation_id** is a useful option to trace messages.",
+ ),
+ ] = None,
+ reply_to: Annotated[
+ str,
+ Doc("Reply message topic name to send response."),
+ ] = "",
+ no_confirm: Annotated[
+ bool,
+ Doc("Do not wait for Kafka publish confirmation."),
+ ] = False,
+ ) -> "asyncio.Future":
+ """Publish message directly.
- return await super().publish(
+ This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
+ applications or to publish messages from time to time.
+
+ Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
+ """
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
reply_to=reply_to,
no_confirm=no_confirm,
- **kwargs,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
self,
message: "SendableMessage",
topic: str,
- key: Optional[bytes] = None,
+ *,
+ key: Union[bytes, str, None] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- ) -> Optional[Any]:
- correlation_id = correlation_id or gen_cor_id()
-
- return await super().request(
+ ) -> "KafkaMessage":
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
timeout=timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
)
+ msg: KafkaMessage = await super()._basic_request(cmd, producer=self._producer)
+ return msg
+
async def publish_batch(
self,
- *msgs: "SendableMessage",
+ *messages: "SendableMessage",
topic: str,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
reply_to: str = "",
correlation_id: Optional[str] = None,
no_confirm: bool = False,
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
- for m in self._middlewares[::-1]:
- call = partial(m(None).publish_scope, call)
-
- await call(
- *msgs,
+ cmd = KafkaPublishCommand(
+ *messages,
topic=topic,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
reply_to=reply_to,
- correlation_id=correlation_id,
no_confirm=no_confirm,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
with anyio.move_on_after(timeout) as cancel_scope:
- if self._producer is None:
+ if not self._producer:
return False
while True:
if cancel_scope.cancel_called:
return False
- if await self._producer._producer.ping(timeout=timeout):
+ if await self._producer.ping(timeout=timeout):
return True
await anyio.sleep(sleep_time)
diff --git a/faststream/confluent/broker/logging.py b/faststream/confluent/broker/logging.py
index 758e4285ba..b4523d2b40 100644
--- a/faststream/confluent/broker/logging.py
+++ b/faststream/confluent/broker/logging.py
@@ -1,72 +1,76 @@
import logging
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union
+from functools import partial
+from typing import TYPE_CHECKING, Optional
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.confluent.client import AsyncConfluentConsumer
-from faststream.log.logging import get_broker_logger
-from faststream.types import EMPTY
+from faststream._internal.log.logging import get_broker_logger
+from faststream._internal.state.logger import (
+ DefaultLoggerStorage,
+ make_logger_state,
+)
if TYPE_CHECKING:
- import confluent_kafka
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
- from faststream.types import LoggerProto
-
-
-class KafkaLoggingBroker(
- BrokerUsecase[
- Union["confluent_kafka.Message", Tuple["confluent_kafka.Message", ...]],
- Callable[..., AsyncConfluentConsumer],
- ]
-):
- """A class that extends the LoggingMixin class and adds additional functionality for logging Kafka related information."""
-
- _max_topic_len: int
- _max_group_len: int
- __max_msg_id_ln: ClassVar[int] = 10
+class KafkaParamsStorage(DefaultLoggerStorage):
def __init__(
self,
- *args: Any,
- logger: Optional["LoggerProto"] = EMPTY,
- log_level: int = logging.INFO,
- log_fmt: Optional[str] = None,
- **kwargs: Any,
+ log_fmt: Optional[str],
) -> None:
- """Initialize the class."""
- super().__init__(
- *args,
- logger=logger,
- # TODO: generate unique logger names to not share between brokers
- default_logger=get_broker_logger(
- name="confluent",
- default_context={
- "topic": "",
- "group_id": "",
- },
- message_id_ln=self.__max_msg_id_ln,
- ),
- log_level=log_level,
- log_fmt=log_fmt,
- **kwargs,
- )
+ super().__init__(log_fmt)
+
self._max_topic_len = 4
self._max_group_len = 0
- def get_fmt(self) -> str:
- return (
- "%(asctime)s %(levelname)-8s - "
- + f"%(topic)-{self._max_topic_len}s | "
- + (f"%(group_id)-{self._max_group_len}s | " if self._max_group_len else "")
- + f"%(message_id)-{self.__max_msg_id_ln}s "
- + "- %(message)s"
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ self._max_topic_len = max(
+ (
+ self._max_topic_len,
+ len(params.get("topic", "")),
+ ),
+ )
+ self._max_group_len = max(
+ (
+ self._max_group_len,
+ len(params.get("group_id", "")),
+ ),
)
- def _setup_log_context(
- self,
- *,
- topic: str = "",
- group_id: Optional[str] = None,
- ) -> None:
- """Set up log context."""
- self._max_topic_len = max((self._max_topic_len, len(topic)))
- self._max_group_len = max((self._max_group_len, len(group_id or "")))
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
+ message_id_ln = 10
+
+ # TODO: generate unique logger names to not share between brokers
+ return get_broker_logger(
+ name="confluent",
+ default_context={
+ "topic": "",
+ "group_id": "",
+ },
+ message_id_ln=message_id_ln,
+ fmt=self._log_fmt
+ or "".join((
+ "%(asctime)s %(levelname)-8s - ",
+ f"%(topic)-{self._max_topic_len}s | ",
+ (
+ f"%(group_id)-{self._max_group_len}s | "
+ if self._max_group_len
+ else ""
+ ),
+ f"%(message_id)-{message_id_ln}s ",
+ "- %(message)s",
+ )),
+ context=context,
+ log_level=self.logger_log_level,
+ )
+
+
+make_kafka_logger_state = partial(
+ make_logger_state,
+ default_storage_cls=KafkaParamsStorage,
+)
diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py
index 55edd17846..ae30e0e181 100644
--- a/faststream/confluent/broker/registrator.py
+++ b/faststream/confluent/broker/registrator.py
@@ -1,45 +1,42 @@
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
- Dict,
- Iterable,
Literal,
Optional,
- Sequence,
- Tuple,
Union,
cast,
overload,
)
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.utils import default_filter
-from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher
+from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.confluent.publisher.factory import create_publisher
from faststream.confluent.subscriber.factory import create_subscriber
-from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.types import (
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.confluent.message import KafkaMessage
- from faststream.confluent.publisher.asyncapi import (
- AsyncAPIBatchPublisher,
- AsyncAPIDefaultPublisher,
+ from faststream.confluent.publisher.specified import (
+ SpecificationBatchPublisher,
+ SpecificationDefaultPublisher,
)
from faststream.confluent.schemas import TopicPartition
- from faststream.confluent.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+ from faststream.confluent.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
@@ -47,22 +44,24 @@ class KafkaRegistrator(
ABCBroker[
Union[
"Message",
- Tuple["Message", ...],
+ tuple["Message", ...],
]
- ]
+ ],
):
"""Includable to KafkaBroker router."""
- _subscribers: Dict[
- int,
+ _subscribers: list[ # type: ignore[assignment]
Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
],
]
- _publishers: Dict[ # type: ignore[assignment]
- int, Union["AsyncAPIBatchPublisher", "AsyncAPIDefaultPublisher"]
+ _publishers: list[ # type: ignore[assignment]
+ Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+ ]
]
@overload # type: ignore[override]
@@ -82,7 +81,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -95,7 +94,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -106,7 +105,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -121,7 +120,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -131,7 +130,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -146,7 +145,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -158,7 +157,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -167,15 +166,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -186,7 +191,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -202,7 +207,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -215,7 +220,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -230,7 +235,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -246,7 +251,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -276,7 +281,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -289,8 +294,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -302,50 +307,44 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchSubscriber": ...
+ ) -> "SpecificationBatchSubscriber": ...
@overload
def subscriber(
@@ -364,7 +363,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -377,7 +376,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -388,7 +387,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -403,7 +402,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -413,7 +412,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -428,7 +427,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -440,7 +439,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -449,15 +448,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -468,7 +473,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -484,7 +489,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -497,7 +502,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -512,7 +517,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -528,7 +533,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -558,7 +563,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -571,8 +576,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -584,50 +589,47 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultSubscriber": ...
+ ) -> Union[
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
+ ]: ...
@overload
def subscriber(
@@ -646,7 +648,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -659,7 +661,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -670,7 +672,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -685,7 +687,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -695,7 +697,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -710,7 +712,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -722,7 +724,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -731,15 +733,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -750,7 +758,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -766,7 +774,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -779,7 +787,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -794,7 +802,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -810,7 +818,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -840,7 +848,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -853,8 +861,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -866,52 +874,47 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@override
@@ -931,7 +934,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -944,7 +947,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -955,7 +958,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -970,7 +973,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -980,7 +983,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -995,7 +998,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1007,7 +1010,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1016,15 +1019,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1035,7 +1044,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1051,7 +1060,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -1064,7 +1073,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -1079,7 +1088,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1095,7 +1104,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -1125,7 +1134,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -1138,8 +1147,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1151,61 +1160,52 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
max_workers: Annotated[
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
- if not auto_commit and not group_id:
- raise SetupError("You should install `group_id` with manual commit mode")
-
subscriber = create_subscriber(
*topics,
max_workers=max_workers,
@@ -1221,7 +1221,6 @@ def subscriber(
"fetch_min_bytes": fetch_min_bytes,
"max_partition_fetch_bytes": max_partition_fetch_bytes,
"auto_offset_reset": auto_offset_reset,
- "enable_auto_commit": auto_commit,
"auto_commit_interval_ms": auto_commit_interval_ms,
"check_crcs": check_crcs,
"partition_assignment_strategy": partition_assignment_strategy,
@@ -1230,31 +1229,29 @@ def subscriber(
"heartbeat_interval_ms": heartbeat_interval_ms,
"isolation_level": isolation_level,
},
- is_manual=not auto_commit,
+ auto_commit=auto_commit,
# subscriber args
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
- # AsyncAPI
+ # Specification
title_=title,
description_=description,
include_in_schema=self._solve_include_in_schema(include_in_schema),
)
if batch:
- subscriber = cast("AsyncAPIBatchSubscriber", subscriber)
+ subscriber = cast("SpecificationBatchSubscriber", subscriber)
+ elif max_workers > 1:
+ subscriber = cast("SpecificationConcurrentDefaultSubscriber", subscriber)
else:
- if max_workers > 1:
- subscriber = cast("AsyncAPIConcurrentDefaultSubscriber", subscriber)
- else:
- subscriber = cast("AsyncAPIDefaultSubscriber", subscriber)
+ subscriber = cast("SpecificationDefaultSubscriber", subscriber)
subscriber = super().subscriber(subscriber) # type: ignore[assignment]
return subscriber.add_call(
- filter_=filter,
parser_=parser or self._parser,
decoder_=decoder or self._decoder,
dependencies_=dependencies,
@@ -1280,7 +1277,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1289,15 +1286,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1311,29 +1308,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultPublisher": ...
+ ) -> "SpecificationDefaultPublisher": ...
@overload
def publisher(
@@ -1354,7 +1355,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1363,15 +1364,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1385,29 +1386,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchPublisher": ...
+ ) -> "SpecificationBatchPublisher": ...
@overload
def publisher(
@@ -1428,7 +1433,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1437,15 +1442,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1459,31 +1464,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]: ...
@override
@@ -1505,7 +1514,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1514,15 +1523,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1536,40 +1545,44 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]:
- """Creates long-living and AsyncAPI-documented publisher object.
+ """Creates long-living and Specification-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
In such case publisher will publish your handler return value.
Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`.
"""
- publisher = AsyncAPIPublisher.create(
+ publisher = create_publisher(
# batch flag
batch=batch,
# default args
@@ -1580,9 +1593,9 @@ def publisher(
headers=headers,
reply_to=reply_to,
# publisher-specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
- # AsyncAPI
+ # Specification
title_=title,
description_=description,
schema_=schema,
@@ -1590,8 +1603,8 @@ def publisher(
)
if batch:
- publisher = cast("AsyncAPIBatchPublisher", publisher)
+ publisher = cast("SpecificationBatchPublisher", publisher)
else:
- publisher = cast("AsyncAPIDefaultPublisher", publisher)
+ publisher = cast("SpecificationDefaultPublisher", publisher)
- return super().publisher(publisher) # type: ignore[return-value,arg-type]
+ return super().publisher(publisher)
diff --git a/faststream/confluent/client.py b/faststream/confluent/client.py
index eb8ce40a18..92b91dd3e1 100644
--- a/faststream/confluent/client.py
+++ b/faststream/confluent/client.py
@@ -1,17 +1,14 @@
import asyncio
import logging
+from collections.abc import Iterable, Sequence
+from concurrent.futures import ThreadPoolExecutor
from contextlib import suppress
from time import time
from typing import (
TYPE_CHECKING,
Any,
Callable,
- Dict,
- Iterable,
- List,
Optional,
- Sequence,
- Tuple,
Union,
)
@@ -19,22 +16,23 @@
from confluent_kafka import Consumer, KafkaError, KafkaException, Message, Producer
from confluent_kafka.admin import AdminClient, NewTopic
+from faststream._internal.constants import EMPTY
+from faststream._internal.log import logger as faststream_logger
+from faststream._internal.utils.functions import call_or_await, run_in_executor
from faststream.confluent import config as config_module
from faststream.confluent.schemas import TopicPartition
from faststream.exceptions import SetupError
-from faststream.log import logger as faststream_logger
-from faststream.types import EMPTY
-from faststream.utils.functions import call_or_await
if TYPE_CHECKING:
from typing_extensions import NotRequired, TypedDict
- from faststream.types import AnyDict, LoggerProto
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.state.logger import LoggerState
class _SendKwargs(TypedDict):
value: Optional[Union[str, bytes]]
key: Optional[Union[str, bytes]]
- headers: Optional[List[Tuple[str, Union[str, bytes]]]]
+ headers: Optional[list[tuple[str, Union[str, bytes]]]]
partition: NotRequired[int]
timestamp: NotRequired[int]
on_delivery: NotRequired[Callable[..., None]]
@@ -46,9 +44,9 @@ class AsyncConfluentProducer:
def __init__(
self,
*,
- logger: Optional["LoggerProto"],
+ logger: "LoggerState",
config: config_module.ConfluentFastConfig,
- bootstrap_servers: Union[str, List[str]] = "localhost",
+ bootstrap_servers: Union[str, list[str]] = "localhost",
client_id: Optional[str] = None,
metadata_max_age_ms: int = 300000,
request_timeout_ms: int = 40000,
@@ -68,10 +66,11 @@ def __init__(
sasl_plain_password: Optional[str] = None,
sasl_plain_username: Optional[str] = None,
) -> None:
- self.logger = logger
+ self.logger_state = logger
if isinstance(bootstrap_servers, Iterable) and not isinstance(
- bootstrap_servers, str
+ bootstrap_servers,
+ str,
):
bootstrap_servers = ",".join(bootstrap_servers)
@@ -103,16 +102,16 @@ def __init__(
final_config = {**config.as_config_dict(), **config_from_params}
- if sasl_mechanism in ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"]:
+ if sasl_mechanism in {"PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"}:
final_config.update(
{
"sasl.mechanism": sasl_mechanism,
"sasl.username": sasl_plain_username,
"sasl.password": sasl_plain_password,
- }
+ },
)
- self.producer = Producer(final_config, logger=self.logger) # type: ignore[call-arg]
+ self.producer = Producer(final_config, logger=self.logger_state.logger.logger) # type: ignore[call-arg]
self.__running = True
self._poll_task = asyncio.create_task(self._poll_loop())
@@ -137,9 +136,9 @@ async def send(
key: Optional[Union[str, bytes]] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[List[Tuple[str, Union[str, bytes]]]] = None,
+ headers: Optional[list[tuple[str, Union[str, bytes]]]] = None,
no_confirm: bool = False,
- ) -> None:
+ ) -> "Union[asyncio.Future[Optional[Message]], Optional[Message]]":
"""Sends a single message to a Kafka topic."""
kwargs: _SendKwargs = {
"value": value,
@@ -153,22 +152,25 @@ async def send(
if timestamp_ms is not None:
kwargs["timestamp"] = timestamp_ms
- if not no_confirm:
- result_future: asyncio.Future[Optional[Message]] = asyncio.Future()
+ loop = asyncio.get_running_loop()
+ result_future: asyncio.Future[Optional[Message]] = loop.create_future()
- def ack_callback(err: Any, msg: Optional[Message]) -> None:
- if err or (msg is not None and (err := msg.error())):
- result_future.set_exception(KafkaException(err))
- else:
- result_future.set_result(msg)
+ def ack_callback(err: Any, msg: Optional[Message]) -> None:
+ if err or (msg is not None and (err := msg.error())):
+ loop.call_soon_threadsafe(
+ result_future.set_exception, KafkaException(err)
+ )
+ else:
+ loop.call_soon_threadsafe(result_future.set_result, msg)
- kwargs["on_delivery"] = ack_callback
+ kwargs["on_delivery"] = ack_callback
# should be sync to prevent segfault
self.producer.produce(topic, **kwargs)
if not no_confirm:
- await result_future
+ return await result_future
+ return result_future
def create_batch(self) -> "BatchBuilder":
"""Creates a batch for sending multiple messages."""
@@ -223,9 +225,9 @@ def __init__(
self,
*topics: str,
partitions: Sequence["TopicPartition"],
- logger: Optional["LoggerProto"],
+ logger: "LoggerState",
config: config_module.ConfluentFastConfig,
- bootstrap_servers: Union[str, List[str]] = "localhost",
+ bootstrap_servers: Union[str, list[str]] = "localhost",
client_id: Optional[str] = "confluent-kafka-consumer",
group_id: Optional[str] = None,
group_instance_id: Optional[str] = None,
@@ -239,7 +241,7 @@ def __init__(
auto_commit_interval_ms: int = 5000,
check_crcs: bool = True,
metadata_max_age_ms: int = 5 * 60 * 1000,
- partition_assignment_strategy: Union[str, List[Any]] = "roundrobin",
+ partition_assignment_strategy: Union[str, list[Any]] = "roundrobin",
max_poll_interval_ms: int = 300000,
session_timeout_ms: int = 10000,
heartbeat_interval_ms: int = 3000,
@@ -251,10 +253,11 @@ def __init__(
sasl_plain_password: Optional[str] = None,
sasl_plain_username: Optional[str] = None,
) -> None:
- self.logger = logger
+ self.logger_state = logger
if isinstance(bootstrap_servers, Iterable) and not isinstance(
- bootstrap_servers, str
+ bootstrap_servers,
+ str,
):
bootstrap_servers = ",".join(bootstrap_servers)
@@ -266,7 +269,7 @@ def __init__(
[
x if isinstance(x, str) else x().name
for x in partition_assignment_strategy
- ]
+ ],
)
final_config = config.as_config_dict()
@@ -302,53 +305,63 @@ def __init__(
self.allow_auto_create_topics = allow_auto_create_topics
final_config.update(config_from_params)
- if sasl_mechanism in ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"]:
+ if sasl_mechanism in {"PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"}:
final_config.update(
{
"sasl.mechanism": sasl_mechanism,
"sasl.username": sasl_plain_username,
"sasl.password": sasl_plain_password,
- }
+ },
)
self.config = final_config
- self.consumer = Consumer(final_config, logger=self.logger) # type: ignore[call-arg]
+ self.consumer = Consumer(final_config, logger=self.logger_state.logger.logger) # type: ignore[call-arg]
- # We shouldn't read messages and close consumer concurrently
- # https://github.com/airtai/faststream/issues/1904#issuecomment-2506990895
- self._lock = anyio.Lock()
+ # A pool with single thread is used in order to execute the commands of the consumer sequentially:
+ self._thread_pool = ThreadPoolExecutor(max_workers=1)
@property
- def topics_to_create(self) -> List[str]:
+ def topics_to_create(self) -> list[str]:
return list({*self.topics, *(p.topic for p in self.partitions)})
async def start(self) -> None:
"""Starts the Kafka consumer and subscribes to the specified topics."""
if self.allow_auto_create_topics:
- await call_or_await(
- create_topics, self.topics_to_create, self.config, self.logger
+ await run_in_executor(
+ self._thread_pool,
+ create_topics,
+ topics=self.topics_to_create,
+ config=self.config,
+ logger_=self.logger_state.logger.logger,
)
- elif self.logger:
- self.logger.log(
- logging.WARNING,
- "Auto create topics is disabled. Make sure the topics exist.",
+ else:
+ self.logger_state.log(
+ log_level=logging.WARNING,
+ message="Auto create topics is disabled. Make sure the topics exist.",
)
if self.topics:
- await call_or_await(self.consumer.subscribe, self.topics)
+ await run_in_executor(
+ self._thread_pool, self.consumer.subscribe, topics=self.topics
+ )
elif self.partitions:
- await call_or_await(
- self.consumer.assign, [p.to_confluent() for p in self.partitions]
+ await run_in_executor(
+ self._thread_pool,
+ self.consumer.assign,
+ [p.to_confluent() for p in self.partitions],
)
else:
- raise SetupError("You must provide either `topics` or `partitions` option.")
+ msg = "You must provide either `topics` or `partitions` option."
+ raise SetupError(msg)
async def commit(self, asynchronous: bool = True) -> None:
"""Commits the offsets of all messages returned by the last poll operation."""
- await call_or_await(self.consumer.commit, asynchronous=asynchronous)
+ await run_in_executor(
+ self._thread_pool, self.consumer.commit, asynchronous=asynchronous
+ )
async def stop(self) -> None:
"""Stops the Kafka consumer and releases all resources."""
@@ -363,44 +376,51 @@ async def stop(self) -> None:
# No offset stored issue is not a problem - https://github.com/confluentinc/confluent-kafka-python/issues/295#issuecomment-355907183
if "No offset stored" in str(e):
pass
- elif self.logger:
- self.logger.log(
- logging.ERROR,
- "Consumer closing error occurred.",
+ else:
+ self.logger_state.log(
+ log_level=logging.ERROR,
+ message="Consumer closing error occurred.",
exc_info=e,
)
# Wrap calls to async to make method cancelable by timeout
- async with self._lock:
- await call_or_await(self.consumer.close)
+ # We shouldn't read messages and close consumer concurrently
+ # https://github.com/airtai/faststream/issues/1904#issuecomment-2506990895
+ # Now it works without lock due `ThreadPoolExecutor(max_workers=1)`
+ # that makes all calls to consumer sequential
+ await run_in_executor(self._thread_pool, self.consumer.close)
+
+ self._thread_pool.shutdown(wait=False)
async def getone(self, timeout: float = 0.1) -> Optional[Message]:
"""Consumes a single message from Kafka."""
- async with self._lock:
- msg = await call_or_await(self.consumer.poll, timeout)
+ msg = await run_in_executor(self._thread_pool, self.consumer.poll, timeout)
return check_msg_error(msg)
async def getmany(
self,
timeout: float = 0.1,
max_records: Optional[int] = 10,
- ) -> Tuple[Message, ...]:
+ ) -> tuple[Message, ...]:
"""Consumes a batch of messages from Kafka and groups them by topic and partition."""
- async with self._lock:
- raw_messages: List[Optional[Message]] = await call_or_await(
- self.consumer.consume, # type: ignore[arg-type]
- num_messages=max_records or 10,
- timeout=timeout,
- )
-
+ raw_messages: list[Optional[Message]] = await run_in_executor(
+ self._thread_pool,
+ self.consumer.consume, # type: ignore[arg-type]
+ num_messages=max_records or 10,
+ timeout=timeout,
+ )
return tuple(x for x in map(check_msg_error, raw_messages) if x is not None)
async def seek(self, topic: str, partition: int, offset: int) -> None:
"""Seeks to the specified offset in the specified topic and partition."""
topic_partition = TopicPartition(
- topic=topic, partition=partition, offset=offset
+ topic=topic,
+ partition=partition,
+ offset=offset,
+ )
+ await run_in_executor(
+ self._thread_pool, self.consumer.seek, topic_partition.to_confluent()
)
- await call_or_await(self.consumer.seek, topic_partition.to_confluent())
def check_msg_error(msg: Optional[Message]) -> Optional[Message]:
@@ -416,7 +436,7 @@ class BatchBuilder:
def __init__(self) -> None:
"""Initializes a new BatchBuilder instance."""
- self._builder: List[AnyDict] = []
+ self._builder: list[AnyDict] = []
def append(
self,
@@ -424,12 +444,12 @@ def append(
timestamp: Optional[int] = None,
key: Optional[Union[str, bytes]] = None,
value: Optional[Union[str, bytes]] = None,
- headers: Optional[List[Tuple[str, bytes]]] = None,
+ headers: Optional[list[tuple[str, bytes]]] = None,
) -> None:
"""Appends a message to the batch with optional timestamp, key, value, and headers."""
if key is None and value is None:
raise KafkaException(
- KafkaError(40, reason="Both key and value can't be None")
+ KafkaError(40, reason="Both key and value can't be None"),
)
self._builder.append(
@@ -438,24 +458,24 @@ def append(
"key": key,
"value": value,
"headers": headers or [],
- }
+ },
)
def create_topics(
- topics: List[str],
- config: Dict[str, Optional[Union[str, int, float, bool, Any]]],
+ topics: list[str],
+ config: dict[str, Optional[Union[str, int, float, bool, Any]]],
logger_: Optional["LoggerProto"] = None,
) -> None:
logger_ = logger_ or faststream_logger
"""Creates Kafka topics using the provided configuration."""
admin_client = AdminClient(
- {x: config[x] for x in ADMINCLIENT_CONFIG_PARAMS if x in config}
+ {x: config[x] for x in ADMINCLIENT_CONFIG_PARAMS if x in config},
)
fs = admin_client.create_topics(
- [NewTopic(topic, num_partitions=1, replication_factor=1) for topic in topics]
+ [NewTopic(topic, num_partitions=1, replication_factor=1) for topic in topics],
)
for topic, f in fs.items():
@@ -463,7 +483,7 @@ def create_topics(
f.result() # The result itself is None
except Exception as e: # noqa: PERF203
if "TOPIC_ALREADY_EXISTS" not in str(e):
- logger_.log(logging.WARN, f"Failed to create topic {topic}: {e}")
+ logger_.log(logging.WARNING, f"Failed to create topic {topic}: {e}")
else:
logger_.log(logging.INFO, f"Topic `{topic}` created.")
diff --git a/faststream/confluent/config.py b/faststream/confluent/config.py
index 16de28b3f1..4474ff300a 100644
--- a/faststream/confluent/config.py
+++ b/faststream/confluent/config.py
@@ -4,7 +4,7 @@
from typing_extensions import TypedDict
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
class BuiltinFeatures(str, Enum):
diff --git a/faststream/confluent/fastapi/__init__.py b/faststream/confluent/fastapi/__init__.py
index 352142194a..21354fcf98 100644
--- a/faststream/confluent/fastapi/__init__.py
+++ b/faststream/confluent/fastapi/__init__.py
@@ -1,11 +1,12 @@
-from typing_extensions import Annotated
+from typing import Annotated
-from faststream.broker.fastapi.context import Context, ContextRepo, Logger
+from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.confluent.broker import KafkaBroker as KB
-from faststream.confluent.fastapi.fastapi import KafkaRouter
from faststream.confluent.message import KafkaMessage as KM
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+from .fastapi import KafkaRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/confluent/fastapi/fastapi.py b/faststream/confluent/fastapi/fastapi.py
index dd4178543e..9df8c7ccb7 100644
--- a/faststream/confluent/fastapi/fastapi.py
+++ b/faststream/confluent/fastapi/fastapi.py
@@ -1,16 +1,12 @@
import logging
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Literal,
Optional,
- Sequence,
- Tuple,
- Type,
TypeVar,
Union,
cast,
@@ -23,13 +19,13 @@
from fastapi.utils import generate_unique_id
from starlette.responses import JSONResponse, Response
from starlette.routing import BaseRoute
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.utils import default_filter
+from faststream._internal.constants import EMPTY
+from faststream._internal.fastapi.router import StreamRouter
from faststream.confluent.broker.broker import KafkaBroker as KB
-from faststream.types import EMPTY
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from enum import Enum
@@ -38,34 +34,33 @@
from fastapi.types import IncEx
from starlette.types import ASGIApp, Lifespan
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.confluent.config import ConfluentConfig
from faststream.confluent.message import KafkaMessage
- from faststream.confluent.publisher.asyncapi import (
- AsyncAPIBatchPublisher,
- AsyncAPIDefaultPublisher,
+ from faststream.confluent.publisher.specified import (
+ SpecificationBatchPublisher,
+ SpecificationDefaultPublisher,
)
from faststream.confluent.schemas import TopicPartition
- from faststream.confluent.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+ from faststream.confluent.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
from faststream.security import BaseSecurity
- from faststream.types import AnyDict, LoggerProto
+ from faststream.specification.schema.extra import Tag, TagDict
Partition = TypeVar("Partition")
-class KafkaRouter(StreamRouter[Union[Message, Tuple[Message, ...]]]):
+class KafkaRouter(StreamRouter[Union[Message, tuple[Message, ...]]]):
"""A class to represent a Kafka router."""
broker_class = KB
@@ -83,7 +78,7 @@ def __init__(
This does not have to be the full node list.
It just needs to have at least one broker that will respond to a
Metadata API Request. Default port is 9092.
- """
+ """,
),
] = "localhost",
*,
@@ -104,7 +99,7 @@ def __init__(
which we force a refresh of metadata even if we haven't seen any
partition leadership changes to proactively discover any new
brokers or partitions.
- """
+ """,
),
] = 5 * 60 * 1000,
connections_max_idle_ms: Annotated[
@@ -114,7 +109,7 @@ def __init__(
Close idle connections after the number
of milliseconds specified by this config. Specifying `None` will
disable idle checks.
- """
+ """,
),
] = 9 * 60 * 1000,
client_id: Annotated[
@@ -126,7 +121,7 @@ def __init__(
server-side log entries that correspond to this client. Also
submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`
for logging with respect to consumer group administration.
- """
+ """,
),
] = SERVICE_NAME,
allow_auto_create_topics: Annotated[
@@ -134,7 +129,7 @@ def __init__(
Doc(
"""
Allow automatic topic creation on the broker when subscribing to or assigning non-existent topics.
- """
+ """,
),
] = True,
config: Annotated[
@@ -143,7 +138,7 @@ def __init__(
"""
Extra configuration for the confluent-kafka-python
producer/consumer. See `confluent_kafka.Config `_.
- """
+ """,
),
] = None,
# publisher args
@@ -175,7 +170,7 @@ def __init__(
If unset, defaults to ``acks=1``. If `enable_idempotence` is
:data:`True` defaults to ``acks=all``.
- """
+ """,
),
] = EMPTY,
compression_type: Annotated[
@@ -186,14 +181,14 @@ def __init__(
Compression is of full batches of data, so the efficacy of batching
will also impact the compression ratio (more batching means better
compression).
- """
+ """,
),
] = None,
partitioner: Annotated[
Union[
str,
Callable[
- [bytes, List[Partition], List[Partition]],
+ [bytes, list[Partition], list[Partition]],
Partition,
],
],
@@ -207,7 +202,7 @@ def __init__(
messages with the same key are assigned to the same partition.
When a key is :data:`None`, the message is delivered to a random partition
(filtered to partitions with available leaders only, if possible).
- """
+ """,
),
] = "consistent_random",
max_request_size: Annotated[
@@ -219,7 +214,7 @@ def __init__(
has its own cap on record size which may be different from this.
This setting will limit the number of record batches the producer
will send in a single request to avoid sending huge requests.
- """
+ """,
),
] = 1024 * 1024,
linger_ms: Annotated[
@@ -234,7 +229,7 @@ def __init__(
This setting accomplishes this by adding a small amount of
artificial delay; that is, if first request is processed faster,
than `linger_ms`, producer will wait ``linger_ms - process_time``.
- """
+ """,
),
] = 0,
enable_idempotence: Annotated[
@@ -247,7 +242,7 @@ def __init__(
etc., may write duplicates of the retried message in the stream.
Note that enabling idempotence acks to set to ``all``. If it is not
explicitly set by the user it will be chosen.
- """
+ """,
),
] = False,
transactional_id: Optional[str] = None,
@@ -256,7 +251,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -271,38 +266,40 @@ def __init__(
Sequence[
Union[
"BrokerMiddleware[Message]",
- "BrokerMiddleware[Tuple[Message, ...]]",
+ "BrokerMiddleware[tuple[Message, ...]]",
]
],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
- # AsyncAPI args
+ # Specification args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate Specification server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Optional[str],
- Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
+ Doc(
+ "Specification hardcoded server addresses. Use `servers` if not specified.",
+ ),
] = None,
protocol: Annotated[
Optional[str],
- Doc("AsyncAPI server protocol."),
+ Doc("Specification server protocol."),
] = None,
protocol_version: Annotated[
Optional[str],
- Doc("AsyncAPI server protocol version."),
+ Doc("Specification server protocol version."),
] = "auto",
description: Annotated[
Optional[str],
- Doc("AsyncAPI server description."),
- ] = None,
- asyncapi_tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
- Doc("AsyncAPI server tags."),
+ Doc("Specification server description."),
] = None,
+ specification_tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
+ Doc("Specification server tags."),
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -321,13 +318,13 @@ def __init__(
bool,
Doc(
"Whether to add broker to app scope in lifespan. "
- "You should disable this option at old ASGI servers."
+ "You should disable this option at old ASGI servers.",
),
] = True,
schema_url: Annotated[
Optional[str],
Doc(
- "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all."
+ "Specification schema url. You should set this option to `None` to disable Specification routes at all.",
),
] = "/asyncapi",
# FastAPI args
@@ -336,7 +333,7 @@ def __init__(
Doc("An optional path prefix for the router."),
] = "",
tags: Annotated[
- Optional[List[Union[str, "Enum"]]],
+ Optional[list[Union[str, "Enum"]]],
Doc(
"""
A list of tags to be applied to all the *path operations* in this
@@ -346,7 +343,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
dependencies: Annotated[
@@ -358,22 +355,22 @@ def __init__(
Read more about it in the
[FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
default_response_class: Annotated[
- Type["Response"],
+ type["Response"],
Doc(
"""
The default response class to be used.
Read more in the
[FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
- """
+ """,
),
] = Default(JSONResponse),
responses: Annotated[
- Optional[Dict[Union[int, str], "AnyDict"]],
+ Optional[dict[Union[int, str], "AnyDict"]],
Doc(
"""
Additional responses to be shown in OpenAPI.
@@ -385,11 +382,11 @@ def __init__(
And in the
[FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
callbacks: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
OpenAPI callbacks that should apply to all *path operations* in this
@@ -399,11 +396,11 @@ def __init__(
Read more about it in the
[FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
- """
+ """,
),
] = None,
routes: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
**Note**: you probably shouldn't use this parameter, it is inherited
@@ -412,7 +409,7 @@ def __init__(
---
A list of routes to serve incoming HTTP and WebSocket requests.
- """
+ """,
),
deprecated(
"""
@@ -421,7 +418,7 @@ def __init__(
In FastAPI, you normally would use the *path operation methods*,
like `router.get()`, `router.post()`, etc.
- """
+ """,
),
] = None,
redirect_slashes: Annotated[
@@ -430,7 +427,7 @@ def __init__(
"""
Whether to detect and redirect slashes in URLs when the client doesn't
use the same format.
- """
+ """,
),
] = True,
default: Annotated[
@@ -439,7 +436,7 @@ def __init__(
"""
Default function handler for this router. Used to handle
404 Not Found errors.
- """
+ """,
),
] = None,
dependency_overrides_provider: Annotated[
@@ -450,18 +447,18 @@ def __init__(
You shouldn't need to use it. It normally points to the `FastAPI` app
object.
- """
+ """,
),
] = None,
route_class: Annotated[
- Type["APIRoute"],
+ type["APIRoute"],
Doc(
"""
Custom route (*path operation*) class to be used by this router.
Read more about it in the
[FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
- """
+ """,
),
] = APIRoute,
on_startup: Annotated[
@@ -473,7 +470,7 @@ def __init__(
You should instead use the `lifespan` handlers.
Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
on_shutdown: Annotated[
@@ -486,7 +483,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
lifespan: Annotated[
@@ -498,7 +495,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
deprecated: Annotated[
@@ -511,7 +508,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
include_in_schema: Annotated[
@@ -525,7 +522,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
- """
+ """,
),
] = True,
generate_unique_id_function: Annotated[
@@ -540,7 +537,7 @@ def __init__(
Read more about it in the
[FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
- """
+ """,
),
] = Default(generate_unique_id),
) -> None:
@@ -572,13 +569,13 @@ def __init__(
logger=logger,
log_level=log_level,
log_fmt=log_fmt,
- # AsyncAPI options
+ # Specification options
security=security,
protocol=protocol,
description=description,
protocol_version=protocol_version,
- asyncapi_tags=asyncapi_tags,
- asyncapi_url=asyncapi_url,
+ specification_tags=specification_tags,
+ specification_url=specification_url,
# FastAPI kwargs
prefix=prefix,
tags=tags,
@@ -616,7 +613,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -629,7 +626,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -640,7 +637,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -655,7 +652,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -665,7 +662,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -680,7 +677,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -692,7 +689,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -701,15 +698,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -720,7 +723,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -736,7 +739,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -749,7 +752,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -764,7 +767,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -780,7 +783,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -810,7 +813,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -836,48 +839,42 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -911,7 +908,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -923,7 +920,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -935,7 +932,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -947,7 +944,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -965,7 +962,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -982,7 +979,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -999,10 +996,13 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> "AsyncAPIDefaultSubscriber": ...
+ ) -> Union[
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
+ ]: ...
@overload
def subscriber(
@@ -1021,7 +1021,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -1034,7 +1034,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -1045,7 +1045,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -1060,7 +1060,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1070,7 +1070,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -1085,7 +1085,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1097,7 +1097,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1106,15 +1106,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1125,7 +1131,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1141,7 +1147,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -1154,7 +1160,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -1169,7 +1175,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1185,7 +1191,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -1215,7 +1221,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -1241,34 +1247,27 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- # AsyncAPI args
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -1302,7 +1301,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -1314,7 +1313,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -1326,7 +1325,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -1338,7 +1337,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -1356,7 +1355,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -1373,7 +1372,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -1390,10 +1389,10 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> "AsyncAPIBatchSubscriber": ...
+ ) -> "SpecificationBatchSubscriber": ...
@overload
def subscriber(
@@ -1412,7 +1411,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -1425,7 +1424,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -1436,7 +1435,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -1451,7 +1450,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1461,7 +1460,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -1476,7 +1475,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1488,7 +1487,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1497,15 +1496,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1516,7 +1521,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1532,7 +1537,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -1545,7 +1550,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -1560,7 +1565,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1576,7 +1581,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -1606,7 +1611,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -1632,48 +1637,42 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -1707,7 +1706,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -1719,7 +1718,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -1731,7 +1730,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -1743,7 +1742,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -1761,7 +1760,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -1778,7 +1777,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -1795,12 +1794,13 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
) -> Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@override
@@ -1820,7 +1820,7 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -1833,7 +1833,7 @@ def subscriber(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -1844,7 +1844,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -1859,7 +1859,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1869,7 +1869,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -1884,7 +1884,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1896,7 +1896,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1905,15 +1905,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1924,7 +1930,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1940,7 +1946,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -1953,7 +1959,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -1968,7 +1974,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1984,7 +1990,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -2014,7 +2020,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -2040,48 +2046,42 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -2115,7 +2115,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -2127,7 +2127,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -2139,7 +2139,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -2151,7 +2151,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -2169,7 +2169,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -2186,7 +2186,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -2203,7 +2203,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
max_workers: Annotated[
@@ -2211,9 +2211,9 @@ def subscriber(
Doc("Number of workers to process messages concurrently."),
] = 1,
) -> Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
subscriber = super().subscriber(
*topics,
@@ -2242,8 +2242,7 @@ def subscriber(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -2260,12 +2259,10 @@ def subscriber(
)
if batch:
- return cast("AsyncAPIBatchSubscriber", subscriber)
- else:
- if max_workers > 1:
- return cast("AsyncAPIConcurrentDefaultSubscriber", subscriber)
- else:
- return cast("AsyncAPIDefaultSubscriber", subscriber)
+ return cast("SpecificationBatchSubscriber", subscriber)
+ if max_workers > 1:
+ return cast("SpecificationConcurrentDefaultSubscriber", subscriber)
+ return cast("SpecificationDefaultSubscriber", subscriber)
@overload # type: ignore[override]
def publisher(
@@ -2286,7 +2283,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2295,15 +2292,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2317,29 +2314,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultPublisher": ...
+ ) -> "SpecificationDefaultPublisher": ...
@overload
def publisher(
@@ -2360,7 +2361,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2369,15 +2370,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2391,29 +2392,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchPublisher": ...
+ ) -> "SpecificationBatchPublisher": ...
@overload
def publisher(
@@ -2434,7 +2439,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2443,15 +2448,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2465,31 +2470,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]: ...
@override
@@ -2511,7 +2520,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2520,15 +2529,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2542,31 +2551,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]:
return self.broker.publisher(
topic=topic,
@@ -2577,7 +2590,7 @@ def publisher(
reply_to=reply_to,
# broker options
middlewares=middlewares,
- # AsyncAPI options
+ # Specification options
title=title,
description=description,
schema=schema,
diff --git a/faststream/confluent/message.py b/faststream/confluent/message.py
index 83ee0e814b..8adb249384 100644
--- a/faststream/confluent/message.py
+++ b/faststream/confluent/message.py
@@ -1,6 +1,6 @@
-from typing import TYPE_CHECKING, Any, Optional, Protocol, Tuple, Union
+from typing import TYPE_CHECKING, Any, Optional, Protocol, Union
-from faststream.broker.message import StreamMessage
+from faststream.message import AckStatus, StreamMessage
if TYPE_CHECKING:
from confluent_kafka import Message
@@ -41,9 +41,9 @@ class KafkaMessage(
StreamMessage[
Union[
"Message",
- Tuple["Message", ...],
+ tuple["Message", ...],
]
- ]
+ ],
):
"""Represents a Kafka message in the FastStream framework.
@@ -59,9 +59,12 @@ def __init__(
) -> None:
super().__init__(*args, **kwargs)
- self.is_manual = is_manual
self.consumer = consumer
+ self.is_manual = is_manual
+ if not is_manual:
+ self.committed = AckStatus.ACKED
+
async def ack(self) -> None:
"""Acknowledge the Kafka message."""
if self.is_manual and not self.committed:
diff --git a/faststream/confluent/opentelemetry/middleware.py b/faststream/confluent/opentelemetry/middleware.py
index d8e5906dd3..a0c265438f 100644
--- a/faststream/confluent/opentelemetry/middleware.py
+++ b/faststream/confluent/opentelemetry/middleware.py
@@ -6,10 +6,11 @@
from faststream.confluent.opentelemetry.provider import (
telemetry_attributes_provider_factory,
)
+from faststream.confluent.response import KafkaPublishCommand
from faststream.opentelemetry.middleware import TelemetryMiddleware
-class KafkaTelemetryMiddleware(TelemetryMiddleware):
+class KafkaTelemetryMiddleware(TelemetryMiddleware[KafkaPublishCommand]):
def __init__(
self,
*,
diff --git a/faststream/confluent/opentelemetry/provider.py b/faststream/confluent/opentelemetry/provider.py
index 3c157851d9..73bd7ead3c 100644
--- a/faststream/confluent/opentelemetry/provider.py
+++ b/faststream/confluent/opentelemetry/provider.py
@@ -1,16 +1,19 @@
-from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
from opentelemetry.semconv.trace import SpanAttributes
-from faststream.broker.types import MsgType
+from faststream._internal.types import MsgType
from faststream.opentelemetry import TelemetrySettingsProvider
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
if TYPE_CHECKING:
from confluent_kafka import Message
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.confluent.response import KafkaPublishCommand
+ from faststream.message import StreamMessage
+ from faststream.response import PublishCommand
class BaseConfluentTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
@@ -19,33 +22,33 @@ class BaseConfluentTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType])
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> "AnyDict":
- attrs = {
+ attrs: AnyDict = {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
- if (partition := kwargs.get("partition")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition
+ if cmd.partition is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = cmd.partition
- if (key := kwargs.get("key")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key
+ if cmd.key is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = cmd.key
return attrs
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class ConfluentTelemetrySettingsProvider(
- BaseConfluentTelemetrySettingsProvider["Message"]
+ BaseConfluentTelemetrySettingsProvider["Message"],
):
def get_consume_attrs_from_message(
self,
@@ -70,36 +73,34 @@ def get_consume_destination_name(
self,
msg: "StreamMessage[Message]",
) -> str:
- return cast(str, msg.raw_message.topic())
+ return cast("str", msg.raw_message.topic())
class BatchConfluentTelemetrySettingsProvider(
- BaseConfluentTelemetrySettingsProvider[Tuple["Message", ...]]
+ BaseConfluentTelemetrySettingsProvider[tuple["Message", ...]],
):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[Tuple[Message, ...]]",
+ msg: "StreamMessage[tuple[Message, ...]]",
) -> "AnyDict":
raw_message = msg.raw_message[0]
- attrs = {
+ return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id,
SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id,
SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT: len(msg.raw_message),
SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(
- bytearray().join(cast(Sequence[bytes], msg.body))
+ bytearray().join(cast("Sequence[bytes]", msg.body)),
),
SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: raw_message.partition(),
MESSAGING_DESTINATION_PUBLISH_NAME: raw_message.topic(),
}
- return attrs
-
def get_consume_destination_name(
self,
- msg: "StreamMessage[Tuple[Message, ...]]",
+ msg: "StreamMessage[tuple[Message, ...]]",
) -> str:
- return cast(str, msg.raw_message[0].topic())
+ return cast("str", msg.raw_message[0].topic())
def telemetry_attributes_provider_factory(
@@ -110,5 +111,4 @@ def telemetry_attributes_provider_factory(
]:
if isinstance(msg, Sequence):
return BatchConfluentTelemetrySettingsProvider()
- else:
- return ConfluentTelemetrySettingsProvider()
+ return ConfluentTelemetrySettingsProvider()
diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py
index fff96a8f12..790f492364 100644
--- a/faststream/confluent/parser.py
+++ b/faststream/confluent/parser.py
@@ -1,22 +1,30 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Union
-from faststream.broker.message import decode_message, gen_cor_id
-from faststream.confluent.message import FAKE_CONSUMER, KafkaMessage
-from faststream.utils.context.repository import context
+from faststream.message import decode_message
+
+from .message import FAKE_CONSUMER, KafkaMessage
if TYPE_CHECKING:
from confluent_kafka import Message
- from faststream.broker.message import StreamMessage
- from faststream.confluent.subscriber.usecase import LogicSubscriber
- from faststream.types import DecodedMessage
+ from faststream._internal.basic_types import DecodedMessage
+
+ from .message import ConsumerProtocol, StreamMessage
class AsyncConfluentParser:
"""A class to parse Kafka messages."""
- @staticmethod
+ def __init__(self, is_manual: bool = False) -> None:
+ self.is_manual = is_manual
+ self._consumer: ConsumerProtocol = FAKE_CONSUMER
+
+ def _setup(self, consumer: "ConsumerProtocol") -> None:
+ self._consumer = consumer
+
async def parse_message(
+ self,
message: "Message",
) -> KafkaMessage:
"""Parses a Kafka message."""
@@ -26,27 +34,25 @@ async def parse_message(
offset = message.offset()
_, timestamp = message.timestamp()
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return KafkaMessage(
body=body,
headers=headers,
reply_to=headers.get("reply_to", ""),
content_type=headers.get("content-type"),
message_id=f"{offset}-{timestamp}",
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ correlation_id=headers.get("correlation_id"),
raw_message=message,
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
- is_manual=getattr(handler, "is_manual", True),
+ consumer=self._consumer,
+ is_manual=self.is_manual,
)
- @staticmethod
async def parse_message_batch(
- message: Tuple["Message", ...],
+ self,
+ message: tuple["Message", ...],
) -> KafkaMessage:
"""Parses a batch of messages from a Kafka consumer."""
- body: List[Any] = []
- batch_headers: List[Dict[str, str]] = []
+ body: list[Any] = []
+ batch_headers: list[dict[str, str]] = []
first = message[0]
last = message[-1]
@@ -59,8 +65,6 @@ async def parse_message_batch(
_, first_timestamp = first.timestamp()
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return KafkaMessage(
body=body,
headers=headers,
@@ -68,29 +72,28 @@ async def parse_message_batch(
reply_to=headers.get("reply_to", ""),
content_type=headers.get("content-type"),
message_id=f"{first.offset()}-{last.offset()}-{first_timestamp}",
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ correlation_id=headers.get("correlation_id"),
raw_message=message,
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
- is_manual=getattr(handler, "is_manual", True),
+ consumer=self._consumer,
+ is_manual=self.is_manual,
)
- @staticmethod
async def decode_message(
+ self,
msg: "StreamMessage[Message]",
) -> "DecodedMessage":
"""Decodes a message."""
return decode_message(msg)
- @classmethod
async def decode_message_batch(
- cls,
- msg: "StreamMessage[Tuple[Message, ...]]",
+ self,
+ msg: "StreamMessage[tuple[Message, ...]]",
) -> "DecodedMessage":
"""Decode a batch of messages."""
- return [decode_message(await cls.parse_message(m)) for m in msg.raw_message]
+ return [decode_message(await self.parse_message(m)) for m in msg.raw_message]
def _parse_msg_headers(
- headers: Sequence[Tuple[str, Union[bytes, str]]],
-) -> Dict[str, str]:
+ headers: Sequence[tuple[str, Union[bytes, str]]],
+) -> dict[str, str]:
return {i: j if isinstance(j, str) else j.decode() for i, j in headers}
diff --git a/faststream/confluent/prometheus/middleware.py b/faststream/confluent/prometheus/middleware.py
index 2ac27dacea..4f737d181b 100644
--- a/faststream/confluent/prometheus/middleware.py
+++ b/faststream/confluent/prometheus/middleware.py
@@ -1,14 +1,23 @@
-from typing import TYPE_CHECKING, Optional, Sequence
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, Union
+from confluent_kafka import Message
+
+from faststream._internal.constants import EMPTY
from faststream.confluent.prometheus.provider import settings_provider_factory
-from faststream.prometheus.middleware import BasePrometheusMiddleware
-from faststream.types import EMPTY
+from faststream.confluent.response import KafkaPublishCommand
+from faststream.prometheus.middleware import PrometheusMiddleware
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
-class KafkaPrometheusMiddleware(BasePrometheusMiddleware):
+class KafkaPrometheusMiddleware(
+ PrometheusMiddleware[
+ KafkaPublishCommand,
+ Union[Message, Sequence[Message]],
+ ]
+):
def __init__(
self,
*,
@@ -18,7 +27,7 @@ def __init__(
received_messages_size_buckets: Optional[Sequence[float]] = None,
) -> None:
super().__init__(
- settings_provider_factory=settings_provider_factory,
+ settings_provider_factory=settings_provider_factory, # type: ignore[arg-type]
registry=registry,
app_name=app_name,
metrics_prefix=metrics_prefix,
diff --git a/faststream/confluent/prometheus/provider.py b/faststream/confluent/prometheus/provider.py
index bdcb26728a..c5b2814687 100644
--- a/faststream/confluent/prometheus/provider.py
+++ b/faststream/confluent/prometheus/provider.py
@@ -1,6 +1,7 @@
-from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
-from faststream.broker.message import MsgType, StreamMessage
+from faststream.message.message import MsgType, StreamMessage
from faststream.prometheus import (
ConsumeAttrs,
MetricsSettingsProvider,
@@ -9,7 +10,7 @@
if TYPE_CHECKING:
from confluent_kafka import Message
- from faststream.types import AnyDict
+ from faststream.response import PublishCommand
class BaseConfluentMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
@@ -18,11 +19,11 @@ class BaseConfluentMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class ConfluentMetricsSettingsProvider(BaseConfluentMetricsSettingsProvider["Message"]):
@@ -31,23 +32,23 @@ def get_consume_attrs_from_message(
msg: "StreamMessage[Message]",
) -> ConsumeAttrs:
return {
- "destination_name": cast(str, msg.raw_message.topic()),
+ "destination_name": cast("str", msg.raw_message.topic()),
"message_size": len(msg.body),
"messages_count": 1,
}
class BatchConfluentMetricsSettingsProvider(
- BaseConfluentMetricsSettingsProvider[Tuple["Message", ...]]
+ BaseConfluentMetricsSettingsProvider[tuple["Message", ...]]
):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[Tuple[Message, ...]]",
+ msg: "StreamMessage[tuple[Message, ...]]",
) -> ConsumeAttrs:
raw_message = msg.raw_message[0]
return {
- "destination_name": cast(str, raw_message.topic()),
- "message_size": len(bytearray().join(cast(Sequence[bytes], msg.body))),
+ "destination_name": cast("str", raw_message.topic()),
+ "message_size": len(bytearray().join(cast("Sequence[bytes]", msg.body))),
"messages_count": len(msg.raw_message),
}
@@ -60,5 +61,4 @@ def settings_provider_factory(
]:
if isinstance(msg, Sequence):
return BatchConfluentMetricsSettingsProvider()
- else:
- return ConfluentMetricsSettingsProvider()
+ return ConfluentMetricsSettingsProvider()
diff --git a/faststream/confluent/publisher/asyncapi.py b/faststream/confluent/publisher/asyncapi.py
deleted file mode 100644
index 552b792601..0000000000
--- a/faststream/confluent/publisher/asyncapi.py
+++ /dev/null
@@ -1,204 +0,0 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Literal,
- Optional,
- Sequence,
- Tuple,
- Union,
- cast,
- overload,
-)
-
-from typing_extensions import override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import kafka
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.broker.types import MsgType
-from faststream.confluent.publisher.usecase import (
- BatchPublisher,
- DefaultPublisher,
- LogicPublisher,
-)
-from faststream.exceptions import SetupError
-
-if TYPE_CHECKING:
- from confluent_kafka import Message as ConfluentMsg
-
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
-
-
-class AsyncAPIPublisher(LogicPublisher[MsgType]):
- """A class representing a publisher."""
-
- def get_name(self) -> str:
- return f"{self.topic}:Publisher"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- publish=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads, "Publisher"),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(kafka=kafka.ChannelBinding(topic=self.topic)),
- )
- }
-
- @overload # type: ignore[override]
- @staticmethod
- def create(
- *,
- batch: Literal[False],
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[ConfluentMsg]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIDefaultPublisher": ...
-
- @overload
- @staticmethod
- def create(
- *,
- batch: Literal[True],
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIBatchPublisher": ...
-
- @overload
- @staticmethod
- def create(
- *,
- batch: bool,
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Union[
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
- Sequence["BrokerMiddleware[ConfluentMsg]"],
- ],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
- ]: ...
-
- @override
- @staticmethod
- def create(
- *,
- batch: bool,
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Union[
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
- Sequence["BrokerMiddleware[ConfluentMsg]"],
- ],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
- ]:
- if batch:
- if key:
- raise SetupError("You can't setup `key` with batch publisher")
-
- return AsyncAPIBatchPublisher(
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=cast(
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
- broker_middlewares,
- ),
- middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
- else:
- return AsyncAPIDefaultPublisher(
- key=key,
- # basic args
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=cast(
- Sequence["BrokerMiddleware[ConfluentMsg]"], broker_middlewares
- ),
- middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
-
-class AsyncAPIBatchPublisher(
- BatchPublisher,
- AsyncAPIPublisher[Tuple["ConfluentMsg", ...]],
-):
- pass
-
-
-class AsyncAPIDefaultPublisher(
- DefaultPublisher,
- AsyncAPIPublisher["ConfluentMsg"],
-):
- pass
diff --git a/faststream/confluent/publisher/factory.py b/faststream/confluent/publisher/factory.py
new file mode 100644
index 0000000000..e5e68dc049
--- /dev/null
+++ b/faststream/confluent/publisher/factory.py
@@ -0,0 +1,148 @@
+from collections.abc import Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Literal,
+ Optional,
+ Union,
+ cast,
+ overload,
+)
+
+from faststream.exceptions import SetupError
+
+from .specified import SpecificationBatchPublisher, SpecificationDefaultPublisher
+
+if TYPE_CHECKING:
+ from confluent_kafka import Message as ConfluentMsg
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[True],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationBatchPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[False],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence["BrokerMiddleware[ConfluentMsg]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationDefaultPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Union[
+ Sequence["BrokerMiddleware[ConfluentMsg]"],
+ Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
+ ],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]: ...
+
+
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Union[
+ Sequence["BrokerMiddleware[ConfluentMsg]"],
+ Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
+ ],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]:
+ if batch:
+ if key:
+ msg = "You can't setup `key` with batch publisher"
+ raise SetupError(msg)
+
+ return SpecificationBatchPublisher(
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=cast(
+ "Sequence[BrokerMiddleware[tuple[ConfluentMsg, ...]]]",
+ broker_middlewares,
+ ),
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ return SpecificationDefaultPublisher(
+ key=key,
+ # basic args
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=cast(
+ "Sequence[BrokerMiddleware[ConfluentMsg]]",
+ broker_middlewares,
+ ),
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/confluent/publisher/fake.py b/faststream/confluent/publisher/fake.py
new file mode 100644
index 0000000000..6e302ccb51
--- /dev/null
+++ b/faststream/confluent/publisher/fake.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.confluent.response import KafkaPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class KafkaFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ topic: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.topic = topic
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "KafkaPublishCommand"]
+ ) -> "KafkaPublishCommand":
+ cmd = super().patch_command(cmd)
+ real_cmd = KafkaPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.topic
+ return real_cmd
diff --git a/faststream/confluent/publisher/producer.py b/faststream/confluent/publisher/producer.py
index 6e3d25259e..af725bf72d 100644
--- a/faststream/confluent/publisher/producer.py
+++ b/faststream/confluent/publisher/producer.py
@@ -1,17 +1,23 @@
-from typing import TYPE_CHECKING, Any, Dict, Optional
+from typing import TYPE_CHECKING, NoReturn, Optional, Union
from typing_extensions import override
-from faststream.broker.message import encode_message
-from faststream.broker.publisher.proto import ProducerProto
-from faststream.broker.utils import resolve_custom_func
+from faststream._internal.publisher.proto import ProducerProto
+from faststream._internal.subscriber.utils import resolve_custom_func
from faststream.confluent.parser import AsyncConfluentParser
-from faststream.exceptions import OperationForbiddenError
+from faststream.exceptions import FeatureNotSupportedException
+from faststream.message import encode_message
+
+from .state import EmptyProducerState, ProducerState, RealProducer
if TYPE_CHECKING:
- from faststream.broker.types import CustomCallable
+ import asyncio
+
+ from confluent_kafka import Message
+
+ from faststream._internal.types import CustomCallable
from faststream.confluent.client import AsyncConfluentProducer
- from faststream.types import SendableMessage
+ from faststream.confluent.response import KafkaPublishCommand
class AsyncConfluentFastProducer(ProducerProto):
@@ -19,82 +25,63 @@ class AsyncConfluentFastProducer(ProducerProto):
def __init__(
self,
- producer: "AsyncConfluentProducer",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._producer = producer
+ self._producer: ProducerState = EmptyProducerState()
# NOTE: register default parser to be compatible with request
- default = AsyncConfluentParser
+ default = AsyncConfluentParser()
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ def connect(self, producer: "AsyncConfluentProducer") -> None:
+ self._producer = RealProducer(producer)
+
+ async def disconnect(self) -> None:
+ await self._producer.stop()
+ self._producer = EmptyProducerState()
+
+ def __bool__(self) -> bool:
+ return bool(self._producer)
+
+ async def ping(self, timeout: float) -> bool:
+ return await self._producer.ping(timeout=timeout)
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- *,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: str = "",
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> "Union[asyncio.Future[Optional[Message]], Optional[Message]]":
"""Publish a message to a topic."""
- message, content_type = encode_message(message)
+ message, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- await self._producer.send(
- topic=topic,
+ return await self._producer.producer.send(
+ topic=cmd.destination,
value=message,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
headers=[(i, (j or "").encode()) for i, j in headers_to_send.items()],
- no_confirm=no_confirm,
+ no_confirm=cmd.no_confirm,
)
- async def stop(self) -> None:
- await self._producer.stop()
-
- async def publish_batch(
+ @override
+ async def publish_batch( # type: ignore[override]
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: str = "",
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to a topic."""
- batch = self._producer.create_batch()
+ batch = self._producer.producer.create_batch()
- headers_to_send = {"correlation_id": correlation_id, **(headers or {})}
+ headers_to_send = cmd.headers_to_publish()
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- for msg in msgs:
+ for msg in cmd.batch_bodies:
message, content_type = encode_message(msg)
if content_type:
@@ -108,19 +95,21 @@ async def publish_batch(
batch.append(
key=None,
value=message,
- timestamp=timestamp_ms,
+ timestamp=cmd.timestamp_ms,
headers=[(i, j.encode()) for i, j in final_headers.items()],
)
- await self._producer.send_batch(
+ await self._producer.producer.send_batch(
batch,
- topic,
- partition=partition,
- no_confirm=no_confirm,
+ cmd.destination,
+ partition=cmd.partition,
+ no_confirm=cmd.no_confirm,
)
@override
- async def request(self, *args: Any, **kwargs: Any) -> Optional[Any]:
- raise OperationForbiddenError(
- "Kafka doesn't support `request` method without test client."
- )
+ async def request( # type: ignore[override]
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> NoReturn:
+ msg = "Kafka doesn't support `request` method without test client."
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/confluent/publisher/specified.py b/faststream/confluent/publisher/specified.py
new file mode 100644
index 0000000000..69b4ca499b
--- /dev/null
+++ b/faststream/confluent/publisher/specified.py
@@ -0,0 +1,43 @@
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.confluent.publisher.usecase import BatchPublisher, DefaultPublisher
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, PublisherSpec
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+
+
+class SpecificationPublisher(SpecificationPublisherMixin):
+ """A class representing a publisher."""
+
+ def get_default_name(self) -> str:
+ return f"{self.topic}:Publisher"
+
+ def get_schema(self) -> dict[str, PublisherSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: PublisherSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ kafka=kafka.ChannelBinding(
+ topic=self.topic, partitions=None, replicas=None
+ )
+ ),
+ ),
+ }
+
+
+class SpecificationBatchPublisher(SpecificationPublisher, BatchPublisher):
+ pass
+
+
+class SpecificationDefaultPublisher(SpecificationPublisher, DefaultPublisher):
+ pass
diff --git a/faststream/confluent/publisher/state.py b/faststream/confluent/publisher/state.py
new file mode 100644
index 0000000000..13f658903a
--- /dev/null
+++ b/faststream/confluent/publisher/state.py
@@ -0,0 +1,50 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from faststream.confluent.client import AsyncConfluentProducer
+
+
+class ProducerState(Protocol):
+ producer: "AsyncConfluentProducer"
+
+ def __bool__(self) -> bool: ...
+
+ async def ping(self, timeout: float) -> bool: ...
+
+ async def stop(self) -> None: ...
+
+
+class EmptyProducerState(ProducerState):
+ __slots__ = ()
+
+ @property
+ def producer(self) -> "AsyncConfluentProducer":
+ msg = "You can't use producer here, please connect broker first."
+ raise IncorrectState(msg)
+
+ async def ping(self, timeout: float) -> bool:
+ return False
+
+ def __bool__(self) -> bool:
+ return False
+
+ async def stop(self) -> None:
+ pass
+
+
+class RealProducer(ProducerState):
+ __slots__ = ("producer",)
+
+ def __init__(self, producer: "AsyncConfluentProducer") -> None:
+ self.producer = producer
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def stop(self) -> None:
+ await self.producer.stop()
+
+ async def ping(self, timeout: float) -> bool:
+ return await self.producer.ping(timeout=timeout)
diff --git a/faststream/confluent/publisher/usecase.py b/faststream/confluent/publisher/usecase.py
index 3601b2a018..e8fe4d554e 100644
--- a/faststream/confluent/publisher/usecase.py
+++ b/faststream/confluent/publisher/usecase.py
@@ -1,79 +1,53 @@
-from contextlib import AsyncExitStack
-from functools import partial
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Dict,
- Iterable,
- Optional,
- Sequence,
- Tuple,
- Union,
- cast,
-)
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Optional, Union
from confluent_kafka import Message
from typing_extensions import override
-from faststream.broker.message import SourceType, gen_cor_id
-from faststream.broker.publisher.usecase import PublisherUsecase
-from faststream.broker.types import MsgType
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.utils.functions import return_input
+from faststream._internal.publisher.usecase import PublisherUsecase
+from faststream._internal.types import MsgType
+from faststream.confluent.response import KafkaPublishCommand
+from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
+ import asyncio
+
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.confluent.message import KafkaMessage
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
- from faststream.types import AnyDict, AsyncFunc, SendableMessage
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[MsgType]):
"""A class to publish messages to a Kafka topic."""
- _producer: Optional["AsyncConfluentFastProducer"]
+ _producer: "AsyncConfluentFastProducer"
def __init__(
self,
*,
topic: str,
partition: Optional[int],
- headers: Optional[Dict[str, str]],
+ headers: Optional[dict[str, str]],
reply_to: Optional[str],
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.topic = topic
self.partition = partition
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
-
- def __hash__(self) -> int:
- return hash(self.topic)
+ self.headers = headers or {}
def add_prefix(self, prefix: str) -> None:
- self.topic = "".join((prefix, self.topic))
+ self.topic = f"{prefix}{self.topic}"
@override
async def request(
@@ -84,70 +58,38 @@ async def request(
key: Optional[bytes] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> "KafkaMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "key": key,
- # basic args
- "timeout": timeout,
- "timestamp_ms": timestamp_ms,
- "topic": topic or self.topic,
- "partition": partition or self.partition,
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- }
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(message, **kwargs)
-
- async with AsyncExitStack() as stack:
- return_msg: Callable[[KafkaMessage], Awaitable[KafkaMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
+ cmd = KafkaPublishCommand(
+ message,
+ topic=topic or self.topic,
+ key=key,
+ partition=partition or self.partition,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ timeout=timeout,
+ _publish_type=PublishType.REQUEST,
+ )
- raise AssertionError("unreachable")
+ msg: KafkaMessage = await self._basic_request(cmd)
+ return msg
class DefaultPublisher(LogicPublisher[Message]):
def __init__(
self,
*,
- key: Optional[bytes],
+ key: Union[bytes, str, None],
topic: str,
partition: Optional[int],
- headers: Optional[Dict[str, str]],
+ headers: Optional[dict[str, str]],
reply_to: Optional[str],
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[Message]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
topic=topic,
@@ -157,11 +99,6 @@ def __init__(
# publisher args
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.key = key
@@ -175,39 +112,43 @@ async def publish(
key: Optional[bytes] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
reply_to: str = "",
no_confirm: bool = False,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
- ) -> Optional[Any]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> "asyncio.Future":
+ cmd = KafkaPublishCommand(
+ message,
+ topic=topic or self.topic,
+ key=key or self.key,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
+ )
+ return await self._basic_publish(cmd, _extra_middlewares=())
- kwargs: AnyDict = {
- "key": key or self.key,
- # basic args
- "no_confirm": no_confirm,
- "topic": topic or self.topic,
- "partition": partition or self.partition,
- "timestamp_ms": timestamp_ms,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- }
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd)
- call: AsyncFunc = self._producer.publish
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
+ cmd.partition = cmd.partition or self.partition
+ cmd.key = cmd.key or self.key
- return await call(message, **kwargs)
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -218,14 +159,12 @@ async def request(
key: Optional[bytes] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> "KafkaMessage":
return await super().request(
- message=message,
+ message,
topic=topic,
key=key or self.key,
partition=partition,
@@ -233,53 +172,51 @@ async def request(
headers=headers,
correlation_id=correlation_id,
timeout=timeout,
- _extra_middlewares=_extra_middlewares,
)
-class BatchPublisher(LogicPublisher[Tuple[Message, ...]]):
+class BatchPublisher(LogicPublisher[tuple[Message, ...]]):
@override
async def publish(
self,
- message: Union["SendableMessage", Iterable["SendableMessage"]],
- *extra_messages: "SendableMessage",
+ *messages: "SendableMessage",
topic: str = "",
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
reply_to: str = "",
no_confirm: bool = False,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ cmd = KafkaPublishCommand(
+ *messages,
+ key=None,
+ topic=topic or self.topic,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
+ )
- msgs: Iterable[SendableMessage]
- if extra_messages:
- msgs = (cast("SendableMessage", message), *extra_messages)
- else:
- msgs = cast(Iterable["SendableMessage"], message)
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
- kwargs: AnyDict = {
- "topic": topic or self.topic,
- "no_confirm": no_confirm,
- "partition": partition or self.partition,
- "timestamp_ms": timestamp_ms,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- }
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd, batch=True)
- call: AsyncFunc = self._producer.publish_batch
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
+ cmd.partition = cmd.partition or self.partition
- await call(*msgs, **kwargs)
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
diff --git a/faststream/confluent/response.py b/faststream/confluent/response.py
index da420aa286..454b0ca2cd 100644
--- a/faststream/confluent/response.py
+++ b/faststream/confluent/response.py
@@ -1,11 +1,12 @@
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.broker.response import Response
+from faststream.response.publish_type import PublishType
+from faststream.response.response import BatchPublishCommand, PublishCommand, Response
if TYPE_CHECKING:
- from faststream.types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import AnyDict, SendableMessage
class KafkaResponse(Response):
@@ -28,10 +29,84 @@ def __init__(
self.key = key
@override
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- **super().as_publish_kwargs(),
- "timestamp_ms": self.timestamp_ms,
- "key": self.key,
- }
- return publish_options
+ def as_publish_command(self) -> "KafkaPublishCommand":
+ return KafkaPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ # Kafka specific
+ topic="",
+ key=self.key,
+ timestamp_ms=self.timestamp_ms,
+ )
+
+
+class KafkaPublishCommand(BatchPublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ topic: str,
+ _publish_type: PublishType,
+ key: Union[bytes, str, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ timeout: float = 0.5,
+ ) -> None:
+ super().__init__(
+ message,
+ *messages,
+ destination=topic,
+ reply_to=reply_to,
+ correlation_id=correlation_id,
+ headers=headers,
+ _publish_type=_publish_type,
+ )
+
+ self.key = key
+ self.partition = partition
+ self.timestamp_ms = timestamp_ms
+ self.no_confirm = no_confirm
+
+ # request option
+ self.timeout = timeout
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "KafkaPublishCommand":
+ if isinstance(cmd, KafkaPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cls._parse_bodies(cmd.body, batch=batch)
+
+ return cls(
+ body,
+ *extra_bodies,
+ topic=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def headers_to_publish(self) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py
index 14dcb9b943..dd513e7ec5 100644
--- a/faststream/confluent/router.py
+++ b/faststream/confluent/router.py
@@ -1,37 +1,39 @@
+from collections.abc import Awaitable, Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
- Awaitable,
Callable,
- Dict,
- Iterable,
Literal,
Optional,
- Sequence,
- Tuple,
Union,
)
-from typing_extensions import Annotated, Doc, deprecated
+from typing_extensions import Doc, deprecated
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream._internal.constants import EMPTY
from faststream.confluent.broker.registrator import KafkaRegistrator
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.confluent.message import KafkaMessage
from faststream.confluent.schemas import TopicPartition
- from faststream.types import SendableMessage
class KafkaPublisher(ArgsContainer):
@@ -58,7 +60,7 @@ def __init__(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -67,15 +69,15 @@ def __init__(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -89,6 +91,10 @@ def __init__(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI args
@@ -104,7 +110,7 @@ def __init__(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -159,7 +165,7 @@ def __init__(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
group_instance_id: Annotated[
@@ -172,7 +178,7 @@ def __init__(
partition assignment, rebalances). This can be used to assign
partitions to specific consumers, rather than letting the group
assign partitions based on consumer metadata.
- """
+ """,
),
] = None,
fetch_max_wait_ms: Annotated[
@@ -183,7 +189,7 @@ def __init__(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
fetch_max_bytes: Annotated[
@@ -198,7 +204,7 @@ def __init__(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -208,7 +214,7 @@ def __init__(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
max_partition_fetch_bytes: Annotated[
@@ -223,7 +229,7 @@ def __init__(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -235,7 +241,7 @@ def __init__(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -244,15 +250,21 @@ def __init__(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -263,7 +275,7 @@ def __init__(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -279,7 +291,7 @@ def __init__(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = ("roundrobin",),
max_poll_interval_ms: Annotated[
@@ -292,7 +304,7 @@ def __init__(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
session_timeout_ms: Annotated[
@@ -307,7 +319,7 @@ def __init__(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -323,7 +335,7 @@ def __init__(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
isolation_level: Annotated[
@@ -353,7 +365,7 @@ def __init__(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch: Annotated[
@@ -366,8 +378,8 @@ def __init__(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -379,31 +391,25 @@ def __init__(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI args
@@ -415,7 +421,7 @@ def __init__(
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -456,14 +462,12 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
no_reply=no_reply,
# AsyncAPI args
title=title,
description=description,
include_in_schema=include_in_schema,
- # FastDepends args
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
)
@@ -473,7 +477,7 @@ class KafkaRouter(
BrokerRouter[
Union[
"Message",
- Tuple["Message", ...],
+ tuple["Message", ...],
]
],
):
@@ -491,20 +495,24 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers."
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
Sequence[
Union[
"BrokerMiddleware[Message]",
- "BrokerMiddleware[Tuple[Message, ...]]",
+ "BrokerMiddleware[tuple[Message, ...]]",
]
],
Doc("Router middlewares to apply to all routers' publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[Message]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc("Parser to map original **Message** object to FastStream one."),
@@ -524,6 +532,7 @@ def __init__(
prefix=prefix,
dependencies=dependencies,
middlewares=middlewares, # type: ignore[arg-type]
+ routers=routers,
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
diff --git a/faststream/confluent/schemas/params.py b/faststream/confluent/schemas/params.py
index 91cb83591a..f7b4c2bcdb 100644
--- a/faststream/confluent/schemas/params.py
+++ b/faststream/confluent/schemas/params.py
@@ -1,4 +1,4 @@
-from typing import List, Literal, Union
+from typing import Literal, Union
from typing_extensions import TypedDict
@@ -6,7 +6,7 @@
class ConsumerConnectionParams(TypedDict, total=False):
"""A class to represent the connection parameters for a consumer."""
- bootstrap_servers: Union[str, List[str]]
+ bootstrap_servers: Union[str, list[str]]
client_id: str
retry_backoff_ms: int
metadata_max_age_ms: int
diff --git a/faststream/confluent/security.py b/faststream/confluent/security.py
index 4e3e0c0e48..044402bb24 100644
--- a/faststream/confluent/security.py
+++ b/faststream/confluent/security.py
@@ -12,31 +12,32 @@
)
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
def parse_security(security: Optional[BaseSecurity]) -> "AnyDict":
if security and isinstance(security.ssl_context, ssl.SSLContext):
+ msg = "ssl_context in not supported by confluent-kafka-python, please use config instead."
raise SetupError(
- "ssl_context in not supported by confluent-kafka-python, please use config instead."
+ msg,
)
if security is None:
return {}
- elif isinstance(security, SASLPlaintext):
+ if isinstance(security, SASLPlaintext):
return _parse_sasl_plaintext(security)
- elif isinstance(security, SASLScram256):
+ if isinstance(security, SASLScram256):
return _parse_sasl_scram256(security)
- elif isinstance(security, SASLScram512):
+ if isinstance(security, SASLScram512):
return _parse_sasl_scram512(security)
- elif isinstance(security, SASLOAuthBearer):
+ if isinstance(security, SASLOAuthBearer):
return _parse_sasl_oauthbearer(security)
- elif isinstance(security, SASLGSSAPI):
+ if isinstance(security, SASLGSSAPI):
return _parse_sasl_gssapi(security)
- elif isinstance(security, BaseSecurity):
+ if isinstance(security, BaseSecurity):
return _parse_base_security(security)
- else:
- raise NotImplementedError(f"KafkaBroker does not support `{type(security)}`.")
+ msg = f"KafkaBroker does not support `{type(security)}`."
+ raise NotImplementedError(msg)
def _parse_base_security(security: BaseSecurity) -> "AnyDict":
diff --git a/faststream/confluent/subscriber/asyncapi.py b/faststream/confluent/subscriber/asyncapi.py
deleted file mode 100644
index bb0d592f76..0000000000
--- a/faststream/confluent/subscriber/asyncapi.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Dict,
- Tuple,
-)
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import kafka
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.broker.types import MsgType
-from faststream.confluent.subscriber.usecase import (
- BatchSubscriber,
- ConcurrentDefaultSubscriber,
- DefaultSubscriber,
- LogicSubscriber,
-)
-
-if TYPE_CHECKING:
- from confluent_kafka import Message as ConfluentMsg
-
-
-class AsyncAPISubscriber(LogicSubscriber[MsgType]):
- """A class to handle logic and async API operations."""
-
- def get_name(self) -> str:
- return f'{",".join(self.topics)}:{self.call_name}'
-
- def get_schema(self) -> Dict[str, Channel]:
- channels = {}
-
- payloads = self.get_payloads()
-
- topics = chain(self.topics, {part.topic for part in self.partitions})
-
- for t in topics:
- handler_name = self.title_ or f"{t}:{self.call_name}"
-
- channels[handler_name] = Channel(
- description=self.description,
- subscribe=Operation(
- message=Message(
- title=f"{handler_name}:Message",
- payload=resolve_payloads(payloads),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- kafka=kafka.ChannelBinding(topic=t),
- ),
- )
-
- return channels
-
-
-class AsyncAPIDefaultSubscriber(
- DefaultSubscriber,
- AsyncAPISubscriber["ConfluentMsg"],
-):
- pass
-
-
-class AsyncAPIBatchSubscriber(
- BatchSubscriber,
- AsyncAPISubscriber[Tuple["ConfluentMsg", ...]],
-):
- pass
-
-
-class AsyncAPIConcurrentDefaultSubscriber(
- ConcurrentDefaultSubscriber,
- AsyncAPISubscriber["ConfluentMsg"],
-):
- pass
diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py
index ae6b907c28..a5ee415d67 100644
--- a/faststream/confluent/subscriber/factory.py
+++ b/faststream/confluent/subscriber/factory.py
@@ -1,29 +1,30 @@
+import warnings
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
- Iterable,
Literal,
Optional,
- Sequence,
- Tuple,
Union,
cast,
overload,
)
-from faststream.confluent.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+from faststream._internal.constants import EMPTY
+from faststream.confluent.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message as ConfluentMsg
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import BrokerMiddleware
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
from faststream.confluent.schemas import TopicPartition
- from faststream.types import AnyDict
@overload
@@ -36,19 +37,19 @@ def create_subscriber(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
no_ack: bool,
max_workers: int,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
- # AsyncAPI args
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
-) -> "AsyncAPIBatchSubscriber": ...
+) -> "SpecificationBatchSubscriber": ...
@overload
@@ -61,21 +62,21 @@ def create_subscriber(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
no_ack: bool,
max_workers: int,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[ConfluentMsg]"],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@@ -89,25 +90,25 @@ def create_subscriber(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
no_ack: bool,
max_workers: int,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Union[
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
+ Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
Sequence["BrokerMiddleware[ConfluentMsg]"],
],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@@ -120,89 +121,158 @@ def create_subscriber(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
no_ack: bool,
max_workers: int,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Union[
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
+ Sequence["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
Sequence["BrokerMiddleware[ConfluentMsg]"],
],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
- if is_manual and max_workers > 1:
- raise SetupError("Max workers not work with manual commit mode.")
+ _validate_input_for_misconfigure(
+ *topics,
+ partitions=partitions,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ auto_commit=auto_commit,
+ group_id=group_id,
+ max_workers=max_workers,
+ )
+
+ if auto_commit is not EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST if auto_commit else AckPolicy.REJECT_ON_ERROR
+
+ if no_ack is not EMPTY:
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST
+
+ if ack_policy is AckPolicy.ACK_FIRST:
+ connection_data["enable_auto_commit"] = True
+ ack_policy = AckPolicy.DO_NOTHING
if batch:
- return AsyncAPIBatchSubscriber(
+ return SpecificationBatchSubscriber(
*topics,
partitions=partitions,
polling_interval=polling_interval,
max_records=max_records,
group_id=group_id,
connection_data=connection_data,
- is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=cast(
+ "Sequence[BrokerMiddleware[tuple[ConfluentMsg, ...]]]",
+ broker_middlewares,
+ ),
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ if max_workers > 1:
+ return SpecificationConcurrentDefaultSubscriber(
+ *topics,
+ partitions=partitions,
+ polling_interval=polling_interval,
+ group_id=group_id,
+ connection_data=connection_data,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=cast(
- Sequence["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"],
+ "Sequence[BrokerMiddleware[ConfluentMsg]]",
broker_middlewares,
),
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
+ # concurrent arg
+ max_workers=max_workers,
)
- else:
- if max_workers > 1:
- return AsyncAPIConcurrentDefaultSubscriber(
- *topics,
- max_workers=max_workers,
- partitions=partitions,
- polling_interval=polling_interval,
- group_id=group_id,
- connection_data=connection_data,
- is_manual=is_manual,
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=cast(
- Sequence["BrokerMiddleware[ConfluentMsg]"],
- broker_middlewares,
- ),
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
- else:
- return AsyncAPIDefaultSubscriber(
- *topics,
- partitions=partitions,
- polling_interval=polling_interval,
- group_id=group_id,
- connection_data=connection_data,
- is_manual=is_manual,
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=cast(
- Sequence["BrokerMiddleware[ConfluentMsg]"],
- broker_middlewares,
- ),
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+
+ return SpecificationDefaultSubscriber(
+ *topics,
+ partitions=partitions,
+ polling_interval=polling_interval,
+ group_id=group_id,
+ connection_data=connection_data,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=cast(
+ "Sequence[BrokerMiddleware[ConfluentMsg]]",
+ broker_middlewares,
+ ),
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+
+def _validate_input_for_misconfigure(
+ *topics: str,
+ partitions: Sequence["TopicPartition"],
+ ack_policy: "AckPolicy",
+ auto_commit: bool,
+ no_ack: bool,
+ group_id: Optional[str],
+ max_workers: int,
+) -> None:
+ if auto_commit is not EMPTY:
+ warnings.warn(
+ "`auto_commit` option was deprecated in prior to `ack_policy=AckPolicy.ACK_FIRST`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `auto_commit` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ ack_policy = AckPolicy.ACK_FIRST if auto_commit else AckPolicy.REJECT_ON_ERROR
+
+ if no_ack is not EMPTY:
+ warnings.warn(
+ "`no_ack` option was deprecated in prior to `ack_policy=AckPolicy.DO_NOTHING`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `no_ack` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST
+
+ if AckPolicy.ACK_FIRST is not AckPolicy.ACK_FIRST and max_workers > 1:
+ msg = "Max workers not work with manual commit mode."
+ raise SetupError(msg)
+
+ if not group_id and ack_policy is not AckPolicy.ACK_FIRST:
+ msg = "You must use `group_id` with manual commit mode."
+ raise SetupError(msg)
+
+ if not topics and not partitions:
+ msg = "You should provide either `topics` or `partitions`."
+ raise SetupError(msg)
+ if topics and partitions:
+ msg = "You can't provide both `topics` and `partitions`."
+ raise SetupError(msg)
diff --git a/faststream/confluent/subscriber/specified.py b/faststream/confluent/subscriber/specified.py
new file mode 100644
index 0000000000..ae74ad845c
--- /dev/null
+++ b/faststream/confluent/subscriber/specified.py
@@ -0,0 +1,72 @@
+from collections.abc import Iterable
+from itertools import chain
+from typing import TYPE_CHECKING
+
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.confluent.subscriber.usecase import (
+ BatchSubscriber,
+ ConcurrentDefaultSubscriber,
+ DefaultSubscriber,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+
+if TYPE_CHECKING:
+ from faststream.confluent.schemas import TopicPartition
+
+
+class SpecificationSubscriber(SpecificationSubscriberMixin):
+ """A class to handle logic and async API operations."""
+
+ topics: Iterable[str]
+ partitions: Iterable["TopicPartition"] # TODO: support partitions
+
+ def get_default_name(self) -> str:
+ return f"{','.join(self.topics)}:{self.call_name}"
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ channels = {}
+
+ payloads = self.get_payloads()
+ for t in chain(self.topics, {p.topic for p in self.partitions}):
+ handler_name = self.title_ or f"{t}:{self.call_name}"
+
+ channels[handler_name] = SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{handler_name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ kafka=kafka.ChannelBinding(topic=t, partitions=None, replicas=None),
+ ),
+ )
+
+ return channels
+
+
+class SpecificationDefaultSubscriber(
+ SpecificationSubscriber,
+ DefaultSubscriber,
+):
+ pass
+
+
+class SpecificationBatchSubscriber(
+ SpecificationSubscriber,
+ BatchSubscriber,
+):
+ pass
+
+
+class SpecificationConcurrentDefaultSubscriber(
+ SpecificationSubscriber,
+ ConcurrentDefaultSubscriber,
+):
+ pass
diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py
index b435f35433..a2a0392978 100644
--- a/faststream/confluent/subscriber/usecase.py
+++ b/faststream/confluent/subscriber/usecase.py
@@ -1,40 +1,38 @@
from abc import abstractmethod
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
Any,
Callable,
- Dict,
- Iterable,
- List,
Optional,
- Sequence,
- Tuple,
)
import anyio
from confluent_kafka import KafkaException, Message
from typing_extensions import override
-from faststream.broker.publisher.fake import FakePublisher
-from faststream.broker.subscriber.mixins import ConcurrentMixin, TasksMixin
-from faststream.broker.subscriber.usecase import SubscriberUsecase
-from faststream.broker.types import MsgType
-from faststream.broker.utils import process_msg
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.subscriber.utils import process_msg
+from faststream._internal.types import MsgType
from faststream.confluent.parser import AsyncConfluentParser
+from faststream.confluent.publisher.fake import KafkaFakePublisher
from faststream.confluent.schemas import TopicPartition
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
+ from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
CustomCallable,
)
from faststream.confluent.client import AsyncConfluentConsumer
- from faststream.types import AnyDict, Decorator, LoggerProto
+ from faststream.message import StreamMessage
class LogicSubscriber(TasksMixin, SubscriberUsecase[MsgType]):
@@ -45,6 +43,7 @@ class LogicSubscriber(TasksMixin, SubscriberUsecase[MsgType]):
builder: Optional[Callable[..., "AsyncConfluentConsumer"]]
consumer: Optional["AsyncConfluentConsumer"]
+ parser: AsyncConfluentParser
client_id: Optional[str]
@@ -56,33 +55,22 @@ def __init__(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.__connection_data = connection_data
@@ -90,7 +78,6 @@ def __init__(
self.group_id = group_id
self.topics = topics
self.partitions = partitions
- self.is_manual = is_manual
self.consumer = None
self.polling_interval = polling_interval
@@ -100,39 +87,27 @@ def __init__(
self.builder = None
@override
- def setup( # type: ignore[override]
+ def _setup( # type: ignore[override]
self,
*,
client_id: Optional[str],
builder: Callable[..., "AsyncConfluentConsumer"],
- # basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
+ # basic args,
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
+ state: "BrokerState",
) -> None:
self.client_id = client_id
self.builder = builder
- super().setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
+ super()._setup(
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
+ state=state,
)
@override
@@ -147,6 +122,7 @@ async def start(self) -> None:
client_id=self.client_id,
**self.__connection_data,
)
+ self.parser._setup(consumer)
await consumer.start()
await super().start()
@@ -174,9 +150,13 @@ async def get_one(
raw_message = await self.consumer.getone(timeout=timeout)
+ context = self._state.get().di_state.context
+
return await process_msg(
msg=raw_message, # type: ignore[arg-type]
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -184,16 +164,11 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "topic": message.reply_to,
- },
+ KafkaFakePublisher(
+ self._state.get().producer,
+ topic=message.reply_to,
),
)
@@ -202,7 +177,7 @@ async def consume_one(self, msg: MsgType) -> None:
@abstractmethod
async def get_msg(self) -> Optional[MsgType]:
- raise NotImplementedError()
+ raise NotImplementedError
async def _consume(self) -> None:
assert self.consumer, "You should start subscriber at first." # nosec B101
@@ -224,28 +199,17 @@ async def _consume(self) -> None:
await self.consume_one(msg)
@property
- def topic_names(self) -> List[str]:
+ def topic_names(self) -> list[str]:
if self.topics:
return list(self.topics)
- else:
- return [f"{p.topic}-{p.partition}" for p in self.partitions]
-
- @staticmethod
- def get_routing_hash(topics: Iterable[str], group_id: Optional[str] = None) -> int:
- return hash("".join((*topics, group_id or "")))
-
- def __hash__(self) -> int:
- return self.get_routing_hash(
- topics=self.topic_names,
- group_id=self.group_id,
- )
+ return [f"{p.topic}-{p.partition}" for p in self.partitions]
@staticmethod
def build_log_context(
message: Optional["StreamMessage[Any]"],
topic: str,
group_id: Optional[str] = None,
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
return {
"topic": topic,
"group_id": group_id or "",
@@ -253,11 +217,11 @@ def build_log_context(
}
def add_prefix(self, prefix: str) -> None:
- self.topics = tuple("".join((prefix, t)) for t in self.topics)
+ self.topics = tuple(f"{prefix}{t}" for t in self.topics)
self.partitions = [
TopicPartition(
- topic="".join((prefix, p.topic)),
+ topic=f"{prefix}{p.topic}",
partition=p.partition,
offset=p.offset,
metadata=p.metadata,
@@ -276,38 +240,30 @@ def __init__(
polling_interval: float,
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[Message]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
+ self.parser = AsyncConfluentParser(
+ is_manual=ack_policy is not AckPolicy.ACK_FIRST
+ )
+
super().__init__(
*topics,
partitions=partitions,
polling_interval=polling_interval,
group_id=group_id,
connection_data=connection_data,
- is_manual=is_manual,
# subscriber args
- default_parser=AsyncConfluentParser.parse_message,
- default_decoder=AsyncConfluentParser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
async def get_msg(self) -> Optional["Message"]:
@@ -317,7 +273,7 @@ async def get_msg(self) -> Optional["Message"]:
def get_log_context(
self,
message: Optional["StreamMessage[Message]"],
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
if message is None:
topic = ",".join(self.topic_names)
else:
@@ -330,7 +286,16 @@ def get_log_context(
)
-class BatchSubscriber(LogicSubscriber[Tuple[Message, ...]]):
+class ConcurrentDefaultSubscriber(ConcurrentMixin["Message"], DefaultSubscriber):
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "Message") -> None:
+ await self._put_msg(msg)
+
+
+class BatchSubscriber(LogicSubscriber[tuple[Message, ...]]):
def __init__(
self,
*topics: str,
@@ -340,60 +305,48 @@ def __init__(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[Tuple[Message, ...]]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[tuple[Message, ...]]"],
) -> None:
self.max_records = max_records
+ self.parser = AsyncConfluentParser(
+ is_manual=ack_policy is not AckPolicy.ACK_FIRST
+ )
+
super().__init__(
*topics,
partitions=partitions,
polling_interval=polling_interval,
group_id=group_id,
connection_data=connection_data,
- is_manual=is_manual,
# subscriber args
- default_parser=AsyncConfluentParser.parse_message_batch,
- default_decoder=AsyncConfluentParser.decode_message_batch,
+ default_parser=self.parser.parse_message_batch,
+ default_decoder=self.parser.decode_message_batch,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
- async def get_msg(self) -> Optional[Tuple["Message", ...]]:
+ async def get_msg(self) -> Optional[tuple["Message", ...]]:
assert self.consumer, "You should setup subscriber at first." # nosec B101
-
- messages = await self.consumer.getmany(
- timeout=self.polling_interval,
- max_records=self.max_records,
+ return (
+ await self.consumer.getmany(
+ timeout=self.polling_interval,
+ max_records=self.max_records,
+ )
+ or None
)
- if not messages: # TODO: why we are sleeping here?
- await anyio.sleep(self.polling_interval)
- return None
-
- return messages
-
def get_log_context(
self,
- message: Optional["StreamMessage[Tuple[Message, ...]]"],
- ) -> Dict[str, str]:
+ message: Optional["StreamMessage[tuple[Message, ...]]"],
+ ) -> dict[str, str]:
if message is None:
topic = ",".join(self.topic_names)
else:
@@ -404,54 +357,3 @@ def get_log_context(
topic=topic,
group_id=self.group_id,
)
-
-
-class ConcurrentDefaultSubscriber(ConcurrentMixin[Message], DefaultSubscriber):
- def __init__(
- self,
- *topics: str,
- # Kafka information
- partitions: Sequence["TopicPartition"],
- polling_interval: float,
- group_id: Optional[str],
- connection_data: "AnyDict",
- is_manual: bool,
- # Subscriber args
- max_workers: int,
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[Message]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- *topics,
- partitions=partitions,
- polling_interval=polling_interval,
- group_id=group_id,
- connection_data=connection_data,
- is_manual=is_manual,
- # subscriber args
- max_workers=max_workers,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- async def start(self) -> None:
- await super().start()
- self.start_consume_task()
-
- async def consume_one(self, msg: "Message") -> None:
- await self._put_msg(msg)
diff --git a/faststream/confluent/testing.py b/faststream/confluent/testing.py
index 10d0fd5b20..5f4d8711a8 100644
--- a/faststream/confluent/testing.py
+++ b/faststream/confluent/testing.py
@@ -1,26 +1,34 @@
-from datetime import datetime
-from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
+from datetime import datetime, timezone
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+)
from unittest.mock import AsyncMock, MagicMock
import anyio
from typing_extensions import override
-from faststream.broker.message import encode_message, gen_cor_id
-from faststream.broker.utils import resolve_custom_func
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.testing.broker import TestBroker
from faststream.confluent.broker import KafkaBroker
from faststream.confluent.parser import AsyncConfluentParser
-from faststream.confluent.publisher.asyncapi import AsyncAPIBatchPublisher
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+from faststream.confluent.publisher.specified import SpecificationBatchPublisher
from faststream.confluent.schemas import TopicPartition
-from faststream.confluent.subscriber.asyncapi import AsyncAPIBatchSubscriber
+from faststream.confluent.subscriber.usecase import BatchSubscriber
from faststream.exceptions import SubscriberNotFound
-from faststream.testing.broker import TestBroker
-from faststream.utils.functions import timeout_scope
+from faststream.message import encode_message, gen_cor_id
if TYPE_CHECKING:
- from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher
+ from faststream._internal.basic_types import SendableMessage
+ from faststream.confluent.publisher.specified import SpecificationPublisher
+ from faststream.confluent.response import KafkaPublishCommand
from faststream.confluent.subscriber.usecase import LogicSubscriber
- from faststream.types import SendableMessage
+
__all__ = ("TestKafkaBroker",)
@@ -28,24 +36,32 @@
class TestKafkaBroker(TestBroker[KafkaBroker]):
"""A class to test Kafka brokers."""
+ @contextmanager
+ def _patch_producer(self, broker: KafkaBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: KafkaBroker,
*args: Any,
**kwargs: Any,
) -> Callable[..., AsyncMock]:
- broker._producer = FakeProducer(broker)
return _fake_connection
@staticmethod
def create_publisher_fake_subscriber(
broker: KafkaBroker,
- publisher: "AsyncAPIPublisher[Any]",
- ) -> Tuple["LogicSubscriber[Any]", bool]:
+ publisher: "SpecificationPublisher[Any, Any]",
+ ) -> tuple["LogicSubscriber[Any]", bool]:
sub: Optional[LogicSubscriber[Any]] = None
- for handler in broker._subscribers.values():
+ for handler in broker._subscribers:
if _is_handler_matches(
- handler, topic=publisher.topic, partition=publisher.partition
+ handler,
+ topic=publisher.topic,
+ partition=publisher.partition,
):
sub = handler
break
@@ -55,17 +71,18 @@ def create_publisher_fake_subscriber(
if publisher.partition:
tp = TopicPartition(
- topic=publisher.topic, partition=publisher.partition
+ topic=publisher.topic,
+ partition=publisher.partition,
)
sub = broker.subscriber(
partitions=[tp],
- batch=isinstance(publisher, AsyncAPIBatchPublisher),
+ batch=isinstance(publisher, SpecificationBatchPublisher),
auto_offset_reset="earliest",
)
else:
sub = broker.subscriber(
publisher.topic,
- batch=isinstance(publisher, AsyncAPIBatchPublisher),
+ batch=isinstance(publisher, SpecificationBatchPublisher),
auto_offset_reset="earliest",
)
@@ -84,131 +101,104 @@ class FakeProducer(AsyncConfluentFastProducer):
def __init__(self, broker: KafkaBroker) -> None:
self.broker = broker
- default = AsyncConfluentParser
+ default = AsyncConfluentParser()
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
+ def __bool__(self) -> bool:
+ return True
+
+ async def ping(self, timeout: float) -> None:
+ return True
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- no_confirm: bool = False,
- reply_to: str = "",
- rpc: bool = False,
- rpc_timeout: Optional[float] = None,
- raise_timeout: bool = False,
- ) -> Optional[Any]:
+ cmd: "KafkaPublishCommand",
+ ) -> None:
"""Publish a message to the Kafka broker."""
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
- reply_to=reply_to,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
- return_value = None
-
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- msg_to_send = (
- [incoming]
- if isinstance(handler, AsyncAPIBatchSubscriber)
- else incoming
- )
-
- with timeout_scope(rpc_timeout, raise_timeout):
- response_msg = await self._execute_handler(
- msg_to_send, topic, handler
- )
- if rpc:
- return_value = return_value or await self._decoder(
- await self._parser(response_msg)
- )
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ msg_to_send = (
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
+ )
- return return_value
+ await self._execute_handler(msg_to_send, cmd.destination, handler)
async def publish_batch(
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to the Kafka broker."""
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- messages = (
- build_message(
- message=message,
- topic=topic,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
- reply_to=reply_to,
- )
- for message in msgs
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ messages = (
+ build_message(
+ message=message,
+ topic=cmd.destination,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
+ for message in cmd.batch_bodies
+ )
- if isinstance(handler, AsyncAPIBatchSubscriber):
- await self._execute_handler(list(messages), topic, handler)
+ if isinstance(handler, BatchSubscriber):
+ await self._execute_handler(list(messages), cmd.destination, handler)
- else:
- for m in messages:
- await self._execute_handler(m, topic, handler)
-
- return None
+ else:
+ for m in messages:
+ await self._execute_handler(m, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- timeout: Optional[float] = 0.5,
+ cmd: "KafkaPublishCommand",
) -> "MockConfluentMessage":
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- msg_to_send = (
- [incoming]
- if isinstance(handler, AsyncAPIBatchSubscriber)
- else incoming
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ msg_to_send = (
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
+ )
+
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(
+ msg_to_send, cmd.destination, handler
)
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg_to_send, topic, handler)
-
raise SubscriberNotFound
async def _execute_handler(
@@ -233,13 +223,13 @@ def __init__(
raw_msg: bytes,
topic: str,
key: bytes,
- headers: List[Tuple[str, bytes]],
+ headers: list[tuple[str, bytes]],
offset: int,
partition: int,
timestamp_type: int,
timestamp_ms: int,
error: Optional[str] = None,
- ):
+ ) -> None:
self._raw_msg = raw_msg
self._topic = topic
self._key = key
@@ -255,7 +245,7 @@ def len(self) -> int:
def error(self) -> Optional[str]:
return self._error
- def headers(self) -> List[Tuple[str, bytes]]:
+ def headers(self) -> list[tuple[str, bytes]]:
return self._headers
def key(self) -> bytes:
@@ -267,7 +257,7 @@ def offset(self) -> int:
def partition(self) -> int:
return self._partition
- def timestamp(self) -> Tuple[int, int]:
+ def timestamp(self) -> tuple[int, int]:
return self._timestamp
def topic(self) -> str:
@@ -281,11 +271,11 @@ def build_message(
message: "SendableMessage",
topic: str,
*,
- correlation_id: str,
+ correlation_id: Optional[str] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
key: Optional[bytes] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
reply_to: str = "",
) -> MockConfluentMessage:
"""Build a mock confluent_kafka.Message for a sendable message."""
@@ -293,7 +283,7 @@ def build_message(
k = key or b""
headers = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
+ "correlation_id": correlation_id or gen_cor_id(),
"reply_to": reply_to,
**(headers or {}),
}
@@ -307,7 +297,7 @@ def build_message(
offset=0,
partition=partition or 0,
timestamp_type=0 + 1,
- timestamp_ms=timestamp_ms or int(datetime.now().timestamp()),
+ timestamp_ms=timestamp_ms or int(datetime.now(timezone.utc).timestamp()),
)
@@ -318,6 +308,22 @@ def _fake_connection(*args: Any, **kwargs: Any) -> AsyncMock:
return mock
+def _find_handler(
+ subscribers: Iterable["LogicSubscriber[Any]"],
+ topic: str,
+ partition: Optional[int],
+) -> Generator["LogicSubscriber[Any]", None, None]:
+ published_groups = set()
+ for handler in subscribers: # pragma: no branch
+ if _is_handler_matches(handler, topic, partition):
+ if handler.group_id:
+ if handler.group_id in published_groups:
+ continue
+ else:
+ published_groups.add(handler.group_id)
+ yield handler
+
+
def _is_handler_matches(
handler: "LogicSubscriber[Any]",
topic: str,
@@ -328,5 +334,5 @@ def _is_handler_matches(
p.topic == topic and (partition is None or p.partition == partition)
for p in handler.partitions
)
- or topic in handler.topics
+ or topic in handler.topics,
)
diff --git a/faststream/constants.py b/faststream/constants.py
deleted file mode 100644
index d3f7c3e25d..0000000000
--- a/faststream/constants.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from enum import Enum
-
-ContentType = str
-
-
-class ContentTypes(str, Enum):
- """A class to represent content types."""
-
- text = "text/plain"
- json = "application/json"
diff --git a/faststream/exceptions.py b/faststream/exceptions.py
index 231b3eb778..32557dd42c 100644
--- a/faststream/exceptions.py
+++ b/faststream/exceptions.py
@@ -1,4 +1,6 @@
-from typing import Any, Iterable
+from collections.abc import Iterable
+from pprint import pformat
+from typing import Any
class FastStreamException(Exception): # noqa: N818
@@ -45,7 +47,7 @@ class AckMessage(HandlerException):
extra_options (Any): Additional parameters that will be passed to `message.ack(**extra_options)` method.
"""
- def __init__(self, **extra_options: Any):
+ def __init__(self, **extra_options: Any) -> None:
self.extra_options = extra_options
super().__init__()
@@ -64,7 +66,7 @@ class NackMessage(HandlerException):
kwargs (Any): Additional parameters that will be passed to `message.nack(**extra_options)` method.
"""
- def __init__(self, **kwargs: Any):
+ def __init__(self, **kwargs: Any) -> None:
self.extra_options = kwargs
super().__init__()
@@ -83,7 +85,7 @@ class RejectMessage(HandlerException):
kwargs (Any): Additional parameters that will be passed to `message.reject(**extra_options)` method.
"""
- def __init__(self, **kwargs: Any):
+ def __init__(self, **kwargs: Any) -> None:
self.extra_options = kwargs
super().__init__()
@@ -95,14 +97,25 @@ class SetupError(FastStreamException, ValueError):
"""Exception to raise at wrong method usage."""
-class ValidationError(FastStreamException, ValueError):
+class StartupValidationError(FastStreamException, ValueError):
"""Exception to raise at startup hook validation error."""
- def __init__(self, fields: Iterable[str] = ()) -> None:
- self.fields = fields
+ def __init__(
+ self,
+ missed_fields: Iterable[str] = (),
+ invalid_fields: Iterable[str] = (),
+ ) -> None:
+ self.missed_fields = missed_fields
+ self.invalid_fields = invalid_fields
+
+ def __str__(self) -> str:
+ return (
+ f"\n Incorrect options `{' / '.join(f'--{i}' for i in (*self.missed_fields, *self.invalid_fields))}`"
+ "\n You registered extra options in your application `lifespan/on_startup` hook, but set them wrong in CLI."
+ )
-class OperationForbiddenError(FastStreamException, NotImplementedError):
+class FeatureNotSupportedException(FastStreamException, NotImplementedError): # noqa: N818
"""Raises at planned NotImplemented operation call."""
@@ -110,9 +123,29 @@ class SubscriberNotFound(FastStreamException):
"""Raises as a service message or in tests."""
+class IncorrectState(FastStreamException):
+ """Raises in FSM at wrong state calling."""
+
+
+class ContextError(FastStreamException, KeyError):
+ """Raises if context exception occurred."""
+
+ def __init__(self, context: Any, field: str) -> None:
+ self.context = context
+ self.field = field
+
+ def __str__(self) -> str:
+ return "".join(
+ (
+ f"\n Key `{self.field}` not found in the context\n ",
+ pformat(self.context),
+ ),
+ )
+
+
WRONG_PUBLISH_ARGS = SetupError(
"You should use `reply_to` to send response to long-living queue "
- "and `rpc` to get response in sync mode."
+ "and `rpc` to get response in sync mode.",
)
@@ -128,3 +161,5 @@ class SubscriberNotFound(FastStreamException):
To use restart feature, please install dependencies:\n
pip install watchfiles
"""
+
+SCHEMA_NOT_SUPPORTED = "`{schema_filename}` not supported. Make sure that your schema is valid and schema version supported by FastStream"
diff --git a/faststream/kafka/__init__.py b/faststream/kafka/__init__.py
index 12e349b08b..c52fa57508 100644
--- a/faststream/kafka/__init__.py
+++ b/faststream/kafka/__init__.py
@@ -1,11 +1,11 @@
from aiokafka import TopicPartition
+from faststream._internal.testing.app import TestApp
from faststream.kafka.annotations import KafkaMessage
from faststream.kafka.broker import KafkaBroker
from faststream.kafka.response import KafkaResponse
from faststream.kafka.router import KafkaPublisher, KafkaRoute, KafkaRouter
from faststream.kafka.testing import TestKafkaBroker
-from faststream.testing.app import TestApp
__all__ = (
"KafkaBroker",
diff --git a/faststream/kafka/annotations.py b/faststream/kafka/annotations.py
index fc735bd439..1f5c70d524 100644
--- a/faststream/kafka/annotations.py
+++ b/faststream/kafka/annotations.py
@@ -1,11 +1,13 @@
+from typing import Annotated
+
from aiokafka import AIOKafkaConsumer
-from typing_extensions import Annotated
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.context import Context
+from faststream.annotations import ContextRepo, Logger
from faststream.kafka.broker import KafkaBroker as KB
from faststream.kafka.message import KafkaMessage as KM
from faststream.kafka.publisher.producer import AioKafkaFastProducer
-from faststream.utils.context import Context
+from faststream.params import NoCast
__all__ = (
"ContextRepo",
diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py
index 86123612f5..9a04650711 100644
--- a/faststream/kafka/broker/broker.py
+++ b/faststream/kafka/broker/broker.py
@@ -1,62 +1,66 @@
import logging
+from collections.abc import Iterable, Sequence
from functools import partial
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Literal,
Optional,
- Sequence,
- Tuple,
- Type,
TypeVar,
Union,
+ overload,
)
import aiokafka
import anyio
from aiokafka.partitioner import DefaultPartitioner
from aiokafka.producer.producer import _missing
-from typing_extensions import Annotated, Doc, override
+from typing_extensions import Doc, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.message import gen_cor_id
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.constants import EMPTY
+from faststream._internal.utils.data import filter_by_dict
from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.kafka.broker.logging import KafkaLoggingBroker
-from faststream.kafka.broker.registrator import KafkaRegistrator
from faststream.kafka.publisher.producer import AioKafkaFastProducer
+from faststream.kafka.response import KafkaPublishCommand
from faststream.kafka.schemas.params import ConsumerConnectionParams
from faststream.kafka.security import parse_security
-from faststream.types import EMPTY
-from faststream.utils.data import filter_by_dict
+from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
+
+from .logging import make_kafka_logger_state
+from .registrator import KafkaRegistrator
Partition = TypeVar("Partition")
if TYPE_CHECKING:
- from asyncio import AbstractEventLoop
+ import asyncio
from types import TracebackType
from aiokafka import ConsumerRecord
from aiokafka.abc import AbstractTokenProvider
- from fast_depends.dependencies import Depends
+ from aiokafka.structs import RecordMetadata
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from typing_extensions import TypedDict, Unpack
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.security import BaseSecurity
- from faststream.types import (
+ from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
Decorator,
LoggerProto,
SendableMessage,
)
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.kafka.message import KafkaMessage
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
class KafkaInitKwargs(TypedDict, total=False):
request_timeout_ms: Annotated[
@@ -75,7 +79,7 @@ class KafkaInitKwargs(TypedDict, total=False):
which we force a refresh of metadata even if we haven't seen any
partition leadership changes to proactively discover any new
brokers or partitions.
- """
+ """,
),
]
connections_max_idle_ms: Annotated[
@@ -85,7 +89,7 @@ class KafkaInitKwargs(TypedDict, total=False):
Close idle connections after the number
of milliseconds specified by this config. Specifying `None` will
disable idle checks.
- """
+ """,
),
]
sasl_kerberos_service_name: str
@@ -94,7 +98,7 @@ class KafkaInitKwargs(TypedDict, total=False):
Optional[AbstractTokenProvider],
Doc("OAuthBearer token provider instance."),
]
- loop: Optional[AbstractEventLoop]
+ loop: Optional[asyncio.AbstractEventLoop]
client_id: Annotated[
Optional[str],
Doc(
@@ -104,7 +108,7 @@ class KafkaInitKwargs(TypedDict, total=False):
server-side log entries that correspond to this client. Also
submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`
for logging with respect to consumer group administration.
- """
+ """,
),
]
# publisher args
@@ -136,7 +140,7 @@ class KafkaInitKwargs(TypedDict, total=False):
If unset, defaults to ``acks=1``. If `enable_idempotence` is
:data:`True` defaults to ``acks=all``.
- """
+ """,
),
]
key_serializer: Annotated[
@@ -155,7 +159,7 @@ class KafkaInitKwargs(TypedDict, total=False):
Compression is of full batches of data, so the efficacy of batching
will also impact the compression ratio (more batching means better
compression).
- """
+ """,
),
]
max_batch_size: Annotated[
@@ -164,12 +168,12 @@ class KafkaInitKwargs(TypedDict, total=False):
"""
Maximum size of buffered data per partition.
After this amount `send` coroutine will block until batch is drained.
- """
+ """,
),
]
partitioner: Annotated[
Callable[
- [bytes, List[Partition], List[Partition]],
+ [bytes, list[Partition], list[Partition]],
Partition,
],
Doc(
@@ -182,7 +186,7 @@ class KafkaInitKwargs(TypedDict, total=False):
messages with the same key are assigned to the same partition.
When a key is :data:`None`, the message is delivered to a random partition
(filtered to partitions with available leaders only, if possible).
- """
+ """,
),
]
max_request_size: Annotated[
@@ -194,7 +198,7 @@ class KafkaInitKwargs(TypedDict, total=False):
has its own cap on record size which may be different from this.
This setting will limit the number of record batches the producer
will send in a single request to avoid sending huge requests.
- """
+ """,
),
]
linger_ms: Annotated[
@@ -209,7 +213,7 @@ class KafkaInitKwargs(TypedDict, total=False):
This setting accomplishes this by adding a small amount of
artificial delay; that is, if first request is processed faster,
than `linger_ms`, producer will wait ``linger_ms - process_time``.
- """
+ """,
),
]
enable_idempotence: Annotated[
@@ -222,7 +226,7 @@ class KafkaInitKwargs(TypedDict, total=False):
etc., may write duplicates of the retried message in the stream.
Note that enabling idempotence acks to set to ``all``. If it is not
explicitly set by the user it will be chosen.
- """
+ """,
),
]
transactional_id: Optional[str]
@@ -231,10 +235,13 @@ class KafkaInitKwargs(TypedDict, total=False):
class KafkaBroker(
KafkaRegistrator,
- KafkaLoggingBroker,
+ BrokerUsecase[
+ Union[aiokafka.ConsumerRecord, tuple[aiokafka.ConsumerRecord, ...]],
+ Callable[..., aiokafka.AIOKafkaConsumer],
+ ],
):
- url: List[str]
- _producer: Optional["AioKafkaFastProducer"]
+ url: list[str]
+ _producer: "AioKafkaFastProducer"
def __init__(
self,
@@ -248,7 +255,7 @@ def __init__(
This does not have to be the full node list.
It just needs to have at least one broker that will respond to a
Metadata API Request. Default port is 9092.
- """
+ """,
),
] = "localhost",
*,
@@ -269,7 +276,7 @@ def __init__(
which we force a refresh of metadata even if we haven't seen any
partition leadership changes to proactively discover any new
brokers or partitions.
- """
+ """,
),
] = 5 * 60 * 1000,
connections_max_idle_ms: Annotated[
@@ -279,7 +286,7 @@ def __init__(
Close idle connections after the number
of milliseconds specified by this config. Specifying `None` will
disable idle checks.
- """
+ """,
),
] = 9 * 60 * 1000,
sasl_kerberos_service_name: str = "kafka",
@@ -288,7 +295,7 @@ def __init__(
Optional["AbstractTokenProvider"],
Doc("OAuthBearer token provider instance."),
] = None,
- loop: Optional["AbstractEventLoop"] = None,
+ loop: Optional["asyncio.AbstractEventLoop"] = None,
client_id: Annotated[
Optional[str],
Doc(
@@ -298,7 +305,7 @@ def __init__(
server-side log entries that correspond to this client. Also
submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`
for logging with respect to consumer group administration.
- """
+ """,
),
] = SERVICE_NAME,
# publisher args
@@ -330,7 +337,7 @@ def __init__(
If unset, defaults to ``acks=1``. If `enable_idempotence` is
:data:`True` defaults to ``acks=all``.
- """
+ """,
),
] = _missing,
key_serializer: Annotated[
@@ -349,7 +356,7 @@ def __init__(
Compression is of full batches of data, so the efficacy of batching
will also impact the compression ratio (more batching means better
compression).
- """
+ """,
),
] = None,
max_batch_size: Annotated[
@@ -358,12 +365,12 @@ def __init__(
"""
Maximum size of buffered data per partition.
After this amount `send` coroutine will block until batch is drained.
- """
+ """,
),
] = 16 * 1024,
partitioner: Annotated[
Callable[
- [bytes, List[Partition], List[Partition]],
+ [bytes, list[Partition], list[Partition]],
Partition,
],
Doc(
@@ -376,7 +383,7 @@ def __init__(
messages with the same key are assigned to the same partition.
When a key is :data:`None`, the message is delivered to a random partition
(filtered to partitions with available leaders only, if possible).
- """
+ """,
),
] = DefaultPartitioner(),
max_request_size: Annotated[
@@ -388,7 +395,7 @@ def __init__(
has its own cap on record size which may be different from this.
This setting will limit the number of record batches the producer
will send in a single request to avoid sending huge requests.
- """
+ """,
),
] = 1024 * 1024,
linger_ms: Annotated[
@@ -403,7 +410,7 @@ def __init__(
This setting accomplishes this by adding a small amount of
artificial delay; that is, if first request is processed faster,
than `linger_ms`, producer will wait ``linger_ms - process_time``.
- """
+ """,
),
] = 0,
enable_idempotence: Annotated[
@@ -416,7 +423,7 @@ def __init__(
etc., may write duplicates of the retried message in the stream.
Note that enabling idempotence acks to set to ``all``. If it is not
explicitly set by the user it will be chosen.
- """
+ """,
),
] = False,
transactional_id: Optional[str] = None,
@@ -425,7 +432,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -437,26 +444,27 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
Sequence[
- Union[
- "BrokerMiddleware[ConsumerRecord]",
- "BrokerMiddleware[Tuple[ConsumerRecord, ...]]",
- ]
+ "BrokerMiddleware[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]"
],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[ConsumerRecord]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
# AsyncAPI args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Union[str, Iterable[str], None],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -473,9 +481,9 @@ def __init__(
Doc("AsyncAPI server description."),
] = None,
tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -494,10 +502,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -519,13 +524,13 @@ def __init__(
else list(bootstrap_servers)
)
- if asyncapi_url is not None:
- if isinstance(asyncapi_url, str):
- asyncapi_url = [asyncapi_url]
+ if specification_url is not None:
+ if isinstance(specification_url, str):
+ specification_url = [specification_url]
else:
- asyncapi_url = list(asyncapi_url)
+ specification_url = list(specification_url)
else:
- asyncapi_url = servers
+ specification_url = servers
super().__init__(
bootstrap_servers=servers,
@@ -558,38 +563,46 @@ def __init__(
decoder=decoder,
parser=parser,
middlewares=middlewares,
+ routers=routers,
# AsyncAPI args
description=description,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol,
protocol_version=protocol_version,
security=security,
tags=tags,
# Logging args
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
+ logger_state=make_kafka_logger_state(
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ ),
# FastDepends args
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
)
self.client_id = client_id
- self._producer = None
+ self._state.patch_value(
+ producer=AioKafkaFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
- async def _close(
+ async def close(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
- if self._producer is not None: # pragma: no branch
- await self._producer.stop()
- self._producer = None
+ await super().close(exc_type, exc_val, exc_tb)
+
+ await self._producer.disconnect()
- await super()._close(exc_type, exc_val, exc_tb)
+ self._connection = None
@override
async def connect( # type: ignore[override]
@@ -630,29 +643,17 @@ async def _connect( # type: ignore[override]
client_id=client_id,
)
- await producer.start()
- self._producer = AioKafkaFastProducer(
- producer=producer,
- parser=self._parser,
- decoder=self._decoder,
- )
+ await self._producer.connect(producer)
- return partial(
- aiokafka.AIOKafkaConsumer,
- **filter_by_dict(ConsumerConnectionParams, kwargs),
- )
+ connection_kwargs, _ = filter_by_dict(ConsumerConnectionParams, kwargs)
+ return partial(aiokafka.AIOKafkaConsumer, **connection_kwargs)
async def start(self) -> None:
"""Connect broker to Kafka and startup all subscribers."""
+ await self.connect()
+ self._setup()
await super().start()
- for handler in self._subscribers.values():
- self._log(
- f"`{handler.call_name}` waiting for messages",
- extra=handler.get_log_context(None),
- )
- await handler.start()
-
@property
def _subscriber_setup_extra(self) -> "AnyDict":
return {
@@ -661,94 +662,103 @@ def _subscriber_setup_extra(self) -> "AnyDict":
"builder": self._connection,
}
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ topic: str = "",
+ *,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: Literal[True],
+ ) -> "asyncio.Future[RecordMetadata]": ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ topic: str = "",
+ *,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: Literal[False] = False,
+ ) -> "RecordMetadata": ...
+
@override
- async def publish( # type: ignore[override]
+ async def publish(
self,
- message: Annotated[
- "SendableMessage",
- Doc("Message body to send."),
- ],
- topic: Annotated[
- str,
- Doc("Topic where the message will be published."),
- ],
+ message: "SendableMessage",
+ topic: str = "",
*,
- key: Annotated[
- Union[bytes, Any, None],
- Doc(
- """
- A key to associate with the message. Can be used to
- determine which partition to send the message to. If partition
- is `None` (and producer's partitioner config is left as default),
- then messages with the same key will be delivered to the same
- partition (but if key is `None`, partition is chosen randomly).
- Must be type `bytes`, or be serializable to bytes via configured
- `key_serializer`.
- """
- ),
- ] = None,
- partition: Annotated[
- Optional[int],
- Doc(
- """
- Specify a partition. If not set, the partition will be
- selected using the configured `partitioner`.
- """
- ),
- ] = None,
- timestamp_ms: Annotated[
- Optional[int],
- Doc(
- """
- Epoch milliseconds (from Jan 1 1970 UTC) to use as
- the message timestamp. Defaults to current time.
- """
- ),
- ] = None,
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc("Message headers to store metainformation."),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("Reply message topic name to send response."),
- ] = "",
- no_confirm: Annotated[
- bool,
- Doc("Do not wait for Kafka publish confirmation."),
- ] = False,
- # extra options to be compatible with test client
- **kwargs: Any,
- ) -> Optional[Any]:
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
applications or to publish messages from time to time.
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
- """
- correlation_id = correlation_id or gen_cor_id()
- return await super().publish(
+ Args:
+ message:
+ Message body to send.
+ topic:
+ Topic where the message will be published.
+ key:
+ A key to associate with the message. Can be used to
+ determine which partition to send the message to. If partition
+ is `None` (and producer's partitioner config is left as default),
+ then messages with the same key will be delivered to the same
+ partition (but if key is `None`, partition is chosen randomly).
+ Must be type `bytes`, or be serializable to bytes via configured
+ `key_serializer`
+ partition:
+ Specify a partition. If not set, the partition will be
+ selected using the configured `partitioner`
+ timestamp_ms:
+ Epoch milliseconds (from Jan 1 1970 UTC) to use as
+ the message timestamp. Defaults to current time.
+ headers:
+ Message headers to store metainformation.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ reply_to:
+ Reply message topic name to send response.
+ no_confirm:
+ Do not wait for Kafka publish confirmation.
+
+ Returns:
+ `asyncio.Future[RecordMetadata]` if no_confirm = True.
+ `RecordMetadata` if no_confirm = False.
+ """
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
reply_to=reply_to,
no_confirm=no_confirm,
- **kwargs,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
@@ -773,7 +783,7 @@ async def request( # type: ignore[override]
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -782,7 +792,7 @@ async def request( # type: ignore[override]
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
timestamp_ms: Annotated[
@@ -791,120 +801,133 @@ async def request( # type: ignore[override]
"""
Epoch milliseconds (from Jan 1 1970 UTC) to use as
the message timestamp. Defaults to current time.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc("Message headers to store metainformation."),
] = None,
correlation_id: Annotated[
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
timeout: Annotated[
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- ) -> Optional[Any]:
- correlation_id = correlation_id or gen_cor_id()
-
- return await super().request(
+ ) -> "KafkaMessage":
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
timeout=timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
)
+ msg: KafkaMessage = await super()._basic_request(cmd, producer=self._producer)
+ return msg
+
+ @overload
async def publish_batch(
self,
- *msgs: Annotated[
- "SendableMessage",
- Doc("Messages bodies to send."),
- ],
- topic: Annotated[
- str,
- Doc("Topic where the message will be published."),
- ],
- partition: Annotated[
- Optional[int],
- Doc(
- """
- Specify a partition. If not set, the partition will be
- selected using the configured `partitioner`.
- """
- ),
- ] = None,
- timestamp_ms: Annotated[
- Optional[int],
- Doc(
- """
- Epoch milliseconds (from Jan 1 1970 UTC) to use as
- the message timestamp. Defaults to current time.
- """
- ),
- ] = None,
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc("Messages headers to store metainformation."),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("Reply message topic name to send response."),
- ] = "",
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- no_confirm: Annotated[
- bool,
- Doc("Do not wait for Kafka publish confirmation."),
- ] = False,
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: Literal[True],
+ ) -> "asyncio.Future[RecordMetadata]": ...
+
+ @overload
+ async def publish_batch(
+ self,
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: Literal[False] = False,
+ ) -> "RecordMetadata": ...
- for m in self._middlewares[::-1]:
- call = partial(m(None).publish_scope, call)
+ async def publish_batch(
+ self,
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: bool = False,
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
+ """Publish a message batch as a single request to broker.
+
+ Args:
+ *messages:
+ Messages bodies to send.
+ topic:
+ Topic where the message will be published.
+ partition:
+ Specify a partition. If not set, the partition will be
+ selected using the configured `partitioner`
+ timestamp_ms:
+ Epoch milliseconds (from Jan 1 1970 UTC) to use as
+ the message timestamp. Defaults to current time.
+ headers:
+ Message headers to store metainformation.
+ reply_to:
+ Reply message topic name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ no_confirm:
+ Do not wait for Kafka publish confirmation.
+
+ Returns:
+ `asyncio.Future[RecordMetadata]` if no_confirm = True.
+ `RecordMetadata` if no_confirm = False.
+ """
+ assert self._producer, NOT_CONNECTED_YET # nosec B101
- await call(
- *msgs,
+ cmd = KafkaPublishCommand(
+ *messages,
topic=topic,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
reply_to=reply_to,
- correlation_id=correlation_id,
no_confirm=no_confirm,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
with anyio.move_on_after(timeout) as cancel_scope:
- if self._producer is None:
+ if not self._producer:
return False
while True:
if cancel_scope.cancel_called:
return False
- if not self._producer._producer._closed:
+ if not self._producer.closed:
return True
await anyio.sleep(sleep_time)
diff --git a/faststream/kafka/broker/logging.py b/faststream/kafka/broker/logging.py
index e7e534e98b..72a1420325 100644
--- a/faststream/kafka/broker/logging.py
+++ b/faststream/kafka/broker/logging.py
@@ -1,71 +1,76 @@
import logging
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union
+from functools import partial
+from typing import TYPE_CHECKING, Optional
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.log.logging import get_broker_logger
-from faststream.types import EMPTY
+from faststream._internal.log.logging import get_broker_logger
+from faststream._internal.state.logger import (
+ DefaultLoggerStorage,
+ make_logger_state,
+)
if TYPE_CHECKING:
- import aiokafka
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
- from faststream.types import LoggerProto
-
-
-class KafkaLoggingBroker(
- BrokerUsecase[
- Union["aiokafka.ConsumerRecord", Tuple["aiokafka.ConsumerRecord", ...]],
- Callable[..., "aiokafka.AIOKafkaConsumer"],
- ]
-):
- """A class that extends the LoggingMixin class and adds additional functionality for logging Kafka related information."""
-
- _max_topic_len: int
- _max_group_len: int
- __max_msg_id_ln: ClassVar[int] = 10
+class KafkaParamsStorage(DefaultLoggerStorage):
def __init__(
self,
- *args: Any,
- logger: Optional["LoggerProto"] = EMPTY,
- log_level: int = logging.INFO,
- log_fmt: Optional[str] = None,
- **kwargs: Any,
+ log_fmt: Optional[str],
) -> None:
- """Initialize the class."""
- super().__init__(
- *args,
- logger=logger,
- # TODO: generate unique logger names to not share between brokers
- default_logger=get_broker_logger(
- name="kafka",
- default_context={
- "topic": "",
- "group_id": "",
- },
- message_id_ln=self.__max_msg_id_ln,
- ),
- log_level=log_level,
- log_fmt=log_fmt,
- **kwargs,
- )
+ super().__init__(log_fmt)
+
self._max_topic_len = 4
self._max_group_len = 0
- def get_fmt(self) -> str:
- return (
- "%(asctime)s %(levelname)-8s - "
- + f"%(topic)-{self._max_topic_len}s | "
- + (f"%(group_id)-{self._max_group_len}s | " if self._max_group_len else "")
- + f"%(message_id)-{self.__max_msg_id_ln}s "
- + "- %(message)s"
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ self._max_topic_len = max(
+ (
+ self._max_topic_len,
+ len(params.get("topic", "")),
+ ),
+ )
+ self._max_group_len = max(
+ (
+ self._max_group_len,
+ len(params.get("group_id", "")),
+ ),
)
- def _setup_log_context(
- self,
- *,
- topic: str = "",
- group_id: Optional[str] = None,
- ) -> None:
- """Set up log context."""
- self._max_topic_len = max((self._max_topic_len, len(topic)))
- self._max_group_len = max((self._max_group_len, len(group_id or "")))
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
+ message_id_ln = 10
+
+ # TODO: generate unique logger names to not share between brokers
+ return get_broker_logger(
+ name="kafka",
+ default_context={
+ "topic": "",
+ "group_id": "",
+ },
+ message_id_ln=message_id_ln,
+ fmt=self._log_fmt
+ or "".join((
+ "%(asctime)s %(levelname)-8s - ",
+ f"%(topic)-{self._max_topic_len}s | ",
+ (
+ f"%(group_id)-{self._max_group_len}s | "
+ if self._max_group_len
+ else ""
+ ),
+ f"%(message_id)-{message_id_ln}s ",
+ "- %(message)s",
+ )),
+ context=context,
+ log_level=self.logger_log_level,
+ )
+
+
+make_kafka_logger_state = partial(
+ make_logger_state,
+ default_storage_cls=KafkaParamsStorage,
+)
diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py
index 45ef5b5a64..6f804bda02 100644
--- a/faststream/kafka/broker/registrator.py
+++ b/faststream/kafka/broker/registrator.py
@@ -1,13 +1,11 @@
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
Literal,
Optional,
- Sequence,
- Tuple,
Union,
cast,
overload,
@@ -15,34 +13,34 @@
from aiokafka import ConsumerRecord
from aiokafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.utils import default_filter
-from faststream.kafka.publisher.asyncapi import AsyncAPIPublisher
+from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.kafka.publisher.factory import create_publisher
from faststream.kafka.subscriber.factory import create_subscriber
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.types import (
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.kafka.message import KafkaMessage
- from faststream.kafka.publisher.asyncapi import (
- AsyncAPIBatchPublisher,
- AsyncAPIDefaultPublisher,
+ from faststream.kafka.publisher.specified import (
+ SpecificationBatchPublisher,
+ SpecificationDefaultPublisher,
)
- from faststream.kafka.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+ from faststream.kafka.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
@@ -50,23 +48,21 @@ class KafkaRegistrator(
ABCBroker[
Union[
ConsumerRecord,
- Tuple[ConsumerRecord, ...],
+ tuple[ConsumerRecord, ...],
]
- ]
+ ],
):
"""Includable to KafkaBroker router."""
- _subscribers: Dict[
- int,
+ _subscribers: list[
Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
- ],
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
+ ]
]
- _publishers: Dict[
- int,
- Union["AsyncAPIBatchPublisher", "AsyncAPIDefaultPublisher"],
+ _publishers: list[
+ Union["SpecificationBatchPublisher", "SpecificationDefaultPublisher"],
]
@overload # type: ignore[override]
@@ -88,21 +84,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -117,7 +113,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -127,7 +123,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -138,7 +134,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -153,7 +149,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -165,7 +161,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -174,15 +170,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -193,7 +195,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -209,7 +211,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -222,7 +224,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -236,7 +238,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -251,7 +253,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -267,7 +269,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -277,7 +279,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -286,7 +288,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -297,7 +299,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -327,7 +329,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -338,7 +340,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -370,7 +372,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -378,7 +380,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -387,13 +389,13 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -405,50 +407,44 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultSubscriber": ...
+ ) -> "SpecificationDefaultSubscriber": ...
@overload
def subscriber(
@@ -469,21 +465,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -498,7 +494,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -508,7 +504,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -519,7 +515,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -534,7 +530,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -546,7 +542,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -555,15 +551,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -574,7 +576,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -590,7 +592,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -603,7 +605,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -617,7 +619,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -632,7 +634,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -648,7 +650,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -658,7 +660,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -667,7 +669,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -678,7 +680,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -708,7 +710,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -719,7 +721,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -751,7 +753,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -759,7 +761,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -768,13 +770,13 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -786,50 +788,44 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchSubscriber": ...
+ ) -> "SpecificationBatchSubscriber": ...
@overload
def subscriber(
@@ -850,21 +846,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -879,7 +875,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -889,7 +885,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -900,7 +896,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -915,7 +911,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -927,7 +923,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -936,15 +932,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -955,7 +957,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -971,7 +973,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -984,7 +986,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -998,7 +1000,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -1013,7 +1015,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1029,7 +1031,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -1039,7 +1041,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -1048,7 +1050,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -1059,7 +1061,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -1089,7 +1091,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -1100,7 +1102,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -1132,7 +1134,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -1140,7 +1142,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -1149,13 +1151,13 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1167,52 +1169,46 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
]: ...
@override
@@ -1234,21 +1230,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -1263,7 +1259,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1273,7 +1269,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -1284,7 +1280,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -1299,7 +1295,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1311,7 +1307,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1320,15 +1316,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1339,7 +1341,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1355,7 +1357,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -1368,7 +1370,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -1382,7 +1384,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -1397,7 +1399,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1413,7 +1415,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -1423,7 +1425,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -1432,7 +1434,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -1443,7 +1445,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -1473,7 +1475,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -1484,7 +1486,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -1516,7 +1518,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -1524,7 +1526,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -1533,13 +1535,13 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1551,130 +1553,111 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
max_workers: Annotated[
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
- subscriber = super().subscriber(
- create_subscriber(
- *topics,
- batch=batch,
- max_workers=max_workers,
- batch_timeout_ms=batch_timeout_ms,
- max_records=max_records,
- group_id=group_id,
- listener=listener,
- pattern=pattern,
- connection_args={
- "key_deserializer": key_deserializer,
- "value_deserializer": value_deserializer,
- "fetch_max_wait_ms": fetch_max_wait_ms,
- "fetch_max_bytes": fetch_max_bytes,
- "fetch_min_bytes": fetch_min_bytes,
- "max_partition_fetch_bytes": max_partition_fetch_bytes,
- "auto_offset_reset": auto_offset_reset,
- "enable_auto_commit": auto_commit,
- "auto_commit_interval_ms": auto_commit_interval_ms,
- "check_crcs": check_crcs,
- "partition_assignment_strategy": partition_assignment_strategy,
- "max_poll_interval_ms": max_poll_interval_ms,
- "rebalance_timeout_ms": rebalance_timeout_ms,
- "session_timeout_ms": session_timeout_ms,
- "heartbeat_interval_ms": heartbeat_interval_ms,
- "consumer_timeout_ms": consumer_timeout_ms,
- "max_poll_records": max_poll_records,
- "exclude_internal_topics": exclude_internal_topics,
- "isolation_level": isolation_level,
- },
- partitions=partitions,
- is_manual=not auto_commit,
- # subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
- broker_dependencies=self._dependencies,
- # AsyncAPI
- title_=title,
- description_=description,
- include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ sub = create_subscriber(
+ *topics,
+ batch=batch,
+ max_workers=max_workers,
+ batch_timeout_ms=batch_timeout_ms,
+ max_records=max_records,
+ group_id=group_id,
+ listener=listener,
+ pattern=pattern,
+ connection_args={
+ "key_deserializer": key_deserializer,
+ "value_deserializer": value_deserializer,
+ "fetch_max_wait_ms": fetch_max_wait_ms,
+ "fetch_max_bytes": fetch_max_bytes,
+ "fetch_min_bytes": fetch_min_bytes,
+ "max_partition_fetch_bytes": max_partition_fetch_bytes,
+ "auto_offset_reset": auto_offset_reset,
+ "auto_commit_interval_ms": auto_commit_interval_ms,
+ "check_crcs": check_crcs,
+ "partition_assignment_strategy": partition_assignment_strategy,
+ "max_poll_interval_ms": max_poll_interval_ms,
+ "rebalance_timeout_ms": rebalance_timeout_ms,
+ "session_timeout_ms": session_timeout_ms,
+ "heartbeat_interval_ms": heartbeat_interval_ms,
+ "consumer_timeout_ms": consumer_timeout_ms,
+ "max_poll_records": max_poll_records,
+ "exclude_internal_topics": exclude_internal_topics,
+ "isolation_level": isolation_level,
+ },
+ partitions=partitions,
+ # acknowledgement args
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ auto_commit=auto_commit,
+ # subscriber args
+ no_reply=no_reply,
+ broker_middlewares=self.middlewares,
+ broker_dependencies=self._dependencies,
+ # Specification
+ title_=title,
+ description_=description,
+ include_in_schema=self._solve_include_in_schema(include_in_schema),
)
- if batch:
- return cast("AsyncAPIBatchSubscriber", subscriber).add_call(
- filter_=filter,
- parser_=parser or self._parser,
- decoder_=decoder or self._decoder,
- dependencies_=dependencies,
- middlewares_=middlewares,
- )
+ subscriber = super().subscriber(sub)
+ if batch:
+ subscriber = cast("SpecificationBatchSubscriber", subscriber)
+ elif max_workers > 1:
+ subscriber = cast("SpecificationConcurrentDefaultSubscriber", subscriber)
else:
- if max_workers > 1:
- return cast("AsyncAPIConcurrentDefaultSubscriber", subscriber).add_call(
- filter_=filter,
- parser_=parser or self._parser,
- decoder_=decoder or self._decoder,
- dependencies_=dependencies,
- middlewares_=middlewares,
- )
- else:
- return cast("AsyncAPIDefaultSubscriber", subscriber).add_call(
- filter_=filter,
- parser_=parser or self._parser,
- decoder_=decoder or self._decoder,
- dependencies_=dependencies,
- middlewares_=middlewares,
- )
+ subscriber = cast("SpecificationDefaultSubscriber", subscriber)
+
+ return subscriber.add_call(
+ parser_=parser or self._parser,
+ decoder_=decoder or self._decoder,
+ dependencies_=dependencies,
+ middlewares_=middlewares,
+ )
@overload # type: ignore[override]
def publisher(
@@ -1695,7 +1678,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1704,15 +1687,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1726,29 +1709,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultPublisher": ...
+ ) -> "SpecificationDefaultPublisher": ...
@overload
def publisher(
@@ -1769,7 +1756,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1778,15 +1765,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1800,29 +1787,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchPublisher": ...
+ ) -> "SpecificationBatchPublisher": ...
@overload
def publisher(
@@ -1843,7 +1834,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1852,15 +1843,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1874,31 +1865,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]: ...
@override
@@ -1920,7 +1915,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -1929,15 +1924,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -1951,40 +1946,44 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]:
- """Creates long-living and AsyncAPI-documented publisher object.
+ """Creates long-living and Specification-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
In such case publisher will publish your handler return value.
Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`.
"""
- publisher = AsyncAPIPublisher.create(
+ publisher = create_publisher(
# batch flag
batch=batch,
# default args
@@ -1995,9 +1994,9 @@ def publisher(
headers=headers,
reply_to=reply_to,
# publisher-specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
- # AsyncAPI
+ # Specification
title_=title,
description_=description,
schema_=schema,
@@ -2005,6 +2004,5 @@ def publisher(
)
if batch:
- return cast("AsyncAPIBatchPublisher", super().publisher(publisher))
- else:
- return cast("AsyncAPIDefaultPublisher", super().publisher(publisher))
+ return cast("SpecificationBatchPublisher", super().publisher(publisher))
+ return cast("SpecificationDefaultPublisher", super().publisher(publisher))
diff --git a/faststream/kafka/exceptions.py b/faststream/kafka/exceptions.py
index bf51f6a401..443d2cfdc6 100644
--- a/faststream/kafka/exceptions.py
+++ b/faststream/kafka/exceptions.py
@@ -9,6 +9,6 @@ def __init__(self, message_position: int) -> None:
def __str__(self) -> str:
return (
- f"The batch buffer is full. The position of the message"
+ "The batch buffer is full. The position of the message"
f" in the transferred collection at which the overflow occurred: {self.message_position}"
)
diff --git a/faststream/kafka/fastapi/__init__.py b/faststream/kafka/fastapi/__init__.py
index 88c88f215f..9fda6d07d3 100644
--- a/faststream/kafka/fastapi/__init__.py
+++ b/faststream/kafka/fastapi/__init__.py
@@ -1,11 +1,12 @@
-from typing_extensions import Annotated
+from typing import Annotated
-from faststream.broker.fastapi.context import Context, ContextRepo, Logger
+from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.kafka.broker import KafkaBroker as KB
-from faststream.kafka.fastapi.fastapi import KafkaRouter
from faststream.kafka.message import KafkaMessage as KM
from faststream.kafka.publisher.producer import AioKafkaFastProducer
+from .fastapi import KafkaRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/kafka/fastapi/fastapi.py b/faststream/kafka/fastapi/fastapi.py
index 0ce8e3ac3a..b7f1bf0ec7 100644
--- a/faststream/kafka/fastapi/fastapi.py
+++ b/faststream/kafka/fastapi/fastapi.py
@@ -1,16 +1,12 @@
import logging
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Literal,
Optional,
- Sequence,
- Tuple,
- Type,
TypeVar,
Union,
cast,
@@ -26,13 +22,13 @@
from fastapi.utils import generate_unique_id
from starlette.responses import JSONResponse, Response
from starlette.routing import BaseRoute
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.utils import default_filter
+from faststream._internal.constants import EMPTY
+from faststream._internal.fastapi.router import StreamRouter
from faststream.kafka.broker.broker import KafkaBroker as KB
-from faststream.types import EMPTY
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from asyncio import AbstractEventLoop
@@ -45,31 +41,30 @@
from fastapi.types import IncEx
from starlette.types import ASGIApp, Lifespan
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.kafka.message import KafkaMessage
- from faststream.kafka.publisher.asyncapi import (
- AsyncAPIBatchPublisher,
- AsyncAPIDefaultPublisher,
+ from faststream.kafka.publisher.specified import (
+ SpecificationBatchPublisher,
+ SpecificationDefaultPublisher,
)
- from faststream.kafka.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+ from faststream.kafka.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
from faststream.security import BaseSecurity
- from faststream.types import AnyDict, LoggerProto
+ from faststream.specification.schema.extra import Tag, TagDict
Partition = TypeVar("Partition")
-class KafkaRouter(StreamRouter[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]):
+class KafkaRouter(StreamRouter[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]):
"""A class to represent a Kafka router."""
broker_class = KB
@@ -87,7 +82,7 @@ def __init__(
This does not have to be the full node list.
It just needs to have at least one broker that will respond to a
Metadata API Request. Default port is 9092.
- """
+ """,
),
] = "localhost",
*,
@@ -108,7 +103,7 @@ def __init__(
which we force a refresh of metadata even if we haven't seen any
partition leadership changes to proactively discover any new
brokers or partitions.
- """
+ """,
),
] = 5 * 60 * 1000,
connections_max_idle_ms: Annotated[
@@ -118,7 +113,7 @@ def __init__(
Close idle connections after the number
of milliseconds specified by this config. Specifying `None` will
disable idle checks.
- """
+ """,
),
] = 9 * 60 * 1000,
sasl_kerberos_service_name: str = "kafka",
@@ -137,7 +132,7 @@ def __init__(
server-side log entries that correspond to this client. Also
submitted to :class:`~.consumer.group_coordinator.GroupCoordinator`
for logging with respect to consumer group administration.
- """
+ """,
),
] = SERVICE_NAME,
# publisher args
@@ -169,7 +164,7 @@ def __init__(
If unset, defaults to ``acks=1``. If `enable_idempotence` is
:data:`True` defaults to ``acks=all``.
- """
+ """,
),
] = _missing,
key_serializer: Annotated[
@@ -188,7 +183,7 @@ def __init__(
Compression is of full batches of data, so the efficacy of batching
will also impact the compression ratio (more batching means better
compression).
- """
+ """,
),
] = None,
max_batch_size: Annotated[
@@ -197,12 +192,12 @@ def __init__(
"""
Maximum size of buffered data per partition.
After this amount `send` coroutine will block until batch is drained.
- """
+ """,
),
] = 16 * 1024,
partitioner: Annotated[
Callable[
- [bytes, List[Partition], List[Partition]],
+ [bytes, list[Partition], list[Partition]],
Partition,
],
Doc(
@@ -215,7 +210,7 @@ def __init__(
messages with the same key are assigned to the same partition.
When a key is :data:`None`, the message is delivered to a random partition
(filtered to partitions with available leaders only, if possible).
- """
+ """,
),
] = DefaultPartitioner(),
max_request_size: Annotated[
@@ -227,7 +222,7 @@ def __init__(
has its own cap on record size which may be different from this.
This setting will limit the number of record batches the producer
will send in a single request to avoid sending huge requests.
- """
+ """,
),
] = 1024 * 1024,
linger_ms: Annotated[
@@ -242,7 +237,7 @@ def __init__(
This setting accomplishes this by adding a small amount of
artificial delay; that is, if first request is processed faster,
than `linger_ms`, producer will wait ``linger_ms - process_time``.
- """
+ """,
),
] = 0,
enable_idempotence: Annotated[
@@ -255,7 +250,7 @@ def __init__(
etc., may write duplicates of the retried message in the stream.
Note that enabling idempotence acks to set to ``all``. If it is not
explicitly set by the user it will be chosen.
- """
+ """,
),
] = False,
transactional_id: Optional[str] = None,
@@ -264,7 +259,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -279,38 +274,40 @@ def __init__(
Sequence[
Union[
"BrokerMiddleware[ConsumerRecord]",
- "BrokerMiddleware[Tuple[ConsumerRecord, ...]]",
+ "BrokerMiddleware[tuple[ConsumerRecord, ...]]",
]
],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
- # AsyncAPI args
+ # Specification args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate Specification server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Optional[str],
- Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
+ Doc(
+ "Specification hardcoded server addresses. Use `servers` if not specified.",
+ ),
] = None,
protocol: Annotated[
Optional[str],
- Doc("AsyncAPI server protocol."),
+ Doc("Specification server protocol."),
] = None,
protocol_version: Annotated[
Optional[str],
- Doc("AsyncAPI server protocol version."),
+ Doc("Specification server protocol version."),
] = "auto",
description: Annotated[
Optional[str],
- Doc("AsyncAPI server description."),
- ] = None,
- asyncapi_tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
- Doc("AsyncAPI server tags."),
+ Doc("Specification server description."),
] = None,
+ specification_tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
+ Doc("Specification server tags."),
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -329,13 +326,13 @@ def __init__(
bool,
Doc(
"Whether to add broker to app scope in lifespan. "
- "You should disable this option at old ASGI servers."
+ "You should disable this option at old ASGI servers.",
),
] = True,
schema_url: Annotated[
Optional[str],
Doc(
- "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all."
+ "Specification schema url. You should set this option to `None` to disable Specification routes at all.",
),
] = "/asyncapi",
# FastAPI args
@@ -344,7 +341,7 @@ def __init__(
Doc("An optional path prefix for the router."),
] = "",
tags: Annotated[
- Optional[List[Union[str, "Enum"]]],
+ Optional[list[Union[str, "Enum"]]],
Doc(
"""
A list of tags to be applied to all the *path operations* in this
@@ -354,7 +351,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
dependencies: Annotated[
@@ -366,22 +363,22 @@ def __init__(
Read more about it in the
[FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
default_response_class: Annotated[
- Type["Response"],
+ type["Response"],
Doc(
"""
The default response class to be used.
Read more in the
[FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
- """
+ """,
),
] = Default(JSONResponse),
responses: Annotated[
- Optional[Dict[Union[int, str], "AnyDict"]],
+ Optional[dict[Union[int, str], "AnyDict"]],
Doc(
"""
Additional responses to be shown in OpenAPI.
@@ -393,11 +390,11 @@ def __init__(
And in the
[FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
callbacks: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
OpenAPI callbacks that should apply to all *path operations* in this
@@ -407,11 +404,11 @@ def __init__(
Read more about it in the
[FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
- """
+ """,
),
] = None,
routes: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
**Note**: you probably shouldn't use this parameter, it is inherited
@@ -420,7 +417,7 @@ def __init__(
---
A list of routes to serve incoming HTTP and WebSocket requests.
- """
+ """,
),
deprecated(
"""
@@ -429,7 +426,7 @@ def __init__(
In FastAPI, you normally would use the *path operation methods*,
like `router.get()`, `router.post()`, etc.
- """
+ """,
),
] = None,
redirect_slashes: Annotated[
@@ -438,7 +435,7 @@ def __init__(
"""
Whether to detect and redirect slashes in URLs when the client doesn't
use the same format.
- """
+ """,
),
] = True,
default: Annotated[
@@ -447,7 +444,7 @@ def __init__(
"""
Default function handler for this router. Used to handle
404 Not Found errors.
- """
+ """,
),
] = None,
dependency_overrides_provider: Annotated[
@@ -458,18 +455,18 @@ def __init__(
You shouldn't need to use it. It normally points to the `FastAPI` app
object.
- """
+ """,
),
] = None,
route_class: Annotated[
- Type["APIRoute"],
+ type["APIRoute"],
Doc(
"""
Custom route (*path operation*) class to be used by this router.
Read more about it in the
[FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
- """
+ """,
),
] = APIRoute,
on_startup: Annotated[
@@ -481,7 +478,7 @@ def __init__(
You should instead use the `lifespan` handlers.
Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
on_shutdown: Annotated[
@@ -494,7 +491,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
lifespan: Annotated[
@@ -506,7 +503,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
deprecated: Annotated[
@@ -519,7 +516,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
include_in_schema: Annotated[
@@ -533,7 +530,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
- """
+ """,
),
] = True,
generate_unique_id_function: Annotated[
@@ -548,7 +545,7 @@ def __init__(
Read more about it in the
[FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
- """
+ """,
),
] = Default(generate_unique_id),
) -> None:
@@ -587,13 +584,13 @@ def __init__(
logger=logger,
log_level=log_level,
log_fmt=log_fmt,
- # AsyncAPI args
+ # Specification args
security=security,
protocol=protocol,
description=description,
protocol_version=protocol_version,
- asyncapi_tags=asyncapi_tags,
- asyncapi_url=asyncapi_url,
+ specification_tags=specification_tags,
+ specification_url=specification_url,
# FastAPI args
prefix=prefix,
tags=tags,
@@ -626,21 +623,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -655,7 +652,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -665,7 +662,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -676,7 +673,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -691,7 +688,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -703,7 +700,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -712,15 +709,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -731,7 +734,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -747,7 +750,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -760,7 +763,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -774,7 +777,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -789,7 +792,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -805,7 +808,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -815,7 +818,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -824,7 +827,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -835,7 +838,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -865,7 +868,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -876,7 +879,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -912,7 +915,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -920,7 +923,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -929,7 +932,7 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
@@ -947,48 +950,42 @@ def subscriber(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI information
+ # Specification information
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -1022,7 +1019,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -1034,7 +1031,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -1046,7 +1043,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -1058,7 +1055,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -1076,7 +1073,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -1093,7 +1090,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -1110,10 +1107,10 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> "AsyncAPIDefaultSubscriber": ...
+ ) -> "SpecificationDefaultSubscriber": ...
@overload
def subscriber(
@@ -1127,21 +1124,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -1156,7 +1153,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1166,7 +1163,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -1177,7 +1174,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -1192,7 +1189,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1204,7 +1201,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1213,15 +1210,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1232,7 +1235,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1248,7 +1251,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -1261,7 +1264,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -1275,7 +1278,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -1290,7 +1293,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1306,7 +1309,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -1316,7 +1319,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -1325,7 +1328,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -1336,7 +1339,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -1366,7 +1369,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -1377,7 +1380,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -1413,7 +1416,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -1421,7 +1424,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -1430,7 +1433,7 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
@@ -1447,49 +1450,43 @@ def subscriber(
Doc("Function to decode FastStream msg bytes body to python objects."),
] = None,
middlewares: Annotated[
- Iterable["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
+ Sequence["SubscriberMiddleware[KafkaMessage]"],
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI information
+ # Specification information
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -1523,7 +1520,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -1535,7 +1532,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -1547,7 +1544,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -1559,7 +1556,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -1577,7 +1574,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -1594,7 +1591,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -1611,10 +1608,10 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> "AsyncAPIBatchSubscriber": ...
+ ) -> "SpecificationBatchSubscriber": ...
@overload
def subscriber(
@@ -1628,21 +1625,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -1657,7 +1654,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -1667,7 +1664,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -1678,7 +1675,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -1693,7 +1690,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -1705,7 +1702,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -1714,15 +1711,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -1733,7 +1736,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -1749,7 +1752,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -1762,7 +1765,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -1776,7 +1779,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -1791,7 +1794,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -1807,7 +1810,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -1817,7 +1820,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -1826,7 +1829,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -1837,7 +1840,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -1867,7 +1870,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -1878,7 +1881,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -1914,7 +1917,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -1922,7 +1925,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -1931,7 +1934,7 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
@@ -1948,49 +1951,43 @@ def subscriber(
Doc("Function to decode FastStream msg bytes body to python objects."),
] = None,
middlewares: Annotated[
- Iterable["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
+ Sequence["SubscriberMiddleware[KafkaMessage]"],
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI information
+ # Specification information
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -2024,7 +2021,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -2036,7 +2033,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -2048,7 +2045,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -2060,7 +2057,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -2078,7 +2075,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -2095,7 +2092,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -2112,12 +2109,12 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
) -> Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
]: ...
@override
@@ -2132,21 +2129,21 @@ def subscriber(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -2161,7 +2158,7 @@ def subscriber(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -2171,7 +2168,7 @@ def subscriber(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -2182,7 +2179,7 @@ def subscriber(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -2197,7 +2194,7 @@ def subscriber(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -2209,7 +2206,7 @@ def subscriber(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -2218,15 +2215,21 @@ def subscriber(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -2237,7 +2240,7 @@ def subscriber(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -2253,7 +2256,7 @@ def subscriber(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -2266,7 +2269,7 @@ def subscriber(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -2280,7 +2283,7 @@ def subscriber(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -2295,7 +2298,7 @@ def subscriber(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -2311,7 +2314,7 @@ def subscriber(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -2321,7 +2324,7 @@ def subscriber(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -2330,7 +2333,7 @@ def subscriber(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -2341,7 +2344,7 @@ def subscriber(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -2371,7 +2374,7 @@ def subscriber(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -2382,7 +2385,7 @@ def subscriber(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -2418,7 +2421,7 @@ def subscriber(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -2426,7 +2429,7 @@ def subscriber(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -2435,7 +2438,7 @@ def subscriber(
"""
An explicit partitions list to assign.
You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
@@ -2452,49 +2455,43 @@ def subscriber(
Doc("Function to decode FastStream msg bytes body to python objects."),
] = None,
middlewares: Annotated[
- Iterable["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
+ Sequence["SubscriberMiddleware[KafkaMessage]"],
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
- # AsyncAPI information
+ # Specification information
title: Annotated[
Optional[str],
- Doc("AsyncAPI subscriber object title."),
+ Doc("Specification subscriber object title."),
] = None,
description: Annotated[
Optional[str],
Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Specification subscriber object description. "
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
# FastAPI args
response_model: Annotated[
@@ -2528,7 +2525,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -2540,7 +2537,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -2552,7 +2549,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -2564,7 +2561,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -2582,7 +2579,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -2599,7 +2596,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -2616,7 +2613,7 @@ def subscriber(
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
max_workers: Annotated[
@@ -2624,9 +2621,9 @@ def subscriber(
Doc("Number of workers to process messages concurrently."),
] = 1,
) -> Union[
- "AsyncAPIBatchSubscriber",
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
subscriber = super().subscriber(
*topics,
@@ -2662,8 +2659,7 @@ def subscriber(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -2680,12 +2676,10 @@ def subscriber(
)
if batch:
- return cast("AsyncAPIBatchSubscriber", subscriber)
- else:
- if max_workers > 1:
- return cast("AsyncAPIConcurrentDefaultSubscriber", subscriber)
- else:
- return cast("AsyncAPIDefaultSubscriber", subscriber)
+ return cast("SpecificationBatchSubscriber", subscriber)
+ if max_workers > 1:
+ return cast("SpecificationConcurrentDefaultSubscriber", subscriber)
+ return cast("SpecificationDefaultSubscriber", subscriber)
@overload # type: ignore[override]
def publisher(
@@ -2706,7 +2700,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2715,15 +2709,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2737,29 +2731,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIDefaultPublisher": ...
+ ) -> "SpecificationDefaultPublisher": ...
@overload
def publisher(
@@ -2780,7 +2778,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2789,15 +2787,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2811,29 +2809,33 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
- ) -> "AsyncAPIBatchPublisher": ...
+ ) -> "SpecificationBatchPublisher": ...
@overload
def publisher(
@@ -2854,7 +2856,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2863,15 +2865,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2885,31 +2887,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]: ...
@override
@@ -2931,7 +2937,7 @@ def publisher(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -2940,15 +2946,15 @@ def publisher(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -2962,31 +2968,35 @@ def publisher(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
- # AsyncAPI args
+ # Specification args
title: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object title."),
+ Doc("Specification publisher object title."),
] = None,
description: Annotated[
Optional[str],
- Doc("AsyncAPI publisher object description."),
+ Doc("Specification publisher object description."),
] = None,
schema: Annotated[
Optional[Any],
Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Specification publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
+ Doc("Whetever to include operation in Specification schema or not."),
] = True,
) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
]:
return self.broker.publisher(
topic=topic,
@@ -2997,7 +3007,7 @@ def publisher(
reply_to=reply_to,
# broker options
middlewares=middlewares,
- # AsyncAPI options
+ # Specification options
title=title,
description=description,
schema=schema,
diff --git a/faststream/kafka/message.py b/faststream/kafka/message.py
index bde7669787..20fe0d0edd 100644
--- a/faststream/kafka/message.py
+++ b/faststream/kafka/message.py
@@ -1,8 +1,8 @@
-from typing import TYPE_CHECKING, Any, Protocol, Tuple, Union
+from typing import TYPE_CHECKING, Any, Protocol, Union
from aiokafka import TopicPartition as AIOKafkaTopicPartition
-from faststream.broker.message import StreamMessage
+from faststream.message import AckStatus, StreamMessage
if TYPE_CHECKING:
from aiokafka import ConsumerRecord
@@ -42,15 +42,30 @@ class KafkaMessage(
StreamMessage[
Union[
"ConsumerRecord",
- Tuple["ConsumerRecord", ...],
+ tuple["ConsumerRecord", ...],
]
- ]
+ ],
):
"""Represents a Kafka message in the FastStream framework.
This class extends `StreamMessage` and is specialized for handling Kafka ConsumerRecord objects.
"""
+ def __init__(self, *args: Any, consumer: ConsumerProtocol, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+ self.consumer = consumer
+ self.committed = AckStatus.ACKED
+
+
+class KafkaAckableMessage(
+ StreamMessage[
+ Union[
+ "ConsumerRecord",
+ tuple["ConsumerRecord", ...],
+ ]
+ ]
+):
def __init__(
self,
*args: Any,
@@ -61,6 +76,12 @@ def __init__(
self.consumer = consumer
+ async def ack(self) -> None:
+ """Acknowledge the Kafka message."""
+ if not self.committed:
+ await self.consumer.commit()
+ await super().ack()
+
async def nack(self) -> None:
"""Reject the Kafka message."""
if not self.committed:
@@ -78,11 +99,3 @@ async def nack(self) -> None:
offset=raw_message.offset,
)
await super().nack()
-
-
-class KafkaAckableMessage(KafkaMessage):
- async def ack(self) -> None:
- """Acknowledge the Kafka message."""
- if not self.committed:
- await self.consumer.commit()
- await super().ack()
diff --git a/faststream/kafka/opentelemetry/middleware.py b/faststream/kafka/opentelemetry/middleware.py
index 2f06486c33..6b845e8705 100644
--- a/faststream/kafka/opentelemetry/middleware.py
+++ b/faststream/kafka/opentelemetry/middleware.py
@@ -6,10 +6,11 @@
from faststream.kafka.opentelemetry.provider import (
telemetry_attributes_provider_factory,
)
+from faststream.kafka.response import KafkaPublishCommand
from faststream.opentelemetry.middleware import TelemetryMiddleware
-class KafkaTelemetryMiddleware(TelemetryMiddleware):
+class KafkaTelemetryMiddleware(TelemetryMiddleware[KafkaPublishCommand]):
def __init__(
self,
*,
diff --git a/faststream/kafka/opentelemetry/provider.py b/faststream/kafka/opentelemetry/provider.py
index b90d82c9fd..6eedfafdf7 100644
--- a/faststream/kafka/opentelemetry/provider.py
+++ b/faststream/kafka/opentelemetry/provider.py
@@ -1,16 +1,19 @@
-from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
from opentelemetry.semconv.trace import SpanAttributes
-from faststream.broker.types import MsgType
+from faststream._internal.types import MsgType
from faststream.opentelemetry import TelemetrySettingsProvider
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
if TYPE_CHECKING:
from aiokafka import ConsumerRecord
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.kafka.response import KafkaPublishCommand
+ from faststream.message import StreamMessage
+ from faststream.response import PublishCommand
class BaseKafkaTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
@@ -19,33 +22,33 @@ class BaseKafkaTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> "AnyDict":
- attrs = {
+ attrs: AnyDict = {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
- if (partition := kwargs.get("partition")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition
+ if cmd.partition is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = cmd.partition
- if (key := kwargs.get("key")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key
+ if cmd.key is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = cmd.key
return attrs
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class KafkaTelemetrySettingsProvider(
- BaseKafkaTelemetrySettingsProvider["ConsumerRecord"]
+ BaseKafkaTelemetrySettingsProvider["ConsumerRecord"],
):
def get_consume_attrs_from_message(
self,
@@ -70,37 +73,35 @@ def get_consume_destination_name(
self,
msg: "StreamMessage[ConsumerRecord]",
) -> str:
- return cast(str, msg.raw_message.topic)
+ return cast("str", msg.raw_message.topic)
class BatchKafkaTelemetrySettingsProvider(
- BaseKafkaTelemetrySettingsProvider[Tuple["ConsumerRecord", ...]]
+ BaseKafkaTelemetrySettingsProvider[tuple["ConsumerRecord", ...]],
):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[Tuple[ConsumerRecord, ...]]",
+ msg: "StreamMessage[tuple[ConsumerRecord, ...]]",
) -> "AnyDict":
raw_message = msg.raw_message[0]
- attrs = {
+ return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id,
SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id,
SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(
- bytearray().join(cast(Sequence[bytes], msg.body))
+ bytearray().join(cast("Sequence[bytes]", msg.body)),
),
SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT: len(msg.raw_message),
SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: raw_message.partition,
MESSAGING_DESTINATION_PUBLISH_NAME: raw_message.topic,
}
- return attrs
-
def get_consume_destination_name(
self,
- msg: "StreamMessage[Tuple[ConsumerRecord, ...]]",
+ msg: "StreamMessage[tuple[ConsumerRecord, ...]]",
) -> str:
- return cast(str, msg.raw_message[0].topic)
+ return cast("str", msg.raw_message[0].topic)
def telemetry_attributes_provider_factory(
@@ -111,5 +112,4 @@ def telemetry_attributes_provider_factory(
]:
if isinstance(msg, Sequence):
return BatchKafkaTelemetrySettingsProvider()
- else:
- return KafkaTelemetrySettingsProvider()
+ return KafkaTelemetrySettingsProvider()
diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py
index bea29db170..936abbc7b0 100644
--- a/faststream/kafka/parser.py
+++ b/faststream/kafka/parser.py
@@ -1,17 +1,15 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
-from faststream.broker.message import decode_message, gen_cor_id
-from faststream.kafka.message import FAKE_CONSUMER, KafkaMessage
-from faststream.utils.context.repository import context
+from faststream.kafka.message import FAKE_CONSUMER, ConsumerProtocol, KafkaMessage
+from faststream.message import decode_message
if TYPE_CHECKING:
from re import Pattern
from aiokafka import ConsumerRecord
- from faststream.broker.message import StreamMessage
- from faststream.kafka.subscriber.usecase import LogicSubscriber
- from faststream.types import DecodedMessage
+ from faststream._internal.basic_types import DecodedMessage
+ from faststream.message import StreamMessage
class AioKafkaParser:
@@ -19,19 +17,23 @@ class AioKafkaParser:
def __init__(
self,
- msg_class: Type[KafkaMessage],
+ msg_class: type[KafkaMessage],
regex: Optional["Pattern[str]"],
) -> None:
self.msg_class = msg_class
self.regex = regex
+ self._consumer: ConsumerProtocol = FAKE_CONSUMER
+
+ def _setup(self, consumer: ConsumerProtocol) -> None:
+ self._consumer = consumer
+
async def parse_message(
self,
message: "ConsumerRecord",
) -> "StreamMessage[ConsumerRecord]":
"""Parses a Kafka message."""
headers = {i: j.decode() for i, j in message.headers}
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
return self.msg_class(
body=message.value or b"",
@@ -39,10 +41,10 @@ async def parse_message(
reply_to=headers.get("reply_to", ""),
content_type=headers.get("content-type"),
message_id=f"{message.offset}-{message.timestamp}",
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ correlation_id=headers.get("correlation_id"),
raw_message=message,
path=self.get_path(message.topic),
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
+ consumer=self._consumer,
)
async def decode_message(
@@ -52,21 +54,20 @@ async def decode_message(
"""Decodes a message."""
return decode_message(msg)
- def get_path(self, topic: str) -> Dict[str, str]:
+ def get_path(self, topic: str) -> dict[str, str]:
if self.regex and (match := self.regex.match(topic)):
return match.groupdict()
- else:
- return {}
+ return {}
class AioKafkaBatchParser(AioKafkaParser):
async def parse_message(
self,
- message: Tuple["ConsumerRecord", ...],
- ) -> "StreamMessage[Tuple[ConsumerRecord, ...]]":
+ message: tuple["ConsumerRecord", ...],
+ ) -> "StreamMessage[tuple[ConsumerRecord, ...]]":
"""Parses a batch of messages from a Kafka consumer."""
- body: List[Any] = []
- batch_headers: List[Dict[str, str]] = []
+ body: list[Any] = []
+ batch_headers: list[dict[str, str]] = []
first = message[0]
last = message[-1]
@@ -77,8 +78,6 @@ async def parse_message(
headers = next(iter(batch_headers), {})
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return self.msg_class(
body=body,
headers=headers,
@@ -86,19 +85,19 @@ async def parse_message(
reply_to=headers.get("reply_to", ""),
content_type=headers.get("content-type"),
message_id=f"{first.offset}-{last.offset}-{first.timestamp}",
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ correlation_id=headers.get("correlation_id"),
raw_message=message,
path=self.get_path(first.topic),
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
+ consumer=self._consumer,
)
async def decode_message(
self,
- msg: "StreamMessage[Tuple[ConsumerRecord, ...]]",
+ msg: "StreamMessage[tuple[ConsumerRecord, ...]]",
) -> "DecodedMessage":
"""Decode a batch of messages."""
# super() should be here due python can't find it in comprehension
- super_obj = cast(AioKafkaParser, super())
+ super_obj = cast("AioKafkaParser", super())
return [
decode_message(await super_obj.parse_message(m)) for m in msg.raw_message
diff --git a/faststream/kafka/prometheus/middleware.py b/faststream/kafka/prometheus/middleware.py
index 3fd41edeba..0e068bc2f2 100644
--- a/faststream/kafka/prometheus/middleware.py
+++ b/faststream/kafka/prometheus/middleware.py
@@ -1,14 +1,23 @@
-from typing import TYPE_CHECKING, Optional, Sequence
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, Union
+from aiokafka import ConsumerRecord
+
+from faststream._internal.constants import EMPTY
from faststream.kafka.prometheus.provider import settings_provider_factory
-from faststream.prometheus.middleware import BasePrometheusMiddleware
-from faststream.types import EMPTY
+from faststream.kafka.response import KafkaPublishCommand
+from faststream.prometheus.middleware import PrometheusMiddleware
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
-class KafkaPrometheusMiddleware(BasePrometheusMiddleware):
+class KafkaPrometheusMiddleware(
+ PrometheusMiddleware[
+ KafkaPublishCommand,
+ Union[ConsumerRecord, Sequence[ConsumerRecord]],
+ ],
+):
def __init__(
self,
*,
@@ -18,7 +27,7 @@ def __init__(
received_messages_size_buckets: Optional[Sequence[float]] = None,
) -> None:
super().__init__(
- settings_provider_factory=settings_provider_factory,
+ settings_provider_factory=settings_provider_factory, # type: ignore[arg-type]
registry=registry,
app_name=app_name,
metrics_prefix=metrics_prefix,
diff --git a/faststream/kafka/prometheus/provider.py b/faststream/kafka/prometheus/provider.py
index 9caf118e1f..25f324decb 100644
--- a/faststream/kafka/prometheus/provider.py
+++ b/faststream/kafka/prometheus/provider.py
@@ -1,15 +1,14 @@
-from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
-from faststream.broker.message import MsgType, StreamMessage
-from faststream.prometheus import (
- MetricsSettingsProvider,
-)
+from faststream.message.message import MsgType, StreamMessage
+from faststream.prometheus import MetricsSettingsProvider
if TYPE_CHECKING:
from aiokafka import ConsumerRecord
from faststream.prometheus import ConsumeAttrs
- from faststream.types import AnyDict
+ from faststream.response import PublishCommand
class BaseKafkaMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
@@ -18,11 +17,11 @@ class BaseKafkaMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class KafkaMetricsSettingsProvider(BaseKafkaMetricsSettingsProvider["ConsumerRecord"]):
@@ -38,16 +37,16 @@ def get_consume_attrs_from_message(
class BatchKafkaMetricsSettingsProvider(
- BaseKafkaMetricsSettingsProvider[Tuple["ConsumerRecord", ...]]
+ BaseKafkaMetricsSettingsProvider[tuple["ConsumerRecord", ...]]
):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[Tuple[ConsumerRecord, ...]]",
+ msg: "StreamMessage[tuple[ConsumerRecord, ...]]",
) -> "ConsumeAttrs":
raw_message = msg.raw_message[0]
return {
"destination_name": raw_message.topic,
- "message_size": len(bytearray().join(cast(Sequence[bytes], msg.body))),
+ "message_size": len(bytearray().join(cast("Sequence[bytes]", msg.body))),
"messages_count": len(msg.raw_message),
}
@@ -60,5 +59,4 @@ def settings_provider_factory(
]:
if isinstance(msg, Sequence):
return BatchKafkaMetricsSettingsProvider()
- else:
- return KafkaMetricsSettingsProvider()
+ return KafkaMetricsSettingsProvider()
diff --git a/faststream/kafka/publisher/asyncapi.py b/faststream/kafka/publisher/asyncapi.py
deleted file mode 100644
index 7830c01807..0000000000
--- a/faststream/kafka/publisher/asyncapi.py
+++ /dev/null
@@ -1,196 +0,0 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Literal,
- Optional,
- Sequence,
- Tuple,
- Union,
- overload,
-)
-
-from typing_extensions import override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import kafka
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.broker.types import MsgType
-from faststream.exceptions import SetupError
-from faststream.kafka.publisher.usecase import (
- BatchPublisher,
- DefaultPublisher,
- LogicPublisher,
-)
-
-if TYPE_CHECKING:
- from aiokafka import ConsumerRecord
-
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
-
-
-class AsyncAPIPublisher(LogicPublisher[MsgType]):
- """A class representing a publisher."""
-
- def get_name(self) -> str:
- return f"{self.topic}:Publisher"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- publish=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads, "Publisher"),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(kafka=kafka.ChannelBinding(topic=self.topic)),
- )
- }
-
- @overload # type: ignore[override]
- @staticmethod
- def create(
- *,
- batch: Literal[True],
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIBatchPublisher": ...
-
- @overload
- @staticmethod
- def create(
- *,
- batch: Literal[False],
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIDefaultPublisher": ...
-
- @overload
- @staticmethod
- def create(
- *,
- batch: bool,
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence[
- "BrokerMiddleware[Union[Tuple[ConsumerRecord, ...], ConsumerRecord]]"
- ],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
- ]: ...
-
- @override
- @staticmethod
- def create(
- *,
- batch: bool,
- key: Optional[bytes],
- topic: str,
- partition: Optional[int],
- headers: Optional[Dict[str, str]],
- reply_to: str,
- # Publisher args
- broker_middlewares: Sequence[
- "BrokerMiddleware[Union[Tuple[ConsumerRecord, ...], ConsumerRecord]]"
- ],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> Union[
- "AsyncAPIBatchPublisher",
- "AsyncAPIDefaultPublisher",
- ]:
- if batch:
- if key:
- raise SetupError("You can't setup `key` with batch publisher")
-
- return AsyncAPIBatchPublisher(
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
- else:
- return AsyncAPIDefaultPublisher(
- key=key,
- # basic args
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
-
-class AsyncAPIBatchPublisher(
- BatchPublisher,
- AsyncAPIPublisher[Tuple["ConsumerRecord", ...]],
-):
- pass
-
-
-class AsyncAPIDefaultPublisher(
- DefaultPublisher,
- AsyncAPIPublisher["ConsumerRecord"],
-):
- pass
diff --git a/faststream/kafka/publisher/factory.py b/faststream/kafka/publisher/factory.py
new file mode 100644
index 0000000000..808dfc135d
--- /dev/null
+++ b/faststream/kafka/publisher/factory.py
@@ -0,0 +1,138 @@
+from collections.abc import Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Literal,
+ Optional,
+ Union,
+ overload,
+)
+
+from faststream.exceptions import SetupError
+
+from .specified import SpecificationBatchPublisher, SpecificationDefaultPublisher
+
+if TYPE_CHECKING:
+ from aiokafka import ConsumerRecord
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[True],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence["BrokerMiddleware[tuple[ConsumerRecord, ...]]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationBatchPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[False],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationDefaultPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence[
+ "BrokerMiddleware[Union[tuple[ConsumerRecord, ...], ConsumerRecord]]"
+ ],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]: ...
+
+
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Sequence[
+ "BrokerMiddleware[Union[tuple[ConsumerRecord, ...], ConsumerRecord]]"
+ ],
+ middlewares: Sequence["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]:
+ if batch:
+ if key:
+ msg = "You can't setup `key` with batch publisher"
+ raise SetupError(msg)
+
+ return SpecificationBatchPublisher(
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+ return SpecificationDefaultPublisher(
+ key=key,
+ # basic args
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/kafka/publisher/fake.py b/faststream/kafka/publisher/fake.py
new file mode 100644
index 0000000000..ea9a321816
--- /dev/null
+++ b/faststream/kafka/publisher/fake.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.kafka.response import KafkaPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class KafkaFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ topic: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.topic = topic
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "KafkaPublishCommand"]
+ ) -> "KafkaPublishCommand":
+ cmd = super().patch_command(cmd)
+ real_cmd = KafkaPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.topic
+ return real_cmd
diff --git a/faststream/kafka/publisher/producer.py b/faststream/kafka/publisher/producer.py
index 78493dbf2d..5569ab9877 100644
--- a/faststream/kafka/publisher/producer.py
+++ b/faststream/kafka/publisher/producer.py
@@ -1,20 +1,25 @@
-from typing import TYPE_CHECKING, Any, Dict, Optional, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
from typing_extensions import override
-from faststream.broker.message import encode_message
-from faststream.broker.publisher.proto import ProducerProto
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import OperationForbiddenError
+from faststream._internal.publisher.proto import ProducerProto
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream.exceptions import FeatureNotSupportedException
from faststream.kafka.exceptions import BatchBufferOverflowException
from faststream.kafka.message import KafkaMessage
from faststream.kafka.parser import AioKafkaParser
+from faststream.message import encode_message
+
+from .state import EmptyProducerState, ProducerState, RealProducer
if TYPE_CHECKING:
+ import asyncio
+
from aiokafka import AIOKafkaProducer
+ from aiokafka.structs import RecordMetadata
- from faststream.broker.types import CustomCallable
- from faststream.types import SendableMessage
+ from faststream._internal.types import CustomCallable
+ from faststream.kafka.response import KafkaPublishCommand
class AioKafkaFastProducer(ProducerProto):
@@ -22,87 +27,72 @@ class AioKafkaFastProducer(ProducerProto):
def __init__(
self,
- producer: "AIOKafkaProducer",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._producer = producer
+ self._producer: ProducerState = EmptyProducerState()
# NOTE: register default parser to be compatible with request
default = AioKafkaParser(
msg_class=KafkaMessage,
regex=None,
)
+
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ async def connect(self, producer: "AIOKafkaProducer") -> None:
+ await producer.start()
+ self._producer = RealProducer(producer)
+
+ async def disconnect(self) -> None:
+ await self._producer.stop()
+ self._producer = EmptyProducerState()
+
+ def __bool__(self) -> bool:
+ return bool(self._producer)
+
+ @property
+ def closed(self) -> bool:
+ return self._producer.closed
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- *,
- correlation_id: str,
- key: Union[bytes, Any, None] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
"""Publish a message to a topic."""
- message, content_type = encode_message(message)
+ message, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- send_future = await self._producer.send(
- topic=topic,
+ send_future = await self._producer.producer.send(
+ topic=cmd.destination,
value=message,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
headers=[(i, (j or "").encode()) for i, j in headers_to_send.items()],
)
- if not no_confirm:
- await send_future
- async def stop(self) -> None:
- await self._producer.stop()
+ if not cmd.no_confirm:
+ return await send_future
+ return send_future
async def publish_batch(
self,
- *msgs: "SendableMessage",
- correlation_id: str,
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
"""Publish a batch of messages to a topic."""
- batch = self._producer.create_batch()
+ batch = self._producer.producer.create_batch()
- headers_to_send = {"correlation_id": correlation_id, **(headers or {})}
+ headers_to_send = cmd.headers_to_publish()
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- for message_position, msg in enumerate(msgs):
- message, content_type = encode_message(msg)
+ for message_position, body in enumerate(cmd.batch_bodies):
+ message, content_type = encode_message(body)
if content_type:
final_headers = {
@@ -115,18 +105,25 @@ async def publish_batch(
metadata = batch.append(
key=None,
value=message,
- timestamp=timestamp_ms,
+ timestamp=cmd.timestamp_ms,
headers=[(i, j.encode()) for i, j in final_headers.items()],
)
if metadata is None:
raise BatchBufferOverflowException(message_position=message_position)
- send_future = await self._producer.send_batch(batch, topic, partition=partition)
- if not no_confirm:
- await send_future
+ send_future = await self._producer.producer.send_batch(
+ batch,
+ cmd.destination,
+ partition=cmd.partition,
+ )
+ if not cmd.no_confirm:
+ return await send_future
+ return send_future
@override
- async def request(self, *args: Any, **kwargs: Any) -> Optional[Any]:
- raise OperationForbiddenError(
- "Kafka doesn't support `request` method without test client."
- )
+ async def request(
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> Any:
+ msg = "Kafka doesn't support `request` method without test client."
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/kafka/publisher/specified.py b/faststream/kafka/publisher/specified.py
new file mode 100644
index 0000000000..b23eef8d92
--- /dev/null
+++ b/faststream/kafka/publisher/specified.py
@@ -0,0 +1,43 @@
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.kafka.publisher.usecase import BatchPublisher, DefaultPublisher
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, PublisherSpec
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+
+
+class SpecificationPublisher(SpecificationPublisherMixin):
+ """A class representing a publisher."""
+
+ def get_default_name(self) -> str:
+ return f"{self.topic}:Publisher"
+
+ def get_schema(self) -> dict[str, PublisherSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: PublisherSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ kafka=kafka.ChannelBinding(
+ topic=self.topic, partitions=None, replicas=None
+ )
+ ),
+ ),
+ }
+
+
+class SpecificationBatchPublisher(SpecificationPublisher, BatchPublisher):
+ pass
+
+
+class SpecificationDefaultPublisher(SpecificationPublisher, DefaultPublisher):
+ pass
diff --git a/faststream/kafka/publisher/state.py b/faststream/kafka/publisher/state.py
new file mode 100644
index 0000000000..397967c696
--- /dev/null
+++ b/faststream/kafka/publisher/state.py
@@ -0,0 +1,53 @@
+from abc import abstractmethod
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from aiokafka import AIOKafkaProducer
+
+
+class ProducerState(Protocol):
+ producer: "AIOKafkaProducer"
+
+ @property
+ @abstractmethod
+ def closed(self) -> bool: ...
+
+ def __bool__(self) -> bool: ...
+
+ async def stop(self) -> None: ...
+
+
+class EmptyProducerState(ProducerState):
+ __slots__ = ()
+
+ closed = True
+
+ @property
+ def producer(self) -> "AIOKafkaProducer":
+ msg = "You can't use producer here, please connect broker first."
+ raise IncorrectState(msg)
+
+ def __bool__(self) -> bool:
+ return False
+
+ async def stop(self) -> None:
+ pass
+
+
+class RealProducer(ProducerState):
+ __slots__ = ("producer",)
+
+ def __init__(self, producer: "AIOKafkaProducer") -> None:
+ self.producer = producer
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def stop(self) -> None:
+ await self.producer.stop()
+
+ @property
+ def closed(self) -> bool:
+ return self.producer._closed or False
diff --git a/faststream/kafka/publisher/usecase.py b/faststream/kafka/publisher/usecase.py
index 5c5f34efe1..a715342e9d 100644
--- a/faststream/kafka/publisher/usecase.py
+++ b/faststream/kafka/publisher/usecase.py
@@ -1,79 +1,56 @@
-from contextlib import AsyncExitStack
-from functools import partial
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Dict,
- Iterable,
- Optional,
- Sequence,
- Tuple,
- Union,
- cast,
-)
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union, overload
from aiokafka import ConsumerRecord
-from typing_extensions import Annotated, Doc, override
+from typing_extensions import Doc, override
-from faststream.broker.message import SourceType, gen_cor_id
-from faststream.broker.publisher.usecase import PublisherUsecase
-from faststream.broker.types import MsgType
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.utils.functions import return_input
+from faststream._internal.publisher.usecase import PublisherUsecase
+from faststream._internal.types import MsgType
+from faststream.kafka.message import KafkaMessage
+from faststream.kafka.response import KafkaPublishCommand
+from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
+ import asyncio
+
+ from aiokafka.structs import RecordMetadata
+
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.kafka.message import KafkaMessage
from faststream.kafka.publisher.producer import AioKafkaFastProducer
- from faststream.types import AsyncFunc, SendableMessage
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[MsgType]):
"""A class to publish messages to a Kafka topic."""
- _producer: Optional["AioKafkaFastProducer"]
+ _producer: "AioKafkaFastProducer"
def __init__(
self,
*,
topic: str,
partition: Optional[int],
- headers: Optional[Dict[str, str]],
+ headers: Optional[dict[str, str]],
reply_to: str,
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.topic = topic
self.partition = partition
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
-
- def __hash__(self) -> int:
- return hash(self.topic)
+ self.headers = headers or {}
def add_prefix(self, prefix: str) -> None:
- self.topic = "".join((prefix, self.topic))
+ self.topic = f"{prefix}{self.topic}"
@override
async def request(
@@ -98,7 +75,7 @@ async def request(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -107,7 +84,7 @@ async def request(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
timestamp_ms: Annotated[
@@ -116,91 +93,53 @@ async def request(
"""
Epoch milliseconds (from Jan 1 1970 UTC) to use as
the message timestamp. Defaults to current time.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc("Message headers to store metainformation."),
] = None,
correlation_id: Annotated[
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
timeout: Annotated[
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "KafkaMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- topic = topic or self.topic
- partition = partition or self.partition
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = KafkaPublishCommand(
message,
- topic=topic,
+ topic=topic or self.topic,
key=key,
- partition=partition,
- headers=headers,
- timeout=timeout,
- correlation_id=correlation_id,
+ partition=partition or self.partition,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
timestamp_ms=timestamp_ms,
+ timeout=timeout,
+ _publish_type=PublishType.REQUEST,
)
- async with AsyncExitStack() as stack:
- return_msg: Callable[[KafkaMessage], Awaitable[KafkaMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
+ msg: KafkaMessage = await self._basic_request(cmd)
+ return msg
class DefaultPublisher(LogicPublisher[ConsumerRecord]):
def __init__(
self,
*,
- key: Optional[bytes],
+ key: Union[bytes, str, None],
topic: str,
partition: Optional[int],
- headers: Optional[Dict[str, str]],
+ headers: Optional[dict[str, str]],
reply_to: str,
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
topic=topic,
@@ -210,115 +149,121 @@ def __init__(
# publisher args
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.key = key
- @override
+ @overload
async def publish(
self,
- message: Annotated[
- "SendableMessage",
- Doc("Message body to send."),
- ],
- topic: Annotated[
- str,
- Doc("Topic where the message will be published."),
- ] = "",
+ message: "SendableMessage",
+ topic: str = "",
*,
- key: Annotated[
- Union[bytes, Any, None],
- Doc(
- """
- A key to associate with the message. Can be used to
- determine which partition to send the message to. If partition
- is `None` (and producer's partitioner config is left as default),
- then messages with the same key will be delivered to the same
- partition (but if key is `None`, partition is chosen randomly).
- Must be type `bytes`, or be serializable to bytes via configured
- `key_serializer`.
- """
- ),
- ] = None,
- partition: Annotated[
- Optional[int],
- Doc(
- """
- Specify a partition. If not set, the partition will be
- selected using the configured `partitioner`.
- """
- ),
- ] = None,
- timestamp_ms: Annotated[
- Optional[int],
- Doc(
- """
- Epoch milliseconds (from Jan 1 1970 UTC) to use as
- the message timestamp. Defaults to current time.
- """
- ),
- ] = None,
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc("Message headers to store metainformation."),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("Reply message topic name to send response."),
- ] = "",
- no_confirm: Annotated[
- bool,
- Doc("Do not wait for Kafka publish confirmation."),
- ] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> Optional[Any]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- topic = topic or self.topic
- key = key or self.key
- partition = partition or self.partition
- headers = headers or self.headers
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: Literal[True],
+ ) -> "asyncio.Future[RecordMetadata]": ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ topic: str = "",
+ *,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: Literal[False] = False,
+ ) -> "RecordMetadata": ...
- return await call(
+ @override
+ async def publish(
+ self,
+ message: "SendableMessage",
+ topic: str = "",
+ *,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
+ """Publishes a message to Kafka.
+
+ Args:
+ message:
+ Message body to send.
+ topic:
+ Topic where the message will be published.
+ key:
+ A key to associate with the message. Can be used to
+ determine which partition to send the message to. If partition
+ is `None` (and producer's partitioner config is left as default),
+ then messages with the same key will be delivered to the same
+ partition (but if key is `None`, partition is chosen randomly).
+ Must be type `bytes`, or be serializable to bytes via configured
+ `key_serializer`
+ partition:
+ Specify a partition. If not set, the partition will be
+ selected using the configured `partitioner`
+ timestamp_ms:
+ Epoch milliseconds (from Jan 1 1970 UTC) to use as
+ the message timestamp. Defaults to current time.
+ headers:
+ Message headers to store metainformation.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ reply_to:
+ Reply message topic name to send response.
+ no_confirm:
+ Do not wait for Kafka publish confirmation.
+
+ Returns:
+ `asyncio.Future[RecordMetadata]` if no_confirm = True.
+ `RecordMetadata` if no_confirm = False.
+ """
+ cmd = KafkaPublishCommand(
message,
- topic=topic,
- key=key,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- correlation_id=correlation_id,
+ topic=topic or self.topic,
+ key=key or self.key,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
timestamp_ms=timestamp_ms,
no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd)
+
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ cmd.partition = cmd.partition or self.partition
+ cmd.key = cmd.key or self.key
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -343,7 +288,7 @@ async def request(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -352,7 +297,7 @@ async def request(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
timestamp_ms: Annotated[
@@ -361,32 +306,27 @@ async def request(
"""
Epoch milliseconds (from Jan 1 1970 UTC) to use as
the message timestamp. Defaults to current time.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc("Message headers to store metainformation."),
] = None,
correlation_id: Annotated[
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
timeout: Annotated[
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "KafkaMessage":
return await super().request(
- message=message,
+ message,
topic=topic,
key=key or self.key,
partition=partition,
@@ -394,101 +334,104 @@ async def request(
headers=headers,
correlation_id=correlation_id,
timeout=timeout,
- _extra_middlewares=_extra_middlewares,
)
-class BatchPublisher(LogicPublisher[Tuple["ConsumerRecord", ...]]):
- @override
+class BatchPublisher(LogicPublisher[tuple["ConsumerRecord", ...]]):
+ @overload
async def publish(
self,
- message: Annotated[
- Union["SendableMessage", Iterable["SendableMessage"]],
- Doc("One message or iterable messages bodies to send."),
- ],
- *extra_messages: Annotated[
- "SendableMessage",
- Doc("Messages bodies to send."),
- ],
- topic: Annotated[
- str,
- Doc("Topic where the message will be published."),
- ] = "",
- partition: Annotated[
- Optional[int],
- Doc(
- """
- Specify a partition. If not set, the partition will be
- selected using the configured `partitioner`.
- """
- ),
- ] = None,
- timestamp_ms: Annotated[
- Optional[int],
- Doc(
- """
- Epoch milliseconds (from Jan 1 1970 UTC) to use as
- the message timestamp. Defaults to current time.
- """
- ),
- ] = None,
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc("Messages headers to store metainformation."),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("Reply message topic name to send response."),
- ] = "",
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- no_confirm: Annotated[
- bool,
- Doc("Do not wait for Kafka publish confirmation."),
- ] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- msgs: Iterable[SendableMessage]
- if extra_messages:
- msgs = (cast("SendableMessage", message), *extra_messages)
- else:
- msgs = cast(Iterable["SendableMessage"], message)
-
- topic = topic or self.topic
- partition = partition or self.partition
- headers = headers or self.headers
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: Literal[True],
+ ) -> "asyncio.Future[RecordMetadata]": ...
+
+ @overload
+ async def publish(
+ self,
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: Literal[False] = False,
+ ) -> "RecordMetadata": ...
- await call(
- *msgs,
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- correlation_id=correlation_id,
+ @override
+ async def publish(
+ self,
+ *messages: "SendableMessage",
+ topic: str = "",
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ no_confirm: bool = False,
+ ) -> Union["asyncio.Future[RecordMetadata]", "RecordMetadata"]:
+ """Publish a message batch as a single request to broker.
+
+ Args:
+ *messages:
+ Messages bodies to send.
+ topic:
+ Topic where the message will be published.
+ partition:
+ Specify a partition. If not set, the partition will be
+ selected using the configured `partitioner`
+ timestamp_ms:
+ Epoch milliseconds (from Jan 1 1970 UTC) to use as
+ the message timestamp. Defaults to current time.
+ headers:
+ Message headers to store metainformation.
+ reply_to:
+ Reply message topic name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ no_confirm:
+ Do not wait for Kafka publish confirmation.
+
+ Returns:
+ `asyncio.Future[RecordMetadata]` if no_confirm = True.
+ `RecordMetadata` if no_confirm = False.
+ """
+ cmd = KafkaPublishCommand(
+ *messages,
+ key=None,
+ topic=topic or self.topic,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
timestamp_ms=timestamp_ms,
no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
)
+
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd, batch=True)
+
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ cmd.partition = cmd.partition or self.partition
+
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
diff --git a/faststream/kafka/response.py b/faststream/kafka/response.py
index da420aa286..3ff1a908d1 100644
--- a/faststream/kafka/response.py
+++ b/faststream/kafka/response.py
@@ -1,11 +1,12 @@
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Any, Optional, Union
from typing_extensions import override
-from faststream.broker.response import Response
+from faststream.response.publish_type import PublishType
+from faststream.response.response import BatchPublishCommand, PublishCommand, Response
if TYPE_CHECKING:
- from faststream.types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import AnyDict, SendableMessage
class KafkaResponse(Response):
@@ -28,10 +29,84 @@ def __init__(
self.key = key
@override
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- **super().as_publish_kwargs(),
- "timestamp_ms": self.timestamp_ms,
- "key": self.key,
- }
- return publish_options
+ def as_publish_command(self) -> "KafkaPublishCommand":
+ return KafkaPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ # Kafka specific
+ topic="",
+ key=self.key,
+ timestamp_ms=self.timestamp_ms,
+ )
+
+
+class KafkaPublishCommand(BatchPublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ topic: str,
+ _publish_type: PublishType,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ timeout: float = 0.5,
+ ) -> None:
+ super().__init__(
+ message,
+ *messages,
+ destination=topic,
+ reply_to=reply_to,
+ correlation_id=correlation_id,
+ headers=headers,
+ _publish_type=_publish_type,
+ )
+
+ self.key = key
+ self.partition = partition
+ self.timestamp_ms = timestamp_ms
+ self.no_confirm = no_confirm
+
+ # request option
+ self.timeout = timeout
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "KafkaPublishCommand":
+ if isinstance(cmd, KafkaPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cls._parse_bodies(cmd.body, batch=batch)
+
+ return cls(
+ body,
+ *extra_bodies,
+ topic=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def headers_to_publish(self) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py
index 71638c140e..f5d3f43496 100644
--- a/faststream/kafka/router.py
+++ b/faststream/kafka/router.py
@@ -1,39 +1,41 @@
+from collections.abc import Awaitable, Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
- Awaitable,
Callable,
- Dict,
- Iterable,
Literal,
Optional,
- Sequence,
- Tuple,
Union,
)
from aiokafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
-from typing_extensions import Annotated, Doc, deprecated
+from typing_extensions import Doc, deprecated
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream._internal.constants import EMPTY
from faststream.kafka.broker.registrator import KafkaRegistrator
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import ConsumerRecord, TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.kafka.message import KafkaMessage
- from faststream.types import SendableMessage
class KafkaPublisher(ArgsContainer):
@@ -60,7 +62,7 @@ def __init__(
partition (but if key is `None`, partition is chosen randomly).
Must be type `bytes`, or be serializable to bytes via configured
`key_serializer`.
- """
+ """,
),
] = None,
partition: Annotated[
@@ -69,15 +71,15 @@ def __init__(
"""
Specify a partition. If not set, the partition will be
selected using the configured `partitioner`.
- """
+ """,
),
] = None,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -91,6 +93,10 @@ def __init__(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI args
@@ -106,7 +112,7 @@ def __init__(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -143,7 +149,7 @@ def __init__(
],
Doc(
"Message handler function "
- "to wrap the same with `@broker.subscriber(...)` way."
+ "to wrap the same with `@broker.subscriber(...)` way.",
),
],
*topics: Annotated[
@@ -166,21 +172,21 @@ def __init__(
partition assignment (if enabled), and to use for fetching and
committing offsets. If `None`, auto-partition assignment (via
group coordinator) and offset commits are disabled.
- """
+ """,
),
] = None,
key_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "key and returns a deserialized one."
+ "key and returns a deserialized one.",
),
] = None,
value_deserializer: Annotated[
Optional[Callable[[bytes], Any]],
Doc(
"Any callable that takes a raw message `bytes` "
- "value and returns a deserialized value."
+ "value and returns a deserialized value.",
),
] = None,
fetch_max_bytes: Annotated[
@@ -195,7 +201,7 @@ def __init__(
performs fetches to multiple brokers in parallel so memory
usage will depend on the number of brokers containing
partitions for the topic.
- """
+ """,
),
] = 50 * 1024 * 1024,
fetch_min_bytes: Annotated[
@@ -205,7 +211,7 @@ def __init__(
Minimum amount of data the server should
return for a fetch request, otherwise wait up to
`fetch_max_wait_ms` for more data to accumulate.
- """
+ """,
),
] = 1,
fetch_max_wait_ms: Annotated[
@@ -216,7 +222,7 @@ def __init__(
the server will block before answering the fetch request if
there isn't sufficient data to immediately satisfy the
requirement given by `fetch_min_bytes`.
- """
+ """,
),
] = 500,
max_partition_fetch_bytes: Annotated[
@@ -231,7 +237,7 @@ def __init__(
send messages larger than the consumer can fetch. If that
happens, the consumer can get stuck trying to fetch a large
message on a certain partition.
- """
+ """,
),
] = 1 * 1024 * 1024,
auto_offset_reset: Annotated[
@@ -243,7 +249,7 @@ def __init__(
* `earliest` will move to the oldest available message
* `latest` will move to the most recent
* `none` will raise an exception so you can handle this case
- """
+ """,
),
] = "latest",
auto_commit: Annotated[
@@ -252,15 +258,21 @@ def __init__(
"""
If `True` the consumer's offset will be
periodically committed in the background.
- """
+ """,
),
- ] = True,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
auto_commit_interval_ms: Annotated[
int,
Doc(
"""
Milliseconds between automatic
- offset commits, if `auto_commit` is `True`."""
+ offset commits, if `auto_commit` is `True`.""",
),
] = 5 * 1000,
check_crcs: Annotated[
@@ -271,7 +283,7 @@ def __init__(
consumed. This ensures no on-the-wire or on-disk corruption to
the messages occurred. This check adds some overhead, so it may
be disabled in cases seeking extreme performance.
- """
+ """,
),
] = True,
partition_assignment_strategy: Annotated[
@@ -287,7 +299,7 @@ def __init__(
one. The coordinator will choose the old assignment strategy until
all members have been updated. Then it will choose the new
strategy.
- """
+ """,
),
] = (RoundRobinPartitionAssignor,),
max_poll_interval_ms: Annotated[
@@ -300,7 +312,7 @@ def __init__(
rebalance in order to reassign the partitions to another consumer
group member. If API methods block waiting for messages, that time
does not count against this timeout.
- """
+ """,
),
] = 5 * 60 * 1000,
rebalance_timeout_ms: Annotated[
@@ -314,7 +326,7 @@ def __init__(
decouple this setting to allow finer tuning by users that use
`ConsumerRebalanceListener` to delay rebalacing. Defaults
to ``session_timeout_ms``
- """
+ """,
),
] = None,
session_timeout_ms: Annotated[
@@ -329,7 +341,7 @@ def __init__(
group and trigger a rebalance. The allowed range is configured with
the **broker** configuration properties
`group.min.session.timeout.ms` and `group.max.session.timeout.ms`.
- """
+ """,
),
] = 10 * 1000,
heartbeat_interval_ms: Annotated[
@@ -345,7 +357,7 @@ def __init__(
should be set no higher than 1/3 of that value. It can be
adjusted even lower to control the expected time for normal
rebalances.
- """
+ """,
),
] = 3 * 1000,
consumer_timeout_ms: Annotated[
@@ -355,7 +367,7 @@ def __init__(
Maximum wait timeout for background fetching
routine. Mostly defines how fast the system will see rebalance and
request new data for new partitions.
- """
+ """,
),
] = 200,
max_poll_records: Annotated[
@@ -364,7 +376,7 @@ def __init__(
"""
The maximum number of records returned in a
single call by batch consumer. Has no limit by default.
- """
+ """,
),
] = None,
exclude_internal_topics: Annotated[
@@ -375,7 +387,7 @@ def __init__(
(such as offsets) should be exposed to the consumer. If set to True
the only way to receive records from an internal topic is
subscribing to it.
- """
+ """,
),
] = True,
isolation_level: Annotated[
@@ -405,7 +417,7 @@ def __init__(
to the high watermark when there are in flight transactions.
Further, when in `read_committed` the seek_to_end method will
return the LSO. See method docs below.
- """
+ """,
),
] = "read_uncommitted",
batch_timeout_ms: Annotated[
@@ -416,7 +428,7 @@ def __init__(
data is not available in the buffer. If 0, returns immediately
with any records that are available currently in the buffer,
else returns empty.
- """
+ """,
),
] = 200,
max_records: Annotated[
@@ -448,7 +460,7 @@ def __init__(
to subscribe. It is guaranteed, however, that the partitions
revoked/assigned
through this interface are from topics subscribed in this call.
- """
+ """,
),
] = None,
pattern: Annotated[
@@ -456,7 +468,7 @@ def __init__(
Doc(
"""
Pattern to match available topics. You must provide either topics or pattern, but not both.
- """
+ """,
),
] = None,
partitions: Annotated[
@@ -464,13 +476,13 @@ def __init__(
Doc(
"""
A topic and partition tuple. You can't use 'topics' and 'partitions' in the same time.
- """
+ """,
),
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -482,31 +494,25 @@ def __init__(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[KafkaMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[KafkaMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI args
@@ -518,7 +524,7 @@ def __init__(
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -566,15 +572,13 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
no_reply=no_reply,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
# AsyncAPI args
title=title,
description=description,
include_in_schema=include_in_schema,
- # FastDepends args
- retry=retry,
- no_ack=no_ack,
)
@@ -583,7 +587,7 @@ class KafkaRouter(
BrokerRouter[
Union[
"ConsumerRecord",
- Tuple["ConsumerRecord", ...],
+ tuple["ConsumerRecord", ...],
]
],
):
@@ -601,20 +605,24 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers."
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
Sequence[
Union[
"BrokerMiddleware[ConsumerRecord]",
- "BrokerMiddleware[Tuple[ConsumerRecord, ...]]",
+ "BrokerMiddleware[tuple[ConsumerRecord, ...]]",
]
],
Doc("Router middlewares to apply to all routers' publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[ConsumerRecord]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc("Parser to map original **ConsumerRecord** object to FastStream one."),
@@ -634,6 +642,7 @@ def __init__(
prefix=prefix,
dependencies=dependencies,
middlewares=middlewares,
+ routers=routers,
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
diff --git a/faststream/kafka/schemas/params.py b/faststream/kafka/schemas/params.py
index 9943e7e13c..64a301614e 100644
--- a/faststream/kafka/schemas/params.py
+++ b/faststream/kafka/schemas/params.py
@@ -1,6 +1,6 @@
import ssl
from asyncio import AbstractEventLoop
-from typing import List, Literal, Optional, Union
+from typing import Literal, Optional, Union
from aiokafka.abc import AbstractTokenProvider
from typing_extensions import TypedDict
@@ -25,7 +25,7 @@ class ConsumerConnectionParams(TypedDict, total=False):
sasl_kerberos_service_name : The service
"""
- bootstrap_servers: Union[str, List[str]]
+ bootstrap_servers: Union[str, list[str]]
loop: Optional[AbstractEventLoop]
client_id: str
request_timeout_ms: int
diff --git a/faststream/kafka/security.py b/faststream/kafka/security.py
index 1f08878c1c..cd8359901b 100644
--- a/faststream/kafka/security.py
+++ b/faststream/kafka/security.py
@@ -10,26 +10,26 @@
)
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
def parse_security(security: Optional[BaseSecurity]) -> "AnyDict":
if security is None:
return {}
- elif isinstance(security, SASLPlaintext):
+ if isinstance(security, SASLPlaintext):
return _parse_sasl_plaintext(security)
- elif isinstance(security, SASLScram256):
+ if isinstance(security, SASLScram256):
return _parse_sasl_scram256(security)
- elif isinstance(security, SASLScram512):
+ if isinstance(security, SASLScram512):
return _parse_sasl_scram512(security)
- elif isinstance(security, SASLOAuthBearer):
+ if isinstance(security, SASLOAuthBearer):
return _parse_sasl_oauthbearer(security)
- elif isinstance(security, SASLGSSAPI):
+ if isinstance(security, SASLGSSAPI):
return _parse_sasl_gssapi(security)
- elif isinstance(security, BaseSecurity):
+ if isinstance(security, BaseSecurity):
return _parse_base_security(security)
- else:
- raise NotImplementedError(f"KafkaBroker does not support `{type(security)}`.")
+ msg = f"KafkaBroker does not support `{type(security)}`."
+ raise NotImplementedError(msg)
def _parse_base_security(security: BaseSecurity) -> "AnyDict":
diff --git a/faststream/kafka/subscriber/asyncapi.py b/faststream/kafka/subscriber/asyncapi.py
deleted file mode 100644
index 1c3ad53ce7..0000000000
--- a/faststream/kafka/subscriber/asyncapi.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Dict,
- Tuple,
-)
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import kafka
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.broker.types import MsgType
-from faststream.kafka.subscriber.usecase import (
- BatchSubscriber,
- ConcurrentDefaultSubscriber,
- DefaultSubscriber,
- LogicSubscriber,
-)
-
-if TYPE_CHECKING:
- from aiokafka import ConsumerRecord
-
-
-class AsyncAPISubscriber(LogicSubscriber[MsgType]):
- """A class to handle logic and async API operations."""
-
- def get_name(self) -> str:
- return f'{",".join(self.topics)}:{self.call_name}'
-
- def get_schema(self) -> Dict[str, Channel]:
- channels = {}
-
- payloads = self.get_payloads()
-
- topics = chain(self.topics, {part.topic for part in self.partitions})
-
- for t in topics:
- handler_name = self.title_ or f"{t}:{self.call_name}"
-
- channels[handler_name] = Channel(
- description=self.description,
- subscribe=Operation(
- message=Message(
- title=f"{handler_name}:Message",
- payload=resolve_payloads(payloads),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- kafka=kafka.ChannelBinding(topic=t),
- ),
- )
-
- return channels
-
-
-class AsyncAPIDefaultSubscriber(
- DefaultSubscriber,
- AsyncAPISubscriber["ConsumerRecord"],
-):
- pass
-
-
-class AsyncAPIBatchSubscriber(
- BatchSubscriber,
- AsyncAPISubscriber[Tuple["ConsumerRecord", ...]],
-):
- pass
-
-
-class AsyncAPIConcurrentDefaultSubscriber(
- AsyncAPISubscriber["ConsumerRecord"],
- ConcurrentDefaultSubscriber,
-):
- pass
diff --git a/faststream/kafka/subscriber/factory.py b/faststream/kafka/subscriber/factory.py
index cdc2b35a7d..45edbefea3 100644
--- a/faststream/kafka/subscriber/factory.py
+++ b/faststream/kafka/subscriber/factory.py
@@ -1,28 +1,23 @@
-from typing import (
- TYPE_CHECKING,
- Iterable,
- Literal,
- Optional,
- Sequence,
- Tuple,
- Union,
- overload,
-)
+import warnings
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Literal, Optional, Union, overload
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
-from faststream.kafka.subscriber.asyncapi import (
- AsyncAPIBatchSubscriber,
- AsyncAPIConcurrentDefaultSubscriber,
- AsyncAPIDefaultSubscriber,
+from faststream.kafka.subscriber.specified import (
+ SpecificationBatchSubscriber,
+ SpecificationConcurrentDefaultSubscriber,
+ SpecificationDefaultSubscriber,
)
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import ConsumerRecord, TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import BrokerMiddleware
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
@overload
@@ -37,19 +32,23 @@ def create_subscriber(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
max_workers: int,
no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"],
- # AsyncAPI args
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[tuple[ConsumerRecord, ...]]"],
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
-) -> "AsyncAPIBatchSubscriber": ...
+) -> Union[
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
+]: ...
@overload
@@ -64,21 +63,22 @@ def create_subscriber(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
max_workers: int,
no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@@ -94,24 +94,24 @@ def create_subscriber(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
max_workers: int,
no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence[
- "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]"
+ "BrokerMiddleware[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]"
],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]: ...
@@ -126,44 +126,51 @@ def create_subscriber(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
+ auto_commit: bool,
# Subscriber args
+ ack_policy: "AckPolicy",
max_workers: int,
no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence[
- "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]"
+ "BrokerMiddleware[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]"
],
- # AsyncAPI args
+ # Specification args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPIDefaultSubscriber",
- "AsyncAPIBatchSubscriber",
- "AsyncAPIConcurrentDefaultSubscriber",
+ "SpecificationDefaultSubscriber",
+ "SpecificationBatchSubscriber",
+ "SpecificationConcurrentDefaultSubscriber",
]:
- if is_manual and not group_id:
- raise SetupError("You must use `group_id` with manual commit mode.")
+ _validate_input_for_misconfigure(
+ *topics,
+ pattern=pattern,
+ partitions=partitions,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ auto_commit=auto_commit,
+ max_workers=max_workers,
+ group_id=group_id,
+ )
- if is_manual and max_workers > 1:
- raise SetupError("Max workers not work with manual commit mode.")
+ if auto_commit is not EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST if auto_commit else AckPolicy.REJECT_ON_ERROR
- if not topics and not partitions and not pattern:
- raise SetupError(
- "You should provide either `topics` or `partitions` or `pattern`."
- )
- elif topics and partitions:
- raise SetupError("You can't provide both `topics` and `partitions`.")
- elif topics and pattern:
- raise SetupError("You can't provide both `topics` and `pattern`.")
- elif partitions and pattern:
- raise SetupError("You can't provide both `partitions` and `pattern`.")
+ if no_ack is not EMPTY:
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST
+
+ if ack_policy is AckPolicy.ACK_FIRST:
+ connection_args["enable_auto_commit"] = True
+ ack_policy = AckPolicy.DO_NOTHING
if batch:
- return AsyncAPIBatchSubscriber(
+ return SpecificationBatchSubscriber(
*topics,
batch_timeout_ms=batch_timeout_ms,
max_records=max_records,
@@ -172,10 +179,26 @@ def create_subscriber(
pattern=pattern,
connection_args=connection_args,
partitions=partitions,
- is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ if max_workers > 1:
+ return SpecificationConcurrentDefaultSubscriber(
+ *topics,
+ max_workers=max_workers,
+ group_id=group_id,
+ listener=listener,
+ pattern=pattern,
+ connection_args=connection_args,
+ partitions=partitions,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
@@ -183,41 +206,79 @@ def create_subscriber(
include_in_schema=include_in_schema,
)
- else:
- if max_workers > 1:
- return AsyncAPIConcurrentDefaultSubscriber(
- *topics,
- max_workers=max_workers,
- group_id=group_id,
- listener=listener,
- pattern=pattern,
- connection_args=connection_args,
- partitions=partitions,
- is_manual=is_manual,
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
- else:
- return AsyncAPIDefaultSubscriber(
- *topics,
- group_id=group_id,
- listener=listener,
- pattern=pattern,
- connection_args=connection_args,
- partitions=partitions,
- is_manual=is_manual,
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+ return SpecificationDefaultSubscriber(
+ *topics,
+ group_id=group_id,
+ listener=listener,
+ pattern=pattern,
+ connection_args=connection_args,
+ partitions=partitions,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+
+def _validate_input_for_misconfigure(
+ *topics: str,
+ partitions: Iterable["TopicPartition"],
+ pattern: Optional[str],
+ ack_policy: "AckPolicy",
+ auto_commit: bool,
+ no_ack: bool,
+ group_id: Optional[str],
+ max_workers: int,
+) -> None:
+ if auto_commit is not EMPTY:
+ warnings.warn(
+ "`auto_commit` option was deprecated in prior to `ack_policy=AckPolicy.ACK_FIRST`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `auto_commit` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ ack_policy = AckPolicy.ACK_FIRST if auto_commit else AckPolicy.REJECT_ON_ERROR
+
+ if no_ack is not EMPTY:
+ warnings.warn(
+ "`no_ack` option was deprecated in prior to `ack_policy=AckPolicy.DO_NOTHING`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `no_ack` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST
+
+ if max_workers > 1 and ack_policy is not AckPolicy.ACK_FIRST:
+ msg = "You can't use `max_workers` option with manual commit mode."
+ raise SetupError(msg)
+
+ if not group_id and ack_policy is not AckPolicy.ACK_FIRST:
+ msg = "You must use `group_id` with manual commit mode."
+ raise SetupError(msg)
+
+ if not topics and not partitions and not pattern:
+ msg = "You should provide either `topics` or `partitions` or `pattern`."
+ raise SetupError(msg)
+ if topics and partitions:
+ msg = "You can't provide both `topics` and `partitions`."
+ raise SetupError(msg)
+ if topics and pattern:
+ msg = "You can't provide both `topics` and `pattern`."
+ raise SetupError(msg)
+ if partitions and pattern:
+ msg = "You can't provide both `partitions` and `pattern`."
+ raise SetupError(msg)
diff --git a/faststream/kafka/subscriber/specified.py b/faststream/kafka/subscriber/specified.py
new file mode 100644
index 0000000000..f2d5f70a3a
--- /dev/null
+++ b/faststream/kafka/subscriber/specified.py
@@ -0,0 +1,74 @@
+from collections.abc import Iterable
+from itertools import chain
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.kafka.subscriber.usecase import (
+ BatchSubscriber,
+ ConcurrentDefaultSubscriber,
+ DefaultSubscriber,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+
+if TYPE_CHECKING:
+ from aiokafka import TopicPartition
+
+
+class SpecificationSubscriber(SpecificationSubscriberMixin):
+ """A class to handle logic and async API operations."""
+
+ topics: Iterable[str]
+ partitions: Iterable["TopicPartition"] # TODO: support partitions
+ _pattern: Optional[str] # TODO: support pattern schema
+
+ def get_default_name(self) -> str:
+ return f"{','.join(self.topics)}:{self.call_name}"
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ channels = {}
+
+ payloads = self.get_payloads()
+
+ for t in chain(self.topics, {p.topic for p in self.partitions}):
+ handler_name = self.title_ or f"{t}:{self.call_name}"
+
+ channels[handler_name] = SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{handler_name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ kafka=kafka.ChannelBinding(topic=t, partitions=None, replicas=None),
+ ),
+ )
+
+ return channels
+
+
+class SpecificationDefaultSubscriber(
+ SpecificationSubscriber,
+ DefaultSubscriber,
+):
+ pass
+
+
+class SpecificationBatchSubscriber(
+ SpecificationSubscriber,
+ BatchSubscriber,
+):
+ pass
+
+
+class SpecificationConcurrentDefaultSubscriber(
+ SpecificationSubscriber,
+ ConcurrentDefaultSubscriber,
+):
+ pass
diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py
index 9b682f21ae..e96489c1e4 100644
--- a/faststream/kafka/subscriber/usecase.py
+++ b/faststream/kafka/subscriber/usecase.py
@@ -1,47 +1,40 @@
-from abc import ABC, abstractmethod
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Iterable,
- List,
- Optional,
- Sequence,
- Tuple,
-)
+from typing import TYPE_CHECKING, Any, Callable, Optional
import anyio
from aiokafka import ConsumerRecord, TopicPartition
from aiokafka.errors import ConsumerStoppedError, KafkaError
from typing_extensions import override
-from faststream.broker.publisher.fake import FakePublisher
-from faststream.broker.subscriber.mixins import ConcurrentMixin, TasksMixin
-from faststream.broker.subscriber.usecase import SubscriberUsecase
-from faststream.broker.types import (
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.subscriber.utils import process_msg
+from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
CustomCallable,
MsgType,
)
-from faststream.broker.utils import process_msg
+from faststream._internal.utils.path import compile_path
from faststream.kafka.message import KafkaAckableMessage, KafkaMessage
from faststream.kafka.parser import AioKafkaBatchParser, AioKafkaParser
-from faststream.utils.path import compile_path
+from faststream.kafka.publisher.fake import KafkaFakePublisher
if TYPE_CHECKING:
from aiokafka import AIOKafkaConsumer
from aiokafka.abc import ConsumerRebalanceListener
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.types import AnyDict, Decorator, LoggerProto
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
-class LogicSubscriber(ABC, TasksMixin, SubscriberUsecase[MsgType]):
+class LogicSubscriber(TasksMixin, SubscriberUsecase[MsgType]):
"""A class to handle logic for consuming messages from Kafka."""
topics: Sequence[str]
@@ -52,6 +45,7 @@ class LogicSubscriber(ABC, TasksMixin, SubscriberUsecase[MsgType]):
client_id: Optional[str]
batch: bool
+ parser: AioKafkaParser
def __init__(
self,
@@ -65,29 +59,19 @@ def __init__(
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.topics = topics
@@ -105,39 +89,27 @@ def __init__(
self.consumer = None
@override
- def setup( # type: ignore[override]
+ def _setup( # type: ignore[override]
self,
*,
client_id: Optional[str],
builder: Callable[..., "AIOKafkaConsumer"],
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
+ state: "BrokerState",
) -> None:
self.client_id = client_id
self.builder = builder
- super().setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
+ super()._setup(
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
+ state=state,
)
async def start(self) -> None:
@@ -150,6 +122,8 @@ async def start(self) -> None:
**self.__connection_args,
)
+ self.parser._setup(consumer)
+
if self.topics or self._pattern:
consumer.subscribe(
topics=self.topics,
@@ -185,7 +159,8 @@ async def get_one(
), "You can't use `get_one` method if subscriber has registered handlers."
raw_messages = await self.consumer.getmany(
- timeout_ms=timeout * 1000, max_records=1
+ timeout_ms=timeout * 1000,
+ max_records=1,
)
if not raw_messages:
@@ -193,9 +168,13 @@ async def get_one(
((raw_message,),) = raw_messages.values()
+ context = self._state.get().di_state.context
+
return await process_msg(
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -203,22 +182,17 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "topic": message.reply_to,
- },
+ KafkaFakePublisher(
+ self._state.get().producer,
+ topic=message.reply_to,
),
)
@abstractmethod
async def get_msg(self) -> MsgType:
- raise NotImplementedError()
+ raise NotImplementedError
async def _consume(self) -> None:
assert self.consumer, "You should start subscriber at first." # nosec B101
@@ -247,34 +221,20 @@ async def _consume(self) -> None:
async def consume_one(self, msg: MsgType) -> None:
await self.consume(msg)
- @staticmethod
- def get_routing_hash(
- topics: Iterable[str],
- group_id: Optional[str] = None,
- ) -> int:
- return hash("".join((*topics, group_id or "")))
-
@property
- def topic_names(self) -> List[str]:
+ def topic_names(self) -> list[str]:
if self._pattern:
return [self._pattern]
- elif self.topics:
+ if self.topics:
return list(self.topics)
- else:
- return [f"{p.topic}-{p.partition}" for p in self.partitions]
-
- def __hash__(self) -> int:
- return self.get_routing_hash(
- topics=self.topic_names,
- group_id=self.group_id,
- )
+ return [f"{p.topic}-{p.partition}" for p in self.partitions]
@staticmethod
def build_log_context(
message: Optional["StreamMessage[Any]"],
topic: str,
group_id: Optional[str] = None,
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
return {
"topic": topic,
"group_id": group_id or "",
@@ -282,11 +242,11 @@ def build_log_context(
}
def add_prefix(self, prefix: str) -> None:
- self.topics = tuple("".join((prefix, t)) for t in self.topics)
+ self.topics = tuple(f"{prefix}{t}" for t in self.topics)
self.partitions = [
TopicPartition(
- topic="".join((prefix, p.topic)),
+ topic=f"{prefix}{p.topic}",
partition=p.partition,
)
for p in self.partitions
@@ -303,17 +263,11 @@ def __init__(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
if pattern:
reg, pattern = compile_path(
@@ -325,8 +279,10 @@ def __init__(
else:
reg = None
- parser = AioKafkaParser(
- msg_class=KafkaAckableMessage if is_manual else KafkaMessage,
+ self.parser = AioKafkaParser(
+ msg_class=KafkaMessage
+ if ack_policy is ack_policy.ACK_FIRST
+ else KafkaAckableMessage,
regex=reg,
)
@@ -338,18 +294,13 @@ def __init__(
connection_args=connection_args,
partitions=partitions,
# subscriber args
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
async def get_msg(self) -> "ConsumerRecord":
@@ -359,7 +310,7 @@ async def get_msg(self) -> "ConsumerRecord":
def get_log_context(
self,
message: Optional["StreamMessage[ConsumerRecord]"],
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
if message is None:
topic = ",".join(self.topic_names)
else:
@@ -372,7 +323,16 @@ def get_log_context(
)
-class BatchSubscriber(LogicSubscriber[Tuple["ConsumerRecord", ...]]):
+class ConcurrentDefaultSubscriber(ConcurrentMixin["ConsumerRecord"], DefaultSubscriber):
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "ConsumerRecord") -> None:
+ await self._put_msg(msg)
+
+
+class BatchSubscriber(LogicSubscriber[tuple["ConsumerRecord", ...]]):
def __init__(
self,
*topics: str,
@@ -384,19 +344,13 @@ def __init__(
pattern: Optional[str],
connection_args: "AnyDict",
partitions: Iterable["TopicPartition"],
- is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence[
- "BrokerMiddleware[Sequence[Tuple[ConsumerRecord, ...]]]"
+ "BrokerMiddleware[Sequence[tuple[ConsumerRecord, ...]]]"
],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
self.batch_timeout_ms = batch_timeout_ms
self.max_records = max_records
@@ -411,8 +365,10 @@ def __init__(
else:
reg = None
- parser = AioKafkaBatchParser(
- msg_class=KafkaAckableMessage if is_manual else KafkaMessage,
+ self.parser = AioKafkaBatchParser(
+ msg_class=KafkaMessage
+ if ack_policy is ack_policy.ACK_FIRST
+ else KafkaAckableMessage,
regex=reg,
)
@@ -424,21 +380,16 @@ def __init__(
connection_args=connection_args,
partitions=partitions,
# subscriber args
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
- async def get_msg(self) -> Tuple["ConsumerRecord", ...]:
+ async def get_msg(self) -> tuple["ConsumerRecord", ...]:
assert self.consumer, "You should setup subscriber at first." # nosec B101
messages = await self.consumer.getmany(
@@ -454,8 +405,8 @@ async def get_msg(self) -> Tuple["ConsumerRecord", ...]:
def get_log_context(
self,
- message: Optional["StreamMessage[Tuple[ConsumerRecord, ...]]"],
- ) -> Dict[str, str]:
+ message: Optional["StreamMessage[tuple[ConsumerRecord, ...]]"],
+ ) -> dict[str, str]:
if message is None:
topic = ",".join(self.topic_names)
else:
@@ -466,55 +417,3 @@ def get_log_context(
topic=topic,
group_id=self.group_id,
)
-
-
-class ConcurrentDefaultSubscriber(ConcurrentMixin[ConsumerRecord], DefaultSubscriber):
- def __init__(
- self,
- *topics: str,
- # Kafka information
- group_id: Optional[str],
- listener: Optional["ConsumerRebalanceListener"],
- pattern: Optional[str],
- connection_args: "AnyDict",
- partitions: Iterable["TopicPartition"],
- is_manual: bool,
- # Subscriber args
- max_workers: int,
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[ConsumerRecord]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- *topics,
- group_id=group_id,
- listener=listener,
- pattern=pattern,
- connection_args=connection_args,
- partitions=partitions,
- is_manual=is_manual,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- max_workers=max_workers,
- )
-
- async def start(self) -> None:
- await super().start()
- self.start_consume_task()
-
- async def consume_one(self, msg: "ConsumerRecord") -> None:
- await self._put_msg(msg)
diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py
index e538fbb70d..fc442f9ee6 100755
--- a/faststream/kafka/testing.py
+++ b/faststream/kafka/testing.py
@@ -1,29 +1,36 @@
import re
-from datetime import datetime
-from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
+from datetime import datetime, timezone
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+)
from unittest.mock import AsyncMock, MagicMock
import anyio
from aiokafka import ConsumerRecord
from typing_extensions import override
-from faststream.broker.message import encode_message, gen_cor_id
-from faststream.broker.utils import resolve_custom_func
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.testing.broker import TestBroker
from faststream.exceptions import SubscriberNotFound
from faststream.kafka import TopicPartition
from faststream.kafka.broker import KafkaBroker
from faststream.kafka.message import KafkaMessage
from faststream.kafka.parser import AioKafkaParser
-from faststream.kafka.publisher.asyncapi import AsyncAPIBatchPublisher
from faststream.kafka.publisher.producer import AioKafkaFastProducer
-from faststream.kafka.subscriber.asyncapi import AsyncAPIBatchSubscriber
-from faststream.testing.broker import TestBroker
-from faststream.utils.functions import timeout_scope
+from faststream.kafka.publisher.specified import SpecificationBatchPublisher
+from faststream.kafka.subscriber.usecase import BatchSubscriber
+from faststream.message import encode_message, gen_cor_id
if TYPE_CHECKING:
- from faststream.kafka.publisher.asyncapi import AsyncAPIPublisher
+ from faststream._internal.basic_types import SendableMessage
+ from faststream.kafka.publisher.specified import SpecificationPublisher
+ from faststream.kafka.response import KafkaPublishCommand
from faststream.kafka.subscriber.usecase import LogicSubscriber
- from faststream.types import SendableMessage
__all__ = ("TestKafkaBroker",)
@@ -31,22 +38,28 @@
class TestKafkaBroker(TestBroker[KafkaBroker]):
"""A class to test Kafka brokers."""
+ @contextmanager
+ def _patch_producer(self, broker: KafkaBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: KafkaBroker,
*args: Any,
**kwargs: Any,
) -> Callable[..., AsyncMock]:
- broker._producer = FakeProducer(broker)
return _fake_connection
@staticmethod
def create_publisher_fake_subscriber(
broker: KafkaBroker,
- publisher: "AsyncAPIPublisher[Any]",
- ) -> Tuple["LogicSubscriber[Any]", bool]:
+ publisher: "SpecificationPublisher[Any, Any]",
+ ) -> tuple["LogicSubscriber[Any]", bool]:
sub: Optional[LogicSubscriber[Any]] = None
- for handler in broker._subscribers.values():
+ for handler in broker._subscribers:
if _is_handler_matches(handler, publisher.topic, publisher.partition):
sub = handler
break
@@ -56,16 +69,17 @@ def create_publisher_fake_subscriber(
if publisher.partition:
tp = TopicPartition(
- topic=publisher.topic, partition=publisher.partition
+ topic=publisher.topic,
+ partition=publisher.partition,
)
sub = broker.subscriber(
partitions=[tp],
- batch=isinstance(publisher, AsyncAPIBatchPublisher),
+ batch=isinstance(publisher, SpecificationBatchPublisher),
)
else:
sub = broker.subscriber(
publisher.topic,
- batch=isinstance(publisher, AsyncAPIBatchPublisher),
+ batch=isinstance(publisher, SpecificationBatchPublisher),
)
else:
is_real = True
@@ -90,126 +104,101 @@ def __init__(self, broker: KafkaBroker) -> None:
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
+ def __bool__(self) -> None:
+ return True
+
+ @property
+ def closed(self) -> bool:
+ return False
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- reply_to: str = "",
- rpc: bool = False,
- rpc_timeout: Optional[float] = None,
- raise_timeout: bool = False,
- no_confirm: bool = False,
- ) -> Optional[Any]:
+ cmd: "KafkaPublishCommand",
+ ) -> None:
"""Publish a message to the Kafka broker."""
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
- return_value = None
-
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- msg_to_send = (
- [incoming]
- if isinstance(handler, AsyncAPIBatchSubscriber)
- else incoming
- )
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ msg_to_send = (
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
+ )
- with timeout_scope(rpc_timeout, raise_timeout):
- response_msg = await self._execute_handler(
- msg_to_send, topic, handler
- )
- if rpc:
- return_value = return_value or await self._decoder(
- await self._parser(response_msg)
- )
-
- return return_value
+ await self._execute_handler(msg_to_send, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- timeout: Optional[float] = 0.5,
+ cmd: "KafkaPublishCommand",
) -> "ConsumerRecord":
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- msg_to_send = (
- [incoming]
- if isinstance(handler, AsyncAPIBatchSubscriber)
- else incoming
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ msg_to_send = (
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
+ )
+
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(
+ msg_to_send, cmd.destination, handler
)
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg_to_send, topic, handler)
-
raise SubscriberNotFound
async def publish_batch(
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to the Kafka broker."""
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_matches(handler, topic, partition):
- messages = (
- build_message(
- message=message,
- topic=topic,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
- )
- for message in msgs
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.partition,
+ ):
+ messages = (
+ build_message(
+ message=message,
+ topic=cmd.destination,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
+ for message in cmd.batch_bodies
+ )
- if isinstance(handler, AsyncAPIBatchSubscriber):
- await self._execute_handler(list(messages), topic, handler)
+ if isinstance(handler, BatchSubscriber):
+ await self._execute_handler(list(messages), cmd.destination, handler)
- else:
- for m in messages:
- await self._execute_handler(m, topic, handler)
- return None
+ else:
+ for m in messages:
+ await self._execute_handler(m, cmd.destination, handler)
async def _execute_handler(
self,
@@ -233,7 +222,7 @@ def build_message(
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
key: Optional[bytes] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
*,
reply_to: str = "",
@@ -256,7 +245,7 @@ def build_message(
value=msg,
topic=topic,
partition=partition or 0,
- timestamp=timestamp_ms or int(datetime.now().timestamp()),
+ timestamp=timestamp_ms or int(datetime.now(timezone.utc).timestamp()),
timestamp_type=0,
key=k,
serialized_key_size=len(k),
@@ -274,6 +263,22 @@ def _fake_connection(*args: Any, **kwargs: Any) -> AsyncMock:
return mock
+def _find_handler(
+ subscribers: Iterable["LogicSubscriber[Any]"],
+ topic: str,
+ partition: Optional[int],
+) -> Generator["LogicSubscriber[Any]", None, None]:
+ published_groups = set()
+ for handler in subscribers: # pragma: no branch
+ if _is_handler_matches(handler, topic, partition):
+ if handler.group_id:
+ if handler.group_id in published_groups:
+ continue
+ else:
+ published_groups.add(handler.group_id)
+ yield handler
+
+
def _is_handler_matches(
handler: "LogicSubscriber[Any]",
topic: str,
@@ -285,5 +290,5 @@ def _is_handler_matches(
for p in handler.partitions
)
or topic in handler.topics
- or (handler._pattern and re.match(handler._pattern, topic))
+ or (handler._pattern and re.match(handler._pattern, topic)),
)
diff --git a/faststream/log/__init__.py b/faststream/log/__init__.py
deleted file mode 100644
index 0fc7042279..0000000000
--- a/faststream/log/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from faststream.log.logging import logger
-
-__all__ = ("logger",)
diff --git a/faststream/log/logging.py b/faststream/log/logging.py
deleted file mode 100644
index d10852c440..0000000000
--- a/faststream/log/logging.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import logging
-import sys
-from logging import LogRecord
-from typing import Mapping
-
-from faststream.log.formatter import ColourizedFormatter
-from faststream.utils.context.repository import context
-
-logger = logging.getLogger("faststream")
-logger.setLevel(logging.INFO)
-logger.propagate = False
-main_handler = logging.StreamHandler(stream=sys.stderr)
-main_handler.setFormatter(
- ColourizedFormatter(
- fmt="%(asctime)s %(levelname)8s - %(message)s",
- use_colors=True,
- )
-)
-logger.addHandler(main_handler)
-
-
-class ExtendedFilter(logging.Filter):
- def __init__(
- self,
- default_context: Mapping[str, str],
- message_id_ln: int,
- name: str = "",
- ) -> None:
- self.default_context = default_context
- self.message_id_ln = message_id_ln
- super().__init__(name)
-
- def filter(self, record: LogRecord) -> bool:
- if is_suitable := super().filter(record):
- log_context: Mapping[str, str] = context.get_local(
- "log_context", self.default_context
- )
-
- for k, v in log_context.items():
- value = getattr(record, k, v)
- setattr(record, k, value)
-
- record.message_id = getattr(record, "message_id", "")[: self.message_id_ln]
-
- return is_suitable
-
-
-def get_broker_logger(
- name: str,
- default_context: Mapping[str, str],
- message_id_ln: int,
-) -> logging.Logger:
- logger = logging.getLogger(f"faststream.access.{name}")
- logger.propagate = False
- logger.addFilter(ExtendedFilter(default_context, message_id_ln))
- logger.setLevel(logging.INFO)
- return logger
-
-
-def set_logger_fmt(
- logger: logging.Logger,
- fmt: str = "%(asctime)s %(levelname)s - %(message)s",
-) -> None:
- handler = logging.StreamHandler(stream=sys.stdout)
-
- formatter = ColourizedFormatter(
- fmt=fmt,
- use_colors=True,
- )
- handler.setFormatter(formatter)
-
- logger.addHandler(handler)
diff --git a/faststream/message/__init__.py b/faststream/message/__init__.py
new file mode 100644
index 0000000000..2dd53d6c4e
--- /dev/null
+++ b/faststream/message/__init__.py
@@ -0,0 +1,12 @@
+from .message import AckStatus, StreamMessage
+from .source_type import SourceType
+from .utils import decode_message, encode_message, gen_cor_id
+
+__all__ = (
+ "AckStatus",
+ "SourceType",
+ "StreamMessage",
+ "decode_message",
+ "encode_message",
+ "gen_cor_id",
+)
diff --git a/faststream/message/message.py b/faststream/message/message.py
new file mode 100644
index 0000000000..2037e7dc98
--- /dev/null
+++ b/faststream/message/message.py
@@ -0,0 +1,115 @@
+from enum import Enum
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Generic,
+ Optional,
+ TypeVar,
+ Union,
+)
+from uuid import uuid4
+
+from .source_type import SourceType
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import AsyncCallable
+
+# prevent circular imports
+MsgType = TypeVar("MsgType")
+
+
+class AckStatus(str, Enum):
+ ACKED = "ACKED"
+ NACKED = "NACKED"
+ REJECTED = "REJECTED"
+
+
+class StreamMessage(Generic[MsgType]):
+ """Generic class to represent a stream message."""
+
+ def __init__(
+ self,
+ raw_message: "MsgType",
+ body: Union[bytes, Any],
+ *,
+ headers: Optional["AnyDict"] = None,
+ reply_to: str = "",
+ batch_headers: Optional[list["AnyDict"]] = None,
+ path: Optional["AnyDict"] = None,
+ content_type: Optional[str] = None,
+ correlation_id: Optional[str] = None,
+ message_id: Optional[str] = None,
+ source_type: SourceType = SourceType.CONSUME,
+ ) -> None:
+ self.raw_message = raw_message
+ self.body = body
+ self.reply_to = reply_to
+ self.content_type = content_type
+ self._source_type = source_type
+
+ self.headers = headers or {}
+ self.batch_headers = batch_headers or []
+ self.path = path or {}
+ self.correlation_id = correlation_id or str(uuid4())
+ self.message_id = message_id or self.correlation_id
+
+ self.committed: Optional[AckStatus] = None
+ self.processed = False
+
+ # Setup later
+ self.__decoder: Optional[AsyncCallable] = None
+ self.__decoded_caches: dict[
+ Any, Any
+ ] = {} # Cache values between filters and tests
+
+ def set_decoder(self, decoder: "AsyncCallable") -> None:
+ self.__decoder = decoder
+
+ def clear_cache(self) -> None:
+ self.__decoded_caches.clear()
+
+ def __repr__(self) -> str:
+ inner = ", ".join(
+ filter(
+ bool,
+ (
+ f"body={self.body!r}",
+ f"content_type={self.content_type}",
+ f"message_id={self.message_id}",
+ f"correlation_id={self.correlation_id}",
+ f"reply_to={self.reply_to}" if self.reply_to else "",
+ f"headers={self.headers}",
+ f"path={self.path}",
+ f"committed={self.committed}",
+ f"raw_message={self.raw_message}",
+ ),
+ ),
+ )
+
+ return f"{self.__class__.__name__}({inner})"
+
+ async def decode(self) -> Optional["Any"]:
+ """Serialize the message by lazy decoder.
+
+ Returns a cache after first usage. To prevent such behavior, please call
+ `message.clear_cache()` after `message.body` changes.
+ """
+ assert self.__decoder, "You should call `set_decoder()` method first." # nosec B101
+
+ if (result := self.__decoded_caches.get(self.__decoder)) is None:
+ result = self.__decoded_caches[self.__decoder] = await self.__decoder(self)
+
+ return result
+
+ async def ack(self) -> None:
+ if self.committed is None:
+ self.committed = AckStatus.ACKED
+
+ async def nack(self) -> None:
+ if self.committed is None:
+ self.committed = AckStatus.NACKED
+
+ async def reject(self) -> None:
+ if self.committed is None:
+ self.committed = AckStatus.REJECTED
diff --git a/faststream/message/source_type.py b/faststream/message/source_type.py
new file mode 100644
index 0000000000..b6e4f95fd9
--- /dev/null
+++ b/faststream/message/source_type.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+
+class SourceType(str, Enum):
+ CONSUME = "CONSUME"
+ """Message consumed by basic subscriber flow."""
+
+ RESPONSE = "RESPONSE"
+ """RPC response consumed."""
diff --git a/faststream/message/utils.py b/faststream/message/utils.py
new file mode 100644
index 0000000000..c06750f813
--- /dev/null
+++ b/faststream/message/utils.py
@@ -0,0 +1,74 @@
+import json
+from collections.abc import Sequence
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+ Union,
+ cast,
+)
+from uuid import uuid4
+
+from faststream._internal._compat import dump_json, json_loads
+from faststream._internal.constants import ContentTypes
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import DecodedMessage, SendableMessage
+
+ from .message import StreamMessage
+
+
+def gen_cor_id() -> str:
+ """Generate random string to use as ID."""
+ return str(uuid4())
+
+
+def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage":
+ """Decodes a message."""
+ body: Any = getattr(message, "body", message)
+ m: DecodedMessage = body
+
+ if content_type := getattr(message, "content_type", False):
+ content_type = ContentTypes(cast("str", content_type))
+
+ if content_type is ContentTypes.TEXT:
+ m = body.decode()
+
+ elif content_type is ContentTypes.JSON:
+ m = json_loads(body)
+
+ else:
+ # content-type not set
+ with suppress(json.JSONDecodeError, UnicodeDecodeError):
+ m = json_loads(body)
+
+ return m
+
+
+def encode_message(
+ msg: Union[Sequence["SendableMessage"], "SendableMessage"],
+) -> tuple[bytes, Optional[str]]:
+ """Encodes a message."""
+ if msg is None:
+ return (
+ b"",
+ None,
+ )
+
+ if isinstance(msg, bytes):
+ return (
+ msg,
+ None,
+ )
+
+ if isinstance(msg, str):
+ return (
+ msg.encode(),
+ ContentTypes.TEXT.value,
+ )
+
+ return (
+ dump_json(msg),
+ ContentTypes.JSON.value,
+ )
diff --git a/faststream/middlewares/__init__.py b/faststream/middlewares/__init__.py
new file mode 100644
index 0000000000..abf9fb4e62
--- /dev/null
+++ b/faststream/middlewares/__init__.py
@@ -0,0 +1,11 @@
+from faststream._internal.middlewares import BaseMiddleware
+from faststream.middlewares.acknowledgement.conf import AckPolicy
+from faststream.middlewares.acknowledgement.middleware import AcknowledgementMiddleware
+from faststream.middlewares.exception import ExceptionMiddleware
+
+__all__ = (
+ "AckPolicy",
+ "AcknowledgementMiddleware",
+ "BaseMiddleware",
+ "ExceptionMiddleware",
+)
diff --git a/faststream/cli/docs/__init__.py b/faststream/middlewares/acknowledgement/__init__.py
similarity index 100%
rename from faststream/cli/docs/__init__.py
rename to faststream/middlewares/acknowledgement/__init__.py
diff --git a/faststream/middlewares/acknowledgement/conf.py b/faststream/middlewares/acknowledgement/conf.py
new file mode 100644
index 0000000000..c5cd759e10
--- /dev/null
+++ b/faststream/middlewares/acknowledgement/conf.py
@@ -0,0 +1,18 @@
+from enum import Enum
+
+
+class AckPolicy(str, Enum):
+ ACK_FIRST = "ack_first"
+ """Ack message on consume."""
+
+ ACK = "ack"
+ """Ack message after all process."""
+
+ REJECT_ON_ERROR = "reject_on_error"
+ """Reject message on unhandled exceptions."""
+
+ NACK_ON_ERROR = "nack_on_error"
+ """Nack message on unhandled exceptions."""
+
+ DO_NOTHING = "do_nothing"
+ """Disable default FastStream Acknowledgement logic. User should confirm all actions manually."""
diff --git a/faststream/middlewares/acknowledgement/middleware.py b/faststream/middlewares/acknowledgement/middleware.py
new file mode 100644
index 0000000000..be22b40e7c
--- /dev/null
+++ b/faststream/middlewares/acknowledgement/middleware.py
@@ -0,0 +1,129 @@
+import logging
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream._internal.middlewares import BaseMiddleware
+from faststream.exceptions import (
+ AckMessage,
+ HandlerException,
+ NackMessage,
+ RejectMessage,
+)
+from faststream.middlewares.acknowledgement.conf import AckPolicy
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.basic_types import AnyDict, AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream._internal.state import LoggerState
+ from faststream.message import StreamMessage
+
+
+class AcknowledgementMiddleware:
+ def __init__(
+ self, logger: "LoggerState", ack_policy: "AckPolicy", extra_options: "AnyDict"
+ ) -> None:
+ self.ack_policy = ack_policy
+ self.extra_options = extra_options
+ self.logger = logger
+
+ def __call__(
+ self, msg: Optional[Any], context: "ContextRepo"
+ ) -> "_AcknowledgementMiddleware":
+ return _AcknowledgementMiddleware(
+ msg,
+ logger=self.logger,
+ ack_policy=self.ack_policy,
+ extra_options=self.extra_options,
+ context=context,
+ )
+
+
+class _AcknowledgementMiddleware(BaseMiddleware):
+ def __init__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ logger: "LoggerState",
+ context: "ContextRepo",
+ extra_options: "AnyDict",
+ # can't be created with AckPolicy.DO_NOTHING
+ ack_policy: AckPolicy,
+ ) -> None:
+ super().__init__(msg, context=context)
+
+ self.ack_policy = ack_policy
+ self.extra_options = extra_options
+ self.logger = logger
+
+ self.message: Optional[StreamMessage[Any]] = None
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ self.message = msg
+ if self.ack_policy is AckPolicy.ACK_FIRST:
+ await self.__ack()
+
+ return await call_next(msg)
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ if self.ack_policy is AckPolicy.ACK_FIRST:
+ return False
+
+ if not exc_type:
+ await self.__ack()
+
+ elif isinstance(exc_val, HandlerException):
+ if isinstance(exc_val, AckMessage):
+ await self.__ack(**exc_val.extra_options)
+
+ elif isinstance(exc_val, NackMessage):
+ await self.__nack(**exc_val.extra_options)
+
+ elif isinstance(exc_val, RejectMessage): # pragma: no branch
+ await self.__reject(**exc_val.extra_options)
+
+ # Exception was processed and suppressed
+ return True
+
+ elif self.ack_policy is AckPolicy.REJECT_ON_ERROR:
+ await self.__reject()
+
+ elif self.ack_policy is AckPolicy.NACK_ON_ERROR:
+ await self.__nack()
+
+ # Exception was not processed
+ return False
+
+ async def __ack(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.ack(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(repr(er), logging.CRITICAL, exc_info=er)
+
+ async def __nack(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.nack(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(repr(er), logging.CRITICAL, exc_info=er)
+
+ async def __reject(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.reject(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(repr(er), logging.CRITICAL, exc_info=er)
diff --git a/faststream/middlewares/exception.py b/faststream/middlewares/exception.py
new file mode 100644
index 0000000000..dd28b8c324
--- /dev/null
+++ b/faststream/middlewares/exception.py
@@ -0,0 +1,220 @@
+from collections.abc import Awaitable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ NoReturn,
+ Optional,
+ Union,
+ cast,
+ overload,
+)
+
+from typing_extensions import Literal, TypeAlias
+
+from faststream._internal.middlewares import BaseMiddleware
+from faststream._internal.utils import apply_types
+from faststream._internal.utils.functions import sync_fake_context, to_async
+from faststream.exceptions import IgnoredException
+
+if TYPE_CHECKING:
+ from contextlib import AbstractContextManager
+ from types import TracebackType
+
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream.message import StreamMessage
+
+
+GeneralExceptionHandler: TypeAlias = Union[
+ Callable[..., None],
+ Callable[..., Awaitable[None]],
+]
+PublishingExceptionHandler: TypeAlias = Callable[..., Any]
+
+CastedGeneralExceptionHandler: TypeAlias = Callable[..., Awaitable[None]]
+CastedPublishingExceptionHandler: TypeAlias = Callable[..., Awaitable[Any]]
+CastedHandlers: TypeAlias = list[
+ tuple[
+ type[Exception],
+ CastedGeneralExceptionHandler,
+ ]
+]
+CastedPublishingHandlers: TypeAlias = list[
+ tuple[
+ type[Exception],
+ CastedPublishingExceptionHandler,
+ ]
+]
+
+
+class ExceptionMiddleware:
+ __slots__ = ("_handlers", "_publish_handlers")
+
+ _handlers: CastedHandlers
+ _publish_handlers: CastedPublishingHandlers
+
+ def __init__(
+ self,
+ handlers: Optional[
+ dict[
+ type[Exception],
+ GeneralExceptionHandler,
+ ]
+ ] = None,
+ publish_handlers: Optional[
+ dict[
+ type[Exception],
+ PublishingExceptionHandler,
+ ]
+ ] = None,
+ ) -> None:
+ self._handlers: CastedHandlers = [
+ (IgnoredException, ignore_handler),
+ *(
+ (
+ exc_type,
+ apply_types(
+ cast("Callable[..., Awaitable[None]]", to_async(handler)),
+ ),
+ )
+ for exc_type, handler in (handlers or {}).items()
+ ),
+ ]
+
+ self._publish_handlers: CastedPublishingHandlers = [
+ (IgnoredException, ignore_handler),
+ *(
+ (exc_type, apply_types(to_async(handler)))
+ for exc_type, handler in (publish_handlers or {}).items()
+ ),
+ ]
+
+ @overload
+ def add_handler(
+ self,
+ exc: type[Exception],
+ publish: Literal[False] = False,
+ ) -> Callable[[GeneralExceptionHandler], GeneralExceptionHandler]: ...
+
+ @overload
+ def add_handler(
+ self,
+ exc: type[Exception],
+ publish: Literal[True],
+ ) -> Callable[[PublishingExceptionHandler], PublishingExceptionHandler]: ...
+
+ def add_handler(
+ self,
+ exc: type[Exception],
+ publish: bool = False,
+ ) -> Union[
+ Callable[[GeneralExceptionHandler], GeneralExceptionHandler],
+ Callable[[PublishingExceptionHandler], PublishingExceptionHandler],
+ ]:
+ if publish:
+
+ def pub_wrapper(
+ func: PublishingExceptionHandler,
+ ) -> PublishingExceptionHandler:
+ self._publish_handlers.append(
+ (
+ exc,
+ apply_types(to_async(func)),
+ ),
+ )
+ return func
+
+ return pub_wrapper
+
+ def default_wrapper(
+ func: GeneralExceptionHandler,
+ ) -> GeneralExceptionHandler:
+ self._handlers.append(
+ (
+ exc,
+ apply_types(to_async(func)),
+ ),
+ )
+ return func
+
+ return default_wrapper
+
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_BaseExceptionMiddleware":
+ """Real middleware runtime constructor."""
+ return _BaseExceptionMiddleware(
+ handlers=self._handlers,
+ publish_handlers=self._publish_handlers,
+ context=context,
+ msg=msg,
+ )
+
+
+class _BaseExceptionMiddleware(BaseMiddleware):
+ def __init__(
+ self,
+ *,
+ handlers: CastedHandlers,
+ publish_handlers: CastedPublishingHandlers,
+ context: "ContextRepo",
+ msg: Optional[Any],
+ ) -> None:
+ super().__init__(msg, context=context)
+ self._handlers = handlers
+ self._publish_handlers = publish_handlers
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ try:
+ return await call_next(await self.on_consume(msg))
+
+ except Exception as exc:
+ exc_type = type(exc)
+
+ for handler_type, handler in self._publish_handlers:
+ if issubclass(exc_type, handler_type):
+ return await handler(exc, context__=self.context)
+
+ raise
+
+ async def after_processed(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ if exc_type:
+ for handler_type, handler in self._handlers:
+ if issubclass(exc_type, handler_type):
+ # TODO: remove it after context will be moved to middleware
+ # In case parser/decoder error occurred
+ scope: AbstractContextManager[Any]
+ if not self.context.get_local("message"):
+ scope = self.context.scope("message", self.msg)
+ else:
+ scope = sync_fake_context()
+
+ with scope:
+ await handler(exc_val, context__=self.context)
+
+ return True
+
+ return False
+
+ return None
+
+
+async def ignore_handler(
+ exception: IgnoredException,
+ **kwargs: Any, # suppress context
+) -> NoReturn:
+ raise exception
diff --git a/faststream/middlewares/logging.py b/faststream/middlewares/logging.py
new file mode 100644
index 0000000000..e9c4d0b75b
--- /dev/null
+++ b/faststream/middlewares/logging.py
@@ -0,0 +1,97 @@
+import logging
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream._internal.middlewares import BaseMiddleware
+from faststream.exceptions import IgnoredException
+from faststream.message.source_type import SourceType
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream._internal.state.logger import LoggerState
+ from faststream.message import StreamMessage
+
+
+class CriticalLogMiddleware:
+ def __init__(self, logger: "LoggerState") -> None:
+ """Initialize the class."""
+ self.logger = logger
+
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_LoggingMiddleware":
+ return _LoggingMiddleware(
+ logger=self.logger,
+ msg=msg,
+ context=context,
+ )
+
+
+class _LoggingMiddleware(BaseMiddleware):
+ """A middleware class for logging critical errors."""
+
+ def __init__(
+ self,
+ *,
+ logger: "LoggerState",
+ context: "ContextRepo",
+ msg: Optional[Any],
+ ) -> None:
+ super().__init__(msg, context=context)
+ self.logger = logger
+ self._source_type = SourceType.CONSUME
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ source_type = self._source_type = msg._source_type
+
+ if source_type is not SourceType.RESPONSE:
+ self.logger.log(
+ "Received",
+ extra=self.context.get_local("log_context", {}),
+ )
+
+ return await call_next(msg)
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> bool:
+ """Asynchronously called after processing."""
+ if self._source_type is not SourceType.RESPONSE:
+ c = self.context.get_local("log_context", {})
+
+ if exc_type:
+ # TODO: move critical logging to `subscriber.consume()` method
+ if issubclass(exc_type, IgnoredException):
+ self.logger.log(
+ log_level=logging.INFO,
+ message=str(exc_val),
+ extra=c,
+ )
+
+ else:
+ self.logger.log(
+ log_level=logging.ERROR,
+ message=f"{exc_type.__name__}: {exc_val}",
+ exc_info=exc_val,
+ extra=c,
+ )
+
+ self.logger.log(message="Processed", extra=c)
+
+ await super().__aexit__(exc_type, exc_val, exc_tb)
+
+ # Exception was not processed
+ return False
diff --git a/faststream/nats/__init__.py b/faststream/nats/__init__.py
index 5ee2b42c61..55ed9abd15 100644
--- a/faststream/nats/__init__.py
+++ b/faststream/nats/__init__.py
@@ -13,13 +13,13 @@
StreamSource,
)
+from faststream._internal.testing.app import TestApp
from faststream.nats.annotations import NatsMessage
from faststream.nats.broker.broker import NatsBroker
from faststream.nats.response import NatsResponse
from faststream.nats.router import NatsPublisher, NatsRoute, NatsRouter
-from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
+from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PubAck, PullSub
from faststream.nats.testing import TestNatsBroker
-from faststream.testing.app import TestApp
__all__ = (
"AckPolicy",
@@ -38,6 +38,7 @@
"NatsRouter",
"ObjWatch",
"Placement",
+ "PubAck",
"PullSub",
"RePublish",
"ReplayPolicy",
diff --git a/faststream/nats/annotations.py b/faststream/nats/annotations.py
index b93ba4e6e0..203784d9d8 100644
--- a/faststream/nats/annotations.py
+++ b/faststream/nats/annotations.py
@@ -1,15 +1,17 @@
+from typing import Annotated
+
from nats.aio.client import Client as _NatsClient
from nats.js.client import JetStreamContext as _JetStream
from nats.js.object_store import ObjectStore as _ObjectStore
-from typing_extensions import Annotated
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.context import Context
+from faststream.annotations import ContextRepo, Logger
from faststream.nats.broker import NatsBroker as _Broker
from faststream.nats.message import NatsMessage as _Message
-from faststream.nats.publisher.producer import NatsFastProducer as _CoreProducer
-from faststream.nats.publisher.producer import NatsJSFastProducer as _JsProducer
-from faststream.nats.subscriber.usecase import OBJECT_STORAGE_CONTEXT_KEY
-from faststream.utils.context import Context
+from faststream.nats.subscriber.usecases.object_storage_subscriber import (
+ OBJECT_STORAGE_CONTEXT_KEY,
+)
+from faststream.params import NoCast
__all__ = (
"Client",
@@ -27,5 +29,3 @@
NatsBroker = Annotated[_Broker, Context("broker")]
Client = Annotated[_NatsClient, Context("broker._connection")]
JsClient = Annotated[_JetStream, Context("broker._stream")]
-NatsProducer = Annotated[_CoreProducer, Context("broker._producer")]
-NatsJsProducer = Annotated[_JsProducer, Context("broker._js_producer")]
diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py
index bbf718f232..ef75a4768e 100644
--- a/faststream/nats/broker/broker.py
+++ b/faststream/nats/broker/broker.py
@@ -1,15 +1,11 @@
import logging
-import warnings
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Optional,
- Sequence,
- Type,
Union,
)
@@ -25,57 +21,61 @@
DEFAULT_PENDING_SIZE,
DEFAULT_PING_INTERVAL,
DEFAULT_RECONNECT_TIME_WAIT,
+ Client,
)
+from nats.aio.msg import Msg
from nats.errors import Error
from nats.js.errors import BadRequestError
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, overload, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.message import gen_cor_id
-from faststream.nats.broker.logging import NatsLoggingBroker
-from faststream.nats.broker.registrator import NatsRegistrator
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.constants import EMPTY
+from faststream.message import gen_cor_id
from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
+from faststream.nats.response import NatsPublishCommand
from faststream.nats.security import parse_security
-from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber
-from faststream.types import EMPTY
+from faststream.nats.subscriber.usecases.basic import LogicSubscriber
+from faststream.response.publish_type import PublishType
+
+from .logging import make_nats_logger_state
+from .registrator import NatsRegistrator
+from .state import BrokerState, ConnectedState, EmptyBrokerState
if TYPE_CHECKING:
- import ssl
from types import TracebackType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from nats.aio.client import (
Callback,
- Client,
Credentials,
ErrorCallback,
JWTCallback,
SignatureCallback,
)
- from nats.aio.msg import Msg
from nats.js.api import Placement, RePublish, StorageType
- from nats.js.client import JetStreamContext
from nats.js.kv import KeyValue
from nats.js.object_store import ObjectStore
from typing_extensions import TypedDict, Unpack
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.nats.message import NatsMessage
- from faststream.nats.publisher.asyncapi import AsyncAPIPublisher
- from faststream.security import BaseSecurity
- from faststream.types import (
+ from faststream._internal.basic_types import (
AnyDict,
- DecodedMessage,
Decorator,
LoggerProto,
SendableMessage,
)
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.publisher.usecase import LogicPublisher
+ from faststream.nats.schemas import PubAck
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
class NatsInitKwargs(TypedDict, total=False):
"""NatsBroker.connect() method type hints."""
@@ -97,7 +97,8 @@ class NatsInitKwargs(TypedDict, total=False):
Doc("Callback to report when a new server joins the cluster."),
]
reconnected_cb: Annotated[
- Optional["Callback"], Doc("Callback to report success reconnection.")
+ Optional["Callback"],
+ Doc("Callback to report success reconnection."),
]
name: Annotated[
Optional[str],
@@ -107,7 +108,7 @@ class NatsInitKwargs(TypedDict, total=False):
bool,
Doc(
"Turn on NATS server pedantic mode that performs extra checks on the protocol. "
- "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode"
+ "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode",
),
]
verbose: Annotated[
@@ -141,32 +142,21 @@ class NatsInitKwargs(TypedDict, total=False):
dont_randomize: Annotated[
bool,
Doc(
- "Boolean indicating should client randomly shuffle servers list for reconnection randomness."
+ "Boolean indicating should client randomly shuffle servers list for reconnection randomness.",
),
]
flusher_queue_size: Annotated[
- int, Doc("Max count of commands awaiting to be flushed to the socket")
+ int,
+ Doc("Max count of commands awaiting to be flushed to the socket"),
]
no_echo: Annotated[
bool,
Doc("Boolean indicating should commands be echoed."),
]
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ]
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
]
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ]
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ]
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -180,14 +170,14 @@ class NatsInitKwargs(TypedDict, total=False):
Doc(
"A callback used to sign a nonce from the server while "
"authenticating with nkeys. The user should sign the nonce and "
- "return the base64 encoded signature."
+ "return the base64 encoded signature.",
),
]
user_jwt_cb: Annotated[
Optional["JWTCallback"],
Doc(
"A callback used to fetch and return the account "
- "signed JWT for this user."
+ "signed JWT for this user.",
),
]
user_credentials: Annotated[
@@ -205,7 +195,7 @@ class NatsInitKwargs(TypedDict, total=False):
inbox_prefix: Annotated[
Union[str, bytes],
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß"
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß",
),
]
pending_size: Annotated[
@@ -220,17 +210,16 @@ class NatsInitKwargs(TypedDict, total=False):
class NatsBroker(
NatsRegistrator,
- NatsLoggingBroker,
+ BrokerUsecase[Msg, Client],
):
"""A class to represent a NATS broker."""
- url: List[str]
- stream: Optional["JetStreamContext"]
+ url: list[str]
- _producer: Optional["NatsFastProducer"]
- _js_producer: Optional["NatsJSFastProducer"]
- _kv_declarer: Optional["KVBucketDeclarer"]
- _os_declarer: Optional["OSBucketDeclarer"]
+ _producer: "NatsFastProducer"
+ _js_producer: "NatsJSFastProducer"
+ _kv_declarer: "KVBucketDeclarer"
+ _os_declarer: "OSBucketDeclarer"
def __init__(
self,
@@ -256,7 +245,8 @@ def __init__(
Doc("Callback to report when a new server joins the cluster."),
] = None,
reconnected_cb: Annotated[
- Optional["Callback"], Doc("Callback to report success reconnection.")
+ Optional["Callback"],
+ Doc("Callback to report success reconnection."),
] = None,
name: Annotated[
Optional[str],
@@ -266,7 +256,7 @@ def __init__(
bool,
Doc(
"Turn on NATS server pedantic mode that performs extra checks on the protocol. "
- "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode"
+ "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode",
),
] = False,
verbose: Annotated[
@@ -300,32 +290,21 @@ def __init__(
dont_randomize: Annotated[
bool,
Doc(
- "Boolean indicating should client randomly shuffle servers list for reconnection randomness."
+ "Boolean indicating should client randomly shuffle servers list for reconnection randomness.",
),
] = False,
flusher_queue_size: Annotated[
- int, Doc("Max count of commands awaiting to be flushed to the socket")
+ int,
+ Doc("Max count of commands awaiting to be flushed to the socket"),
] = DEFAULT_MAX_FLUSHER_QUEUE_SIZE,
no_echo: Annotated[
bool,
Doc("Boolean indicating should commands be echoed."),
] = False,
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ] = None,
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
] = None,
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ] = None,
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ] = None,
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -339,14 +318,14 @@ def __init__(
Doc(
"A callback used to sign a nonce from the server while "
"authenticating with nkeys. The user should sign the nonce and "
- "return the base64 encoded signature."
+ "return the base64 encoded signature.",
),
] = None,
user_jwt_cb: Annotated[
Optional["JWTCallback"],
Doc(
"A callback used to fetch and return the account "
- "signed JWT for this user."
+ "signed JWT for this user.",
),
] = None,
user_credentials: Annotated[
@@ -364,7 +343,7 @@ def __init__(
inbox_prefix: Annotated[
Union[str, bytes],
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß"
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß",
),
] = DEFAULT_INBOX_PREFIX,
pending_size: Annotated[
@@ -379,7 +358,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = None,
decoder: Annotated[
@@ -391,21 +370,25 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[Msg]"],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[Msg]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
# AsyncAPI args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Union[str, Iterable[str], None],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -422,9 +405,9 @@ def __init__(
Doc("AsyncAPI server description."),
] = None,
tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -443,10 +426,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -457,42 +437,17 @@ def __init__(
] = (),
) -> None:
"""Initialize the NatsBroker object."""
- if tls: # pragma: no cover
- warnings.warn(
- (
- "\nNATS `tls` option was deprecated and will be removed in 0.6.0"
- "\nPlease, use `security` with `BaseSecurity` or `SASLPlaintext` instead"
- ),
- DeprecationWarning,
- stacklevel=2,
- )
-
- if user or password:
- warnings.warn(
- (
- "\nNATS `user` and `password` options were deprecated and will be removed in 0.6.0"
- "\nPlease, use `security` with `SASLPlaintext` instead"
- ),
- DeprecationWarning,
- stacklevel=2,
- )
-
- secure_kwargs = {
- "tls": tls,
- "user": user,
- "password": password,
- **parse_security(security),
- }
+ secure_kwargs = parse_security(security)
servers = [servers] if isinstance(servers, str) else list(servers)
- if asyncapi_url is not None:
- if isinstance(asyncapi_url, str):
- asyncapi_url = [asyncapi_url]
+ if specification_url is not None:
+ if isinstance(specification_url, str):
+ specification_url = [specification_url]
else:
- asyncapi_url = list(asyncapi_url)
+ specification_url = list(specification_url)
else:
- asyncapi_url = servers
+ specification_url = servers
super().__init__(
# NATS options
@@ -535,45 +490,60 @@ def __init__(
decoder=decoder,
parser=parser,
middlewares=middlewares,
+ routers=routers,
# AsyncAPI
description=description,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol,
protocol_version=protocol_version,
security=security,
tags=tags,
# logging
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
+ logger_state=make_nats_logger_state(
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ ),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
- self.__is_connected = False
- self._producer = None
+ self._state.patch_value(
+ producer=NatsFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
+ self._js_producer = NatsJSFastProducer(
+ decoder=self._decoder,
+ parser=self._parser,
+ )
+
+ self._kv_declarer = KVBucketDeclarer()
+ self._os_declarer = OSBucketDeclarer()
- # JS options
- self.stream = None
- self._js_producer = None
- self._kv_declarer = None
- self._os_declarer = None
+ self._connection_state: BrokerState = EmptyBrokerState()
@override
async def connect( # type: ignore[override]
self,
- servers: Annotated[
- Union[str, Iterable[str]],
- Doc("NATS cluster addresses to connect."),
- ] = EMPTY,
+ servers: Union[str, Iterable[str]] = EMPTY,
**kwargs: "Unpack[NatsInitKwargs]",
) -> "Client":
"""Connect broker object to NATS cluster.
To startup subscribers too you should use `broker.start()` after/instead this method.
+
+ Args:
+ servers: NATS cluster addresses to connect.
+ **kwargs: all other options from connection signature.
+
+ Returns:
+ `nats.aio.Client` connected object.
"""
if servers is not EMPTY:
connect_kwargs: AnyDict = {
@@ -586,314 +556,243 @@ async def connect( # type: ignore[override]
return await super().connect(**connect_kwargs)
async def _connect(self, **kwargs: Any) -> "Client":
- self.__is_connected = True
connection = await nats.connect(**kwargs)
- self._producer = NatsFastProducer(
- connection=connection,
- decoder=self._decoder,
- parser=self._parser,
- )
-
- stream = self.stream = connection.jetstream()
+ stream = connection.jetstream()
- self._js_producer = NatsJSFastProducer(
- connection=stream,
- decoder=self._decoder,
- parser=self._parser,
- )
+ self._producer.connect(connection)
+ self._js_producer.connect(stream)
- self._kv_declarer = KVBucketDeclarer(stream)
- self._os_declarer = OSBucketDeclarer(stream)
+ self._kv_declarer.connect(stream)
+ self._os_declarer.connect(stream)
+ self._connection_state = ConnectedState(connection, stream)
return connection
- async def _close(
+ async def close(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
- self._producer = None
- self._js_producer = None
- self.stream = None
+ await super().close(exc_type, exc_val, exc_tb)
if self._connection is not None:
await self._connection.drain()
+ self._connection = None
- await super()._close(exc_type, exc_val, exc_tb)
- self.__is_connected = False
+ self._producer.disconnect()
+ self._js_producer.disconnect()
+ self._kv_declarer.disconnect()
+ self._os_declarer.disconnect()
+
+ self._connection_state = EmptyBrokerState()
async def start(self) -> None:
"""Connect broker to NATS cluster and startup all subscribers."""
- await super().start()
+ await self.connect()
+ self._setup()
- assert self._connection # nosec B101
- assert self.stream, "Broker should be started already" # nosec B101
- assert self._producer, "Broker should be started already" # nosec B101
+ stream_context = self._connection_state.stream
for stream in filter(
lambda x: x.declare,
self._stream_builder.objects.values(),
):
try:
- await self.stream.add_stream(
+ await stream_context.add_stream(
config=stream.config,
subjects=stream.subjects,
)
except BadRequestError as e: # noqa: PERF203
- log_context = AsyncAPISubscriber.build_log_context(
+ log_context = LogicSubscriber.build_log_context(
message=None,
subject="",
queue="",
stream=stream.name,
)
+ logger_state = self._state.get().logger_state
+
if (
e.description
== "stream name already in use with a different configuration"
):
- old_config = (await self.stream.stream_info(stream.name)).config
-
- self._log(str(e), logging.WARNING, log_context)
- await self.stream.update_stream(
- config=stream.config,
- subjects=tuple(
- set(old_config.subjects or ()).union(stream.subjects)
- ),
- )
+ old_config = (await stream_context.stream_info(stream.name)).config
+
+ logger_state.log(str(e), logging.WARNING, log_context)
+
+ for subject in old_config.subjects or ():
+ stream.add_subject(subject)
+
+ await stream_context.update_stream(config=stream.config)
else: # pragma: no cover
- self._log(str(e), logging.ERROR, log_context, exc_info=e)
+ logger_state.log(
+ str(e),
+ logging.ERROR,
+ log_context,
+ exc_info=e,
+ )
finally:
# prevent from double declaration
stream.declare = False
- # TODO: filter by already running handlers after TestClient refactor
- for handler in self._subscribers.values():
- self._log(
- f"`{handler.call_name}` waiting for messages",
- extra=handler.get_log_context(None),
- )
- await handler.start()
+ await super().start()
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: None = None,
+ timeout: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> "PubAck": ...
@override
- async def publish( # type: ignore[override]
+ async def publish(
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`)."
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ],
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway."
- ),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("NATS subject name to send response."),
- ] = "",
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- stream: Annotated[
- Optional[str],
- Doc(
- "This option validates that the target subject is in presented stream. "
- "Can be omitted without any effect."
- ),
- ] = None,
- timeout: Annotated[
- Optional[float],
- Doc("Timeout to send message to NATS."),
- ] = None,
- *,
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- ) -> Optional["DecodedMessage"]:
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> Optional["PubAck"]:
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
applications or to publish messages from time to time.
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
- """
- publish_kwargs = {
- "subject": subject,
- "headers": headers,
- "reply_to": reply_to,
- "rpc": rpc,
- "rpc_timeout": rpc_timeout,
- "raise_timeout": raise_timeout,
- }
-
- producer: Optional[ProducerProto]
- if stream is None:
- producer = self._producer
- else:
- producer = self._js_producer
- publish_kwargs.update(
- {
- "stream": stream,
- "timeout": timeout,
- }
- )
- return await super().publish(
- message,
- producer=producer,
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ stream:
+ This option validates that the target subject is in presented stream.
+ Can be omitted without any effect if you doesn't want PubAck frame.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `None` if you publishes a regular message.
+ `faststream.nats.PubAck` if you publishes a message to stream.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
correlation_id=correlation_id or gen_cor_id(),
- **publish_kwargs,
+ subject=subject,
+ headers=headers,
+ reply_to=reply_to,
+ stream=stream,
+ timeout=timeout,
+ _publish_type=PublishType.PUBLISH,
)
+ producer = self._js_producer if stream is not None else self._producer
+
+ return await super()._basic_publish(cmd, producer=producer)
+
@override
async def request( # type: ignore[override]
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`)."
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ],
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway."
- ),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- stream: Annotated[
- Optional[str],
- Doc(
- "This option validates that the target subject is in presented stream. "
- "Can be omitted without any effect."
- ),
- ] = None,
- timeout: Annotated[
- float,
- Doc("Timeout to send message to NATS."),
- ] = 0.5,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: float = 0.5,
) -> "NatsMessage":
- publish_kwargs = {
- "subject": subject,
- "headers": headers,
- "timeout": timeout,
- }
-
- producer: Optional[ProducerProto]
- if stream is None:
- producer = self._producer
-
- else:
- producer = self._js_producer
- publish_kwargs.update({"stream": stream})
-
- msg: NatsMessage = await super().request(
- message,
- producer=producer,
+ """Make a synchronous request to outer subscriber.
+
+ If out subscriber listens subject by stream, you should setup the same **stream** explicitly.
+ Another way you will reseave confirmation frame as a response.
+
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ stream:
+ JetStream name. This option is required if your target subscriber listens for events using JetStream.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `faststream.nats.message.NatsMessage` object as an outer subscriber response.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
correlation_id=correlation_id or gen_cor_id(),
- **publish_kwargs,
+ subject=subject,
+ headers=headers,
+ timeout=timeout,
+ stream=stream,
+ _publish_type=PublishType.REQUEST,
)
+
+ producer = self._js_producer if stream is not None else self._producer
+
+ msg: NatsMessage = await super()._basic_request(cmd, producer=producer)
return msg
@override
def setup_subscriber( # type: ignore[override]
self,
- subscriber: "AsyncAPISubscriber",
+ subscriber: "LogicSubscriber",
) -> None:
- connection: Union[
- Client,
- JetStreamContext,
- KVBucketDeclarer,
- OSBucketDeclarer,
- None,
- ] = None
-
- if getattr(subscriber, "kv_watch", None):
- connection = self._kv_declarer
-
- elif getattr(subscriber, "obj_watch", None):
- connection = self._os_declarer
-
- elif getattr(subscriber, "stream", None):
- connection = self.stream
-
- else:
- connection = self._connection
-
return super().setup_subscriber(
subscriber,
- connection=connection,
+ connection_state=self._connection_state,
+ kv_declarer=self._kv_declarer,
+ os_declarer=self._os_declarer,
)
@override
def setup_publisher( # type: ignore[override]
self,
- publisher: "AsyncAPIPublisher",
+ publisher: "LogicPublisher",
) -> None:
- producer: Optional[ProducerProto] = None
-
- if publisher.stream is not None:
- if self._js_producer is not None:
- producer = self._js_producer
-
- elif self._producer is not None:
- producer = self._producer
+ producer = self._js_producer if publisher.stream is not None else self._producer
super().setup_publisher(publisher, producer=producer)
@@ -914,8 +813,6 @@ async def key_value(
# custom
declare: bool = True,
) -> "KeyValue":
- assert self._kv_declarer, "Broker should be connected already." # nosec B101
-
return await self._kv_declarer.create_key_value(
bucket=bucket,
description=description,
@@ -944,8 +841,6 @@ async def object_storage(
# custom
declare: bool = True,
) -> "ObjectStore":
- assert self._os_declarer, "Broker should be connected already." # nosec B101
-
return await self._os_declarer.create_object_store(
bucket=bucket,
description=description,
@@ -961,17 +856,20 @@ def _log_connection_broken(
self,
error_cb: Optional["ErrorCallback"] = None,
) -> "ErrorCallback":
- c = AsyncAPISubscriber.build_log_context(None, "")
+ c = LogicSubscriber.build_log_context(None, "")
async def wrapper(err: Exception) -> None:
if error_cb is not None:
await error_cb(err)
- if isinstance(err, Error) and self.__is_connected:
- self._log(
- f"Connection broken with {err!r}", logging.WARNING, c, exc_info=err
+ if isinstance(err, Error) and self._connection_state:
+ self._state.get().logger_state.log(
+ f"Connection broken with {err!r}",
+ logging.WARNING,
+ c,
+ exc_info=err,
)
- self.__is_connected = False
+ self._connection_state = self._connection_state.brake()
return wrapper
@@ -979,15 +877,17 @@ def _log_reconnected(
self,
cb: Optional["Callback"] = None,
) -> "Callback":
- c = AsyncAPISubscriber.build_log_context(None, "")
+ c = LogicSubscriber.build_log_context(None, "")
async def wrapper() -> None:
if cb is not None:
await cb()
- if not self.__is_connected:
- self._log("Connection established", logging.INFO, c)
- self.__is_connected = True
+ if not self._connection_state:
+ self._state.get().logger_state.log(
+ "Connection established", logging.INFO, c
+ )
+ self._connection_state = self._connection_state.reconnect()
return wrapper
diff --git a/faststream/nats/broker/logging.py b/faststream/nats/broker/logging.py
index 5e2572ddcb..d67cb8e4bf 100644
--- a/faststream/nats/broker/logging.py
+++ b/faststream/nats/broker/logging.py
@@ -1,74 +1,85 @@
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Optional
+from functools import partial
+from typing import TYPE_CHECKING, Optional
-from nats.aio.client import Client
-from nats.aio.msg import Msg
-
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.log.logging import get_broker_logger
-from faststream.types import EMPTY
+from faststream._internal.log.logging import get_broker_logger
+from faststream._internal.state.logger import (
+ DefaultLoggerStorage,
+ make_logger_state,
+)
if TYPE_CHECKING:
- from faststream.types import LoggerProto
-
-
-class NatsLoggingBroker(BrokerUsecase[Msg, Client]):
- """A class that extends the LoggingMixin class and adds additional functionality for logging NATS related information."""
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
- _max_queue_len: int
- _max_subject_len: int
- __max_msg_id_ln: ClassVar[int] = 10
+class NatsParamsStorage(DefaultLoggerStorage):
def __init__(
self,
- *args: Any,
- logger: Optional["LoggerProto"] = EMPTY,
- log_level: int = logging.INFO,
- log_fmt: Optional[str] = None,
- **kwargs: Any,
+ log_fmt: Optional[str],
) -> None:
- """Initialize the NATS logging mixin."""
- super().__init__(
- *args,
- logger=logger,
- # TODO: generate unique logger names to not share between brokers
- default_logger=get_broker_logger(
- name="nats",
- default_context={
- "subject": "",
- "stream": "",
- "queue": "",
- },
- message_id_ln=self.__max_msg_id_ln,
- ),
- log_level=log_level,
- log_fmt=log_fmt,
- **kwargs,
- )
+ super().__init__(log_fmt)
self._max_queue_len = 0
self._max_stream_len = 0
self._max_subject_len = 4
- def get_fmt(self) -> str:
- """Fallback method to get log format if `log_fmt` if not specified."""
- return (
- "%(asctime)s %(levelname)-8s - "
- + (f"%(stream)-{self._max_stream_len}s | " if self._max_stream_len else "")
- + (f"%(queue)-{self._max_queue_len}s | " if self._max_queue_len else "")
- + f"%(subject)-{self._max_subject_len}s | "
- + f"%(message_id)-{self.__max_msg_id_ln}s - "
- "%(message)s"
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ self._max_subject_len = max(
+ (
+ self._max_subject_len,
+ len(params.get("subject", "")),
+ ),
+ )
+ self._max_queue_len = max(
+ (
+ self._max_queue_len,
+ len(params.get("queue", "")),
+ ),
+ )
+ self._max_stream_len = max(
+ (
+ self._max_stream_len,
+ len(params.get("stream", "")),
+ ),
+ )
+
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
+ message_id_ln = 10
+
+ # TODO: generate unique logger names to not share between brokers
+ return get_broker_logger(
+ name="nats",
+ default_context={
+ "subject": "",
+ "stream": "",
+ "queue": "",
+ },
+ message_id_ln=message_id_ln,
+ fmt=self._log_fmt
+ or "".join((
+ "%(asctime)s %(levelname)-8s - ",
+ (
+ f"%(stream)-{self._max_stream_len}s | "
+ if self._max_stream_len
+ else ""
+ ),
+ (f"%(queue)-{self._max_queue_len}s | " if self._max_queue_len else ""),
+ f"%(subject)-{self._max_subject_len}s | ",
+ f"%(message_id)-{message_id_ln}s - ",
+ "%(message)s",
+ )),
+ context=context,
+ log_level=self.logger_log_level,
)
- def _setup_log_context(
- self,
- *,
- queue: Optional[str] = None,
- subject: Optional[str] = None,
- stream: Optional[str] = None,
- ) -> None:
- """Setup subscriber's information to generate default log format."""
- self._max_subject_len = max((self._max_subject_len, len(subject or "")))
- self._max_queue_len = max((self._max_queue_len, len(queue or "")))
- self._max_stream_len = max((self._max_stream_len, len(stream or "")))
+
+make_nats_logger_state = partial(
+ make_logger_state,
+ default_storage_cls=NatsParamsStorage,
+)
diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py
index ad9cee7404..02ede616cf 100644
--- a/faststream/nats/broker/registrator.py
+++ b/faststream/nats/broker/registrator.py
@@ -1,35 +1,37 @@
-from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Sequence, Union, cast
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast
from nats.js import api
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.nats.helpers import StreamBuilder
-from faststream.nats.publisher.asyncapi import AsyncAPIPublisher
+from faststream.nats.publisher.factory import create_publisher
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
-from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber
from faststream.nats.subscriber.factory import create_subscriber
+from faststream.nats.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.aio.msg import Msg
- from faststream.broker.types import (
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
- from faststream.nats.message import NatsBatchMessage, NatsMessage
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.publisher.specified import SpecificationPublisher
class NatsRegistrator(ABCBroker["Msg"]):
"""Includable to NatsBroker router."""
- _subscribers: Dict[int, "AsyncAPISubscriber"]
- _publishers: Dict[int, "AsyncAPIPublisher"]
+ _subscribers: list["SpecificationSubscriber"]
+ _publishers: list["SpecificationPublisher"]
def __init__(self, **kwargs: Any) -> None:
self._stream_builder = StreamBuilder()
@@ -47,7 +49,7 @@ def subscriber( # type: ignore[override]
str,
Doc(
"Subscribers' NATS queue name. Subscribers with same queue name will be load balanced by the NATS "
- "server."
+ "server.",
),
] = "",
pending_msgs_limit: Annotated[
@@ -57,7 +59,7 @@ def subscriber( # type: ignore[override]
"been answered. In case of NATS Core, if that limits exceeds, you will receive NATS 'Slow Consumer' "
"error. "
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
pending_bytes_limit: Annotated[
@@ -67,7 +69,7 @@ def subscriber( # type: ignore[override]
"been answered. In case of NATS Core, if that limit exceeds, you will receive NATS 'Slow Consumer' "
"error."
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
# Core arguments
@@ -79,7 +81,7 @@ def subscriber( # type: ignore[override]
durable: Annotated[
Optional[str],
Doc(
- "Name of the durable consumer to which the the subscription should be bound."
+ "Name of the durable consumer to which the the subscription should be bound.",
),
] = None,
config: Annotated[
@@ -105,7 +107,7 @@ def subscriber( # type: ignore[override]
headers_only: Annotated[
Optional[bool],
Doc(
- "Should be message delivered without payload, only headers and metadata."
+ "Should be message delivered without payload, only headers and metadata.",
),
] = None,
# pull arguments
@@ -113,7 +115,7 @@ def subscriber( # type: ignore[override]
Union[bool, "PullSub"],
Doc(
"NATS Pull consumer parameters container. "
- "Should be used with `stream` only."
+ "Should be used with `stream` only.",
),
] = False,
kv_watch: Annotated[
@@ -127,22 +129,28 @@ def subscriber( # type: ignore[override]
inbox_prefix: Annotated[
bytes,
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID."
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.",
),
] = api.INBOX_PREFIX,
# custom
ack_first: Annotated[
bool,
Doc("Whether to `ack` message at start of consuming or not."),
- ] = False,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
stream: Annotated[
Union[str, "JStream", None],
Doc("Subscribe to NATS Stream with `subject` filter."),
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -154,38 +162,29 @@ def subscriber( # type: ignore[override]
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[NatsMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- Union[
- "Filter[NatsMessage]",
- "Filter[NatsBatchMessage]",
- ],
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
max_workers: Annotated[
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -197,14 +196,14 @@ def subscriber( # type: ignore[override]
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPISubscriber:
+ ) -> SpecificationSubscriber:
"""Creates NATS subscriber object.
You can use it as a handler decorator `@broker.subscriber(...)`.
@@ -212,7 +211,7 @@ def subscriber( # type: ignore[override]
stream = self._stream_builder.create(stream)
subscriber = cast(
- AsyncAPISubscriber,
+ "SpecificationSubscriber",
super().subscriber(
create_subscriber(
subject=subject,
@@ -236,16 +235,16 @@ def subscriber( # type: ignore[override]
inbox_prefix=inbox_prefix,
ack_first=ack_first,
# subscriber args
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# AsyncAPI
title_=title,
description_=description,
include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ ),
),
)
@@ -253,7 +252,6 @@ def subscriber( # type: ignore[override]
stream.add_subject(subscriber.subject)
return subscriber.add_call(
- filter_=filter,
parser_=parser or self._parser,
decoder_=decoder or self._decoder,
dependencies_=dependencies,
@@ -269,11 +267,11 @@ def publisher( # type: ignore[override]
],
*,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -285,7 +283,7 @@ def publisher( # type: ignore[override]
Union[str, "JStream", None],
Doc(
"This option validates that the target `subject` is in presented stream. "
- "Can be omitted without any effect."
+ "Can be omitted without any effect.",
),
] = None,
timeout: Annotated[
@@ -295,6 +293,10 @@ def publisher( # type: ignore[override]
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -310,14 +312,14 @@ def publisher( # type: ignore[override]
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> "AsyncAPIPublisher":
+ ) -> "SpecificationPublisher":
"""Creates long-living and AsyncAPI-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
@@ -328,9 +330,9 @@ def publisher( # type: ignore[override]
stream = self._stream_builder.create(stream)
publisher = cast(
- AsyncAPIPublisher,
+ "SpecificationPublisher",
super().publisher(
- publisher=AsyncAPIPublisher.create(
+ publisher=create_publisher(
subject=subject,
headers=headers,
# Core
@@ -339,14 +341,14 @@ def publisher( # type: ignore[override]
timeout=timeout,
stream=stream,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
description_=description,
schema_=schema,
include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ ),
),
)
@@ -361,19 +363,19 @@ def include_router( # type: ignore[override]
router: "NatsRegistrator",
*,
prefix: str = "",
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
middlewares: Sequence["BrokerMiddleware[Msg]"] = (),
include_in_schema: Optional[bool] = None,
) -> None:
sub_streams = router._stream_builder.objects.copy()
- sub_router_subjects = [sub.subject for sub in router._subscribers.values()]
+ sub_router_subjects = [sub.subject for sub in router._subscribers]
for stream in sub_streams.values():
new_subjects = []
for subj in stream.subjects:
if subj in sub_router_subjects:
- new_subjects.append("".join((self.prefix, subj)))
+ new_subjects.append(f"{self.prefix}{subj}")
else:
new_subjects.append(subj)
stream.subjects = new_subjects
diff --git a/faststream/nats/broker/state.py b/faststream/nats/broker/state.py
new file mode 100644
index 0000000000..08b5821597
--- /dev/null
+++ b/faststream/nats/broker/state.py
@@ -0,0 +1,78 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from nats.aio.client import Client
+ from nats.js import JetStreamContext
+
+
+class BrokerState(Protocol):
+ stream: "JetStreamContext"
+ connection: "Client"
+
+ def __bool__(self) -> bool: ...
+
+ def brake(self) -> "BrokerState": ...
+
+ def reconnect(self) -> "BrokerState": ...
+
+
+class EmptyBrokerState(BrokerState):
+ @property
+ def connection(self) -> "Client":
+ msg = "Connection is not available yet. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ @property
+ def stream(self) -> "JetStreamContext":
+ msg = "Stream is not available yet. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ def brake(self) -> "BrokerState":
+ return self
+
+ def reconnect(self) -> "BrokerState":
+ msg = "You can't reconnect an empty state. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ def __bool__(self) -> bool:
+ return False
+
+
+class ConnectedState(BrokerState):
+ def __init__(
+ self,
+ connection: "Client",
+ stream: "JetStreamContext",
+ ) -> None:
+ self.connection = connection
+ self.stream = stream
+
+ def __bool__(self) -> bool:
+ return True
+
+ def brake(self) -> "ConnectionBrokenState":
+ return ConnectionBrokenState(
+ connection=self.connection,
+ stream=self.stream,
+ )
+
+
+class ConnectionBrokenState(BrokerState):
+ def __init__(
+ self,
+ connection: "Client",
+ stream: "JetStreamContext",
+ ) -> None:
+ self.connection = connection
+ self.stream = stream
+
+ def __bool__(self) -> bool:
+ return False
+
+ def reconnect(self) -> "ConnectedState":
+ return ConnectedState(
+ connection=self.connection,
+ stream=self.stream,
+ )
diff --git a/faststream/nats/fastapi/__init__.py b/faststream/nats/fastapi/__init__.py
index 56b2eb0f05..2c0acf3c3c 100644
--- a/faststream/nats/fastapi/__init__.py
+++ b/faststream/nats/fastapi/__init__.py
@@ -1,19 +1,18 @@
+from typing import Annotated
+
from nats.aio.client import Client as NatsClient
from nats.js.client import JetStreamContext
-from typing_extensions import Annotated
-from faststream.broker.fastapi.context import Context, ContextRepo, Logger
+from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.nats.broker import NatsBroker as NB
-from faststream.nats.fastapi.fastapi import NatsRouter
from faststream.nats.message import NatsMessage as NM
-from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
+
+from .fastapi import NatsRouter
NatsMessage = Annotated[NM, Context("message")]
NatsBroker = Annotated[NB, Context("broker")]
Client = Annotated[NatsClient, Context("broker._connection")]
JsClient = Annotated[JetStreamContext, Context("broker._stream")]
-NatsProducer = Annotated[NatsFastProducer, Context("broker._producer")]
-NatsJsProducer = Annotated[NatsJSFastProducer, Context("broker._js_producer")]
__all__ = (
"Client",
@@ -22,8 +21,7 @@
"JsClient",
"Logger",
"NatsBroker",
- "NatsJsProducer",
"NatsMessage",
- "NatsProducer",
+ "NatsMessage",
"NatsRouter",
)
diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py
index d1a8ce5846..3b21b2cccc 100644
--- a/faststream/nats/fastapi/fastapi.py
+++ b/faststream/nats/fastapi/fastapi.py
@@ -1,14 +1,11 @@
import logging
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
Optional,
- Sequence,
- Type,
Union,
cast,
)
@@ -30,18 +27,15 @@
from nats.js import api
from starlette.responses import JSONResponse
from starlette.routing import BaseRoute
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.utils import default_filter
+from faststream._internal.constants import EMPTY
+from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
from faststream.nats.broker import NatsBroker
-from faststream.nats.publisher.asyncapi import AsyncAPIPublisher
-from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber
-from faststream.types import EMPTY
if TYPE_CHECKING:
- import ssl
from enum import Enum
from fastapi import params
@@ -57,18 +51,19 @@
from starlette.responses import Response
from starlette.types import ASGIApp, Lifespan
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
- from faststream.nats.message import NatsBatchMessage, NatsMessage
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.publisher.specified import SpecificationPublisher
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
+ from faststream.nats.subscriber.specified import SpecificationSubscriber
from faststream.security import BaseSecurity
- from faststream.types import AnyDict, LoggerProto
+ from faststream.specification.schema.extra import Tag, TagDict
class NatsRouter(StreamRouter["Msg"]):
@@ -102,7 +97,8 @@ def __init__(
Doc("Callback to report when a new server joins the cluster."),
] = None,
reconnected_cb: Annotated[
- Optional["Callback"], Doc("Callback to report success reconnection.")
+ Optional["Callback"],
+ Doc("Callback to report success reconnection."),
] = None,
name: Annotated[
Optional[str],
@@ -112,7 +108,7 @@ def __init__(
bool,
Doc(
"Turn on NATS server pedantic mode that performs extra checks on the protocol. "
- "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode"
+ "https://docs.nats.io/using-nats/developer/connecting/misc#turn-on-pedantic-mode",
),
] = False,
verbose: Annotated[
@@ -146,32 +142,21 @@ def __init__(
dont_randomize: Annotated[
bool,
Doc(
- "Boolean indicating should client randomly shuffle servers list for reconnection randomness."
+ "Boolean indicating should client randomly shuffle servers list for reconnection randomness.",
),
] = False,
flusher_queue_size: Annotated[
- int, Doc("Max count of commands awaiting to be flushed to the socket")
+ int,
+ Doc("Max count of commands awaiting to be flushed to the socket"),
] = DEFAULT_MAX_FLUSHER_QUEUE_SIZE,
no_echo: Annotated[
bool,
Doc("Boolean indicating should commands be echoed."),
] = False,
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ] = None,
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
] = None,
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ] = None,
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ] = None,
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -185,14 +170,14 @@ def __init__(
Doc(
"A callback used to sign a nonce from the server while "
"authenticating with nkeys. The user should sign the nonce and "
- "return the base64 encoded signature."
+ "return the base64 encoded signature.",
),
] = None,
user_jwt_cb: Annotated[
Optional["JWTCallback"],
Doc(
"A callback used to fetch and return the account "
- "signed JWT for this user."
+ "signed JWT for this user.",
),
] = None,
user_credentials: Annotated[
@@ -210,7 +195,7 @@ def __init__(
inbox_prefix: Annotated[
Union[str, bytes],
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß"
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.ß",
),
] = DEFAULT_INBOX_PREFIX,
pending_size: Annotated[
@@ -225,7 +210,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -244,10 +229,10 @@ def __init__(
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Union[str, Iterable[str], None],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -263,10 +248,10 @@ def __init__(
Optional[str],
Doc("AsyncAPI server description."),
] = None,
- asyncapi_tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ specification_tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -285,13 +270,13 @@ def __init__(
bool,
Doc(
"Whether to add broker to app scope in lifespan. "
- "You should disable this option at old ASGI servers."
+ "You should disable this option at old ASGI servers.",
),
] = True,
schema_url: Annotated[
Optional[str],
Doc(
- "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all."
+ "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all.",
),
] = "/asyncapi",
# FastAPI args
@@ -300,7 +285,7 @@ def __init__(
Doc("An optional path prefix for the router."),
] = "",
tags: Annotated[
- Optional[List[Union[str, "Enum"]]],
+ Optional[list[Union[str, "Enum"]]],
Doc(
"""
A list of tags to be applied to all the *path operations* in this
@@ -310,7 +295,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
dependencies: Annotated[
@@ -322,22 +307,22 @@ def __init__(
Read more about it in the
[FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
default_response_class: Annotated[
- Type["Response"],
+ type["Response"],
Doc(
"""
The default response class to be used.
Read more in the
[FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
- """
+ """,
),
] = Default(JSONResponse),
responses: Annotated[
- Optional[Dict[Union[int, str], "AnyDict"]],
+ Optional[dict[Union[int, str], "AnyDict"]],
Doc(
"""
Additional responses to be shown in OpenAPI.
@@ -349,11 +334,11 @@ def __init__(
And in the
[FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
callbacks: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
OpenAPI callbacks that should apply to all *path operations* in this
@@ -363,11 +348,11 @@ def __init__(
Read more about it in the
[FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
- """
+ """,
),
] = None,
routes: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
**Note**: you probably shouldn't use this parameter, it is inherited
@@ -376,7 +361,7 @@ def __init__(
---
A list of routes to serve incoming HTTP and WebSocket requests.
- """
+ """,
),
deprecated(
"""
@@ -385,7 +370,7 @@ def __init__(
In FastAPI, you normally would use the *path operation methods*,
like `router.get()`, `router.post()`, etc.
- """
+ """,
),
] = None,
redirect_slashes: Annotated[
@@ -394,7 +379,7 @@ def __init__(
"""
Whether to detect and redirect slashes in URLs when the client doesn't
use the same format.
- """
+ """,
),
] = True,
default: Annotated[
@@ -403,7 +388,7 @@ def __init__(
"""
Default function handler for this router. Used to handle
404 Not Found errors.
- """
+ """,
),
] = None,
dependency_overrides_provider: Annotated[
@@ -414,18 +399,18 @@ def __init__(
You shouldn't need to use it. It normally points to the `FastAPI` app
object.
- """
+ """,
),
] = None,
route_class: Annotated[
- Type["APIRoute"],
+ type["APIRoute"],
Doc(
"""
Custom route (*path operation*) class to be used by this router.
Read more about it in the
[FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
- """
+ """,
),
] = APIRoute,
on_startup: Annotated[
@@ -437,7 +422,7 @@ def __init__(
You should instead use the `lifespan` handlers.
Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
on_shutdown: Annotated[
@@ -450,7 +435,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
lifespan: Annotated[
@@ -462,7 +447,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
deprecated: Annotated[
@@ -475,7 +460,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
include_in_schema: Annotated[
@@ -489,7 +474,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
- """
+ """,
),
] = True,
generate_unique_id_function: Annotated[
@@ -504,7 +489,7 @@ def __init__(
Read more about it in the
[FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
- """
+ """,
),
] = Default(generate_unique_id),
) -> None:
@@ -527,10 +512,7 @@ def __init__(
dont_randomize=dont_randomize,
flusher_queue_size=flusher_queue_size,
no_echo=no_echo,
- tls=tls,
tls_hostname=tls_hostname,
- user=user,
- password=password,
token=token,
drain_timeout=drain_timeout,
signature_cb=signature_cb,
@@ -547,14 +529,14 @@ def __init__(
parser=parser,
middlewares=middlewares,
security=security,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol,
protocol_version=protocol_version,
description=description,
logger=logger,
log_level=log_level,
log_fmt=log_fmt,
- asyncapi_tags=asyncapi_tags,
+ specification_tags=specification_tags,
schema_url=schema_url,
setup_state=setup_state,
# FastAPI kwargs
@@ -587,7 +569,7 @@ def subscriber( # type: ignore[override]
str,
Doc(
"Subscribers' NATS queue name. Subscribers with same queue name will be load balanced by the NATS "
- "server."
+ "server.",
),
] = "",
pending_msgs_limit: Annotated[
@@ -597,7 +579,7 @@ def subscriber( # type: ignore[override]
"been answered. In case of NATS Core, if that limits exceeds, you will receive NATS 'Slow Consumer' "
"error. "
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
pending_bytes_limit: Annotated[
@@ -607,7 +589,7 @@ def subscriber( # type: ignore[override]
"been answered. In case of NATS Core, if that limit exceeds, you will receive NATS 'Slow Consumer' "
"error."
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
# Core arguments
@@ -619,7 +601,7 @@ def subscriber( # type: ignore[override]
durable: Annotated[
Optional[str],
Doc(
- "Name of the durable consumer to which the the subscription should be bound."
+ "Name of the durable consumer to which the the subscription should be bound.",
),
] = None,
config: Annotated[
@@ -645,7 +627,7 @@ def subscriber( # type: ignore[override]
headers_only: Annotated[
Optional[bool],
Doc(
- "Should be message delivered without payload, only headers and metadata."
+ "Should be message delivered without payload, only headers and metadata.",
),
] = None,
# pull arguments
@@ -653,7 +635,7 @@ def subscriber( # type: ignore[override]
Optional["PullSub"],
Doc(
"NATS Pull consumer parameters container. "
- "Should be used with `stream` only."
+ "Should be used with `stream` only.",
),
] = None,
kv_watch: Annotated[
@@ -667,14 +649,20 @@ def subscriber( # type: ignore[override]
inbox_prefix: Annotated[
bytes,
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID."
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.",
),
] = api.INBOX_PREFIX,
# custom
ack_first: Annotated[
bool,
Doc("Whether to `ack` message at start of consuming or not."),
- ] = False,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
stream: Annotated[
Union[str, "JStream", None],
Doc("Subscribe to NATS Stream with `subject` filter."),
@@ -694,38 +682,29 @@ def subscriber( # type: ignore[override]
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[NatsMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- Union[
- "Filter[NatsMessage]",
- "Filter[NatsBatchMessage]",
- ],
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
max_workers: Annotated[
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -737,7 +716,7 @@ def subscriber( # type: ignore[override]
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -776,7 +755,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -788,7 +767,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -800,7 +779,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -812,7 +791,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -830,7 +809,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -847,7 +826,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -864,12 +843,12 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> "AsyncAPISubscriber":
+ ) -> "SpecificationSubscriber":
return cast(
- AsyncAPISubscriber,
+ "SpecificationSubscriber",
super().subscriber(
subject=subject,
queue=queue,
@@ -892,9 +871,8 @@ def subscriber( # type: ignore[override]
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
max_workers=max_workers,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -920,11 +898,11 @@ def publisher( # type: ignore[override]
Doc("NATS subject to send message."),
],
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -936,7 +914,7 @@ def publisher( # type: ignore[override]
Union[str, "JStream", None],
Doc(
"This option validates that the target `subject` is in presented stream. "
- "Can be omitted without any effect."
+ "Can be omitted without any effect.",
),
] = None,
timeout: Annotated[
@@ -946,6 +924,10 @@ def publisher( # type: ignore[override]
# specific
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -961,14 +943,14 @@ def publisher( # type: ignore[override]
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPIPublisher:
+ ) -> "SpecificationPublisher":
return self.broker.publisher(
subject,
headers=headers,
diff --git a/faststream/nats/helpers/bucket_declarer.py b/faststream/nats/helpers/bucket_declarer.py
index 916b706254..617eb4edad 100644
--- a/faststream/nats/helpers/bucket_declarer.py
+++ b/faststream/nats/helpers/bucket_declarer.py
@@ -1,7 +1,9 @@
-from typing import TYPE_CHECKING, Dict, Optional
+from typing import TYPE_CHECKING, Optional
from nats.js.api import KeyValueConfig
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
+
if TYPE_CHECKING:
from nats.js import JetStreamContext
from nats.js.api import Placement, RePublish, StorageType
@@ -9,12 +11,19 @@
class KVBucketDeclarer:
- buckets: Dict[str, "KeyValue"]
+ buckets: dict[str, "KeyValue"]
- def __init__(self, connection: "JetStreamContext") -> None:
- self._connection = connection
+ def __init__(self) -> None:
self.buckets = {}
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "JetStreamContext") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
async def create_key_value(
self,
bucket: str,
@@ -34,7 +43,7 @@ async def create_key_value(
) -> "KeyValue":
if (key_value := self.buckets.get(bucket)) is None:
if declare:
- key_value = await self._connection.create_key_value(
+ key_value = await self.__state.connection.create_key_value(
config=KeyValueConfig(
bucket=bucket,
description=description,
@@ -47,10 +56,10 @@ async def create_key_value(
placement=placement,
republish=republish,
direct=direct,
- )
+ ),
)
else:
- key_value = await self._connection.key_value(bucket)
+ key_value = await self.__state.connection.key_value(bucket)
self.buckets[bucket] = key_value
diff --git a/faststream/nats/helpers/obj_storage_declarer.py b/faststream/nats/helpers/obj_storage_declarer.py
index 1d2ae50715..f0f31918d7 100644
--- a/faststream/nats/helpers/obj_storage_declarer.py
+++ b/faststream/nats/helpers/obj_storage_declarer.py
@@ -1,7 +1,9 @@
-from typing import TYPE_CHECKING, Dict, Optional
+from typing import TYPE_CHECKING, Optional
from nats.js.api import ObjectStoreConfig
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
+
if TYPE_CHECKING:
from nats.js import JetStreamContext
from nats.js.api import Placement, StorageType
@@ -9,12 +11,19 @@
class OSBucketDeclarer:
- buckets: Dict[str, "ObjectStore"]
+ buckets: dict[str, "ObjectStore"]
- def __init__(self, connection: "JetStreamContext") -> None:
- self._connection = connection
+ def __init__(self) -> None:
self.buckets = {}
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "JetStreamContext") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
async def create_object_store(
self,
bucket: str,
@@ -30,7 +39,7 @@ async def create_object_store(
) -> "ObjectStore":
if (object_store := self.buckets.get(bucket)) is None:
if declare:
- object_store = await self._connection.create_object_store(
+ object_store = await self.__state.connection.create_object_store(
bucket=bucket,
config=ObjectStoreConfig(
bucket=bucket,
@@ -43,7 +52,7 @@ async def create_object_store(
),
)
else:
- object_store = await self._connection.object_store(bucket)
+ object_store = await self.__state.connection.object_store(bucket)
self.buckets[bucket] = object_store
diff --git a/faststream/nats/helpers/object_builder.py b/faststream/nats/helpers/object_builder.py
index 5d40a44da6..7e9fb86f13 100644
--- a/faststream/nats/helpers/object_builder.py
+++ b/faststream/nats/helpers/object_builder.py
@@ -1,4 +1,4 @@
-from typing import Dict, Optional, Union
+from typing import Optional, Union
from faststream.nats.schemas import JStream
@@ -8,7 +8,7 @@ class StreamBuilder:
__slots__ = ("objects",)
- objects: Dict[str, "JStream"]
+ objects: dict[str, "JStream"]
def __init__(self) -> None:
"""Initialize the builder."""
diff --git a/faststream/nats/helpers/state.py b/faststream/nats/helpers/state.py
new file mode 100644
index 0000000000..91c9f84ff7
--- /dev/null
+++ b/faststream/nats/helpers/state.py
@@ -0,0 +1,27 @@
+from typing import Protocol, TypeVar
+
+from nats.aio.client import Client
+from nats.js import JetStreamContext
+
+from faststream.exceptions import IncorrectState
+
+ClientT = TypeVar("ClientT", Client, JetStreamContext)
+
+
+class ConnectionState(Protocol[ClientT]):
+ connection: ClientT
+
+
+class EmptyConnectionState(ConnectionState[ClientT]):
+ __slots__ = ()
+
+ @property
+ def connection(self) -> ClientT:
+ raise IncorrectState
+
+
+class ConnectedState(ConnectionState[ClientT]):
+ __slots__ = ("connection",)
+
+ def __init__(self, connection: ClientT) -> None:
+ self.connection = connection
diff --git a/faststream/nats/message.py b/faststream/nats/message.py
index 5e5d89fd86..ce7486aceb 100644
--- a/faststream/nats/message.py
+++ b/faststream/nats/message.py
@@ -1,10 +1,10 @@
-from typing import List, Union
+from typing import Optional
from nats.aio.msg import Msg
from nats.js.api import ObjectInfo
from nats.js.kv import KeyValue
-from faststream.broker.message import StreamMessage
+from faststream.message import StreamMessage
class NatsMessage(StreamMessage[Msg]):
@@ -24,7 +24,7 @@ async def ack_sync(self) -> None:
async def nack(
self,
- delay: Union[int, float, None] = None,
+ delay: Optional[float] = None,
) -> None:
if not self.raw_message._ackd:
await self.raw_message.nak(delay=delay)
@@ -40,7 +40,7 @@ async def in_progress(self) -> None:
await self.raw_message.in_progress()
-class NatsBatchMessage(StreamMessage[List[Msg]]):
+class NatsBatchMessage(StreamMessage[list[Msg]]):
"""A class to represent a NATS batch message."""
async def ack(self) -> None:
@@ -54,7 +54,7 @@ async def ack(self) -> None:
async def nack(
self,
- delay: Union[int, float, None] = None,
+ delay: Optional[float] = None,
) -> None:
for m in filter(
lambda m: not m._ackd,
diff --git a/faststream/nats/opentelemetry/middleware.py b/faststream/nats/opentelemetry/middleware.py
index cafd8787d8..ac97181ff8 100644
--- a/faststream/nats/opentelemetry/middleware.py
+++ b/faststream/nats/opentelemetry/middleware.py
@@ -4,10 +4,11 @@
from opentelemetry.trace import TracerProvider
from faststream.nats.opentelemetry.provider import telemetry_attributes_provider_factory
+from faststream.nats.response import NatsPublishCommand
from faststream.opentelemetry.middleware import TelemetryMiddleware
-class NatsTelemetryMiddleware(TelemetryMiddleware):
+class NatsTelemetryMiddleware(TelemetryMiddleware[NatsPublishCommand]):
def __init__(
self,
*,
diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py
index a77ff0a2b3..d877c911e2 100644
--- a/faststream/nats/opentelemetry/provider.py
+++ b/faststream/nats/opentelemetry/provider.py
@@ -1,16 +1,17 @@
-from typing import TYPE_CHECKING, List, Optional, Sequence, Union, overload
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, Union, overload
from nats.aio.msg import Msg
from opentelemetry.semconv.trace import SpanAttributes
-from faststream.__about__ import SERVICE_NAME
-from faststream.broker.types import MsgType
+from faststream._internal.types import MsgType
from faststream.opentelemetry import TelemetrySettingsProvider
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message import StreamMessage
+ from faststream.response import PublishCommand
class BaseNatsTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
@@ -19,22 +20,21 @@ class BaseNatsTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "nats"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> "AnyDict":
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["subject"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- subject: str = kwargs.get("subject", SERVICE_NAME)
- return subject
+ return cmd.destination
class NatsTelemetrySettingsProvider(BaseNatsTelemetrySettingsProvider["Msg"]):
@@ -58,11 +58,11 @@ def get_consume_destination_name(
class NatsBatchTelemetrySettingsProvider(
- BaseNatsTelemetrySettingsProvider[List["Msg"]]
+ BaseNatsTelemetrySettingsProvider[list["Msg"]],
):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[List[Msg]]",
+ msg: "StreamMessage[list[Msg]]",
) -> "AnyDict":
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
@@ -75,7 +75,7 @@ def get_consume_attrs_from_message(
def get_consume_destination_name(
self,
- msg: "StreamMessage[List[Msg]]",
+ msg: "StreamMessage[list[Msg]]",
) -> str:
return msg.raw_message[0].subject
@@ -110,8 +110,7 @@ def telemetry_attributes_provider_factory(
]:
if isinstance(msg, Sequence):
return NatsBatchTelemetrySettingsProvider()
- elif isinstance(msg, Msg) or msg is None:
+ if isinstance(msg, Msg) or msg is None:
return NatsTelemetrySettingsProvider()
- else:
- # KeyValue and Object Storage watch cases
- return None
+ # KeyValue and Object Storage watch cases
+ return None
diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py
index c3a1c32928..7553ef7882 100644
--- a/faststream/nats/parser.py
+++ b/faststream/nats/parser.py
@@ -1,6 +1,9 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import TYPE_CHECKING, Any, Optional
-from faststream.broker.message import StreamMessage, decode_message, gen_cor_id
+from faststream.message import (
+ StreamMessage,
+ decode_message,
+)
from faststream.nats.message import (
NatsBatchMessage,
NatsKvMessage,
@@ -14,7 +17,7 @@
from nats.js.api import ObjectInfo
from nats.js.kv import KeyValue
- from faststream.types import AnyDict, DecodedMessage
+ from faststream._internal.basic_types import AnyDict, DecodedMessage
class NatsBaseParser:
@@ -51,9 +54,10 @@ async def decode_message(
class NatsParser(NatsBaseParser):
"""A class to parse NATS core messages."""
- def __init__(self, *, pattern: str, no_ack: bool) -> None:
+ def __init__(self, *, pattern: str, is_ack_disabled: bool) -> None:
super().__init__(pattern=pattern)
- self.no_ack = no_ack
+
+ self.is_ack_disabled = is_ack_disabled
async def parse_message(
self,
@@ -66,8 +70,8 @@ async def parse_message(
headers = message.header or {}
- if not self.no_ack:
- message._ackd = True # prevent message from acking
+ if self.is_ack_disabled:
+ message._ackd = True
return NatsMessage(
raw_message=message,
@@ -76,8 +80,8 @@ async def parse_message(
reply_to=message.reply,
headers=headers,
content_type=headers.get("content-type", ""),
- message_id=headers.get("message_id", gen_cor_id()),
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ message_id=headers.get("message_id"),
+ correlation_id=headers.get("correlation_id"),
)
@@ -101,9 +105,9 @@ async def parse_message(
path=path or {},
reply_to=headers.get("reply_to", ""), # differ from core
headers=headers,
- content_type=headers.get("content-type", ""),
- message_id=headers.get("message_id", gen_cor_id()),
- correlation_id=headers.get("correlation_id", gen_cor_id()),
+ content_type=headers.get("content-type"),
+ message_id=headers.get("message_id"),
+ correlation_id=headers.get("correlation_id"),
)
@@ -112,10 +116,10 @@ class BatchParser(JsParser):
async def parse_batch(
self,
- message: List["Msg"],
- ) -> "StreamMessage[List[Msg]]":
- body: List[bytes] = []
- batch_headers: List[Dict[str, str]] = []
+ message: list["Msg"],
+ ) -> "StreamMessage[list[Msg]]":
+ body: list[bytes] = []
+ batch_headers: list[dict[str, str]] = []
if message:
path = self.get_path(message[0].subject)
@@ -139,9 +143,9 @@ async def parse_batch(
async def decode_batch(
self,
- msg: "StreamMessage[List[Msg]]",
- ) -> List["DecodedMessage"]:
- data: List[DecodedMessage] = []
+ msg: "StreamMessage[list[Msg]]",
+ ) -> list["DecodedMessage"]:
+ data: list[DecodedMessage] = []
path: Optional[AnyDict] = None
for m in msg.raw_message:
@@ -155,7 +159,8 @@ async def decode_batch(
class KvParser(NatsBaseParser):
async def parse_message(
- self, msg: "KeyValue.Entry"
+ self,
+ msg: "KeyValue.Entry",
) -> StreamMessage["KeyValue.Entry"]:
return NatsKvMessage(
raw_message=msg,
diff --git a/faststream/nats/prometheus/middleware.py b/faststream/nats/prometheus/middleware.py
index 3aadeb61d1..7546fe5b6d 100644
--- a/faststream/nats/prometheus/middleware.py
+++ b/faststream/nats/prometheus/middleware.py
@@ -1,14 +1,20 @@
-from typing import TYPE_CHECKING, Optional, Sequence
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, Union
+from nats.aio.msg import Msg
+
+from faststream._internal.constants import EMPTY
from faststream.nats.prometheus.provider import settings_provider_factory
-from faststream.prometheus.middleware import BasePrometheusMiddleware
-from faststream.types import EMPTY
+from faststream.nats.response import NatsPublishCommand
+from faststream.prometheus.middleware import PrometheusMiddleware
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
-class NatsPrometheusMiddleware(BasePrometheusMiddleware):
+class NatsPrometheusMiddleware(
+ PrometheusMiddleware[NatsPublishCommand, Union[Msg, Sequence[Msg]]]
+):
def __init__(
self,
*,
@@ -18,7 +24,7 @@ def __init__(
received_messages_size_buckets: Optional[Sequence[float]] = None,
) -> None:
super().__init__(
- settings_provider_factory=settings_provider_factory,
+ settings_provider_factory=settings_provider_factory, # type: ignore[arg-type]
registry=registry,
app_name=app_name,
metrics_prefix=metrics_prefix,
diff --git a/faststream/nats/prometheus/provider.py b/faststream/nats/prometheus/provider.py
index e6ac0a4684..eefe8ea5be 100644
--- a/faststream/nats/prometheus/provider.py
+++ b/faststream/nats/prometheus/provider.py
@@ -1,15 +1,16 @@
-from typing import TYPE_CHECKING, List, Sequence, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union
from nats.aio.msg import Msg
-from faststream.broker.message import MsgType, StreamMessage
+from faststream.message.message import MsgType, StreamMessage
from faststream.prometheus import (
ConsumeAttrs,
MetricsSettingsProvider,
)
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream.response import PublishCommand
class BaseNatsMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
@@ -18,11 +19,11 @@ class BaseNatsMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "nats"
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return cast(str, kwargs["subject"])
+ return cmd.destination
class NatsMetricsSettingsProvider(BaseNatsMetricsSettingsProvider["Msg"]):
@@ -37,10 +38,10 @@ def get_consume_attrs_from_message(
}
-class BatchNatsMetricsSettingsProvider(BaseNatsMetricsSettingsProvider[List["Msg"]]):
+class BatchNatsMetricsSettingsProvider(BaseNatsMetricsSettingsProvider[list["Msg"]]):
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[List[Msg]]",
+ msg: "StreamMessage[list[Msg]]",
) -> ConsumeAttrs:
raw_message = msg.raw_message[0]
return {
@@ -59,8 +60,7 @@ def settings_provider_factory(
]:
if isinstance(msg, Sequence):
return BatchNatsMetricsSettingsProvider()
- elif isinstance(msg, Msg) or msg is None:
+ if isinstance(msg, Msg) or msg is None:
return NatsMetricsSettingsProvider()
- else:
- # KeyValue and Object Storage watch cases
- return None
+ # KeyValue and Object Storage watch cases
+ return None
diff --git a/faststream/nats/publisher/asyncapi.py b/faststream/nats/publisher/asyncapi.py
deleted file mode 100644
index 7ce50295d7..0000000000
--- a/faststream/nats/publisher/asyncapi.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence
-
-from typing_extensions import override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import nats
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.nats.publisher.usecase import LogicPublisher
-
-if TYPE_CHECKING:
- from nats.aio.msg import Msg
-
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
- from faststream.nats.schemas.js_stream import JStream
-
-
-class AsyncAPIPublisher(LogicPublisher):
- """A class to represent a NATS publisher."""
-
- def get_name(self) -> str:
- return f"{self.subject}:Publisher"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- publish=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads, "Publisher"),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- nats=nats.ChannelBinding(
- subject=self.subject,
- )
- ),
- )
- }
-
- @override
- @classmethod
- def create( # type: ignore[override]
- cls,
- *,
- subject: str,
- reply_to: str,
- headers: Optional[Dict[str, str]],
- stream: Optional["JStream"],
- timeout: Optional[float],
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIPublisher":
- return cls(
- subject=subject,
- reply_to=reply_to,
- headers=headers,
- stream=stream,
- timeout=timeout,
- # Publisher args
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
diff --git a/faststream/nats/publisher/factory.py b/faststream/nats/publisher/factory.py
new file mode 100644
index 0000000000..3191f956ac
--- /dev/null
+++ b/faststream/nats/publisher/factory.py
@@ -0,0 +1,43 @@
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Any, Optional
+
+from .specified import SpecificationPublisher
+
+if TYPE_CHECKING:
+ from nats.aio.msg import Msg
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.nats.schemas.js_stream import JStream
+
+
+def create_publisher(
+ *,
+ subject: str,
+ reply_to: str,
+ headers: Optional[dict[str, str]],
+ stream: Optional["JStream"],
+ timeout: Optional[float],
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> SpecificationPublisher:
+ return SpecificationPublisher(
+ subject=subject,
+ reply_to=reply_to,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ # Publisher args
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/nats/publisher/fake.py b/faststream/nats/publisher/fake.py
new file mode 100644
index 0000000000..1a0a95f18f
--- /dev/null
+++ b/faststream/nats/publisher/fake.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.nats.response import NatsPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class NatsFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ subject: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.subject = subject
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "NatsPublishCommand"]
+ ) -> "NatsPublishCommand":
+ cmd = super().patch_command(cmd)
+ real_cmd = NatsPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.subject
+ return real_cmd
diff --git a/faststream/nats/publisher/producer.py b/faststream/nats/publisher/producer.py
index eedb27932f..7feefe3a59 100644
--- a/faststream/nats/publisher/producer.py
+++ b/faststream/nats/publisher/producer.py
@@ -1,27 +1,32 @@
import asyncio
-from typing import TYPE_CHECKING, Any, Dict, Optional
+from typing import TYPE_CHECKING, Optional
import anyio
import nats
from typing_extensions import override
-from faststream.broker.message import encode_message
-from faststream.broker.publisher.proto import ProducerProto
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS
+from faststream._internal.publisher.proto import ProducerProto
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream.exceptions import FeatureNotSupportedException
+from faststream.message import encode_message
+from faststream.nats.helpers.state import (
+ ConnectedState,
+ ConnectionState,
+ EmptyConnectionState,
+)
from faststream.nats.parser import NatsParser
-from faststream.utils.functions import timeout_scope
if TYPE_CHECKING:
from nats.aio.client import Client
from nats.aio.msg import Msg
from nats.js import JetStreamContext
- from faststream.broker.types import (
+ from faststream._internal.types import (
AsyncCallable,
CustomCallable,
)
- from faststream.types import SendableMessage
+ from faststream.nats.response import NatsPublishCommand
+ from faststream.nats.schemas import PubAck
class NatsFastProducer(ProducerProto):
@@ -32,99 +37,67 @@ class NatsFastProducer(ProducerProto):
def __init__(
self,
- *,
- connection: "Client",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
-
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(pattern="", is_ack_disabled=True)
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ self.__state: ConnectionState[Client] = EmptyConnectionState()
+
+ def connect(self, connection: "Client") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- **kwargs: Any, # suprress stream option
- ) -> Optional[Any]:
- payload, content_type = encode_message(message)
+ cmd: "NatsPublishCommand",
+ ) -> None:
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- client = self._connection
-
- if rpc:
- if reply_to:
- raise WRONG_PUBLISH_ARGS
-
- reply_to = client.new_inbox()
-
- future: asyncio.Future[Msg] = asyncio.Future()
- sub = await client.subscribe(reply_to, future=future, max_msgs=1)
- await sub.unsubscribe(limit=1)
-
- await client.publish(
- subject=subject,
+ return await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
- reply=reply_to,
+ reply=cmd.reply_to,
headers=headers_to_send,
)
- if rpc:
- msg: Any = None
- with timeout_scope(rpc_timeout, raise_timeout):
- msg = await future
-
- if msg: # pragma: no branch
- if msg.headers: # pragma: no cover # noqa: SIM102
- if (
- msg.headers.get(nats.js.api.Header.STATUS)
- == nats.aio.client.NO_RESPONDERS_STATUS
- ):
- raise nats.errors.NoRespondersError
- return await self._decoder(await self._parser(msg))
-
- return None
-
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[Dict[str, str]] = None,
- timeout: float = 0.5,
+ cmd: "NatsPublishCommand",
) -> "Msg":
- payload, content_type = encode_message(message)
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- return await self._connection.request(
- subject=subject,
+ return await self.__state.connection.request(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- timeout=timeout,
+ timeout=cmd.timeout,
)
+ @override
+ async def publish_batch(
+ self,
+ cmd: "NatsPublishCommand",
+ ) -> None:
+ msg = "NATS doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
+
class NatsJSFastProducer(ProducerProto):
"""A class to represent a NATS JetStream producer."""
@@ -135,108 +108,70 @@ class NatsJSFastProducer(ProducerProto):
def __init__(
self,
*,
- connection: "JetStreamContext",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
-
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(
+ pattern="", is_ack_disabled=True
+ ) # core parser to serializer responses
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "Client") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[Dict[str, str]] = None,
- reply_to: str = "",
- stream: Optional[str] = None,
- timeout: Optional[float] = None,
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- ) -> Optional[Any]:
- payload, content_type = encode_message(message)
+ cmd: "NatsPublishCommand",
+ ) -> "PubAck":
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(js=True),
}
- if rpc:
- if reply_to:
- raise WRONG_PUBLISH_ARGS
- reply_to = self._connection._nc.new_inbox()
- future: asyncio.Future[Msg] = asyncio.Future()
- sub = await self._connection._nc.subscribe(
- reply_to, future=future, max_msgs=1
- )
- await sub.unsubscribe(limit=1)
-
- if reply_to:
- headers_to_send.update({"reply_to": reply_to})
-
- await self._connection.publish(
- subject=subject,
+ return await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- stream=stream,
- timeout=timeout,
+ stream=cmd.stream,
+ timeout=cmd.timeout,
)
- if rpc:
- msg: Any = None
- with timeout_scope(rpc_timeout, raise_timeout):
- msg = await future
-
- if msg: # pragma: no branch
- if msg.headers: # pragma: no cover # noqa: SIM102
- if (
- msg.headers.get(nats.js.api.Header.STATUS)
- == nats.aio.client.NO_RESPONDERS_STATUS
- ):
- raise nats.errors.NoRespondersError
- return await self._decoder(await self._parser(msg))
-
- return None
-
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[Dict[str, str]] = None,
- stream: Optional[str] = None,
- timeout: float = 0.5,
+ cmd: "NatsPublishCommand",
) -> "Msg":
- payload, content_type = encode_message(message)
+ payload, content_type = encode_message(cmd.body)
- reply_to = self._connection._nc.new_inbox()
+ reply_to = self.__state.connection._nc.new_inbox()
future: asyncio.Future[Msg] = asyncio.Future()
- sub = await self._connection._nc.subscribe(reply_to, future=future, max_msgs=1)
+ sub = await self.__state.connection._nc.subscribe(
+ reply_to, future=future, max_msgs=1
+ )
await sub.unsubscribe(limit=1)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
"reply_to": reply_to,
- **(headers or {}),
+ **cmd.headers_to_publish(js=False),
}
- with anyio.fail_after(timeout):
- await self._connection.publish(
- subject=subject,
+ with anyio.fail_after(cmd.timeout):
+ await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- stream=stream,
- timeout=timeout,
+ stream=cmd.stream,
+ timeout=cmd.timeout,
)
msg = await future
@@ -251,3 +186,11 @@ async def request( # type: ignore[override]
raise nats.errors.NoRespondersError
return msg
+
+ @override
+ async def publish_batch(
+ self,
+ cmd: "NatsPublishCommand",
+ ) -> None:
+ msg = "NATS doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/nats/publisher/specified.py b/faststream/nats/publisher/specified.py
new file mode 100644
index 0000000000..029c62b344
--- /dev/null
+++ b/faststream/nats/publisher/specified.py
@@ -0,0 +1,39 @@
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.nats.publisher.usecase import LogicPublisher
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, PublisherSpec
+from faststream.specification.schema.bindings import ChannelBinding, nats
+
+
+class SpecificationPublisher(
+ SpecificationPublisherMixin,
+ LogicPublisher,
+):
+ """A class to represent a NATS publisher."""
+
+ def get_default_name(self) -> str:
+ return f"{self.subject}:Publisher"
+
+ def get_schema(self) -> dict[str, PublisherSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: PublisherSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ nats=nats.ChannelBinding(
+ subject=self.subject,
+ queue=None,
+ ),
+ ),
+ ),
+ }
diff --git a/faststream/nats/publisher/usecase.py b/faststream/nats/publisher/usecase.py
index eaa014ce75..6f21bafd6e 100644
--- a/faststream/nats/publisher/usecase.py
+++ b/faststream/nats/publisher/usecase.py
@@ -1,222 +1,194 @@
-from contextlib import AsyncExitStack
-from functools import partial
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Dict,
- Iterable,
- Optional,
- Sequence,
- Union,
-)
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Optional, Union
from nats.aio.msg import Msg
-from typing_extensions import Annotated, Doc, override
+from typing_extensions import overload, override
-from faststream.broker.message import SourceType, gen_cor_id
-from faststream.broker.publisher.usecase import PublisherUsecase
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.utils.functions import return_input
+from faststream._internal.publisher.usecase import PublisherUsecase
+from faststream.message import gen_cor_id
+from faststream.nats.response import NatsPublishCommand
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.nats.message import NatsMessage
from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
- from faststream.nats.schemas import JStream
- from faststream.types import AnyDict, AsyncFunc, SendableMessage
+ from faststream.nats.schemas import JStream, PubAck
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[Msg]):
"""A class to represent a NATS publisher."""
- _producer: Union["NatsFastProducer", "NatsJSFastProducer", None]
+ _producer: Union["NatsFastProducer", "NatsJSFastProducer"]
def __init__(
self,
*,
subject: str,
reply_to: str,
- headers: Optional[Dict[str, str]],
+ headers: Optional[dict[str, str]],
stream: Optional["JStream"],
timeout: Optional[float],
# Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
"""Initialize NATS publisher object."""
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.subject = subject
self.stream = stream
self.timeout = timeout
- self.headers = headers
+ self.headers = headers or {}
self.reply_to = reply_to
- def __hash__(self) -> int:
- return hash(self.subject)
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: None = None,
+ timeout: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> "PubAck": ...
@override
async def publish(
self,
message: "SendableMessage",
subject: str = "",
- *,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
reply_to: str = "",
correlation_id: Optional[str] = None,
stream: Optional[str] = None,
timeout: Optional[float] = None,
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
- ) -> Optional[Any]:
+ ) -> Optional["PubAck"]:
"""Publish message directly.
Args:
- message (SendableMessage): Message body to send.
+ message:
+ Message body to send.
Can be any encodable object (native python types or `pydantic.BaseModel`).
- subject (str): NATS subject to send message (default is `''`).
- headers (:obj:`dict` of :obj:`str`: :obj:`str`, optional): Message headers to store metainformation (default is `None`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
**content-type** and **correlation_id** will be set automatically by framework anyway.
-
- reply_to (str): NATS subject name to send response (default is `None`).
- correlation_id (str, optional): Manual message **correlation_id** setter (default is `None`).
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
**correlation_id** is a useful option to trace messages.
+ stream:
+ This option validates that the target subject is in presented stream.
+ Can be omitted without any effect if you doesn't want PubAck frame.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `None` if you publishes a regular message.
+ `faststream.nats.PubAck` if you publishes a message to stream.
+ """
+ cmd = NatsPublishCommand(
+ message,
+ subject=subject or self.subject,
+ headers=self.headers | (headers or {}),
+ reply_to=reply_to or self.reply_to,
+ correlation_id=correlation_id or gen_cor_id(),
+ stream=stream or getattr(self.stream, "name", None),
+ timeout=timeout or self.timeout,
+ _publish_type=PublishType.PUBLISH,
+ )
+ return await self._basic_publish(cmd, _extra_middlewares=())
- stream (str, optional): This option validates that the target subject is in presented stream (default is `None`).
- Can be omitted without any effect.
- timeout (float, optional): Timeout to send message to NATS in seconds (default is `None`).
- rpc (bool): Whether to wait for reply in blocking mode (default is `False`).
- rpc_timeout (float, optional): RPC reply waiting time (default is `30.0`).
- raise_timeout (bool): Whetever to raise `TimeoutError` or return `None` at **rpc_timeout** (default is `False`).
- RPC request returns `None` at timeout by default.
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "NatsPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = NatsPublishCommand.from_cmd(cmd)
- _extra_middlewares (:obj:`Iterable` of :obj:`PublisherMiddleware`): Extra middlewares to wrap publishing process (default is `()`).
- """
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "subject": subject or self.subject,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- # specific args
- "rpc": rpc,
- "rpc_timeout": rpc_timeout,
- "raise_timeout": raise_timeout,
- }
-
- if stream := stream or getattr(self.stream, "name", None):
- kwargs.update({"stream": stream, "timeout": timeout or self.timeout})
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
-
- return await call(message, **kwargs)
+ cmd.destination = self.subject
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ if self.stream:
+ cmd.stream = self.stream.name
+ cmd.timeout = self.timeout
+
+ return await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`)."
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ] = "",
- *,
- headers: Annotated[
- Optional[Dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway."
- ),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- timeout: Annotated[
- float,
- Doc("Timeout to send message to NATS."),
- ] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ timeout: float = 0.5,
) -> "NatsMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "subject": subject or self.subject,
- "headers": headers or self.headers,
- "timeout": timeout or self.timeout,
- "correlation_id": correlation_id or gen_cor_id(),
- }
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
- message,
- **kwargs,
- )
+ """Make a synchronous request to outer subscriber.
+
+ If out subscriber listens subject by stream, you should setup the same **stream** explicitly.
+ Another way you will reseave confirmation frame as a response.
- async with AsyncExitStack() as stack:
- return_msg: Callable[[NatsMessage], Awaitable[NatsMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
+ Note:
+ To setup **stream** option, please use `__init__` method.
+
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ timeout:
+ Timeout to send message to NATS.
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
+ Returns:
+ `faststream.nats.message.NatsMessage` object as an outer subscriber response.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
+ subject=subject or self.subject,
+ headers=self.headers | (headers or {}),
+ timeout=timeout or self.timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ stream=getattr(self.stream, "name", None),
+ _publish_type=PublishType.REQUEST,
+ )
- raise AssertionError("unreachable")
+ msg: NatsMessage = await self._basic_request(cmd)
+ return msg
def add_prefix(self, prefix: str) -> None:
self.subject = prefix + self.subject
diff --git a/faststream/nats/response.py b/faststream/nats/response.py
index b3813131ff..f66b1c6ef4 100644
--- a/faststream/nats/response.py
+++ b/faststream/nats/response.py
@@ -1,11 +1,12 @@
-from typing import TYPE_CHECKING, Dict, Optional
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.broker.response import Response
+from faststream.response.publish_type import PublishType
+from faststream.response.response import PublishCommand, Response
if TYPE_CHECKING:
- from faststream.types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import SendableMessage
class NatsResponse(Response):
@@ -13,7 +14,7 @@ def __init__(
self,
body: "SendableMessage",
*,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
stream: Optional[str] = None,
) -> None:
@@ -25,9 +26,81 @@ def __init__(
self.stream = stream
@override
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- **super().as_publish_kwargs(),
- "stream": self.stream,
- }
- return publish_options
+ def as_publish_command(self) -> "NatsPublishCommand":
+ return NatsPublishCommand(
+ message=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ # Nats specific
+ subject="",
+ stream=self.stream,
+ )
+
+
+class NatsPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ *,
+ subject: str = "",
+ correlation_id: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ _publish_type: PublishType,
+ ) -> None:
+ super().__init__(
+ body=message,
+ destination=subject,
+ correlation_id=correlation_id,
+ headers=headers,
+ reply_to=reply_to,
+ _publish_type=_publish_type,
+ )
+
+ self.stream = stream
+ self.timeout = timeout
+
+ def headers_to_publish(self, *, js: bool = False) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if js and self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "NatsPublishCommand"],
+ ) -> "NatsPublishCommand":
+ if isinstance(cmd, NatsPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ return cls(
+ message=cmd.body,
+ subject=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def __repr__(self) -> str:
+ body = [f"body='{self.body}'", f"subject='{self.destination}'"]
+ if self.stream:
+ body.append(f"stream={self.stream}")
+ if self.reply_to:
+ body.append(f"reply_to='{self.reply_to}'")
+ body.extend((
+ f"headers={self.headers}",
+ f"correlation_id='{self.correlation_id}'",
+ f"publish_type={self.publish_type}",
+ ))
+ return f"{self.__class__.__name__}({', '.join(body)})"
diff --git a/faststream/nats/router.py b/faststream/nats/router.py
index b9e029c594..44816f591e 100644
--- a/faststream/nats/router.py
+++ b/faststream/nats/router.py
@@ -1,36 +1,39 @@
+from collections.abc import Awaitable, Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
- Awaitable,
Callable,
- Dict,
- Iterable,
Optional,
- Sequence,
Union,
)
from nats.js import api
-from typing_extensions import Annotated, Doc, deprecated
+from typing_extensions import Doc, deprecated
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.nats.broker.registrator import NatsRegistrator
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.aio.msg import Msg
- from faststream.broker.types import (
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
- from faststream.nats.message import NatsBatchMessage, NatsMessage
+ from faststream.nats.message import NatsMessage
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
- from faststream.types import SendableMessage
class NatsPublisher(ArgsContainer):
@@ -47,11 +50,11 @@ def __init__(
],
*,
headers: Annotated[
- Optional[Dict[str, str]],
+ Optional[dict[str, str]],
Doc(
"Message headers to store metainformation. "
"**content-type** and **correlation_id** will be set automatically by framework anyway. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -63,7 +66,7 @@ def __init__(
Union[str, "JStream", None],
Doc(
"This option validates that the target `subject` is in presented stream. "
- "Can be omitted without any effect."
+ "Can be omitted without any effect.",
),
] = None,
timeout: Annotated[
@@ -73,6 +76,10 @@ def __init__(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -88,7 +95,7 @@ def __init__(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -122,7 +129,7 @@ def __init__(
],
Doc(
"Message handler function "
- "to wrap the same with `@broker.subscriber(...)` way."
+ "to wrap the same with `@broker.subscriber(...)` way.",
),
],
subject: Annotated[
@@ -137,7 +144,7 @@ def __init__(
str,
Doc(
"Subscribers' NATS queue name. Subscribers with same queue name will be load balanced by the NATS "
- "server."
+ "server.",
),
] = "",
pending_msgs_limit: Annotated[
@@ -147,7 +154,7 @@ def __init__(
"been answered. In case of NATS Core, if that limits exceeds, you will receive NATS 'Slow Consumer' "
"error. "
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
pending_bytes_limit: Annotated[
@@ -157,7 +164,7 @@ def __init__(
"been answered. In case of NATS Core, if that limit exceeds, you will receive NATS 'Slow Consumer' "
"error."
"That's literally means that your worker can't handle the whole load. In case of NATS JetStream, "
- "you will no longer receive messages until some of delivered messages will be acked in any way."
+ "you will no longer receive messages until some of delivered messages will be acked in any way.",
),
] = None,
# Core arguments
@@ -169,7 +176,7 @@ def __init__(
durable: Annotated[
Optional[str],
Doc(
- "Name of the durable consumer to which the the subscription should be bound."
+ "Name of the durable consumer to which the the subscription should be bound.",
),
] = None,
config: Annotated[
@@ -195,7 +202,7 @@ def __init__(
headers_only: Annotated[
Optional[bool],
Doc(
- "Should be message delivered without payload, only headers and metadata."
+ "Should be message delivered without payload, only headers and metadata.",
),
] = None,
# pull arguments
@@ -203,7 +210,7 @@ def __init__(
Optional["PullSub"],
Doc(
"NATS Pull consumer parameters container. "
- "Should be used with `stream` only."
+ "Should be used with `stream` only.",
),
] = None,
kv_watch: Annotated[
@@ -217,22 +224,28 @@ def __init__(
inbox_prefix: Annotated[
bytes,
Doc(
- "Prefix for generating unique inboxes, subjects with that prefix and NUID."
+ "Prefix for generating unique inboxes, subjects with that prefix and NUID.",
),
] = api.INBOX_PREFIX,
# custom
ack_first: Annotated[
bool,
Doc("Whether to `ack` message at start of consuming or not."),
- ] = False,
+ deprecated(
+ """
+ This option is deprecated and will be removed in 0.7.0 release.
+ Please, use `ack_policy=AckPolicy.ACK_FIRST` instead.
+ """,
+ ),
+ ] = EMPTY,
stream: Annotated[
Union[str, "JStream", None],
Doc("Subscribe to NATS Stream with `subject` filter."),
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -244,38 +257,29 @@ def __init__(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[NatsMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- Union[
- "Filter[NatsMessage]",
- "Filter[NatsBatchMessage]",
- ],
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
max_workers: Annotated[
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -287,7 +291,7 @@ def __init__(
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -321,8 +325,7 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -349,15 +352,19 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers."
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[Msg]"],
Doc("Router middlewares to apply to all routers' publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[Msg]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc("Parser to map original **IncomingMessage** Msg to FastStream one."),
@@ -377,6 +384,7 @@ def __init__(
prefix=prefix,
dependencies=dependencies,
middlewares=middlewares,
+ routers=routers,
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
diff --git a/faststream/nats/schemas/__init__.py b/faststream/nats/schemas/__init__.py
index 1edd51bcbe..accadfc731 100644
--- a/faststream/nats/schemas/__init__.py
+++ b/faststream/nats/schemas/__init__.py
@@ -1,3 +1,5 @@
+from nats.js.api import PubAck
+
from faststream.nats.schemas.js_stream import JStream
from faststream.nats.schemas.kv_watch import KvWatch
from faststream.nats.schemas.obj_watch import ObjWatch
@@ -7,5 +9,6 @@
"JStream",
"KvWatch",
"ObjWatch",
+ "PubAck",
"PullSub",
)
diff --git a/faststream/nats/schemas/js_stream.py b/faststream/nats/schemas/js_stream.py
index 017691da23..62e97124f6 100644
--- a/faststream/nats/schemas/js_stream.py
+++ b/faststream/nats/schemas/js_stream.py
@@ -1,11 +1,11 @@
from itertools import zip_longest
-from typing import TYPE_CHECKING, List, Optional, Tuple
+from typing import TYPE_CHECKING, Annotated, Optional
from nats.js.api import DiscardPolicy, StreamConfig
-from typing_extensions import Annotated, Doc
+from typing_extensions import Doc
-from faststream.broker.schemas import NameRequired
-from faststream.utils.path import compile_path
+from faststream._internal.proto import NameRequired
+from faststream._internal.utils.path import compile_path
if TYPE_CHECKING:
from re import Pattern
@@ -40,12 +40,12 @@ def __init__(
Doc("Stream description if needed."),
] = None,
subjects: Annotated[
- Optional[List[str]],
+ Optional[list[str]],
Doc(
"Subjects, used by stream to grab messages from them. Any message sent by NATS Core will be consumed "
"by stream. Also, stream acknowledge message publisher with message, sent on reply subject of "
"publisher. Can be single string or list of them. Dots separate tokens of subjects, every token may "
- "be matched with exact same token or wildcards."
+ "be matched with exact same token or wildcards.",
),
] = None,
retention: Annotated[
@@ -60,24 +60,25 @@ def __init__(
"which guarantees message to be consumed only once. Since message acked, it will be deleted from the "
"stream immediately. Note: Message will be deleted only if limit is reached or message acked "
"successfully. Message that reached MaxDelivery limit will remain in the stream and should be "
- "manually deleted! Note: All policies will be responsive to Limits."
+ "manually deleted! Note: All policies will be responsive to Limits.",
),
] = None,
max_consumers: Annotated[
- Optional[int], Doc("Max number of consumers to be bound with this stream.")
+ Optional[int],
+ Doc("Max number of consumers to be bound with this stream."),
] = None,
max_msgs: Annotated[
Optional[int],
Doc(
"Max number of messages to be stored in the stream. Stream can automatically delete old messages or "
- "stop receiving new messages, look for 'DiscardPolicy'"
+ "stop receiving new messages, look for 'DiscardPolicy'",
),
] = None,
max_bytes: Annotated[
Optional[int],
Doc(
"Max bytes of all messages to be stored in the stream. Stream can automatically delete old messages or "
- "stop receiving new messages, look for 'DiscardPolicy'"
+ "stop receiving new messages, look for 'DiscardPolicy'",
),
] = None,
discard: Annotated[
@@ -88,27 +89,27 @@ def __init__(
Optional[float],
Doc(
"TTL in seconds for messages. Since message arrive, TTL begun. As soon as TTL exceeds, message will be "
- "deleted."
+ "deleted.",
),
] = None, # in seconds
max_msgs_per_subject: Annotated[
int,
Doc(
- "Limit message count per every unique subject. Stream index subjects to it's pretty fast tho.-"
+ "Limit message count per every unique subject. Stream index subjects to it's pretty fast tho.-",
),
] = -1,
max_msg_size: Annotated[
Optional[int],
Doc(
"Limit message size to be received. Note: the whole message can't be larger than NATS Core message "
- "limit."
+ "limit.",
),
] = -1,
storage: Annotated[
Optional["StorageType"],
Doc(
"Storage type, disk or memory. Disk is more durable, memory is faster. Memory can be better choice "
- "for systems, where new value overrides previous."
+ "for systems, where new value overrides previous.",
),
] = None,
num_replicas: Annotated[
@@ -116,7 +117,7 @@ def __init__(
Doc(
"Replicas of stream to be used. All replicas create RAFT group with leader. In case of losing lesser "
"than half, cluster will be available to reads and writes. In case of losing slightly more than half, "
- "cluster may be available but for reads only."
+ "cluster may be available but for reads only.",
),
] = None,
no_ack: Annotated[
@@ -132,25 +133,25 @@ def __init__(
Doc(
"A TTL for keys in implicit TTL-based hashmap of stream. That hashmap allows to early drop duplicate "
"messages. Essential feature for idempotent writes. Note: disabled by default. Look for 'Nats-Msg-Id' "
- "in NATS documentation for more information."
+ "in NATS documentation for more information.",
),
] = 0,
placement: Annotated[
Optional["Placement"],
Doc(
- "NATS Cluster for stream to be deployed in. Value is name of that cluster."
+ "NATS Cluster for stream to be deployed in. Value is name of that cluster.",
),
] = None,
mirror: Annotated[
Optional["StreamSource"],
Doc(
- "Should stream be read-only replica of another stream, if so, value is name of that stream."
+ "Should stream be read-only replica of another stream, if so, value is name of that stream.",
),
] = None,
sources: Annotated[
- Optional[List["StreamSource"]],
+ Optional[list["StreamSource"]],
Doc(
- "Should stream mux multiple streams into single one, if so, values is names of those streams."
+ "Should stream mux multiple streams into single one, if so, values is names of those streams.",
),
] = None,
sealed: Annotated[
@@ -193,6 +194,7 @@ def __init__(
self.subjects = subjects
self.declare = declare
+
self.config = StreamConfig(
name=name,
description=description,
@@ -245,14 +247,14 @@ def is_subject_match_wildcard(subject: str, wildcard: str) -> bool:
if base == ">":
break
- if base != "*" and current != base:
+ if base not in {"*", current}:
call = False
break
return call
-def compile_nats_wildcard(pattern: str) -> Tuple[Optional["Pattern[str]"], str]:
+def compile_nats_wildcard(pattern: str) -> tuple[Optional["Pattern[str]"], str]:
return compile_path(
pattern,
replace_symbol="*",
diff --git a/faststream/nats/schemas/kv_watch.py b/faststream/nats/schemas/kv_watch.py
index e99a5f5084..1bc3f1374f 100644
--- a/faststream/nats/schemas/kv_watch.py
+++ b/faststream/nats/schemas/kv_watch.py
@@ -1,6 +1,6 @@
from typing import Optional
-from faststream.broker.schemas import NameRequired
+from faststream._internal.proto import NameRequired
class KvWatch(NameRequired):
@@ -50,6 +50,3 @@ def __init__(
self.timeout = timeout
self.declare = declare
-
- def __hash__(self) -> int:
- return hash(self.name)
diff --git a/faststream/nats/schemas/obj_watch.py b/faststream/nats/schemas/obj_watch.py
index a1f11d4667..c8a3b9f245 100644
--- a/faststream/nats/schemas/obj_watch.py
+++ b/faststream/nats/schemas/obj_watch.py
@@ -56,7 +56,6 @@ def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]: ...
def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]:
if value is True:
return ObjWatch()
- elif value is False:
+ if value is False:
return None
- else:
- return value
+ return value
diff --git a/faststream/nats/schemas/pull_sub.py b/faststream/nats/schemas/pull_sub.py
index b38b48ebdb..2075b31834 100644
--- a/faststream/nats/schemas/pull_sub.py
+++ b/faststream/nats/schemas/pull_sub.py
@@ -47,7 +47,6 @@ def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]: ...
def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]:
if value is True:
return PullSub()
- elif value is False:
+ if value is False:
return None
- else:
- return value
+ return value
diff --git a/faststream/nats/security.py b/faststream/nats/security.py
index c80931055b..8aa20e6acc 100644
--- a/faststream/nats/security.py
+++ b/faststream/nats/security.py
@@ -6,18 +6,18 @@
)
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
def parse_security(security: Optional[BaseSecurity]) -> "AnyDict":
if security is None:
return {}
- elif isinstance(security, SASLPlaintext):
+ if isinstance(security, SASLPlaintext):
return _parse_sasl_plaintext(security)
- elif isinstance(security, BaseSecurity):
+ if isinstance(security, BaseSecurity):
return _parse_base_security(security)
- else:
- raise NotImplementedError(f"NatsBroker does not support {type(security)}")
+ msg = f"NatsBroker does not support {type(security)}"
+ raise NotImplementedError(msg)
def _parse_base_security(security: BaseSecurity) -> "AnyDict":
diff --git a/faststream/nats/subscriber/subscription.py b/faststream/nats/subscriber/adapters.py
similarity index 100%
rename from faststream/nats/subscriber/subscription.py
rename to faststream/nats/subscriber/adapters.py
diff --git a/faststream/nats/subscriber/asyncapi.py b/faststream/nats/subscriber/asyncapi.py
deleted file mode 100644
index 402aa0b114..0000000000
--- a/faststream/nats/subscriber/asyncapi.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from typing import Any, Dict
-
-from typing_extensions import override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import nats
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.nats.subscriber.usecase import (
- BatchPullStreamSubscriber,
- ConcurrentCoreSubscriber,
- ConcurrentPullStreamSubscriber,
- ConcurrentPushStreamSubscriber,
- CoreSubscriber,
- KeyValueWatchSubscriber,
- LogicSubscriber,
- ObjStoreWatchSubscriber,
- PullStreamSubscriber,
- PushStreamSubscription,
-)
-
-
-class AsyncAPISubscriber(LogicSubscriber[Any, Any]):
- """A class to represent a NATS handler."""
-
- def get_name(self) -> str:
- return f"{self.subject}:{self.call_name}"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- subscribe=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- nats=nats.ChannelBinding(
- subject=self.subject,
- queue=getattr(self, "queue", "") or None,
- )
- ),
- )
- }
-
-
-class AsyncAPICoreSubscriber(AsyncAPISubscriber, CoreSubscriber):
- """One-message core consumer with AsyncAPI methods."""
-
-
-class AsyncAPIConcurrentCoreSubscriber(AsyncAPISubscriber, ConcurrentCoreSubscriber):
- """One-message core concurrent consumer with AsyncAPI methods."""
-
-
-class AsyncAPIStreamSubscriber(AsyncAPISubscriber, PushStreamSubscription):
- """One-message JS Push consumer with AsyncAPI methods."""
-
-
-class AsyncAPIConcurrentPushStreamSubscriber(
- AsyncAPISubscriber, ConcurrentPushStreamSubscriber
-):
- """One-message JS Push concurrent consumer with AsyncAPI methods."""
-
-
-class AsyncAPIPullStreamSubscriber(AsyncAPISubscriber, PullStreamSubscriber):
- """One-message JS Pull consumer with AsyncAPI methods."""
-
-
-class AsyncAPIConcurrentPullStreamSubscriber(
- AsyncAPISubscriber, ConcurrentPullStreamSubscriber
-):
- """One-message JS Pull concurrent consumer with AsyncAPI methods."""
-
-
-class AsyncAPIBatchPullStreamSubscriber(AsyncAPISubscriber, BatchPullStreamSubscriber):
- """Batch-message Pull consumer with AsyncAPI methods."""
-
-
-class AsyncAPIKeyValueWatchSubscriber(AsyncAPISubscriber, KeyValueWatchSubscriber):
- """KeyValueWatch consumer with AsyncAPI methods."""
-
- @override
- def get_name(self) -> str:
- return ""
-
- @override
- def get_schema(self) -> Dict[str, Channel]:
- return {}
-
-
-class AsyncAPIObjStoreWatchSubscriber(AsyncAPISubscriber, ObjStoreWatchSubscriber):
- """ObjStoreWatch consumer with AsyncAPI methods."""
-
- @override
- def get_name(self) -> str:
- return ""
-
- @override
- def get_schema(self) -> Dict[str, Channel]:
- return {}
diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py
index 4d42041743..4256601f41 100644
--- a/faststream/nats/subscriber/factory.py
+++ b/faststream/nats/subscriber/factory.py
@@ -1,5 +1,6 @@
import warnings
-from typing import TYPE_CHECKING, Any, Iterable, Optional, Sequence, Union
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
from nats.aio.subscription import (
DEFAULT_SUB_PENDING_BYTES_LIMIT,
@@ -11,26 +12,28 @@
DEFAULT_JS_SUB_PENDING_MSGS_LIMIT,
)
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
-from faststream.nats.subscriber.asyncapi import (
- AsyncAPIBatchPullStreamSubscriber,
- AsyncAPIConcurrentCoreSubscriber,
- AsyncAPIConcurrentPullStreamSubscriber,
- AsyncAPIConcurrentPushStreamSubscriber,
- AsyncAPICoreSubscriber,
- AsyncAPIKeyValueWatchSubscriber,
- AsyncAPIObjStoreWatchSubscriber,
- AsyncAPIPullStreamSubscriber,
- AsyncAPIStreamSubscriber,
+from faststream.middlewares import AckPolicy
+from faststream.nats.subscriber.specified import (
+ SpecificationBatchPullStreamSubscriber,
+ SpecificationConcurrentCoreSubscriber,
+ SpecificationConcurrentPullStreamSubscriber,
+ SpecificationConcurrentPushStreamSubscriber,
+ SpecificationCoreSubscriber,
+ SpecificationKeyValueWatchSubscriber,
+ SpecificationObjStoreWatchSubscriber,
+ SpecificationPullStreamSubscriber,
+ SpecificationPushStreamSubscriber,
)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.js import api
- from faststream.broker.types import BrokerMiddleware
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
- from faststream.types import AnyDict
def create_subscriber(
@@ -59,25 +62,25 @@ def create_subscriber(
max_workers: int,
stream: Optional["JStream"],
# Subscriber args
+ ack_policy: "AckPolicy",
no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[Any]"],
- # AsyncAPI information
+ # Specification information
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> Union[
- "AsyncAPICoreSubscriber",
- "AsyncAPIConcurrentCoreSubscriber",
- "AsyncAPIStreamSubscriber",
- "AsyncAPIConcurrentPushStreamSubscriber",
- "AsyncAPIPullStreamSubscriber",
- "AsyncAPIConcurrentPullStreamSubscriber",
- "AsyncAPIBatchPullStreamSubscriber",
- "AsyncAPIKeyValueWatchSubscriber",
- "AsyncAPIObjStoreWatchSubscriber",
+ "SpecificationCoreSubscriber",
+ "SpecificationConcurrentCoreSubscriber",
+ "SpecificationPushStreamSubscriber",
+ "SpecificationConcurrentPushStreamSubscriber",
+ "SpecificationPullStreamSubscriber",
+ "SpecificationConcurrentPullStreamSubscriber",
+ "SpecificationBatchPullStreamSubscriber",
+ "SpecificationKeyValueWatchSubscriber",
+ "SpecificationObjStoreWatchSubscriber",
]:
_validate_input_for_misconfigure(
subject=subject,
@@ -93,6 +96,8 @@ def create_subscriber(
deliver_policy=deliver_policy,
headers_only=headers_only,
pull_sub=pull_sub,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
kv_watch=kv_watch,
obj_watch=obj_watch,
ack_first=ack_first,
@@ -100,6 +105,15 @@ def create_subscriber(
stream=stream,
)
+ if ack_first is not EMPTY:
+ ack_policy = AckPolicy.ACK_FIRST if ack_first else AckPolicy.REJECT_ON_ERROR
+
+ if no_ack is not EMPTY:
+ no_ack = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
+
config = config or ConsumerConfig(filter_subjects=[])
if config.durable_name is None:
config.durable_name = durable
@@ -127,6 +141,12 @@ def create_subscriber(
else:
# JS Push Subscriber
+ if ack_policy is AckPolicy.ACK_FIRST:
+ manual_ack = False
+ ack_policy = AckPolicy.DO_NOTHING
+ else:
+ manual_ack = True
+
extra_options.update(
{
"ordered_consumer": ordered_consumer,
@@ -134,8 +154,8 @@ def create_subscriber(
"flow_control": flow_control,
"deliver_policy": deliver_policy,
"headers_only": headers_only,
- "manual_ack": not ack_first,
- }
+ "manual_ack": manual_ack,
+ },
)
else:
@@ -148,7 +168,7 @@ def create_subscriber(
}
if obj_watch is not None:
- return AsyncAPIObjStoreWatchSubscriber(
+ return SpecificationObjStoreWatchSubscriber(
subject=subject,
config=config,
obj_watch=obj_watch,
@@ -160,7 +180,7 @@ def create_subscriber(
)
if kv_watch is not None:
- return AsyncAPIKeyValueWatchSubscriber(
+ return SpecificationKeyValueWatchSubscriber(
subject=subject,
config=config,
kv_watch=kv_watch,
@@ -171,9 +191,9 @@ def create_subscriber(
include_in_schema=include_in_schema,
)
- elif stream is None:
+ if stream is None:
if max_workers > 1:
- return AsyncAPIConcurrentCoreSubscriber(
+ return SpecificationConcurrentCoreSubscriber(
max_workers=max_workers,
subject=subject,
config=config,
@@ -181,144 +201,131 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
+ ack_policy=ack_policy,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
- # AsyncAPI information
+ # Specification
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
- else:
- return AsyncAPICoreSubscriber(
+ return SpecificationCoreSubscriber(
+ subject=subject,
+ config=config,
+ queue=queue,
+ # basic args
+ extra_options=extra_options,
+ # Subscriber args
+ no_reply=no_reply,
+ ack_policy=ack_policy,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # Specification
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ if max_workers > 1:
+ if pull_sub is not None:
+ return SpecificationConcurrentPullStreamSubscriber(
+ max_workers=max_workers,
+ pull_sub=pull_sub,
+ stream=stream,
subject=subject,
config=config,
- queue=queue,
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
- # AsyncAPI information
+ # Specification
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
- else:
- if max_workers > 1:
- if pull_sub is not None:
- return AsyncAPIConcurrentPullStreamSubscriber(
- max_workers=max_workers,
- pull_sub=pull_sub,
- stream=stream,
- subject=subject,
- config=config,
- # basic args
- extra_options=extra_options,
- # Subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- # AsyncAPI information
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+ return SpecificationConcurrentPushStreamSubscriber(
+ max_workers=max_workers,
+ stream=stream,
+ subject=subject,
+ config=config,
+ queue=queue,
+ # basic args
+ extra_options=extra_options,
+ # Subscriber args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # Specification
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
- else:
- return AsyncAPIConcurrentPushStreamSubscriber(
- max_workers=max_workers,
- stream=stream,
- subject=subject,
- config=config,
- queue=queue,
- # basic args
- extra_options=extra_options,
- # Subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- # AsyncAPI information
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+ if pull_sub is not None:
+ if pull_sub.batch:
+ return SpecificationBatchPullStreamSubscriber(
+ pull_sub=pull_sub,
+ stream=stream,
+ subject=subject,
+ config=config,
+ # basic args
+ extra_options=extra_options,
+ # Subscriber args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # Specification
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
- else:
- if pull_sub is not None:
- if pull_sub.batch:
- return AsyncAPIBatchPullStreamSubscriber(
- pull_sub=pull_sub,
- stream=stream,
- subject=subject,
- config=config,
- # basic args
- extra_options=extra_options,
- # Subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- # AsyncAPI information
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- else:
- return AsyncAPIPullStreamSubscriber(
- pull_sub=pull_sub,
- stream=stream,
- subject=subject,
- config=config,
- # basic args
- extra_options=extra_options,
- # Subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- # AsyncAPI information
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+ return SpecificationPullStreamSubscriber(
+ pull_sub=pull_sub,
+ stream=stream,
+ subject=subject,
+ config=config,
+ # basic args
+ extra_options=extra_options,
+ # Subscriber args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # Specification
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
- else:
- return AsyncAPIStreamSubscriber(
- stream=stream,
- subject=subject,
- queue=queue,
- config=config,
- # basic args
- extra_options=extra_options,
- # Subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_dependencies=broker_dependencies,
- broker_middlewares=broker_middlewares,
- # AsyncAPI information
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
+ return SpecificationPushStreamSubscriber(
+ stream=stream,
+ subject=subject,
+ queue=queue,
+ config=config,
+ # basic args
+ extra_options=extra_options,
+ # Subscriber args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # Specification information
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
-def _validate_input_for_misconfigure(
+def _validate_input_for_misconfigure( # noqa: PLR0915
subject: str,
queue: str, # default ""
pending_msgs_limit: Optional[int],
@@ -334,39 +341,94 @@ def _validate_input_for_misconfigure(
pull_sub: Optional["PullSub"],
kv_watch: Optional["KvWatch"],
obj_watch: Optional["ObjWatch"],
- ack_first: bool, # default False
+ ack_policy: "AckPolicy", # default EMPTY
+ no_ack: bool, # default EMPTY
+ ack_first: bool, # default EMPTY
max_workers: int, # default 1
stream: Optional["JStream"],
) -> None:
+ if ack_policy is not EMPTY:
+ if obj_watch is not None:
+ warnings.warn(
+ "You can't use acknowledgement policy with ObjectStorage watch subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ elif kv_watch is not None:
+ warnings.warn(
+ "You can't use acknowledgement policy with KeyValue watch subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ elif stream is None and ack_policy is not AckPolicy.DO_NOTHING:
+ warnings.warn(
+ (
+ "Core subscriber supports only `ack_policy=AckPolicy.DO_NOTHING` option for very specific cases. "
+ "If you are using different option, probably, you should use JetStream Subscriber instead."
+ ),
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if max_msgs > 0 and any((stream, kv_watch, obj_watch)):
+ warnings.warn(
+ "The `max_msgs` option can be used only with a NATS Core Subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if ack_first is not EMPTY:
+ warnings.warn(
+ "`ack_first` option was deprecated in prior to `ack_policy=AckPolicy.ACK_FIRST`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `ack_first` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ ack_policy = AckPolicy.ACK_FIRST if ack_first else AckPolicy.REJECT_ON_ERROR
+
+ if no_ack is not EMPTY:
+ warnings.warn(
+ "`no_ack` option was deprecated in prior to `ack_policy=AckPolicy.DO_NOTHING`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `no_ack` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+
+ no_ack = AckPolicy.DO_NOTHING if no_ack else EMPTY
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
+
if not subject and not config:
- raise SetupError("You must provide either the `subject` or `config` option.")
+ msg = "You must provide either the `subject` or `config` option."
+ raise SetupError(msg)
if stream and kv_watch:
- raise SetupError(
- "You can't use both the `stream` and `kv_watch` options simultaneously."
- )
+ msg = "You can't use both the `stream` and `kv_watch` options simultaneously."
+ raise SetupError(msg)
if stream and obj_watch:
- raise SetupError(
- "You can't use both the `stream` and `obj_watch` options simultaneously."
- )
+ msg = "You can't use both the `stream` and `obj_watch` options simultaneously."
+ raise SetupError(msg)
if kv_watch and obj_watch:
- raise SetupError(
+ msg = (
"You can't use both the `kv_watch` and `obj_watch` options simultaneously."
)
+ raise SetupError(msg)
if pull_sub and not stream:
- raise SetupError(
- "The pull subscriber can only be used with the `stream` option."
- )
-
- if max_msgs > 0 and any((stream, kv_watch, obj_watch)):
- warnings.warn(
- "The `max_msgs` option can be used only with a NATS Core Subscriber.",
- RuntimeWarning,
- stacklevel=4,
- )
+ msg = "JetStream Pull Subscriber can only be used with the `stream` option."
+ raise SetupError(msg)
if not stream:
if obj_watch or kv_watch:
@@ -449,49 +511,47 @@ def _validate_input_for_misconfigure(
stacklevel=4,
)
- if ack_first:
+ if ack_policy is AckPolicy.ACK_FIRST:
warnings.warn(
- message="The `ack_first` option can be used only with JetStream Push Subscription.",
+ message="The `ack_policy=AckPolicy.ACK_FIRST:` option can be used only with JetStream Push Subscription.",
category=RuntimeWarning,
stacklevel=4,
)
- else:
- # JetStream Subscribers
- if pull_sub:
- if queue:
- warnings.warn(
- message="The `queue` option has no effect with JetStream Pull Subscription. You probably wanted to use the `durable` option instead.",
- category=RuntimeWarning,
- stacklevel=4,
- )
+ # JetStream Subscribers
+ elif pull_sub:
+ if queue:
+ warnings.warn(
+ message="The `queue` option has no effect with JetStream Pull Subscription. You probably wanted to use the `durable` option instead.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
- if ordered_consumer:
- warnings.warn(
- "The `ordered_consumer` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
- RuntimeWarning,
- stacklevel=4,
- )
+ if ordered_consumer:
+ warnings.warn(
+ "The `ordered_consumer` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
- if ack_first:
- warnings.warn(
- message="The `ack_first` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
- category=RuntimeWarning,
- stacklevel=4,
- )
+ if ack_policy is AckPolicy.ACK_FIRST:
+ warnings.warn(
+ message="The `ack_policy=AckPolicy.ACK_FIRST` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
- if flow_control:
- warnings.warn(
- message="The `flow_control` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
- category=RuntimeWarning,
- stacklevel=4,
- )
+ if flow_control:
+ warnings.warn(
+ message="The `flow_control` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
- else:
- # JS PushSub
- if durable is not None:
- warnings.warn(
- message="The JetStream Push consumer with the `durable` option can't be scaled horizontally across multiple instances. You probably wanted to use the `queue` option instead. Also, we strongly recommend using the Jetstream PullSubsriber with the `durable` option as the default.",
- category=RuntimeWarning,
- stacklevel=4,
- )
+ # JS PushSub
+ elif durable is not None:
+ warnings.warn(
+ message="The JetStream Push consumer with the `durable` option can't be scaled horizontally across multiple instances. You probably wanted to use the `queue` option instead. Also, we strongly recommend using the Jetstream PullSubsriber with the `durable` option as the default.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
diff --git a/faststream/nats/subscriber/specified.py b/faststream/nats/subscriber/specified.py
new file mode 100644
index 0000000000..dc8c6720ef
--- /dev/null
+++ b/faststream/nats/subscriber/specified.py
@@ -0,0 +1,129 @@
+from typing_extensions import override
+
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.nats.subscriber.usecases import (
+ BatchPullStreamSubscriber,
+ ConcurrentCoreSubscriber,
+ ConcurrentPullStreamSubscriber,
+ ConcurrentPushStreamSubscriber,
+ CoreSubscriber,
+ KeyValueWatchSubscriber,
+ ObjStoreWatchSubscriber,
+ PullStreamSubscriber,
+ PushStreamSubscription,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import ChannelBinding, nats
+
+
+class SpecificationSubscriber(SpecificationSubscriberMixin):
+ """A class to represent a NATS handler."""
+
+ subject: str
+
+ def get_default_name(self) -> str:
+ return f"{self.subject}:{self.call_name}"
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ nats=nats.ChannelBinding(
+ subject=self.subject,
+ queue=getattr(self, "queue", "") or None,
+ ),
+ ),
+ ),
+ }
+
+
+class SpecificationCoreSubscriber(
+ SpecificationSubscriber,
+ CoreSubscriber,
+):
+ """One-message core consumer with Specification methods."""
+
+
+class SpecificationConcurrentCoreSubscriber(
+ SpecificationSubscriber,
+ ConcurrentCoreSubscriber,
+):
+ """One-message core concurrent consumer with Specification methods."""
+
+
+class SpecificationPushStreamSubscriber(
+ SpecificationSubscriber,
+ PushStreamSubscription,
+):
+ """One-message JS Push consumer with Specification methods."""
+
+
+class SpecificationConcurrentPushStreamSubscriber(
+ SpecificationSubscriber,
+ ConcurrentPushStreamSubscriber,
+):
+ """One-message JS Push concurrent consumer with Specification methods."""
+
+
+class SpecificationPullStreamSubscriber(
+ SpecificationSubscriber,
+ PullStreamSubscriber,
+):
+ """One-message JS Pull consumer with Specification methods."""
+
+
+class SpecificationConcurrentPullStreamSubscriber(
+ SpecificationSubscriber,
+ ConcurrentPullStreamSubscriber,
+):
+ """One-message JS Pull concurrent consumer with Specification methods."""
+
+
+class SpecificationBatchPullStreamSubscriber(
+ SpecificationSubscriber,
+ BatchPullStreamSubscriber,
+):
+ """Batch-message Pull consumer with Specification methods."""
+
+
+class SpecificationKeyValueWatchSubscriber(
+ SpecificationSubscriber,
+ KeyValueWatchSubscriber,
+):
+ """KeyValueWatch consumer with Specification methods."""
+
+ @override
+ def get_default_name(self) -> str:
+ return ""
+
+ @override
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ return {}
+
+
+class SpecificationObjStoreWatchSubscriber(
+ SpecificationSubscriber,
+ ObjStoreWatchSubscriber,
+):
+ """ObjStoreWatch consumer with Specification methods."""
+
+ @override
+ def get_default_name(self) -> str:
+ return ""
+
+ @override
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ return {}
diff --git a/faststream/nats/subscriber/state.py b/faststream/nats/subscriber/state.py
new file mode 100644
index 0000000000..d8e2825d83
--- /dev/null
+++ b/faststream/nats/subscriber/state.py
@@ -0,0 +1,60 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from nats.aio.client import Client
+ from nats.js import JetStreamContext
+
+ from faststream.nats.broker.state import BrokerState
+ from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+
+
+class SubscriberState(Protocol):
+ client: "Client"
+ js: "JetStreamContext"
+ kv_declarer: "KVBucketDeclarer"
+ os_declarer: "OSBucketDeclarer"
+
+
+class EmptySubscriberState(SubscriberState):
+ @property
+ def client(self) -> "Client":
+ msg = "Connection is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def js(self) -> "JetStreamContext":
+ msg = "Stream is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def kv_declarer(self) -> "KVBucketDeclarer":
+ msg = "KeyValue is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def os_declarer(self) -> "OSBucketDeclarer":
+ msg = "ObjectStorage is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+
+class ConnectedSubscriberState(SubscriberState):
+ def __init__(
+ self,
+ *,
+ parent_state: "BrokerState",
+ kv_declarer: "KVBucketDeclarer",
+ os_declarer: "OSBucketDeclarer",
+ ) -> None:
+ self._parent_state = parent_state
+ self.kv_declarer = kv_declarer
+ self.os_declarer = os_declarer
+
+ @property
+ def client(self) -> "Client":
+ return self._parent_state.connection
+
+ @property
+ def js(self) -> "JetStreamContext":
+ return self._parent_state.stream
diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py
deleted file mode 100644
index af90d57d16..0000000000
--- a/faststream/nats/subscriber/usecase.py
+++ /dev/null
@@ -1,1269 +0,0 @@
-from abc import abstractmethod
-from contextlib import suppress
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Dict,
- Generic,
- Iterable,
- List,
- Optional,
- Sequence,
- TypeVar,
- Union,
- cast,
-)
-
-import anyio
-from fast_depends.dependencies import Depends
-from nats.aio.msg import Msg
-from nats.errors import ConnectionClosedError, TimeoutError
-from nats.js.api import ConsumerConfig, ObjectInfo
-from typing_extensions import Annotated, Doc, override
-
-from faststream.broker.publisher.fake import FakePublisher
-from faststream.broker.subscriber.mixins import ConcurrentMixin, TasksMixin
-from faststream.broker.subscriber.usecase import SubscriberUsecase
-from faststream.broker.types import MsgType
-from faststream.broker.utils import process_msg
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.nats.message import NatsMessage
-from faststream.nats.parser import (
- BatchParser,
- JsParser,
- KvParser,
- NatsParser,
- ObjParser,
-)
-from faststream.nats.schemas.js_stream import compile_nats_wildcard
-from faststream.nats.subscriber.subscription import (
- UnsubscribeAdapter,
- Unsubscriptable,
-)
-from faststream.utils.context.repository import context
-
-if TYPE_CHECKING:
- from nats.aio.client import Client
- from nats.aio.subscription import Subscription
- from nats.js import JetStreamContext
- from nats.js.kv import KeyValue
- from nats.js.object_store import ObjectStore
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.broker.types import (
- AsyncCallable,
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
- from faststream.nats.message import NatsKvMessage, NatsObjMessage
- from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
- from faststream.types import AnyDict, Decorator, LoggerProto, SendableMessage
-
-
-ConnectionType = TypeVar("ConnectionType")
-
-
-class LogicSubscriber(Generic[ConnectionType, MsgType], SubscriberUsecase[MsgType]):
- """A class to represent a NATS handler."""
-
- subscription: Optional[Unsubscriptable]
- _fetch_sub: Optional[Unsubscriptable]
- producer: Optional["ProducerProto"]
- _connection: Optional[ConnectionType]
-
- def __init__(
- self,
- *,
- subject: str,
- config: "ConsumerConfig",
- extra_options: Optional["AnyDict"],
- # Subscriber args
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- self.subject = subject
- self.config = config
-
- self.extra_options = extra_options or {}
-
- super().__init__(
- default_parser=default_parser,
- default_decoder=default_decoder,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- self._connection = None
- self._fetch_sub = None
- self.subscription = None
- self.producer = None
-
- @override
- def setup( # type: ignore[override]
- self,
- *,
- connection: ConnectionType,
- # basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
- extra_context: "AnyDict",
- # broker options
- broker_parser: Optional["CustomCallable"],
- broker_decoder: Optional["CustomCallable"],
- # dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> None:
- self._connection = connection
-
- super().setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
- extra_context=extra_context,
- broker_parser=broker_parser,
- broker_decoder=broker_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
- )
-
- @property
- def clear_subject(self) -> str:
- """Compile `test.{name}` to `test.*` subject."""
- _, path = compile_nats_wildcard(self.subject)
- return path
-
- async def start(self) -> None:
- """Create NATS subscription and start consume tasks."""
- assert self._connection, NOT_CONNECTED_YET # nosec B101
-
- await super().start()
-
- if self.calls:
- await self._create_subscription(connection=self._connection)
-
- async def close(self) -> None:
- """Clean up handler subscription, cancel consume task in graceful mode."""
- await super().close()
-
- if self.subscription is not None:
- await self.subscription.unsubscribe()
- self.subscription = None
-
- if self._fetch_sub is not None:
- await self._fetch_sub.unsubscribe()
- self.subscription = None
-
- @abstractmethod
- async def _create_subscription(
- self,
- *,
- connection: ConnectionType,
- ) -> None:
- """Create NATS subscription object to consume messages."""
- raise NotImplementedError()
-
- @staticmethod
- def build_log_context(
- message: Annotated[
- Optional["StreamMessage[MsgType]"],
- Doc("Message which we are building context for"),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject we are listening"),
- ],
- *,
- queue: Annotated[
- str,
- Doc("Using queue group name"),
- ] = "",
- stream: Annotated[
- str,
- Doc("Stream object we are listening"),
- ] = "",
- ) -> Dict[str, str]:
- """Static method to build log context out of `self.consume` scope."""
- return {
- "subject": subject,
- "queue": queue,
- "stream": stream,
- "message_id": getattr(message, "message_id", ""),
- }
-
- def add_prefix(self, prefix: str) -> None:
- """Include Subscriber in router."""
- if self.subject:
- self.subject = "".join((prefix, self.subject))
- else:
- self.config.filter_subjects = [
- "".join((prefix, subject))
- for subject in (self.config.filter_subjects or ())
- ]
-
- @property
- def _resolved_subject_string(self) -> str:
- return self.subject or ", ".join(self.config.filter_subjects or ())
-
- def __hash__(self) -> int:
- return self.get_routing_hash(self._resolved_subject_string)
-
- @staticmethod
- def get_routing_hash(
- subject: Annotated[
- str,
- Doc("NATS subject to consume messages"),
- ],
- ) -> int:
- """Get handler hash by outer data.
-
- Using to find handler in `broker.handlers` dictionary.
- """
- return hash(subject)
-
-
-class _DefaultSubscriber(LogicSubscriber[ConnectionType, MsgType]):
- def __init__(
- self,
- *,
- subject: str,
- config: "ConsumerConfig",
- # default args
- extra_options: Optional["AnyDict"],
- # Subscriber args
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[MsgType]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- subject=subject,
- config=config,
- extra_options=extra_options,
- # subscriber args
- default_parser=default_parser,
- default_decoder=default_decoder,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- def _make_response_publisher(
- self,
- message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
- if self._producer is None:
- return ()
-
- return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "subject": message.reply_to,
- },
- ),
- )
-
- def get_log_context(
- self,
- message: Annotated[
- Optional["StreamMessage[MsgType]"],
- Doc("Message which we are building context for"),
- ],
- ) -> Dict[str, str]:
- """Log context factory using in `self.consume` scope."""
- return self.build_log_context(
- message=message,
- subject=self.subject,
- )
-
-
-class CoreSubscriber(_DefaultSubscriber["Client", "Msg"]):
- subscription: Optional["Subscription"]
- _fetch_sub: Optional["Subscription"]
-
- def __init__(
- self,
- *,
- # default args
- subject: str,
- config: "ConsumerConfig",
- queue: str,
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser_ = NatsParser(pattern=subject, no_ack=no_ack)
-
- self.queue = queue
-
- super().__init__(
- subject=subject,
- config=config,
- extra_options=extra_options,
- # subscriber args
- default_parser=parser_.parse_message,
- default_decoder=parser_.decode_message,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def get_one(
- self,
- *,
- timeout: float = 5.0,
- ) -> "Optional[NatsMessage]":
- assert self._connection, "Please, start() subscriber first" # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- if self._fetch_sub is None:
- fetch_sub = self._fetch_sub = await self._connection.subscribe(
- subject=self.clear_subject,
- queue=self.queue,
- **self.extra_options,
- )
- else:
- fetch_sub = self._fetch_sub
-
- try:
- raw_message = await fetch_sub.next_msg(timeout=timeout)
- except TimeoutError:
- return None
-
- msg: NatsMessage = await process_msg( # type: ignore[assignment]
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
- )
- return msg
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "Client",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.subscription = await connection.subscribe(
- subject=self.clear_subject,
- queue=self.queue,
- cb=self.consume,
- **self.extra_options,
- )
-
- def get_log_context(
- self,
- message: Annotated[
- Optional["StreamMessage[Msg]"],
- Doc("Message which we are building context for"),
- ],
- ) -> Dict[str, str]:
- """Log context factory using in `self.consume` scope."""
- return self.build_log_context(
- message=message,
- subject=self.subject,
- queue=self.queue,
- )
-
-
-class ConcurrentCoreSubscriber(
- ConcurrentMixin[Msg],
- CoreSubscriber,
-):
- def __init__(
- self,
- *,
- max_workers: int,
- # default args
- subject: str,
- config: "ConsumerConfig",
- queue: str,
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- max_workers=max_workers,
- # basic args
- subject=subject,
- config=config,
- queue=queue,
- extra_options=extra_options,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "Client",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.start_consume_task()
-
- self.subscription = await connection.subscribe(
- subject=self.clear_subject,
- queue=self.queue,
- cb=self._put_msg,
- **self.extra_options,
- )
-
-
-class _StreamSubscriber(_DefaultSubscriber["JetStreamContext", "Msg"]):
- _fetch_sub: Optional["JetStreamContext.PullSubscription"]
-
- def __init__(
- self,
- *,
- stream: "JStream",
- # default args
- subject: str,
- config: "ConsumerConfig",
- queue: str,
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser_ = JsParser(pattern=subject)
-
- self.queue = queue
- self.stream = stream
-
- super().__init__(
- subject=subject,
- config=config,
- extra_options=extra_options,
- # subscriber args
- default_parser=parser_.parse_message,
- default_decoder=parser_.decode_message,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- def get_log_context(
- self,
- message: Annotated[
- Optional["StreamMessage[Msg]"],
- Doc("Message which we are building context for"),
- ],
- ) -> Dict[str, str]:
- """Log context factory using in `self.consume` scope."""
- return self.build_log_context(
- message=message,
- subject=self._resolved_subject_string,
- queue=self.queue,
- stream=self.stream.name,
- )
-
- @override
- async def get_one(
- self,
- *,
- timeout: float = 5,
- ) -> Optional["NatsMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- if not self._fetch_sub:
- extra_options = {
- "pending_bytes_limit": self.extra_options["pending_bytes_limit"],
- "pending_msgs_limit": self.extra_options["pending_msgs_limit"],
- "durable": self.extra_options["durable"],
- "stream": self.extra_options["stream"],
- }
- if inbox_prefix := self.extra_options.get("inbox_prefix"):
- extra_options["inbox_prefix"] = inbox_prefix
-
- self._fetch_sub = await self._connection.pull_subscribe(
- subject=self.clear_subject,
- config=self.config,
- **extra_options,
- )
-
- try:
- raw_message = (
- await self._fetch_sub.fetch(
- batch=1,
- timeout=timeout,
- )
- )[0]
- except (TimeoutError, ConnectionClosedError):
- return None
-
- msg: NatsMessage = await process_msg( # type: ignore[assignment]
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
- )
- return msg
-
-
-class PushStreamSubscription(_StreamSubscriber):
- subscription: Optional["JetStreamContext.PushSubscription"]
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.subscription = await connection.subscribe(
- subject=self.clear_subject,
- queue=self.queue,
- cb=self.consume,
- config=self.config,
- **self.extra_options,
- )
-
-
-class ConcurrentPushStreamSubscriber(
- ConcurrentMixin[Msg],
- _StreamSubscriber,
-):
- subscription: Optional["JetStreamContext.PushSubscription"]
-
- def __init__(
- self,
- *,
- max_workers: int,
- stream: "JStream",
- # default args
- subject: str,
- config: "ConsumerConfig",
- queue: str,
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- max_workers=max_workers,
- # basic args
- stream=stream,
- subject=subject,
- config=config,
- queue=queue,
- extra_options=extra_options,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.start_consume_task()
-
- self.subscription = await connection.subscribe(
- subject=self.clear_subject,
- queue=self.queue,
- cb=self._put_msg,
- config=self.config,
- **self.extra_options,
- )
-
-
-class PullStreamSubscriber(TasksMixin, _StreamSubscriber):
- subscription: Optional["JetStreamContext.PullSubscription"]
-
- def __init__(
- self,
- *,
- pull_sub: "PullSub",
- stream: "JStream",
- # default args
- subject: str,
- config: "ConsumerConfig",
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- self.pull_sub = pull_sub
-
- super().__init__(
- # basic args
- stream=stream,
- subject=subject,
- config=config,
- extra_options=extra_options,
- queue="",
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.subscription = await connection.pull_subscribe(
- subject=self.clear_subject,
- config=self.config,
- **self.extra_options,
- )
- self.add_task(self._consume_pull(cb=self.consume))
-
- async def _consume_pull(
- self,
- cb: Callable[["Msg"], Awaitable["SendableMessage"]],
- ) -> None:
- """Endless task consuming messages using NATS Pull subscriber."""
- assert self.subscription # nosec B101
-
- while self.running: # pragma: no branch
- messages = []
- with suppress(TimeoutError, ConnectionClosedError):
- messages = await self.subscription.fetch(
- batch=self.pull_sub.batch_size,
- timeout=self.pull_sub.timeout,
- )
-
- if messages:
- async with anyio.create_task_group() as tg:
- for msg in messages:
- tg.start_soon(cb, msg)
-
-
-class ConcurrentPullStreamSubscriber(
- ConcurrentMixin[Msg],
- PullStreamSubscriber,
-):
- def __init__(
- self,
- *,
- max_workers: int,
- # default args
- pull_sub: "PullSub",
- stream: "JStream",
- subject: str,
- config: "ConsumerConfig",
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[Msg]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- max_workers=max_workers,
- # basic args
- pull_sub=pull_sub,
- stream=stream,
- subject=subject,
- config=config,
- extra_options=extra_options,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.start_consume_task()
-
- self.subscription = await connection.pull_subscribe(
- subject=self.clear_subject,
- config=self.config,
- **self.extra_options,
- )
- self.add_task(self._consume_pull(cb=self._put_msg))
-
-
-class BatchPullStreamSubscriber(
- TasksMixin,
- _DefaultSubscriber["JetStreamContext", List["Msg"]],
-):
- """Batch-message consumer class."""
-
- subscription: Optional["JetStreamContext.PullSubscription"]
- _fetch_sub: Optional["JetStreamContext.PullSubscription"]
-
- def __init__(
- self,
- *,
- # default args
- subject: str,
- config: "ConsumerConfig",
- stream: "JStream",
- pull_sub: "PullSub",
- extra_options: Optional["AnyDict"],
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[List[Msg]]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = BatchParser(pattern=subject)
-
- self.stream = stream
- self.pull_sub = pull_sub
-
- super().__init__(
- subject=subject,
- config=config,
- extra_options=extra_options,
- # subscriber args
- default_parser=parser.parse_batch,
- default_decoder=parser.decode_batch,
- # Propagated args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def get_one(
- self,
- *,
- timeout: float = 5,
- ) -> Optional["NatsMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- if not self._fetch_sub:
- fetch_sub = self._fetch_sub = await self._connection.pull_subscribe(
- subject=self.clear_subject,
- config=self.config,
- **self.extra_options,
- )
- else:
- fetch_sub = self._fetch_sub
-
- try:
- raw_message = await fetch_sub.fetch(
- batch=1,
- timeout=timeout,
- )
- except TimeoutError:
- return None
-
- msg = cast(
- NatsMessage,
- await process_msg(
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
- ),
- )
- return msg
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
- """Create NATS subscription and start consume task."""
- if self.subscription:
- return
-
- self.subscription = await connection.pull_subscribe(
- subject=self.clear_subject,
- config=self.config,
- **self.extra_options,
- )
- self.add_task(self._consume_pull())
-
- async def _consume_pull(self) -> None:
- """Endless task consuming messages using NATS Pull subscriber."""
- assert self.subscription, "You should call `create_subscription` at first." # nosec B101
-
- while self.running: # pragma: no branch
- with suppress(TimeoutError, ConnectionClosedError):
- messages = await self.subscription.fetch(
- batch=self.pull_sub.batch_size,
- timeout=self.pull_sub.timeout,
- )
-
- if messages:
- await self.consume(messages)
-
-
-class KeyValueWatchSubscriber(
- TasksMixin,
- LogicSubscriber["KVBucketDeclarer", "KeyValue.Entry"],
-):
- subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"]
- _fetch_sub: Optional[UnsubscribeAdapter["KeyValue.KeyWatcher"]]
-
- def __init__(
- self,
- *,
- subject: str,
- config: "ConsumerConfig",
- kv_watch: "KvWatch",
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[KeyValue.Entry]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = KvParser(pattern=subject)
- self.kv_watch = kv_watch
-
- super().__init__(
- subject=subject,
- config=config,
- extra_options=None,
- no_ack=True,
- no_reply=True,
- retry=False,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def get_one(
- self,
- *,
- timeout: float = 5,
- ) -> Optional["NatsKvMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- if not self._fetch_sub:
- bucket = await self._connection.create_key_value(
- bucket=self.kv_watch.name,
- declare=self.kv_watch.declare,
- )
-
- fetch_sub = self._fetch_sub = UnsubscribeAdapter["KeyValue.KeyWatcher"](
- await bucket.watch(
- keys=self.clear_subject,
- headers_only=self.kv_watch.headers_only,
- include_history=self.kv_watch.include_history,
- ignore_deletes=self.kv_watch.ignore_deletes,
- meta_only=self.kv_watch.meta_only,
- )
- )
- else:
- fetch_sub = self._fetch_sub
-
- raw_message: Optional[KeyValue.Entry] = None
- sleep_interval = timeout / 10
- with anyio.move_on_after(timeout):
- while ( # noqa: ASYNC110
- raw_message := await fetch_sub.obj.updates(timeout) # type: ignore[no-untyped-call]
- ) is None:
- await anyio.sleep(sleep_interval)
-
- return await process_msg( # type: ignore[return-value]
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "KVBucketDeclarer",
- ) -> None:
- if self.subscription:
- return
-
- bucket = await connection.create_key_value(
- bucket=self.kv_watch.name,
- declare=self.kv_watch.declare,
- )
-
- self.subscription = UnsubscribeAdapter["KeyValue.KeyWatcher"](
- await bucket.watch(
- keys=self.clear_subject,
- headers_only=self.kv_watch.headers_only,
- include_history=self.kv_watch.include_history,
- ignore_deletes=self.kv_watch.ignore_deletes,
- meta_only=self.kv_watch.meta_only,
- )
- )
-
- self.add_task(self._consume_watch())
-
- async def _consume_watch(self) -> None:
- assert self.subscription, "You should call `create_subscription` at first." # nosec B101
-
- key_watcher = self.subscription.obj
-
- while self.running:
- with suppress(ConnectionClosedError, TimeoutError):
- message = cast(
- Optional["KeyValue.Entry"],
- await key_watcher.updates(self.kv_watch.timeout), # type: ignore[no-untyped-call]
- )
-
- if message:
- await self.consume(message)
-
- def _make_response_publisher(
- self,
- message: Annotated[
- "StreamMessage[KeyValue.Entry]",
- Doc("Message requiring reply"),
- ],
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
- return ()
-
- def __hash__(self) -> int:
- return hash(self.kv_watch) + hash(self.subject)
-
- def get_log_context(
- self,
- message: Annotated[
- Optional["StreamMessage[KeyValue.Entry]"],
- Doc("Message which we are building context for"),
- ],
- ) -> Dict[str, str]:
- """Log context factory using in `self.consume` scope."""
- return self.build_log_context(
- message=message,
- subject=self.subject,
- stream=self.kv_watch.name,
- )
-
-
-OBJECT_STORAGE_CONTEXT_KEY = "__object_storage"
-
-
-class ObjStoreWatchSubscriber(
- TasksMixin,
- LogicSubscriber["OSBucketDeclarer", ObjectInfo],
-):
- subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"]
- _fetch_sub: Optional[UnsubscribeAdapter["ObjectStore.ObjectWatcher"]]
-
- def __init__(
- self,
- *,
- subject: str,
- config: "ConsumerConfig",
- obj_watch: "ObjWatch",
- broker_dependencies: Iterable[Depends],
- broker_middlewares: Sequence["BrokerMiddleware[List[Msg]]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = ObjParser(pattern="")
-
- self.obj_watch = obj_watch
- self.obj_watch_conn = None
-
- super().__init__(
- subject=subject,
- config=config,
- extra_options=None,
- no_ack=True,
- no_reply=True,
- retry=False,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI args
- description_=description_,
- title_=title_,
- include_in_schema=include_in_schema,
- )
-
- @override
- async def get_one(
- self,
- *,
- timeout: float = 5,
- ) -> Optional["NatsObjMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- if not self._fetch_sub:
- self.bucket = await self._connection.create_object_store(
- bucket=self.subject,
- declare=self.obj_watch.declare,
- )
-
- obj_watch = await self.bucket.watch(
- ignore_deletes=self.obj_watch.ignore_deletes,
- include_history=self.obj_watch.include_history,
- meta_only=self.obj_watch.meta_only,
- )
- fetch_sub = self._fetch_sub = UnsubscribeAdapter[
- "ObjectStore.ObjectWatcher"
- ](obj_watch)
- else:
- fetch_sub = self._fetch_sub
-
- raw_message: Optional[ObjectInfo] = None
- sleep_interval = timeout / 10
- with anyio.move_on_after(timeout):
- while ( # noqa: ASYNC110
- raw_message := await fetch_sub.obj.updates(timeout) # type: ignore[no-untyped-call]
- ) is None:
- await anyio.sleep(sleep_interval)
-
- return await process_msg( # type: ignore[return-value]
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
- )
-
- @override
- async def _create_subscription(
- self,
- *,
- connection: "OSBucketDeclarer",
- ) -> None:
- if self.subscription:
- return
-
- self.bucket = await connection.create_object_store(
- bucket=self.subject,
- declare=self.obj_watch.declare,
- )
-
- self.add_task(self._consume_watch())
-
- async def _consume_watch(self) -> None:
- assert self.bucket, "You should call `create_subscription` at first." # nosec B101
-
- # Should be created inside task to avoid nats-py lock
- obj_watch = await self.bucket.watch(
- ignore_deletes=self.obj_watch.ignore_deletes,
- include_history=self.obj_watch.include_history,
- meta_only=self.obj_watch.meta_only,
- )
-
- self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"](obj_watch)
-
- while self.running:
- with suppress(TimeoutError):
- message = cast(
- Optional["ObjectInfo"],
- await obj_watch.updates(self.obj_watch.timeout), # type: ignore[no-untyped-call]
- )
-
- if message:
- with context.scope(OBJECT_STORAGE_CONTEXT_KEY, self.bucket):
- await self.consume(message)
-
- def _make_response_publisher(
- self,
- message: Annotated[
- "StreamMessage[ObjectInfo]",
- Doc("Message requiring reply"),
- ],
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
- return ()
-
- def __hash__(self) -> int:
- return hash(self.subject)
-
- def get_log_context(
- self,
- message: Annotated[
- Optional["StreamMessage[ObjectInfo]"],
- Doc("Message which we are building context for"),
- ],
- ) -> Dict[str, str]:
- """Log context factory using in `self.consume` scope."""
- return self.build_log_context(
- message=message,
- subject=self.subject,
- )
diff --git a/faststream/nats/subscriber/usecases/__init__.py b/faststream/nats/subscriber/usecases/__init__.py
new file mode 100644
index 0000000000..040a9f9680
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/__init__.py
@@ -0,0 +1,26 @@
+from .basic import LogicSubscriber
+from .core_subscriber import ConcurrentCoreSubscriber, CoreSubscriber
+from .key_value_subscriber import KeyValueWatchSubscriber
+from .object_storage_subscriber import ObjStoreWatchSubscriber
+from .stream_pull_subscriber import (
+ BatchPullStreamSubscriber,
+ ConcurrentPullStreamSubscriber,
+ PullStreamSubscriber,
+)
+from .stream_push_subscriber import (
+ ConcurrentPushStreamSubscriber,
+ PushStreamSubscription,
+)
+
+__all__ = (
+ "BatchPullStreamSubscriber",
+ "ConcurrentCoreSubscriber",
+ "ConcurrentPullStreamSubscriber",
+ "ConcurrentPushStreamSubscriber",
+ "CoreSubscriber",
+ "KeyValueWatchSubscriber",
+ "LogicSubscriber",
+ "ObjStoreWatchSubscriber",
+ "PullStreamSubscriber",
+ "PushStreamSubscription",
+)
diff --git a/faststream/nats/subscriber/usecases/basic.py b/faststream/nats/subscriber/usecases/basic.py
new file mode 100644
index 0000000000..bee03746b3
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/basic.py
@@ -0,0 +1,230 @@
+from abc import abstractmethod
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+from typing_extensions import override
+
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.types import MsgType
+from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+from faststream.nats.publisher.fake import NatsFakePublisher
+from faststream.nats.schemas.js_stream import compile_nats_wildcard
+from faststream.nats.subscriber.adapters import (
+ Unsubscriptable,
+)
+from faststream.nats.subscriber.state import (
+ ConnectedSubscriberState,
+ EmptySubscriberState,
+ SubscriberState,
+)
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ )
+ from faststream._internal.publisher.proto import BasePublisherProto, ProducerProto
+ from faststream._internal.state import (
+ BrokerState as BasicState,
+ Pointer,
+ )
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.broker.state import BrokerState
+ from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+
+
+class LogicSubscriber(SubscriberUsecase[MsgType]):
+ """Basic class for all NATS Subscriber types (KeyValue, ObjectStorage, Core & JetStream)."""
+
+ subscription: Optional[Unsubscriptable]
+ _fetch_sub: Optional[Unsubscriptable]
+ producer: Optional["ProducerProto"]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ ) -> None:
+ self.subject = subject
+ self.config = config
+
+ self.extra_options = extra_options or {}
+
+ super().__init__(
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ self._fetch_sub = None
+ self.subscription = None
+ self.producer = None
+
+ self._connection_state: SubscriberState = EmptySubscriberState()
+
+ @override
+ def _setup( # type: ignore[override]
+ self,
+ *,
+ connection_state: "BrokerState",
+ os_declarer: "OSBucketDeclarer",
+ kv_declarer: "KVBucketDeclarer",
+ # basic args
+ extra_context: "AnyDict",
+ # broker options
+ broker_parser: Optional["CustomCallable"],
+ broker_decoder: Optional["CustomCallable"],
+ # dependant args
+ state: "Pointer[BasicState]",
+ ) -> None:
+ self._connection_state = ConnectedSubscriberState(
+ parent_state=connection_state,
+ os_declarer=os_declarer,
+ kv_declarer=kv_declarer,
+ )
+
+ super()._setup(
+ extra_context=extra_context,
+ broker_parser=broker_parser,
+ broker_decoder=broker_decoder,
+ state=state,
+ )
+
+ @property
+ def clear_subject(self) -> str:
+ """Compile `test.{name}` to `test.*` subject."""
+ _, path = compile_nats_wildcard(self.subject)
+ return path
+
+ async def start(self) -> None:
+ """Create NATS subscription and start consume tasks."""
+ await super().start()
+
+ if self.calls:
+ await self._create_subscription()
+
+ async def close(self) -> None:
+ """Clean up handler subscription, cancel consume task in graceful mode."""
+ await super().close()
+
+ if self.subscription is not None:
+ await self.subscription.unsubscribe()
+ self.subscription = None
+
+ if self._fetch_sub is not None:
+ await self._fetch_sub.unsubscribe()
+ self.subscription = None
+
+ @abstractmethod
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription object to consume messages."""
+ raise NotImplementedError
+
+ @staticmethod
+ def build_log_context(
+ message: Optional["StreamMessage[MsgType]"],
+ subject: str,
+ *,
+ queue: str = "",
+ stream: str = "",
+ ) -> dict[str, str]:
+ """Static method to build log context out of `self.consume` scope."""
+ return {
+ "subject": subject,
+ "queue": queue,
+ "stream": stream,
+ "message_id": getattr(message, "message_id", ""),
+ }
+
+ def add_prefix(self, prefix: str) -> None:
+ """Include Subscriber in router."""
+ if self.subject:
+ self.subject = f"{prefix}{self.subject}"
+ else:
+ self.config.filter_subjects = [
+ f"{prefix}{subject}" for subject in (self.config.filter_subjects or ())
+ ]
+
+ @property
+ def _resolved_subject_string(self) -> str:
+ return self.subject or ", ".join(self.config.filter_subjects or ())
+
+
+class DefaultSubscriber(LogicSubscriber[MsgType]):
+ """Basic class for Core & JetStream Subscribers."""
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ # default args
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ ) -> None:
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ def _make_response_publisher(
+ self,
+ message: "StreamMessage[Any]",
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return (
+ NatsFakePublisher(
+ producer=self._state.get().producer,
+ subject=message.reply_to,
+ ),
+ )
+
+ def get_log_context(
+ self,
+ message: Optional["StreamMessage[MsgType]"],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ )
diff --git a/faststream/nats/subscriber/usecases/core_subscriber.py b/faststream/nats/subscriber/usecases/core_subscriber.py
new file mode 100644
index 0000000000..bf307093b9
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/core_subscriber.py
@@ -0,0 +1,147 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+)
+
+from nats.errors import TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import NatsParser
+
+from .basic import DefaultSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.aio.subscription import Subscription
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsMessage
+
+
+class CoreSubscriber(DefaultSubscriber["Msg"]):
+ subscription: Optional["Subscription"]
+ _fetch_sub: Optional["Subscription"]
+
+ def __init__(
+ self,
+ *,
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ ack_policy: AckPolicy,
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ ) -> None:
+ parser_ = NatsParser(
+ pattern=subject,
+ is_ack_disabled=ack_policy is not AckPolicy.DO_NOTHING,
+ )
+
+ self.queue = queue
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser_.parse_message,
+ default_decoder=parser_.decode_message,
+ # Propagated args
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[NatsMessage]":
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if self._fetch_sub is None:
+ fetch_sub = self._fetch_sub = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ **self.extra_options,
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ try:
+ raw_message = await fetch_sub.next_msg(timeout=timeout)
+ except TimeoutError:
+ return None
+
+ context = self._state.get().di_state.context
+
+ msg: NatsMessage = await process_msg( # type: ignore[assignment]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self.consume,
+ **self.extra_options,
+ )
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[Msg]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ queue=self.queue,
+ )
+
+
+class ConcurrentCoreSubscriber(ConcurrentMixin["Msg"], CoreSubscriber):
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self._put_msg,
+ **self.extra_options,
+ )
diff --git a/faststream/nats/subscriber/usecases/key_value_subscriber.py b/faststream/nats/subscriber/usecases/key_value_subscriber.py
new file mode 100644
index 0000000000..20827bd6bf
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/key_value_subscriber.py
@@ -0,0 +1,180 @@
+from collections.abc import Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import (
+ KvParser,
+)
+from faststream.nats.subscriber.adapters import (
+ UnsubscribeAdapter,
+)
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.js.api import ConsumerConfig
+ from nats.js.kv import KeyValue
+
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsKvMessage
+ from faststream.nats.schemas import KvWatch
+
+
+class KeyValueWatchSubscriber(
+ TasksMixin,
+ LogicSubscriber["KeyValue.Entry"],
+):
+ subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"]
+ _fetch_sub: Optional[UnsubscribeAdapter["KeyValue.KeyWatcher"]]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ kv_watch: "KvWatch",
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"],
+ ) -> None:
+ parser = KvParser(pattern=subject)
+ self.kv_watch = kv_watch
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=None,
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=True,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsKvMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ bucket = await self._connection_state.kv_declarer.create_key_value(
+ bucket=self.kv_watch.name,
+ declare=self.kv_watch.declare,
+ )
+
+ fetch_sub = self._fetch_sub = UnsubscribeAdapter["KeyValue.KeyWatcher"](
+ await bucket.watch(
+ keys=self.clear_subject,
+ headers_only=self.kv_watch.headers_only,
+ include_history=self.kv_watch.include_history,
+ ignore_deletes=self.kv_watch.ignore_deletes,
+ meta_only=self.kv_watch.meta_only,
+ ),
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ raw_message = None
+ sleep_interval = timeout / 10
+ with anyio.move_on_after(timeout):
+ while ( # noqa: ASYNC110
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
+ ) is None:
+ await anyio.sleep(sleep_interval)
+
+ context = self._state.get().di_state.context
+
+ msg: NatsKvMessage = await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ if self.subscription:
+ return
+
+ bucket = await self._connection_state.kv_declarer.create_key_value(
+ bucket=self.kv_watch.name,
+ declare=self.kv_watch.declare,
+ )
+
+ self.subscription = UnsubscribeAdapter["KeyValue.KeyWatcher"](
+ await bucket.watch(
+ keys=self.clear_subject,
+ headers_only=self.kv_watch.headers_only,
+ include_history=self.kv_watch.include_history,
+ ignore_deletes=self.kv_watch.ignore_deletes,
+ meta_only=self.kv_watch.meta_only,
+ ),
+ )
+
+ self.add_task(self.__consume_watch())
+
+ async def __consume_watch(self) -> None:
+ assert self.subscription, "You should call `create_subscription` at first." # nosec B101
+
+ key_watcher = self.subscription.obj
+
+ while self.running:
+ with suppress(ConnectionClosedError, TimeoutError):
+ message = cast(
+ "Optional[KeyValue.Entry]",
+ # type: ignore[no-untyped-call]
+ await key_watcher.updates(self.kv_watch.timeout),
+ )
+
+ if message:
+ await self.consume(message)
+
+ def _make_response_publisher(
+ self,
+ message: Annotated[
+ "StreamMessage[KeyValue.Entry]",
+ Doc("Message requiring reply"),
+ ],
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return ()
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[KeyValue.Entry]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ stream=self.kv_watch.name,
+ )
diff --git a/faststream/nats/subscriber/usecases/object_storage_subscriber.py b/faststream/nats/subscriber/usecases/object_storage_subscriber.py
new file mode 100644
index 0000000000..326d8fe2a6
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/object_storage_subscriber.py
@@ -0,0 +1,183 @@
+from collections.abc import Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import TimeoutError
+from nats.js.api import ConsumerConfig, ObjectInfo
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import (
+ ObjParser,
+)
+from faststream.nats.subscriber.adapters import (
+ UnsubscribeAdapter,
+)
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js.object_store import ObjectStore
+
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsObjMessage
+ from faststream.nats.schemas import ObjWatch
+
+
+OBJECT_STORAGE_CONTEXT_KEY = "__object_storage"
+
+
+class ObjStoreWatchSubscriber(
+ TasksMixin,
+ LogicSubscriber[ObjectInfo],
+):
+ subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"]
+ _fetch_sub: Optional[UnsubscribeAdapter["ObjectStore.ObjectWatcher"]]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ obj_watch: "ObjWatch",
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
+ ) -> None:
+ parser = ObjParser(pattern="")
+
+ self.obj_watch = obj_watch
+ self.obj_watch_conn = None
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=None,
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=True,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsObjMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
+ bucket=self.subject,
+ declare=self.obj_watch.declare,
+ )
+
+ obj_watch = await self.bucket.watch(
+ ignore_deletes=self.obj_watch.ignore_deletes,
+ include_history=self.obj_watch.include_history,
+ meta_only=self.obj_watch.meta_only,
+ )
+ fetch_sub = self._fetch_sub = UnsubscribeAdapter[
+ "ObjectStore.ObjectWatcher"
+ ](obj_watch)
+ else:
+ fetch_sub = self._fetch_sub
+
+ raw_message = None
+ sleep_interval = timeout / 10
+ with anyio.move_on_after(timeout):
+ while ( # noqa: ASYNC110
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
+ ) is None:
+ await anyio.sleep(sleep_interval)
+
+ context = self._state.get().di_state.context
+
+ msg: NatsObjMessage = await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ if self.subscription:
+ return
+
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
+ bucket=self.subject,
+ declare=self.obj_watch.declare,
+ )
+
+ self.add_task(self.__consume_watch())
+
+ async def __consume_watch(self) -> None:
+ assert self.bucket, "You should call `create_subscription` at first." # nosec B101
+
+ # Should be created inside task to avoid nats-py lock
+ obj_watch = await self.bucket.watch(
+ ignore_deletes=self.obj_watch.ignore_deletes,
+ include_history=self.obj_watch.include_history,
+ meta_only=self.obj_watch.meta_only,
+ )
+
+ self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"](obj_watch)
+
+ context = self._state.get().di_state.context
+
+ while self.running:
+ with suppress(TimeoutError):
+ message = cast(
+ "Optional[ObjectInfo]",
+ await obj_watch.updates(self.obj_watch.timeout), # type: ignore[no-untyped-call]
+ )
+
+ if message:
+ with context.scope(OBJECT_STORAGE_CONTEXT_KEY, self.bucket):
+ await self.consume(message)
+
+ def _make_response_publisher(
+ self,
+ message: Annotated[
+ "StreamMessage[ObjectInfo]",
+ Doc("Message requiring reply"),
+ ],
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return ()
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[ObjectInfo]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ )
diff --git a/faststream/nats/subscriber/usecases/stream_basic.py b/faststream/nats/subscriber/usecases/stream_basic.py
new file mode 100644
index 0000000000..80de14d278
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_basic.py
@@ -0,0 +1,134 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+)
+
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.utils import process_msg
+from faststream.nats.parser import (
+ JsParser,
+)
+
+from .basic import DefaultSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js import JetStreamContext
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ )
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.schemas import JStream
+
+
+class StreamSubscriber(DefaultSubscriber["Msg"]):
+ _fetch_sub: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ stream: "JStream",
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ ) -> None:
+ parser_ = JsParser(pattern=subject)
+
+ self.queue = queue
+ self.stream = stream
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser_.parse_message,
+ default_decoder=parser_.decode_message,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[Msg]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self._resolved_subject_string,
+ queue=self.queue,
+ stream=self.stream.name,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ extra_options = {
+ "pending_bytes_limit": self.extra_options["pending_bytes_limit"],
+ "pending_msgs_limit": self.extra_options["pending_msgs_limit"],
+ "durable": self.extra_options["durable"],
+ "stream": self.extra_options["stream"],
+ }
+ if inbox_prefix := self.extra_options.get("inbox_prefix"):
+ extra_options["inbox_prefix"] = inbox_prefix
+
+ self._fetch_sub = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **extra_options,
+ )
+
+ try:
+ raw_message = (
+ await self._fetch_sub.fetch(
+ batch=1,
+ timeout=timeout,
+ )
+ )[0]
+ except (TimeoutError, ConnectionClosedError):
+ return None
+
+ context = self._state.get().di_state.context
+
+ msg: NatsMessage = await process_msg( # type: ignore[assignment]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
diff --git a/faststream/nats/subscriber/usecases/stream_pull_subscriber.py b/faststream/nats/subscriber/usecases/stream_pull_subscriber.py
new file mode 100644
index 0000000000..04ed7f5377
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_pull_subscriber.py
@@ -0,0 +1,240 @@
+from collections.abc import Awaitable, Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.nats.parser import (
+ BatchParser,
+)
+
+from .basic import DefaultSubscriber
+from .stream_basic import StreamSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js import JetStreamContext
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ SendableMessage,
+ )
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.schemas import JStream, PullSub
+
+
+class PullStreamSubscriber(
+ TasksMixin,
+ StreamSubscriber,
+):
+ subscription: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ pull_sub: "PullSub",
+ stream: "JStream",
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ ) -> None:
+ self.pull_sub = pull_sub
+
+ super().__init__(
+ # basic args
+ stream=stream,
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ queue="",
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull(cb=self.consume))
+
+ async def _consume_pull(
+ self,
+ cb: Callable[["Msg"], Awaitable["SendableMessage"]],
+ ) -> None:
+ """Endless task consuming messages using NATS Pull subscriber."""
+ assert self.subscription # nosec B101
+
+ while self.running: # pragma: no branch
+ messages = []
+ with suppress(TimeoutError, ConnectionClosedError):
+ messages = await self.subscription.fetch(
+ batch=self.pull_sub.batch_size,
+ timeout=self.pull_sub.timeout,
+ )
+
+ if messages:
+ async with anyio.create_task_group() as tg:
+ for msg in messages:
+ tg.start_soon(cb, msg)
+
+
+class ConcurrentPullStreamSubscriber(ConcurrentMixin["Msg"], PullStreamSubscriber):
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull(cb=self._put_msg))
+
+
+class BatchPullStreamSubscriber(
+ TasksMixin,
+ DefaultSubscriber[list["Msg"]],
+):
+ """Batch-message consumer class."""
+
+ subscription: Optional["JetStreamContext.PullSubscription"]
+ _fetch_sub: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ stream: "JStream",
+ pull_sub: "PullSub",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
+ ) -> None:
+ parser = BatchParser(pattern=subject)
+
+ self.stream = stream
+ self.pull_sub = pull_sub
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser.parse_batch,
+ default_decoder=parser.decode_batch,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ fetch_sub = (
+ self._fetch_sub
+ ) = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ try:
+ raw_message = await fetch_sub.fetch(
+ batch=1,
+ timeout=timeout,
+ )
+ except TimeoutError:
+ return None
+
+ context = self._state.get().di_state.context
+
+ return cast(
+ "NatsMessage",
+ await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ ),
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull())
+
+ async def _consume_pull(self) -> None:
+ """Endless task consuming messages using NATS Pull subscriber."""
+ assert self.subscription, "You should call `create_subscription` at first." # nosec B101
+
+ while self.running: # pragma: no branch
+ with suppress(TimeoutError, ConnectionClosedError):
+ messages = await self.subscription.fetch(
+ batch=self.pull_sub.batch_size,
+ timeout=self.pull_sub.timeout,
+ )
+
+ if messages:
+ await self.consume(messages)
diff --git a/faststream/nats/subscriber/usecases/stream_push_subscriber.py b/faststream/nats/subscriber/usecases/stream_push_subscriber.py
new file mode 100644
index 0000000000..c21bc6f481
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_push_subscriber.py
@@ -0,0 +1,51 @@
+from typing import (
+ TYPE_CHECKING,
+ Optional,
+)
+
+from typing_extensions import override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+
+from .stream_basic import StreamSubscriber
+
+if TYPE_CHECKING:
+ from nats.js import JetStreamContext
+
+
+class PushStreamSubscription(StreamSubscriber):
+ subscription: Optional["JetStreamContext.PushSubscription"]
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self.consume,
+ config=self.config,
+ **self.extra_options,
+ )
+
+
+class ConcurrentPushStreamSubscriber(ConcurrentMixin["Msg"], StreamSubscriber):
+ subscription: Optional["JetStreamContext.PushSubscription"]
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.js.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self._put_msg,
+ config=self.config,
+ **self.extra_options,
+ )
diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py
index 6d34547d04..0b0ce88302 100644
--- a/faststream/nats/testing.py
+++ b/faststream/nats/testing.py
@@ -1,24 +1,32 @@
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+ Union,
+)
from unittest.mock import AsyncMock
import anyio
from nats.aio.msg import Msg
from typing_extensions import override
-from faststream.broker.message import encode_message, gen_cor_id
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS, SubscriberNotFound
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.testing.broker import TestBroker
+from faststream.exceptions import SubscriberNotFound
+from faststream.message import encode_message, gen_cor_id
from faststream.nats.broker import NatsBroker
+from faststream.nats.broker.state import ConnectedState
from faststream.nats.parser import NatsParser
from faststream.nats.publisher.producer import NatsFastProducer
from faststream.nats.schemas.js_stream import is_subject_match_wildcard
-from faststream.testing.broker import TestBroker
-from faststream.utils.functions import timeout_scope
if TYPE_CHECKING:
- from faststream.nats.publisher.asyncapi import AsyncAPIPublisher
- from faststream.nats.subscriber.usecase import LogicSubscriber
- from faststream.types import SendableMessage
+ from faststream._internal.basic_types import SendableMessage
+ from faststream.nats.publisher.specified import SpecificationPublisher
+ from faststream.nats.response import NatsPublishCommand
+ from faststream.nats.subscriber.usecases.basic import LogicSubscriber
__all__ = ("TestNatsBroker",)
@@ -29,12 +37,12 @@ class TestNatsBroker(TestBroker[NatsBroker]):
@staticmethod
def create_publisher_fake_subscriber(
broker: NatsBroker,
- publisher: "AsyncAPIPublisher",
- ) -> Tuple["LogicSubscriber[Any, Any]", bool]:
+ publisher: "SpecificationPublisher",
+ ) -> tuple["LogicSubscriber[Any, Any]", bool]:
sub: Optional[LogicSubscriber[Any, Any]] = None
publisher_stream = publisher.stream.name if publisher.stream else None
- for handler in broker._subscribers.values():
- if _is_handler_suitable(handler, publisher.subject, publisher_stream):
+ for handler in broker._subscribers:
+ if _is_handler_matches(handler, publisher.subject, publisher_stream):
sub = handler
break
@@ -46,100 +54,94 @@ def create_publisher_fake_subscriber(
return sub, is_real
- @staticmethod
- async def _fake_connect( # type: ignore[override]
+ @contextmanager
+ def _patch_producer(self, broker: NatsBroker) -> Iterator[None]:
+ old_js_producer, old_producer = broker._js_producer, broker._producer
+ fake_producer = broker._js_producer = FakeProducer(broker)
+
+ broker._state.patch_value(producer=fake_producer)
+ try:
+ yield
+ finally:
+ broker._js_producer = old_js_producer
+ broker._state.patch_value(producer=old_producer)
+
+ async def _fake_connect(
+ self,
broker: NatsBroker,
*args: Any,
**kwargs: Any,
) -> AsyncMock:
- broker.stream = AsyncMock()
- broker._js_producer = broker._producer = FakeProducer( # type: ignore[assignment]
- broker,
- )
+ if not broker._connection_state:
+ broker._connection_state = ConnectedState(AsyncMock(), AsyncMock())
return AsyncMock()
+ def _fake_start(self, broker: NatsBroker, *args: Any, **kwargs: Any) -> None:
+ if not broker._connection_state:
+ broker._connection_state = ConnectedState(AsyncMock(), AsyncMock())
+ return super()._fake_start(broker, *args, **kwargs)
+
class FakeProducer(NatsFastProducer):
def __init__(self, broker: NatsBroker) -> None:
self.broker = broker
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(pattern="", is_ack_disabled=True)
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
@override
async def publish( # type: ignore[override]
- self,
- message: "SendableMessage",
- subject: str,
- reply_to: str = "",
- headers: Optional[Dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- # NatsJSFastProducer compatibility
- timeout: Optional[float] = None,
- stream: Optional[str] = None,
- *,
- rpc: bool = False,
- rpc_timeout: Optional[float] = None,
- raise_timeout: bool = False,
- ) -> Any:
- if rpc and reply_to:
- raise WRONG_PUBLISH_ARGS
-
+ self, cmd: "NatsPublishCommand"
+ ) -> None:
incoming = build_message(
- message=message,
- subject=subject,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
+ message=cmd.body,
+ subject=cmd.destination,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_suitable(handler, subject, stream):
- msg: Union[List[PatchedMessage], PatchedMessage]
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.stream,
+ ):
+ msg: Union[list[PatchedMessage], PatchedMessage]
- if (pull := getattr(handler, "pull_sub", None)) and pull.batch:
- msg = [incoming]
- else:
- msg = incoming
-
- with timeout_scope(rpc_timeout, raise_timeout):
- response = await self._execute_handler(msg, subject, handler)
- if rpc:
- return await self._decoder(await self._parser(response))
+ if (pull := getattr(handler, "pull_sub", None)) and pull.batch:
+ msg = [incoming]
+ else:
+ msg = incoming
- return None
+ await self._execute_handler(msg, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: Optional[str] = None,
- headers: Optional[Dict[str, str]] = None,
- timeout: float = 0.5,
- # NatsJSFastProducer compatibility
- stream: Optional[str] = None,
+ cmd: "NatsPublishCommand",
) -> "PatchedMessage":
incoming = build_message(
- message=message,
- subject=subject,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ subject=cmd.destination,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_suitable(handler, subject, stream):
- msg: Union[List[PatchedMessage], PatchedMessage]
+ for handler in _find_handler(
+ self.broker._subscribers,
+ cmd.destination,
+ cmd.stream,
+ ):
+ msg: Union[list[PatchedMessage], PatchedMessage]
- if (pull := getattr(handler, "pull_sub", None)) and pull.batch:
- msg = [incoming]
- else:
- msg = incoming
+ if (pull := getattr(handler, "pull_sub", None)) and pull.batch:
+ msg = [incoming]
+ else:
+ msg = incoming
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg, subject, handler)
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(msg, cmd.destination, handler)
raise SubscriberNotFound
@@ -159,7 +161,23 @@ async def _execute_handler(
)
-def _is_handler_suitable(
+def _find_handler(
+ subscribers: Iterable["LogicSubscriber[Any, Any]"],
+ subject: str,
+ stream: Optional[str] = None,
+) -> Generator["LogicSubscriber[Any, Any]", None, None]:
+ published_queues = set()
+ for handler in subscribers: # pragma: no branch
+ if _is_handler_matches(handler, subject, stream):
+ if queue := getattr(handler, "queue", None):
+ if queue in published_queues:
+ continue
+ else:
+ published_queues.add(queue)
+ yield handler
+
+
+def _is_handler_matches(
handler: "LogicSubscriber[Any, Any]",
subject: str,
stream: Optional[str] = None,
@@ -187,11 +205,11 @@ def build_message(
*,
reply_to: str = "",
correlation_id: Optional[str] = None,
- headers: Optional[Dict[str, str]] = None,
+ headers: Optional[dict[str, str]] = None,
) -> "PatchedMessage":
msg, content_type = encode_message(message)
return PatchedMessage(
- _client=None, # type: ignore
+ _client=None, # type: ignore[arg-type]
subject=subject,
reply=reply_to,
data=msg,
@@ -213,7 +231,7 @@ async def ack_sync(
) -> "PatchedMessage": # pragma: no cover
return self
- async def nak(self, delay: Union[int, float, None] = None) -> None:
+ async def nak(self, delay: Optional[float] = None) -> None:
pass
async def term(self) -> None:
diff --git a/faststream/opentelemetry/annotations.py b/faststream/opentelemetry/annotations.py
index cdf2378cc3..aeb4fe85f6 100644
--- a/faststream/opentelemetry/annotations.py
+++ b/faststream/opentelemetry/annotations.py
@@ -1,5 +1,6 @@
+from typing import Annotated
+
from opentelemetry.trace import Span
-from typing_extensions import Annotated
from faststream import Context
from faststream.opentelemetry.baggage import Baggage
diff --git a/faststream/opentelemetry/baggage.py b/faststream/opentelemetry/baggage.py
index d225714988..b29f24e1bc 100644
--- a/faststream/opentelemetry/baggage.py
+++ b/faststream/opentelemetry/baggage.py
@@ -1,12 +1,12 @@
-from typing import TYPE_CHECKING, Any, List, Optional, cast
+from typing import TYPE_CHECKING, Any, Optional, cast
from opentelemetry import baggage, context
from opentelemetry.baggage.propagation import W3CBaggagePropagator
from typing_extensions import Self
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message import StreamMessage
_BAGGAGE_PROPAGATOR = W3CBaggagePropagator()
@@ -15,7 +15,9 @@ class Baggage:
__slots__ = ("_baggage", "_batch_baggage")
def __init__(
- self, payload: "AnyDict", batch_payload: Optional[List["AnyDict"]] = None
+ self,
+ payload: "AnyDict",
+ batch_payload: Optional[list["AnyDict"]] = None,
) -> None:
self._baggage = dict(payload)
self._batch_baggage = [dict(b) for b in batch_payload] if batch_payload else []
@@ -24,7 +26,7 @@ def get_all(self) -> "AnyDict":
"""Get a copy of the current baggage."""
return self._baggage.copy()
- def get_all_batch(self) -> List["AnyDict"]:
+ def get_all_batch(self) -> list["AnyDict"]:
"""Get a copy of all batch baggage if exists."""
return self._batch_baggage.copy()
@@ -60,11 +62,10 @@ def to_headers(self, headers: Optional["AnyDict"] = None) -> "AnyDict":
def from_msg(cls, msg: "StreamMessage[Any]") -> Self:
"""Create a Baggage instance from a StreamMessage."""
if len(msg.batch_headers) <= 1:
- payload = baggage.get_all(_BAGGAGE_PROPAGATOR.extract(msg.headers))
- return cls(cast("AnyDict", payload))
+ return cls.from_headers(msg.headers)
cumulative_baggage: AnyDict = {}
- batch_baggage: List[AnyDict] = []
+ batch_baggage: list[AnyDict] = []
for headers in msg.batch_headers:
payload = baggage.get_all(_BAGGAGE_PROPAGATOR.extract(headers))
diff --git a/faststream/opentelemetry/middleware.py b/faststream/opentelemetry/middleware.py
index 94b110e724..485b88acf3 100644
--- a/faststream/opentelemetry/middleware.py
+++ b/faststream/opentelemetry/middleware.py
@@ -1,7 +1,7 @@
import time
from collections import defaultdict
from copy import copy
-from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, cast
+from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, cast
from opentelemetry import baggage, context, metrics, trace
from opentelemetry.baggage.propagation import W3CBaggagePropagator
@@ -10,8 +10,8 @@
from opentelemetry.trace import Link, Span
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
-from faststream import BaseMiddleware
-from faststream import context as fs_context
+from faststream._internal.middlewares import BaseMiddleware
+from faststream._internal.types import PublishCommandType
from faststream.opentelemetry.baggage import Baggage
from faststream.opentelemetry.consts import (
ERROR_TYPE,
@@ -22,7 +22,6 @@
WITH_BATCH,
MessageAction,
)
-from faststream.opentelemetry.provider import TelemetrySettingsProvider
if TYPE_CHECKING:
from contextvars import Token
@@ -32,14 +31,57 @@
from opentelemetry.trace import Tracer, TracerProvider
from opentelemetry.util.types import Attributes
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict, AsyncFunc, AsyncFuncAny
+ from faststream._internal.basic_types import AnyDict, AsyncFunc, AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream.message import StreamMessage
+ from faststream.opentelemetry.provider import TelemetrySettingsProvider
_BAGGAGE_PROPAGATOR = W3CBaggagePropagator()
_TRACE_PROPAGATOR = TraceContextTextMapPropagator()
+class TelemetryMiddleware(Generic[PublishCommandType]):
+ __slots__ = (
+ "_meter",
+ "_metrics",
+ "_settings_provider_factory",
+ "_tracer",
+ )
+
+ def __init__(
+ self,
+ *,
+ settings_provider_factory: Callable[
+ [Any],
+ Optional["TelemetrySettingsProvider[Any, PublishCommandType]"],
+ ],
+ tracer_provider: Optional["TracerProvider"] = None,
+ meter_provider: Optional["MeterProvider"] = None,
+ meter: Optional["Meter"] = None,
+ include_messages_counters: bool = False,
+ ) -> None:
+ self._tracer = _get_tracer(tracer_provider)
+ self._meter = _get_meter(meter_provider, meter)
+ self._metrics = _MetricsContainer(self._meter, include_messages_counters)
+ self._settings_provider_factory = settings_provider_factory
+
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "BaseTelemetryMiddleware[PublishCommandType]":
+ return BaseTelemetryMiddleware[PublishCommandType](
+ msg,
+ tracer=self._tracer,
+ metrics_container=self._metrics,
+ settings_provider_factory=self._settings_provider_factory,
+ context=context,
+ )
+
+
class _MetricsContainer:
__slots__ = (
"include_messages_counters",
@@ -76,7 +118,10 @@ def __init__(self, meter: "Meter", include_messages_counters: bool) -> None:
)
def observe_publish(
- self, attrs: "AnyDict", duration: float, msg_count: int
+ self,
+ attrs: "AnyDict",
+ duration: float,
+ msg_count: int,
) -> None:
self.publish_duration.record(
amount=duration,
@@ -91,7 +136,10 @@ def observe_publish(
)
def observe_consume(
- self, attrs: "AnyDict", duration: float, msg_count: int
+ self,
+ attrs: "AnyDict",
+ duration: float,
+ msg_count: int,
) -> None:
self.process_duration.record(
amount=duration,
@@ -106,61 +154,64 @@ def observe_consume(
)
-class BaseTelemetryMiddleware(BaseMiddleware):
+class BaseTelemetryMiddleware(BaseMiddleware[PublishCommandType]):
def __init__(
self,
+ msg: Optional[Any],
+ /,
*,
tracer: "Tracer",
settings_provider_factory: Callable[
- [Any], Optional[TelemetrySettingsProvider[Any]]
+ [Any],
+ Optional["TelemetrySettingsProvider[Any, PublishCommandType]"],
],
metrics_container: _MetricsContainer,
- msg: Optional[Any] = None,
+ context: "ContextRepo",
) -> None:
- self.msg = msg
+ super().__init__(msg, context=context)
self._tracer = tracer
self._metrics = metrics_container
self._current_span: Optional[Span] = None
self._origin_context: Optional[Context] = None
- self._scope_tokens: List[Tuple[str, Token[Any]]] = []
+ self._scope_tokens: list[tuple[str, Token[Any]]] = []
self.__settings_provider = settings_provider_factory(msg)
async def publish_scope(
self,
call_next: "AsyncFunc",
- msg: Any,
- *args: Any,
- **kwargs: Any,
+ msg: "PublishCommandType",
) -> Any:
if (provider := self.__settings_provider) is None:
- return await call_next(msg, *args, **kwargs)
+ return await call_next(msg)
- headers = kwargs.pop("headers", {}) or {}
+ headers = msg.headers
current_context = context.get_current()
- destination_name = provider.get_publish_destination_name(kwargs)
+ destination_name = provider.get_publish_destination_name(msg)
- current_baggage: Optional[Baggage] = fs_context.get_local("baggage")
+ current_baggage: Optional[Baggage] = self.context.get_local("baggage")
if current_baggage:
headers.update(current_baggage.to_headers())
- trace_attributes = provider.get_publish_attrs_from_kwargs(kwargs)
+ trace_attributes = provider.get_publish_attrs_from_cmd(msg)
metrics_attributes = {
SpanAttributes.MESSAGING_SYSTEM: provider.messaging_system,
SpanAttributes.MESSAGING_DESTINATION_NAME: destination_name,
}
# NOTE: if batch with single message?
- if (msg_count := len((msg, *args))) > 1:
+ if (msg_count := len(msg.batch_bodies)) > 1:
trace_attributes[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = msg_count
current_context = _BAGGAGE_PROPAGATOR.extract(headers, current_context)
_BAGGAGE_PROPAGATOR.inject(
- headers, baggage.set_baggage(WITH_BATCH, True, context=current_context)
+ headers,
+ baggage.set_baggage(WITH_BATCH, True, context=current_context),
)
if self._current_span and self._current_span.is_recording():
current_context = trace.set_span_in_context(
- self._current_span, current_context
+ self._current_span,
+ current_context,
)
_TRACE_PROPAGATOR.inject(headers, context=self._origin_context)
@@ -184,9 +235,11 @@ async def publish_scope(
context=current_context,
) as span:
span.set_attribute(
- SpanAttributes.MESSAGING_OPERATION, MessageAction.PUBLISH
+ SpanAttributes.MESSAGING_OPERATION,
+ MessageAction.PUBLISH,
)
- result = await call_next(msg, *args, headers=headers, **kwargs)
+ msg.headers = headers
+ result = await call_next(msg)
except Exception as e:
metrics_attributes[ERROR_TYPE] = type(e).__name__
@@ -197,7 +250,7 @@ async def publish_scope(
self._metrics.observe_publish(metrics_attributes, duration, msg_count)
for key, token in self._scope_tokens:
- fs_context.reset_local(key, token)
+ self.context.reset_local(key, token)
return result
@@ -245,13 +298,20 @@ async def consume_scope(
end_on_exit=False,
) as span:
span.set_attribute(
- SpanAttributes.MESSAGING_OPERATION, MessageAction.PROCESS
+ SpanAttributes.MESSAGING_OPERATION,
+ MessageAction.PROCESS,
)
self._current_span = span
- self._scope_tokens.append(("span", fs_context.set_local("span", span)))
+ self._scope_tokens.append((
+ "span",
+ self.context.set_local("span", span),
+ ))
self._scope_tokens.append(
- ("baggage", fs_context.set_local("baggage", Baggage.from_msg(msg)))
+ (
+ "baggage",
+ self.context.set_local("baggage", Baggage.from_msg(msg)),
+ ),
)
new_context = trace.set_span_in_context(span, current_context)
@@ -266,7 +326,8 @@ async def consume_scope(
finally:
duration = time.perf_counter() - start_time
msg_count = trace_attributes.get(
- SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT, 1
+ SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT,
+ 1,
)
self._metrics.observe_consume(metrics_attributes, duration, msg_count)
@@ -274,7 +335,7 @@ async def consume_scope(
async def after_processed(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> Optional[bool]:
@@ -283,40 +344,6 @@ async def after_processed(
return False
-class TelemetryMiddleware:
- # NOTE: should it be class or function?
- __slots__ = (
- "_meter",
- "_metrics",
- "_settings_provider_factory",
- "_tracer",
- )
-
- def __init__(
- self,
- *,
- settings_provider_factory: Callable[
- [Any], Optional[TelemetrySettingsProvider[Any]]
- ],
- tracer_provider: Optional["TracerProvider"] = None,
- meter_provider: Optional["MeterProvider"] = None,
- meter: Optional["Meter"] = None,
- include_messages_counters: bool = False,
- ) -> None:
- self._tracer = _get_tracer(tracer_provider)
- self._meter = _get_meter(meter_provider, meter)
- self._metrics = _MetricsContainer(self._meter, include_messages_counters)
- self._settings_provider_factory = settings_provider_factory
-
- def __call__(self, msg: Optional[Any]) -> BaseMiddleware:
- return BaseTelemetryMiddleware(
- tracer=self._tracer,
- metrics_container=self._metrics,
- settings_provider_factory=self._settings_provider_factory,
- msg=msg,
- )
-
-
def _get_meter(
meter_provider: Optional["MeterProvider"] = None,
meter: Optional["Meter"] = None,
@@ -345,20 +372,20 @@ def _create_span_name(destination: str, action: str) -> str:
def _is_batch_message(msg: "StreamMessage[Any]") -> bool:
with_batch = baggage.get_baggage(
- WITH_BATCH, _BAGGAGE_PROPAGATOR.extract(msg.headers)
+ WITH_BATCH,
+ _BAGGAGE_PROPAGATOR.extract(msg.headers),
)
return bool(msg.batch_headers or with_batch)
-def _get_msg_links(msg: "StreamMessage[Any]") -> List[Link]:
+def _get_msg_links(msg: "StreamMessage[Any]") -> list[Link]:
if not msg.batch_headers:
if (span := _get_span_from_headers(msg.headers)) is not None:
return [Link(span.get_span_context())]
- else:
- return []
+ return []
links = {}
- counter: Dict[str, int] = defaultdict(lambda: 0)
+ counter: dict[str, int] = defaultdict(lambda: 0)
for headers in msg.batch_headers:
if (correlation_id := headers.get("correlation_id")) is None:
@@ -385,7 +412,7 @@ def _get_span_from_headers(headers: "AnyDict") -> Optional[Span]:
return None
return cast(
- Optional[Span],
+ "Optional[Span]",
next(iter(trace_context.values())),
)
diff --git a/faststream/opentelemetry/provider.py b/faststream/opentelemetry/provider.py
index 90232d45ab..0cedf1bd8c 100644
--- a/faststream/opentelemetry/provider.py
+++ b/faststream/opentelemetry/provider.py
@@ -1,13 +1,24 @@
from typing import TYPE_CHECKING, Protocol
-from faststream.broker.types import MsgType
+from typing_extensions import TypeVar as TypeVar313
+
+from faststream._internal.types import MsgType
+from faststream.response import PublishCommand
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message import StreamMessage
+
+
+PublishCommandType_contra = TypeVar313(
+ "PublishCommandType_contra",
+ bound=PublishCommand,
+ default=PublishCommand,
+ contravariant=True,
+)
-class TelemetrySettingsProvider(Protocol[MsgType]):
+class TelemetrySettingsProvider(Protocol[MsgType, PublishCommandType_contra]):
messaging_system: str
def get_consume_attrs_from_message(
@@ -20,12 +31,12 @@ def get_consume_destination_name(
msg: "StreamMessage[MsgType]",
) -> str: ...
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: PublishCommandType_contra,
) -> "AnyDict": ...
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: PublishCommandType_contra,
) -> str: ...
diff --git a/faststream/params/__init__.py b/faststream/params/__init__.py
new file mode 100644
index 0000000000..204cec6df5
--- /dev/null
+++ b/faststream/params/__init__.py
@@ -0,0 +1,12 @@
+from fast_depends import Depends
+
+from .no_cast import NoCast
+from .params import Context, Header, Path
+
+__all__ = (
+ "Context",
+ "Depends",
+ "Header",
+ "NoCast",
+ "Path",
+)
diff --git a/faststream/params/no_cast.py b/faststream/params/no_cast.py
new file mode 100644
index 0000000000..282cd267e8
--- /dev/null
+++ b/faststream/params/no_cast.py
@@ -0,0 +1,27 @@
+from typing import Annotated, Any, TypeVar
+
+from fast_depends.library import CustomField
+
+from faststream._internal.basic_types import AnyDict
+
+
+class NoCastField(CustomField):
+ """A class that represents a custom field without casting.
+
+ You can use it to annotate fields, that should not be casted.
+
+ Usage:
+
+ `data: Annotated[..., NoCast()]`
+ """
+
+ def __init__(self) -> None:
+ super().__init__(cast=False)
+
+ def use(self, **kwargs: Any) -> AnyDict:
+ return kwargs
+
+
+_NoCastType = TypeVar("_NoCastType")
+
+NoCast = Annotated[_NoCastType, NoCastField()]
diff --git a/faststream/params/params.py b/faststream/params/params.py
new file mode 100644
index 0000000000..fd5dfd14ea
--- /dev/null
+++ b/faststream/params/params.py
@@ -0,0 +1,47 @@
+from typing import Any, Callable, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream._internal.context import Context as Context_
+
+
+def Context( # noqa: N802
+ real_name: str = "",
+ *,
+ cast: bool = False,
+ default: Any = EMPTY,
+ initial: Optional[Callable[..., Any]] = None,
+) -> Any:
+ return Context_(
+ real_name=real_name,
+ cast=cast,
+ default=default,
+ initial=initial,
+ )
+
+
+def Header( # noqa: N802
+ real_name: str = "",
+ *,
+ cast: bool = True,
+ default: Any = EMPTY,
+) -> Any:
+ return Context_(
+ real_name=real_name,
+ cast=cast,
+ default=default,
+ prefix="message.headers.",
+ )
+
+
+def Path( # noqa: N802
+ real_name: str = "",
+ *,
+ cast: bool = True,
+ default: Any = EMPTY,
+) -> Any:
+ return Context_(
+ real_name=real_name,
+ cast=cast,
+ default=default,
+ prefix="message.path.",
+ )
diff --git a/faststream/prometheus/__init__.py b/faststream/prometheus/__init__.py
index e604b8cef7..a06f158ff3 100644
--- a/faststream/prometheus/__init__.py
+++ b/faststream/prometheus/__init__.py
@@ -1,9 +1,9 @@
-from faststream.prometheus.middleware import BasePrometheusMiddleware
+from faststream.prometheus.middleware import PrometheusMiddleware
from faststream.prometheus.provider import MetricsSettingsProvider
from faststream.prometheus.types import ConsumeAttrs
__all__ = (
- "BasePrometheusMiddleware",
"ConsumeAttrs",
"MetricsSettingsProvider",
+ "PrometheusMiddleware",
)
diff --git a/faststream/prometheus/consts.py b/faststream/prometheus/consts.py
index 3c4648d333..8e592d14ae 100644
--- a/faststream/prometheus/consts.py
+++ b/faststream/prometheus/consts.py
@@ -1,5 +1,5 @@
-from faststream.broker.message import AckStatus
from faststream.exceptions import AckMessage, NackMessage, RejectMessage, SkipMessage
+from faststream.message.message import AckStatus
from faststream.prometheus.types import ProcessingStatus
PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP = {
@@ -11,7 +11,7 @@
PROCESSING_STATUS_BY_ACK_STATUS = {
- AckStatus.acked: ProcessingStatus.acked,
- AckStatus.nacked: ProcessingStatus.nacked,
- AckStatus.rejected: ProcessingStatus.rejected,
+ AckStatus.ACKED: ProcessingStatus.acked,
+ AckStatus.NACKED: ProcessingStatus.nacked,
+ AckStatus.REJECTED: ProcessingStatus.rejected,
}
diff --git a/faststream/prometheus/container.py b/faststream/prometheus/container.py
index 08174f5172..6ef46499b1 100644
--- a/faststream/prometheus/container.py
+++ b/faststream/prometheus/container.py
@@ -1,4 +1,5 @@
-from typing import TYPE_CHECKING, Optional, Sequence, Union, cast
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, cast
from prometheus_client import Counter, Gauge, Histogram
@@ -43,114 +44,127 @@ def __init__(
*,
metrics_prefix: str = "faststream",
received_messages_size_buckets: Optional[Sequence[float]] = None,
- ):
+ ) -> None:
self._registry = registry
self._metrics_prefix = metrics_prefix
+ received_messages_total_name = f"{metrics_prefix}_received_messages_total"
self.received_messages_total = cast(
- Counter,
- self._get_registered_metric(f"{metrics_prefix}_received_messages_total"),
+ "Counter",
+ self._get_registered_metric(received_messages_total_name),
) or Counter(
- name=f"{metrics_prefix}_received_messages_total",
+ name=received_messages_total_name,
documentation="Count of received messages by broker and handler",
labelnames=["app_name", "broker", "handler"],
registry=registry,
)
+ received_messages_size_bytes_name = (
+ f"{metrics_prefix}_received_messages_size_bytes"
+ )
self.received_messages_size_bytes = cast(
- Histogram,
- self._get_registered_metric(
- f"{metrics_prefix}_received_messages_size_bytes"
- ),
+ "Histogram",
+ self._get_registered_metric(received_messages_size_bytes_name),
) or Histogram(
- name=f"{metrics_prefix}_received_messages_size_bytes",
+ name=received_messages_size_bytes_name,
documentation="Histogram of received messages size in bytes by broker and handler",
labelnames=["app_name", "broker", "handler"],
registry=registry,
buckets=received_messages_size_buckets or self.DEFAULT_SIZE_BUCKETS,
)
+ received_messages_in_process_name = (
+ f"{metrics_prefix}_received_messages_in_process"
+ )
self.received_messages_in_process = cast(
- Gauge,
- self._get_registered_metric(
- f"{metrics_prefix}_received_messages_in_process"
- ),
+ "Gauge",
+ self._get_registered_metric(received_messages_in_process_name),
) or Gauge(
- name=f"{metrics_prefix}_received_messages_in_process",
+ name=received_messages_in_process_name,
documentation="Gauge of received messages in process by broker and handler",
labelnames=["app_name", "broker", "handler"],
registry=registry,
)
+ received_processed_messages_total_name = (
+ f"{metrics_prefix}_received_processed_messages_total"
+ )
self.received_processed_messages_total = cast(
- Counter,
- self._get_registered_metric(
- f"{metrics_prefix}_received_processed_messages_total"
- ),
+ "Counter",
+ self._get_registered_metric(received_processed_messages_total_name),
) or Counter(
- name=f"{metrics_prefix}_received_processed_messages_total",
+ name=received_processed_messages_total_name,
documentation="Count of received processed messages by broker, handler and status",
labelnames=["app_name", "broker", "handler", "status"],
registry=registry,
)
+ received_processed_messages_duration_seconds_name = (
+ f"{metrics_prefix}_received_processed_messages_duration_seconds"
+ )
self.received_processed_messages_duration_seconds = cast(
- Histogram,
+ "Histogram",
self._get_registered_metric(
- f"{metrics_prefix}_received_processed_messages_duration_seconds"
+ received_processed_messages_duration_seconds_name
),
) or Histogram(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds",
+ name=received_processed_messages_duration_seconds_name,
documentation="Histogram of received processed messages duration in seconds by broker and handler",
labelnames=["app_name", "broker", "handler"],
registry=registry,
)
+ received_processed_messages_exceptions_total_name = (
+ f"{metrics_prefix}_received_processed_messages_exceptions_total"
+ )
self.received_processed_messages_exceptions_total = cast(
- Counter,
+ "Counter",
self._get_registered_metric(
- f"{metrics_prefix}_received_processed_messages_exceptions_total"
+ received_processed_messages_exceptions_total_name
),
) or Counter(
- name=f"{metrics_prefix}_received_processed_messages_exceptions_total",
+ name=received_processed_messages_exceptions_total_name,
documentation="Count of received processed messages exceptions by broker, handler and exception_type",
labelnames=["app_name", "broker", "handler", "exception_type"],
registry=registry,
)
+ published_messages_total_name = f"{metrics_prefix}_published_messages_total"
self.published_messages_total = cast(
- Counter,
- self._get_registered_metric(f"{metrics_prefix}_published_messages_total"),
+ "Counter",
+ self._get_registered_metric(published_messages_total_name),
) or Counter(
- name=f"{metrics_prefix}_published_messages_total",
+ name=published_messages_total_name,
documentation="Count of published messages by destination and status",
labelnames=["app_name", "broker", "destination", "status"],
registry=registry,
)
+ published_messages_duration_seconds_name = (
+ f"{metrics_prefix}_published_messages_duration_seconds"
+ )
self.published_messages_duration_seconds = cast(
- Histogram,
- self._get_registered_metric(
- f"{metrics_prefix}_published_messages_duration_seconds"
- ),
+ "Histogram",
+ self._get_registered_metric(published_messages_duration_seconds_name),
) or Histogram(
- name=f"{metrics_prefix}_published_messages_duration_seconds",
+ name=published_messages_duration_seconds_name,
documentation="Histogram of published messages duration in seconds by broker and destination",
labelnames=["app_name", "broker", "destination"],
registry=registry,
)
+ published_messages_exceptions_total_name = (
+ f"{metrics_prefix}_published_messages_exceptions_total"
+ )
self.published_messages_exceptions_total = cast(
- Counter,
- self._get_registered_metric(
- f"{metrics_prefix}_published_messages_exceptions_total"
- ),
+ "Counter",
+ self._get_registered_metric(published_messages_exceptions_total_name),
) or Counter(
- name=f"{metrics_prefix}_published_messages_exceptions_total",
+ name=published_messages_exceptions_total_name,
documentation="Count of published messages exceptions by broker, destination and exception_type",
labelnames=["app_name", "broker", "destination", "exception_type"],
registry=registry,
)
- def _get_registered_metric(self, metric_name: str) -> Union["Collector", None]:
+ def _get_registered_metric(self, metric_name: str) -> Optional["Collector"]:
return self._registry._names_to_collectors.get(metric_name)
diff --git a/faststream/prometheus/manager.py b/faststream/prometheus/manager.py
index 10c4211153..844634475e 100644
--- a/faststream/prometheus/manager.py
+++ b/faststream/prometheus/manager.py
@@ -5,7 +5,9 @@
class MetricsManager:
__slots__ = ("_app_name", "_container")
- def __init__(self, container: MetricsContainer, *, app_name: str = "faststream"):
+ def __init__(
+ self, container: MetricsContainer, *, app_name: str = "faststream"
+ ) -> None:
self._container = container
self._app_name = app_name
diff --git a/faststream/prometheus/middleware.py b/faststream/prometheus/middleware.py
index aabe70285f..05858d9e47 100644
--- a/faststream/prometheus/middleware.py
+++ b/faststream/prometheus/middleware.py
@@ -1,8 +1,12 @@
import time
-from typing import TYPE_CHECKING, Any, Callable, Optional, Sequence
+from collections.abc import Awaitable, Sequence
+from typing import TYPE_CHECKING, Any, Callable, Generic, Optional
-from faststream import BaseMiddleware
+from faststream._internal.constants import EMPTY
+from faststream._internal.middlewares import BaseMiddleware
+from faststream._internal.types import AnyMsg, PublishCommandType
from faststream.exceptions import IgnoredException
+from faststream.message import SourceType
from faststream.prometheus.consts import (
PROCESSING_STATUS_BY_ACK_STATUS,
PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP,
@@ -11,35 +15,86 @@
from faststream.prometheus.manager import MetricsManager
from faststream.prometheus.provider import MetricsSettingsProvider
from faststream.prometheus.types import ProcessingStatus, PublishingStatus
-from faststream.types import EMPTY
+from faststream.response import PublishType
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
- from faststream.broker.message import StreamMessage
- from faststream.types import AsyncFunc, AsyncFuncAny
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream.message.message import StreamMessage
-class PrometheusMiddleware(BaseMiddleware):
+class PrometheusMiddleware(Generic[PublishCommandType, AnyMsg]):
+ __slots__ = ("_metrics_container", "_metrics_manager", "_settings_provider_factory")
+
+ def __init__(
+ self,
+ *,
+ settings_provider_factory: Callable[
+ [Optional[AnyMsg]],
+ Optional[MetricsSettingsProvider[AnyMsg, PublishCommandType]],
+ ],
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ if app_name is EMPTY:
+ app_name = metrics_prefix
+
+ self._settings_provider_factory = settings_provider_factory
+ self._metrics_container = MetricsContainer(
+ registry,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
+ self._metrics_manager = MetricsManager(
+ self._metrics_container,
+ app_name=app_name,
+ )
+
+ def __call__(
+ self,
+ msg: Optional[AnyMsg],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "BasePrometheusMiddleware[PublishCommandType]":
+ return BasePrometheusMiddleware[PublishCommandType](
+ msg,
+ metrics_manager=self._metrics_manager,
+ settings_provider_factory=self._settings_provider_factory,
+ context=context,
+ )
+
+
+class BasePrometheusMiddleware(
+ BaseMiddleware[PublishCommandType, AnyMsg],
+ Generic[PublishCommandType, AnyMsg],
+):
def __init__(
self,
- msg: Optional[Any] = None,
+ msg: Optional[AnyMsg],
+ /,
*,
settings_provider_factory: Callable[
- [Any], Optional[MetricsSettingsProvider[Any]]
+ [Optional[AnyMsg]],
+ Optional[MetricsSettingsProvider[AnyMsg, PublishCommandType]],
],
metrics_manager: MetricsManager,
+ context: "ContextRepo",
) -> None:
self._metrics_manager = metrics_manager
self._settings_provider = settings_provider_factory(msg)
- super().__init__(msg)
+ super().__init__(msg, context=context)
async def consume_scope(
self,
call_next: "AsyncFuncAny",
- msg: "StreamMessage[Any]",
+ msg: "StreamMessage[AnyMsg]",
) -> Any:
- if self._settings_provider is None:
+ if self._settings_provider is None or msg._source_type is SourceType.RESPONSE:
return await call_next(msg)
messaging_system = self._settings_provider.messaging_system
@@ -115,16 +170,14 @@ async def consume_scope(
async def publish_scope(
self,
- call_next: "AsyncFunc",
- msg: Any,
- *args: Any,
- **kwargs: Any,
+ call_next: Callable[[PublishCommandType], Awaitable[Any]],
+ cmd: PublishCommandType,
) -> Any:
- if self._settings_provider is None:
- return await call_next(msg, *args, **kwargs)
+ if self._settings_provider is None or cmd.publish_type is PublishType.REPLY:
+ return await call_next(cmd)
destination_name = (
- self._settings_provider.get_publish_destination_name_from_kwargs(kwargs)
+ self._settings_provider.get_publish_destination_name_from_cmd(cmd)
)
messaging_system = self._settings_provider.messaging_system
@@ -132,11 +185,7 @@ async def publish_scope(
start_time = time.perf_counter()
try:
- result = await call_next(
- await self.on_publish(msg, *args, **kwargs),
- *args,
- **kwargs,
- )
+ result = await call_next(cmd)
except Exception as e:
err = e
@@ -157,49 +206,12 @@ async def publish_scope(
)
status = PublishingStatus.error if err else PublishingStatus.success
- messages_count = len((msg, *args))
self._metrics_manager.add_published_message(
- amount=messages_count,
+ amount=len(cmd.batch_bodies),
status=status,
broker=messaging_system,
destination=destination_name,
)
return result
-
-
-class BasePrometheusMiddleware:
- __slots__ = ("_metrics_container", "_metrics_manager", "_settings_provider_factory")
-
- def __init__(
- self,
- *,
- settings_provider_factory: Callable[
- [Any], Optional[MetricsSettingsProvider[Any]]
- ],
- registry: "CollectorRegistry",
- app_name: str = EMPTY,
- metrics_prefix: str = "faststream",
- received_messages_size_buckets: Optional[Sequence[float]] = None,
- ):
- if app_name is EMPTY:
- app_name = metrics_prefix
-
- self._settings_provider_factory = settings_provider_factory
- self._metrics_container = MetricsContainer(
- registry,
- metrics_prefix=metrics_prefix,
- received_messages_size_buckets=received_messages_size_buckets,
- )
- self._metrics_manager = MetricsManager(
- self._metrics_container,
- app_name=app_name,
- )
-
- def __call__(self, msg: Optional[Any]) -> BaseMiddleware:
- return PrometheusMiddleware(
- msg=msg,
- metrics_manager=self._metrics_manager,
- settings_provider_factory=self._settings_provider_factory,
- )
diff --git a/faststream/prometheus/provider.py b/faststream/prometheus/provider.py
index 1a543f5b55..25013e5306 100644
--- a/faststream/prometheus/provider.py
+++ b/faststream/prometheus/provider.py
@@ -1,22 +1,32 @@
from typing import TYPE_CHECKING, Protocol
-from faststream.broker.message import MsgType
+from typing_extensions import TypeVar as TypeVar313
+
+from faststream._internal.types import AnyMsg
+from faststream.response.response import PublishCommand
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
+ from faststream.message.message import StreamMessage
from faststream.prometheus import ConsumeAttrs
- from faststream.types import AnyDict
-class MetricsSettingsProvider(Protocol[MsgType]):
+PublishCommandType_contra = TypeVar313(
+ "PublishCommandType_contra",
+ bound=PublishCommand,
+ default=PublishCommand,
+ contravariant=True,
+)
+
+
+class MetricsSettingsProvider(Protocol[AnyMsg, PublishCommandType_contra]):
messaging_system: str
def get_consume_attrs_from_message(
self,
- msg: "StreamMessage[MsgType]",
+ msg: "StreamMessage[AnyMsg]",
) -> "ConsumeAttrs": ...
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: PublishCommandType_contra,
) -> str: ...
diff --git a/faststream/rabbit/__init__.py b/faststream/rabbit/__init__.py
index 98f364656d..7463fe0e05 100644
--- a/faststream/rabbit/__init__.py
+++ b/faststream/rabbit/__init__.py
@@ -1,3 +1,4 @@
+from faststream._internal.testing.app import TestApp
from faststream.rabbit.annotations import RabbitMessage
from faststream.rabbit.broker import RabbitBroker
from faststream.rabbit.response import RabbitResponse
@@ -7,10 +8,8 @@
QueueType,
RabbitExchange,
RabbitQueue,
- ReplyConfig,
)
from faststream.rabbit.testing import TestRabbitBroker
-from faststream.testing.app import TestApp
__all__ = (
"ExchangeType",
@@ -24,7 +23,6 @@
"RabbitResponse",
"RabbitRoute",
"RabbitRouter",
- "ReplyConfig",
"TestApp",
"TestRabbitBroker",
)
diff --git a/faststream/rabbit/annotations.py b/faststream/rabbit/annotations.py
index aaa7b3eec2..4a135ecae9 100644
--- a/faststream/rabbit/annotations.py
+++ b/faststream/rabbit/annotations.py
@@ -1,11 +1,13 @@
+from typing import Annotated
+
from aio_pika import RobustChannel, RobustConnection
-from typing_extensions import Annotated
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.context import Context
+from faststream.annotations import ContextRepo, Logger
+from faststream.params import NoCast
from faststream.rabbit.broker import RabbitBroker as RB
from faststream.rabbit.message import RabbitMessage as RM
from faststream.rabbit.publisher.producer import AioPikaFastProducer
-from faststream.utils.context import Context
__all__ = (
"Channel",
@@ -24,10 +26,3 @@
Channel = Annotated[RobustChannel, Context("broker._channel")]
Connection = Annotated[RobustConnection, Context("broker._connection")]
-
-# NOTE: transaction is not for the public usage yet
-# async def _get_transaction(connection: Connection) -> RabbitTransaction:
-# async with connection.channel(publisher_confirms=False) as channel:
-# yield channel.transaction()
-
-# Transaction = Annotated[RabbitTransaction, Depends(_get_transaction)]
diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py
index 2e650372d2..95d54fce37 100644
--- a/faststream/rabbit/broker/broker.py
+++ b/faststream/rabbit/broker/broker.py
@@ -1,75 +1,78 @@
import logging
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Iterable,
Optional,
- Sequence,
- Type,
Union,
cast,
)
from urllib.parse import urlparse
import anyio
-from aio_pika import connect_robust
-from typing_extensions import Annotated, Doc, deprecated, override
+from aio_pika import IncomingMessage, RobustConnection, connect_robust
+from typing_extensions import Doc, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.message import gen_cor_id
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.rabbit.broker.logging import RabbitLoggingBroker
-from faststream.rabbit.broker.registrator import RabbitRegistrator
+from faststream._internal.broker.broker import ABCBroker, BrokerUsecase
+from faststream._internal.constants import EMPTY
+from faststream._internal.publisher.proto import PublisherProto
+from faststream.message import gen_cor_id
from faststream.rabbit.helpers.declarer import RabbitDeclarer
from faststream.rabbit.publisher.producer import AioPikaFastProducer
+from faststream.rabbit.response import RabbitPublishCommand
from faststream.rabbit.schemas import (
RABBIT_REPLY,
RabbitExchange,
RabbitQueue,
)
from faststream.rabbit.security import parse_security
-from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber
from faststream.rabbit.utils import build_url
-from faststream.types import EMPTY
+from faststream.response.publish_type import PublishType
+
+from .logging import make_rabbit_logger_state
+from .registrator import RabbitRegistrator
if TYPE_CHECKING:
from ssl import SSLContext
from types import TracebackType
+ import aiormq
from aio_pika import (
- IncomingMessage,
RobustChannel,
- RobustConnection,
RobustExchange,
RobustQueue,
)
from aio_pika.abc import DateType, HeadersType, SSLOptions, TimeoutType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from pamqp.common import FieldTable
from yarl import URL
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, Decorator, LoggerProto
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
)
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.types import AioPikaSendableMessage
from faststream.security import BaseSecurity
- from faststream.types import AnyDict, Decorator, LoggerProto
+ from faststream.specification.schema.extra import Tag, TagDict
class RabbitBroker(
RabbitRegistrator,
- RabbitLoggingBroker,
+ BrokerUsecase[IncomingMessage, RobustConnection],
):
"""A class to represent a RabbitMQ broker."""
url: str
- _producer: Optional["AioPikaFastProducer"]
- declarer: Optional[RabbitDeclarer]
+ _producer: "AioPikaFastProducer"
+ declarer: RabbitDeclarer
+
_channel: Optional["RobustChannel"]
def __init__(
@@ -107,7 +110,7 @@ def __init__(
fail_fast: Annotated[
bool,
Doc(
- "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable."
+ "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable.",
),
] = True,
reconnect_interval: Annotated[
@@ -124,14 +127,14 @@ def __init__(
Doc(
"if `True` the `publish` method will "
"return `bool` type after publish is complete."
- "Otherwise it will returns `None`."
+ "Otherwise it will returns `None`.",
),
] = True,
on_return_raises: Annotated[
bool,
Doc(
"raise an :class:`aio_pika.exceptions.DeliveryError`"
- "when mandatory message will be returned"
+ "when mandatory message will be returned",
),
] = False,
# broker args
@@ -139,7 +142,7 @@ def __init__(
Optional[int],
Doc(
"RabbitMQ channel `qos` option. "
- "It limits max messages processing in the same time count."
+ "It limits max messages processing in the same time count.",
),
] = None,
app_id: Annotated[
@@ -150,7 +153,7 @@ def __init__(
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = None,
decoder: Annotated[
@@ -162,21 +165,25 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[IncomingMessage]"],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[IncomingMessage]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
# AsyncAPI args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Optional[str],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -193,9 +200,9 @@ def __init__(
Doc("AsyncAPI server description."),
] = None,
tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -214,10 +221,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -241,11 +245,11 @@ def __init__(
ssl=security_args.get("ssl"),
)
- if asyncapi_url is None:
- asyncapi_url = str(amqp_url)
+ if specification_url is None:
+ specification_url = str(amqp_url)
# respect ascynapi_url argument scheme
- built_asyncapi_url = urlparse(asyncapi_url)
+ built_asyncapi_url = urlparse(specification_url)
self.virtual_host = built_asyncapi_url.path
if protocol is None:
protocol = built_asyncapi_url.scheme
@@ -266,20 +270,23 @@ def __init__(
decoder=decoder,
parser=parser,
middlewares=middlewares,
+ routers=routers,
# AsyncAPI args
description=description,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol or built_asyncapi_url.scheme,
protocol_version=protocol_version,
security=security,
tags=tags,
# Logging args
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
+ logger_state=make_rabbit_logger_state(
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ ),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
@@ -289,7 +296,15 @@ def __init__(
self.app_id = app_id
self._channel = None
- self.declarer = None
+
+ declarer = self.declarer = RabbitDeclarer()
+ self._state.patch_value(
+ producer=AioPikaFastProducer(
+ declarer=declarer,
+ decoder=self._decoder,
+ parser=self._parser,
+ )
+ )
@property
def _subscriber_setup_extra(self) -> "AnyDict":
@@ -300,13 +315,21 @@ def _subscriber_setup_extra(self) -> "AnyDict":
"declarer": self.declarer,
}
- @property
- def _publisher_setup_extra(self) -> "AnyDict":
- return {
- **super()._publisher_setup_extra,
- "app_id": self.app_id,
- "virtual_host": self.virtual_host,
- }
+ def setup_publisher(
+ self,
+ publisher: PublisherProto[IncomingMessage],
+ **kwargs: Any,
+ ) -> None:
+ return super().setup_publisher(
+ publisher,
+ **(
+ {
+ "app_id": self.app_id,
+ "virtual_host": self.virtual_host,
+ }
+ | kwargs
+ ),
+ )
@override
async def connect( # type: ignore[override]
@@ -339,7 +362,7 @@ async def connect( # type: ignore[override]
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
timeout: Annotated[
@@ -349,7 +372,7 @@ async def connect( # type: ignore[override]
fail_fast: Annotated[
bool,
Doc(
- "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable."
+ "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable.",
),
] = EMPTY,
reconnect_interval: Annotated[
@@ -366,14 +389,14 @@ async def connect( # type: ignore[override]
Doc(
"if `True` the `publish` method will "
"return `bool` type after publish is complete."
- "Otherwise it will returns `None`."
+ "Otherwise it will returns `None`.",
),
] = EMPTY,
on_return_raises: Annotated[
bool,
Doc(
"raise an :class:`aio_pika.exceptions.DeliveryError`"
- "when mandatory message will be returned"
+ "when mandatory message will be returned",
),
] = EMPTY,
) -> "RobustConnection":
@@ -403,9 +426,15 @@ async def connect( # type: ignore[override]
url = None if url is EMPTY else url
- if url or any(
- (host, port, virtualhost, ssl_options, client_properties, security)
- ):
+ if any((
+ url,
+ host,
+ port,
+ virtualhost,
+ ssl_options,
+ client_properties,
+ security,
+ )):
security_args = parse_security(security)
kwargs["url"] = build_url(
@@ -423,9 +452,7 @@ async def connect( # type: ignore[override]
if ssl_context := security_args.get("ssl_context"):
kwargs["ssl_context"] = ssl_context
- connection = await super().connect(**kwargs)
-
- return connection
+ return await super().connect(**kwargs)
@override
async def _connect( # type: ignore[override]
@@ -453,7 +480,6 @@ async def _connect( # type: ignore[override]
)
if self._channel is None: # pragma: no branch
- max_consumers = self._max_consumers
channel = self._channel = cast(
"RobustChannel",
await connection.channel(
@@ -463,229 +489,153 @@ async def _connect( # type: ignore[override]
),
)
- declarer = self.declarer = RabbitDeclarer(channel)
- await declarer.declare_queue(RABBIT_REPLY)
+ if self._max_consumers:
+ await channel.set_qos(prefetch_count=int(self._max_consumers))
- self._producer = AioPikaFastProducer(
- declarer=declarer,
- decoder=self._decoder,
- parser=self._parser,
- )
+ self.declarer.connect(connection=connection, channel=channel)
+ await self.declarer.declare_queue(RABBIT_REPLY)
- if max_consumers:
- c = AsyncAPISubscriber.build_log_context(
- None,
- RabbitQueue(""),
- RabbitExchange(""),
- )
- self._log(f"Set max consumers to {max_consumers}", extra=c)
- await channel.set_qos(prefetch_count=int(max_consumers))
+ self._producer.connect()
return connection
- async def _close(
+ async def close(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
+ await super().close(exc_type, exc_val, exc_tb)
+
if self._channel is not None:
if not self._channel.is_closed:
await self._channel.close()
self._channel = None
- self.declarer = None
- self._producer = None
-
if self._connection is not None:
await self._connection.close()
+ self._connection = None
- await super()._close(exc_type, exc_val, exc_tb)
+ self.declarer.disconnect()
+ self._producer.disconnect()
async def start(self) -> None:
"""Connect broker to RabbitMQ and startup all subscribers."""
- await super().start()
-
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
+ await self.connect()
+ self._setup()
- for publisher in self._publishers.values():
+ for publisher in self._publishers:
if publisher.exchange is not None:
await self.declare_exchange(publisher.exchange)
- for subscriber in self._subscribers.values():
- self._log(
- f"`{subscriber.call_name}` waiting for messages",
- extra=subscriber.get_log_context(None),
- )
- await subscriber.start()
+ await super().start()
+
+ logger_state = self._state.get().logger_state
+ if self._max_consumers:
+ logger_state.log(f"Set max consumers to {self._max_consumers}")
@override
- async def publish( # type: ignore[override]
+ async def publish(
self,
- message: Annotated[
- "AioPikaSendableMessage",
- Doc("Message body to send."),
- ] = None,
- queue: Annotated[
- Union["RabbitQueue", str],
- Doc("Message routing key to publish with."),
- ] = "",
- exchange: Annotated[
- Union["RabbitExchange", str, None],
- Doc("Target exchange to publish message to."),
- ] = None,
+ message: "AioPikaSendableMessage" = None,
+ queue: Union["RabbitQueue", str] = "",
+ exchange: Union["RabbitExchange", str, None] = None,
*,
- routing_key: Annotated[
- str,
- Doc(
- "Message routing key to publish with. "
- "Overrides `queue` option if presented."
- ),
- ] = "",
- mandatory: Annotated[
- bool,
- Doc(
- "Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
- ),
- ] = True,
- immediate: Annotated[
- bool,
- Doc(
- "Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
- ),
- ] = False,
- timeout: Annotated[
- "TimeoutType",
- Doc("Send confirmation time from RabbitMQ."),
- ] = None,
- persist: Annotated[
- bool,
- Doc("Restore the message on RabbitMQ reboot."),
- ] = False,
- reply_to: Annotated[
- Optional[str],
- Doc(
- "Reply message routing key to send with (always sending to default exchange)."
- ),
- ] = None,
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- # message args
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- headers: Annotated[
- Optional["HeadersType"],
- Doc("Message headers to store metainformation."),
- ] = None,
- content_type: Annotated[
- Optional[str],
- Doc(
- "Message **content-type** header. "
- "Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
- ),
- ] = None,
- content_encoding: Annotated[
- Optional[str],
- Doc("Message body content encoding, e.g. **gzip**."),
- ] = None,
- expiration: Annotated[
- Optional["DateType"],
- Doc("Message expiration (lifetime) in seconds (or datetime or timedelta)."),
- ] = None,
- message_id: Annotated[
- Optional[str],
- Doc("Arbitrary message id. Generated automatically if not presented."),
- ] = None,
- timestamp: Annotated[
- Optional["DateType"],
- Doc("Message publish timestamp. Generated automatically if not presented."),
- ] = None,
- message_type: Annotated[
- Optional[str],
- Doc("Application-specific message type, e.g. **orders.created**."),
- ] = None,
- user_id: Annotated[
- Optional[str],
- Doc("Publisher connection User ID, validated if set."),
- ] = None,
- priority: Annotated[
- Optional[int],
- Doc("The message priority (0 by default)."),
- ] = None,
- ) -> Optional[Any]:
+ routing_key: str = "",
+ # publish options
+ mandatory: bool = True,
+ immediate: bool = False,
+ timeout: "TimeoutType" = None,
+ persist: bool = False,
+ reply_to: Optional[str] = None,
+ correlation_id: Optional[str] = None,
+ # message options
+ headers: Optional["HeadersType"] = None,
+ content_type: Optional[str] = None,
+ content_encoding: Optional[str] = None,
+ expiration: Optional["DateType"] = None,
+ message_id: Optional[str] = None,
+ timestamp: Optional["DateType"] = None,
+ message_type: Optional[str] = None,
+ user_id: Optional[str] = None,
+ priority: Optional[int] = None,
+ ) -> Optional["aiormq.abc.ConfirmationFrameType"]:
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
applications or to publish messages from time to time.
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
- """
- routing = routing_key or RabbitQueue.validate(queue).routing
- correlation_id = correlation_id or gen_cor_id()
- return await super().publish(
+ Args:
+ message:
+ Message body to send.
+ queue:
+ Message routing key to publish with.
+ exchange:
+ Target exchange to publish message to.
+ routing_key:
+ Message routing key to publish with. Overrides `queue` option if presented.
+ mandatory:
+ Client waits for confirmation that the message is placed to some queue. RabbitMQ returns message to client if there is no suitable queue.
+ immediate:
+ Client expects that there is consumer ready to take the message to work. RabbitMQ returns message to client if there is no suitable consumer.
+ timeout:
+ Send confirmation time from RabbitMQ.
+ persist:
+ Restore the message on RabbitMQ reboot.
+ reply_to:
+ Reply message routing key to send with (always sending to default exchange).
+ correlation_id:
+ Manual message **correlation_id** setter. **correlation_id** is a useful option to trace messages.
+ headers:
+ Message headers to store metainformation.
+ content_type:
+ Message **content-type** header. Used by application, not core RabbitMQ. Will be set automatically if not specified.
+ content_encoding:
+ Message body content encoding, e.g. **gzip**.
+ expiration:
+ Message expiration (lifetime) in seconds (or datetime or timedelta).
+ message_id:
+ Arbitrary message id. Generated automatically if not presented.
+ timestamp:
+ Message publish timestamp. Generated automatically if not presented.
+ message_type:
+ Application-specific message type, e.g. **orders.created**.
+ user_id:
+ Publisher connection User ID, validated if set.
+ priority:
+ The message priority (0 by default).
+
+ Returns:
+ An optional `aiormq.abc.ConfirmationFrameType` representing the confirmation frame if RabbitMQ is configured to send confirmations.
+ """
+ cmd = RabbitPublishCommand(
message,
- producer=self._producer,
- routing_key=routing,
+ routing_key=routing_key or RabbitQueue.validate(queue).routing,
+ exchange=RabbitExchange.validate(exchange),
+ correlation_id=correlation_id or gen_cor_id(),
app_id=self.app_id,
- exchange=exchange,
mandatory=mandatory,
immediate=immediate,
persist=persist,
reply_to=reply_to,
headers=headers,
- correlation_id=correlation_id,
content_type=content_type,
content_encoding=content_encoding,
expiration=expiration,
message_id=message_id,
- timestamp=timestamp,
message_type=message_type,
+ timestamp=timestamp,
user_id=user_id,
timeout=timeout,
priority=priority,
- rpc=rpc,
- rpc_timeout=rpc_timeout,
- raise_timeout=raise_timeout,
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
+
@override
async def request( # type: ignore[override]
self,
@@ -706,21 +656,21 @@ async def request( # type: ignore[override]
str,
Doc(
"Message routing key to publish with. "
- "Overrides `queue` option if presented."
+ "Overrides `queue` option if presented.",
),
] = "",
mandatory: Annotated[
bool,
Doc(
"Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
+ "RabbitMQ returns message to client if there is no suitable queue.",
),
] = True,
immediate: Annotated[
bool,
Doc(
"Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
+ "RabbitMQ returns message to client if there is no suitable consumer.",
),
] = False,
timeout: Annotated[
@@ -736,7 +686,7 @@ async def request( # type: ignore[override]
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
headers: Annotated[
@@ -748,7 +698,7 @@ async def request( # type: ignore[override]
Doc(
"Message **content-type** header. "
"Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
+ "Will be set automatically if not specified.",
),
] = None,
content_encoding: Annotated[
@@ -780,16 +730,12 @@ async def request( # type: ignore[override]
Doc("The message priority (0 by default)."),
] = None,
) -> "RabbitMessage":
- routing = routing_key or RabbitQueue.validate(queue).routing
- correlation_id = correlation_id or gen_cor_id()
-
- msg: RabbitMessage = await super().request(
+ cmd = RabbitPublishCommand(
message,
- producer=self._producer,
- correlation_id=correlation_id,
- routing_key=routing,
+ routing_key=routing_key or RabbitQueue.validate(queue).routing,
+ exchange=RabbitExchange.validate(exchange),
+ correlation_id=correlation_id or gen_cor_id(),
app_id=self.app_id,
- exchange=exchange,
mandatory=mandatory,
immediate=immediate,
persist=persist,
@@ -798,12 +744,15 @@ async def request( # type: ignore[override]
content_encoding=content_encoding,
expiration=expiration,
message_id=message_id,
- timestamp=timestamp,
message_type=message_type,
+ timestamp=timestamp,
user_id=user_id,
timeout=timeout,
priority=priority,
+ _publish_type=PublishType.REQUEST,
)
+
+ msg: RabbitMessage = await super()._basic_request(cmd, producer=self._producer)
return msg
async def declare_queue(
@@ -814,7 +763,6 @@ async def declare_queue(
],
) -> "RobustQueue":
"""Declares queue object in **RabbitMQ**."""
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
return await self.declarer.declare_queue(queue)
async def declare_exchange(
@@ -825,7 +773,6 @@ async def declare_exchange(
],
) -> "RobustExchange":
"""Declares exchange object in **RabbitMQ**."""
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
return await self.declarer.declare_exchange(exchange)
@override
diff --git a/faststream/rabbit/broker/logging.py b/faststream/rabbit/broker/logging.py
index 738254b36e..21b0172004 100644
--- a/faststream/rabbit/broker/logging.py
+++ b/faststream/rabbit/broker/logging.py
@@ -1,66 +1,68 @@
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Optional
+from functools import partial
+from typing import TYPE_CHECKING, Optional
-from aio_pika import IncomingMessage, RobustConnection
-
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.log.logging import get_broker_logger
-from faststream.types import EMPTY
+from faststream._internal.log.logging import get_broker_logger
+from faststream._internal.state.logger import (
+ DefaultLoggerStorage,
+ make_logger_state,
+)
if TYPE_CHECKING:
- from faststream.types import LoggerProto
-
-
-class RabbitLoggingBroker(BrokerUsecase[IncomingMessage, RobustConnection]):
- """A class that extends the LoggingMixin class and adds additional functionality for logging RabbitMQ related information."""
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
- _max_queue_len: int
- _max_exchange_len: int
- __max_msg_id_ln: ClassVar[int] = 10
+class RabbitParamsStorage(DefaultLoggerStorage):
def __init__(
self,
- *args: Any,
- logger: Optional["LoggerProto"] = EMPTY,
- log_level: int = logging.INFO,
- log_fmt: Optional[str] = None,
- **kwargs: Any,
+ log_fmt: Optional[str],
) -> None:
- super().__init__(
- *args,
- logger=logger,
- # TODO: generate unique logger names to not share between brokers
- default_logger=get_broker_logger(
- name="rabbit",
- default_context={
- "queue": "",
- "exchange": "",
- },
- message_id_ln=self.__max_msg_id_ln,
- ),
- log_level=log_level,
- log_fmt=log_fmt,
- **kwargs,
- )
+ super().__init__(log_fmt)
- self._max_queue_len = 4
self._max_exchange_len = 4
+ self._max_queue_len = 4
+
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
- def get_fmt(self) -> str:
- return (
- "%(asctime)s %(levelname)-8s - "
- f"%(exchange)-{self._max_exchange_len}s | "
- f"%(queue)-{self._max_queue_len}s | "
- f"%(message_id)-{self.__max_msg_id_ln}s "
- "- %(message)s"
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ self._max_exchange_len = max(
+ self._max_exchange_len,
+ len(params.get("exchange", "")),
+ )
+ self._max_queue_len = max(
+ self._max_queue_len,
+ len(params.get("queue", "")),
)
- def _setup_log_context(
- self,
- *,
- queue: Optional[str] = None,
- exchange: Optional[str] = None,
- ) -> None:
- """Set up log context."""
- self._max_exchange_len = max(self._max_exchange_len, len(exchange or ""))
- self._max_queue_len = max(self._max_queue_len, len(queue or ""))
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
+ message_id_ln = 10
+
+ # TODO: generate unique logger names to not share between brokers
+ return get_broker_logger(
+ name="rabbit",
+ default_context={
+ "queue": "",
+ "exchange": "",
+ },
+ message_id_ln=message_id_ln,
+ fmt=self._log_fmt
+ or (
+ "%(asctime)s %(levelname)-8s - "
+ f"%(exchange)-{self._max_exchange_len}s | "
+ f"%(queue)-{self._max_queue_len}s | "
+ f"%(message_id)-{message_id_ln}s "
+ "- %(message)s"
+ ),
+ context=context,
+ log_level=self.logger_log_level,
+ )
+
+
+make_rabbit_logger_state = partial(
+ make_logger_state,
+ default_storage_cls=RabbitParamsStorage,
+)
diff --git a/faststream/rabbit/broker/registrator.py b/faststream/rabbit/broker/registrator.py
index 6c6ff357d6..c95c77f388 100644
--- a/faststream/rabbit/broker/registrator.py
+++ b/faststream/rabbit/broker/registrator.py
@@ -1,39 +1,40 @@
-from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Sequence, Union, cast
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.utils import default_filter
-from faststream.rabbit.publisher.asyncapi import AsyncAPIPublisher
+from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
+from faststream.rabbit.publisher.factory import create_publisher
from faststream.rabbit.publisher.usecase import PublishKwargs
from faststream.rabbit.schemas import (
RabbitExchange,
RabbitQueue,
)
-from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber
from faststream.rabbit.subscriber.factory import create_subscriber
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from aio_pika import IncomingMessage # noqa: F401
from aio_pika.abc import DateType, HeadersType, TimeoutType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import (
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.rabbit.message import RabbitMessage
- from faststream.rabbit.schemas.reply import ReplyConfig
- from faststream.types import AnyDict
+ from faststream.rabbit.publisher.specified import SpecificationPublisher
class RabbitRegistrator(ABCBroker["IncomingMessage"]):
"""Includable to RabbitBroker router."""
- _subscribers: Dict[int, "AsyncAPISubscriber"]
- _publishers: Dict[int, "AsyncAPIPublisher"]
+ _subscribers: list["SpecificationSubscriber"]
+ _publishers: list["SpecificationPublisher"]
@override
def subscriber( # type: ignore[override]
@@ -42,7 +43,7 @@ def subscriber( # type: ignore[override]
Union[str, "RabbitQueue"],
Doc(
"RabbitMQ queue to listen. "
- "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default)."
+ "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default).",
),
],
exchange: Annotated[
@@ -50,7 +51,7 @@ def subscriber( # type: ignore[override]
Doc(
"RabbitMQ exchange to bind queue to. "
"Uses default exchange if not presented. "
- "**FastStream** declares exchange object automatically if it is not passive (by default)."
+ "**FastStream** declares exchange object automatically if it is not passive (by default).",
),
] = None,
*,
@@ -58,19 +59,19 @@ def subscriber( # type: ignore[override]
Optional["AnyDict"],
Doc("Extra consumer arguments to use in `queue.consume(...)` method."),
] = None,
- reply_config: Annotated[
- Optional["ReplyConfig"],
- Doc("Extra options to use at replies publishing."),
+ no_ack: Annotated[
+ bool,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
deprecated(
- "Deprecated in **FastStream 0.5.16**. "
- "Please, use `RabbitResponse` object as a handler return instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
),
- ] = None,
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -82,31 +83,16 @@ def subscriber( # type: ignore[override]
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[RabbitMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[RabbitMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -118,38 +104,36 @@ def subscriber( # type: ignore[override]
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPISubscriber:
+ ) -> SpecificationSubscriber:
subscriber = cast(
- AsyncAPISubscriber,
+ "SpecificationSubscriber",
super().subscriber(
create_subscriber(
queue=RabbitQueue.validate(queue),
exchange=RabbitExchange.validate(exchange),
consume_args=consume_args,
- reply_config=reply_config,
# subscriber args
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# AsyncAPI
title_=title,
description_=description,
include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ ),
),
)
return subscriber.add_call(
- filter_=filter,
parser_=parser or self._parser,
decoder_=decoder or self._decoder,
dependencies_=dependencies,
@@ -172,21 +156,21 @@ def publisher( # type: ignore[override]
str,
Doc(
"Default message routing key to publish with. "
- "Overrides `queue` option if presented."
+ "Overrides `queue` option if presented.",
),
] = "",
mandatory: Annotated[
bool,
Doc(
"Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
+ "RabbitMQ returns message to client if there is no suitable queue.",
),
] = True,
immediate: Annotated[
bool,
Doc(
"Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
+ "RabbitMQ returns message to client if there is no suitable consumer.",
),
] = False,
timeout: Annotated[
@@ -200,7 +184,7 @@ def publisher( # type: ignore[override]
reply_to: Annotated[
Optional[str],
Doc(
- "Reply message routing key to send with (always sending to default exchange)."
+ "Reply message routing key to send with (always sending to default exchange).",
),
] = None,
priority: Annotated[
@@ -210,6 +194,10 @@ def publisher( # type: ignore[override]
# specific
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -225,7 +213,7 @@ def publisher( # type: ignore[override]
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -237,7 +225,7 @@ def publisher( # type: ignore[override]
Optional["HeadersType"],
Doc(
"Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
content_type: Annotated[
@@ -245,7 +233,7 @@ def publisher( # type: ignore[override]
Doc(
"Message **content-type** header. "
"Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
+ "Will be set automatically if not specified.",
),
] = None,
content_encoding: Annotated[
@@ -264,7 +252,7 @@ def publisher( # type: ignore[override]
Optional[str],
Doc("Publisher connection User ID, validated if set."),
] = None,
- ) -> AsyncAPIPublisher:
+ ) -> "SpecificationPublisher":
"""Creates long-living and AsyncAPI-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
@@ -287,24 +275,22 @@ def publisher( # type: ignore[override]
expiration=expiration,
)
- publisher = cast(
- AsyncAPIPublisher,
+ return cast(
+ "SpecificationPublisher",
super().publisher(
- AsyncAPIPublisher.create(
+ create_publisher(
routing_key=routing_key,
queue=RabbitQueue.validate(queue),
exchange=RabbitExchange.validate(exchange),
message_kwargs=message_kwargs,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
description_=description,
schema_=schema,
include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ ),
),
)
-
- return publisher
diff --git a/faststream/rabbit/fastapi/__init__.py b/faststream/rabbit/fastapi/__init__.py
index a46505cc82..cb7c7c26d4 100644
--- a/faststream/rabbit/fastapi/__init__.py
+++ b/faststream/rabbit/fastapi/__init__.py
@@ -1,11 +1,12 @@
-from typing_extensions import Annotated
+from typing import Annotated
-from faststream.broker.fastapi.context import Context, ContextRepo, Logger
+from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.rabbit.broker import RabbitBroker as RB
-from faststream.rabbit.fastapi.router import RabbitRouter
from faststream.rabbit.message import RabbitMessage as RM
from faststream.rabbit.publisher.producer import AioPikaFastProducer
+from .fastapi import RabbitRouter
+
RabbitMessage = Annotated[RM, Context("message")]
RabbitBroker = Annotated[RB, Context("broker")]
RabbitProducer = Annotated[AioPikaFastProducer, Context("broker._producer")]
diff --git a/faststream/rabbit/fastapi/fastapi.py b/faststream/rabbit/fastapi/fastapi.py
new file mode 100644
index 0000000000..e19e95fdf9
--- /dev/null
+++ b/faststream/rabbit/fastapi/fastapi.py
@@ -0,0 +1,835 @@
+import logging
+from collections.abc import Iterable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Any,
+ Callable,
+ Optional,
+ Union,
+ cast,
+)
+
+from fastapi.datastructures import Default
+from fastapi.routing import APIRoute
+from fastapi.utils import generate_unique_id
+from starlette.responses import JSONResponse
+from starlette.routing import BaseRoute
+from typing_extensions import Doc, deprecated, override
+
+from faststream.__about__ import SERVICE_NAME
+from faststream._internal.constants import EMPTY
+from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
+from faststream.rabbit.broker.broker import RabbitBroker as RB
+from faststream.rabbit.schemas import (
+ RabbitExchange,
+ RabbitQueue,
+)
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
+
+if TYPE_CHECKING:
+ from enum import Enum
+
+ from aio_pika import IncomingMessage
+ from aio_pika.abc import DateType, HeadersType, SSLOptions, TimeoutType
+ from fastapi import params
+ from fastapi.types import IncEx
+ from pamqp.common import FieldTable
+ from starlette.responses import Response
+ from starlette.types import ASGIApp, Lifespan
+ from yarl import URL
+
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ PublisherMiddleware,
+ SubscriberMiddleware,
+ )
+ from faststream.rabbit.message import RabbitMessage
+ from faststream.rabbit.publisher.specified import SpecificationPublisher
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
+
+
+class RabbitRouter(StreamRouter["IncomingMessage"]):
+ """A class to represent a RabbitMQ router for incoming messages."""
+
+ broker_class = RB
+ broker: RB
+
+ def __init__(
+ self,
+ url: Annotated[
+ Union[str, "URL", None],
+ Doc("RabbitMQ destination location to connect."),
+ ] = "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
+ *,
+ # connection args
+ host: Annotated[
+ Optional[str],
+ Doc("Destination host. This option overrides `url` option host."),
+ ] = None,
+ port: Annotated[
+ Optional[int],
+ Doc("Destination port. This option overrides `url` option port."),
+ ] = None,
+ virtualhost: Annotated[
+ Optional[str],
+ Doc("RabbitMQ virtual host to use in the current broker connection."),
+ ] = None,
+ ssl_options: Annotated[
+ Optional["SSLOptions"],
+ Doc("Extra ssl options to establish connection."),
+ ] = None,
+ client_properties: Annotated[
+ Optional["FieldTable"],
+ Doc("Add custom client capability."),
+ ] = None,
+ timeout: Annotated[
+ "TimeoutType",
+ Doc("Connection establishement timeout."),
+ ] = None,
+ fail_fast: Annotated[
+ bool,
+ Doc(
+ "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable.",
+ ),
+ ] = True,
+ reconnect_interval: Annotated[
+ "TimeoutType",
+ Doc("Time to sleep between reconnection attempts."),
+ ] = 5.0,
+ # channel args
+ channel_number: Annotated[
+ Optional[int],
+ Doc("Specify the channel number explicit."),
+ ] = None,
+ publisher_confirms: Annotated[
+ bool,
+ Doc(
+ "if `True` the `publish` method will "
+ "return `bool` type after publish is complete."
+ "Otherwise it will returns `None`.",
+ ),
+ ] = True,
+ on_return_raises: Annotated[
+ bool,
+ Doc(
+ "raise an :class:`aio_pika.exceptions.DeliveryError`"
+ "when mandatory message will be returned",
+ ),
+ ] = False,
+ # broker args
+ max_consumers: Annotated[
+ Optional[int],
+ Doc(
+ "RabbitMQ channel `qos` option. "
+ "It limits max messages processing in the same time count.",
+ ),
+ ] = None,
+ app_id: Annotated[
+ Optional[str],
+ Doc("Application name to mark outgoing messages by."),
+ ] = SERVICE_NAME,
+ # broker base args
+ graceful_timeout: Annotated[
+ Optional[float],
+ Doc(
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
+ ),
+ ] = 15.0,
+ decoder: Annotated[
+ Optional["CustomCallable"],
+ Doc("Custom decoder object."),
+ ] = None,
+ parser: Annotated[
+ Optional["CustomCallable"],
+ Doc("Custom parser object."),
+ ] = None,
+ middlewares: Annotated[
+ Sequence["BrokerMiddleware[IncomingMessage]"],
+ Doc("Middlewares to apply to all broker publishers/subscribers."),
+ ] = (),
+ # AsyncAPI args
+ security: Annotated[
+ Optional["BaseSecurity"],
+ Doc(
+ "Security options to connect broker and generate AsyncAPI server security information.",
+ ),
+ ] = None,
+ specification_url: Annotated[
+ Optional[str],
+ Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
+ ] = None,
+ protocol: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server protocol."),
+ ] = None,
+ protocol_version: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server protocol version."),
+ ] = "0.9.1",
+ description: Annotated[
+ Optional[str],
+ Doc("AsyncAPI server description."),
+ ] = None,
+ specification_tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
+ Doc("AsyncAPI server tags."),
+ ] = (),
+ # logging args
+ logger: Annotated[
+ Optional["LoggerProto"],
+ Doc("User specified logger to pass into Context and log service messages."),
+ ] = EMPTY,
+ log_level: Annotated[
+ int,
+ Doc("Service messages log level."),
+ ] = logging.INFO,
+ log_fmt: Annotated[
+ Optional[str],
+ Doc("Default logger log format."),
+ ] = None,
+ # StreamRouter options
+ setup_state: Annotated[
+ bool,
+ Doc(
+ "Whether to add broker to app scope in lifespan. "
+ "You should disable this option at old ASGI servers.",
+ ),
+ ] = True,
+ schema_url: Annotated[
+ Optional[str],
+ Doc(
+ "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all.",
+ ),
+ ] = "/asyncapi",
+ # FastAPI args
+ prefix: Annotated[
+ str,
+ Doc("An optional path prefix for the router."),
+ ] = "",
+ tags: Annotated[
+ Optional[list[Union[str, "Enum"]]],
+ Doc(
+ """
+ A list of tags to be applied to all the *path operations* in this
+ router.
+
+ It will be added to the generated OpenAPI (e.g. visible at `/docs`).
+
+ Read more about it in the
+ [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
+ """,
+ ),
+ ] = None,
+ dependencies: Annotated[
+ Optional[Sequence["params.Depends"]],
+ Doc(
+ """
+ A list of dependencies (using `Depends()`) to be applied to all the
+ *path and stream operations* in this router.
+
+ Read more about it in the
+ [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
+ """,
+ ),
+ ] = None,
+ default_response_class: Annotated[
+ type["Response"],
+ Doc(
+ """
+ The default response class to be used.
+
+ Read more in the
+ [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
+ """,
+ ),
+ ] = Default(JSONResponse),
+ responses: Annotated[
+ Optional[dict[Union[int, str], "AnyDict"]],
+ Doc(
+ """
+ Additional responses to be shown in OpenAPI.
+
+ It will be added to the generated OpenAPI (e.g. visible at `/docs`).
+
+ Read more about it in the
+ [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/).
+
+ And in the
+ [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
+ """,
+ ),
+ ] = None,
+ callbacks: Annotated[
+ Optional[list[BaseRoute]],
+ Doc(
+ """
+ OpenAPI callbacks that should apply to all *path operations* in this
+ router.
+
+ It will be added to the generated OpenAPI (e.g. visible at `/docs`).
+
+ Read more about it in the
+ [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
+ """,
+ ),
+ ] = None,
+ routes: Annotated[
+ Optional[list[BaseRoute]],
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Starlette and supported for compatibility.
+
+ ---
+
+ A list of routes to serve incoming HTTP and WebSocket requests.
+ """,
+ ),
+ deprecated(
+ """
+ You normally wouldn't use this parameter with FastAPI, it is inherited
+ from Starlette and supported for compatibility.
+
+ In FastAPI, you normally would use the *path operation methods*,
+ like `router.get()`, `router.post()`, etc.
+ """,
+ ),
+ ] = None,
+ redirect_slashes: Annotated[
+ bool,
+ Doc(
+ """
+ Whether to detect and redirect slashes in URLs when the client doesn't
+ use the same format.
+ """,
+ ),
+ ] = True,
+ default: Annotated[
+ Optional["ASGIApp"],
+ Doc(
+ """
+ Default function handler for this router. Used to handle
+ 404 Not Found errors.
+ """,
+ ),
+ ] = None,
+ dependency_overrides_provider: Annotated[
+ Optional[Any],
+ Doc(
+ """
+ Only used internally by FastAPI to handle dependency overrides.
+
+ You shouldn't need to use it. It normally points to the `FastAPI` app
+ object.
+ """,
+ ),
+ ] = None,
+ route_class: Annotated[
+ type["APIRoute"],
+ Doc(
+ """
+ Custom route (*path operation*) class to be used by this router.
+
+ Read more about it in the
+ [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
+ """,
+ ),
+ ] = APIRoute,
+ on_startup: Annotated[
+ Optional[Sequence[Callable[[], Any]]],
+ Doc(
+ """
+ A list of startup event handler functions.
+
+ You should instead use the `lifespan` handlers.
+
+ Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
+ """,
+ ),
+ ] = None,
+ on_shutdown: Annotated[
+ Optional[Sequence[Callable[[], Any]]],
+ Doc(
+ """
+ A list of shutdown event handler functions.
+
+ You should instead use the `lifespan` handlers.
+
+ Read more in the
+ [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
+ """,
+ ),
+ ] = None,
+ lifespan: Annotated[
+ Optional["Lifespan[Any]"],
+ Doc(
+ """
+ A `Lifespan` context manager handler. This replaces `startup` and
+ `shutdown` functions with a single context manager.
+
+ Read more in the
+ [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
+ """,
+ ),
+ ] = None,
+ deprecated: Annotated[
+ Optional[bool],
+ Doc(
+ """
+ Mark all *path operations* in this router as deprecated.
+
+ It will be added to the generated OpenAPI (e.g. visible at `/docs`).
+
+ Read more about it in the
+ [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
+ """,
+ ),
+ ] = None,
+ include_in_schema: Annotated[
+ bool,
+ Doc(
+ """
+ To include (or not) all the *path operations* in this router in the
+ generated OpenAPI.
+
+ This affects the generated OpenAPI (e.g. visible at `/docs`).
+
+ Read more about it in the
+ [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
+ """,
+ ),
+ ] = True,
+ generate_unique_id_function: Annotated[
+ Callable[["APIRoute"], str],
+ Doc(
+ """
+ Customize the function used to generate unique IDs for the *path
+ operations* shown in the generated OpenAPI.
+
+ This is particularly useful when automatically generating clients or
+ SDKs for your API.
+
+ Read more about it in the
+ [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
+ """,
+ ),
+ ] = Default(generate_unique_id),
+ ) -> None:
+ super().__init__(
+ url,
+ host=host,
+ port=port,
+ virtualhost=virtualhost,
+ ssl_options=ssl_options,
+ client_properties=client_properties,
+ timeout=timeout,
+ fail_fast=fail_fast,
+ reconnect_interval=reconnect_interval,
+ max_consumers=max_consumers,
+ app_id=app_id,
+ graceful_timeout=graceful_timeout,
+ decoder=decoder,
+ parser=parser,
+ channel_number=channel_number,
+ publisher_confirms=publisher_confirms,
+ on_return_raises=on_return_raises,
+ middlewares=middlewares,
+ security=security,
+ specification_url=specification_url,
+ protocol=protocol,
+ protocol_version=protocol_version,
+ description=description,
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ specification_tags=specification_tags,
+ schema_url=schema_url,
+ setup_state=setup_state,
+ # FastAPI kwargs
+ prefix=prefix,
+ tags=tags,
+ dependencies=dependencies,
+ default_response_class=default_response_class,
+ responses=responses,
+ callbacks=callbacks,
+ routes=routes,
+ redirect_slashes=redirect_slashes,
+ default=default,
+ dependency_overrides_provider=dependency_overrides_provider,
+ route_class=route_class,
+ on_startup=on_startup,
+ on_shutdown=on_shutdown,
+ deprecated=deprecated,
+ include_in_schema=include_in_schema,
+ lifespan=lifespan,
+ generate_unique_id_function=generate_unique_id_function,
+ )
+
+ @override
+ def subscriber( # type: ignore[override]
+ self,
+ queue: Annotated[
+ Union[str, RabbitQueue],
+ Doc(
+ "RabbitMQ queue to listen. "
+ "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default).",
+ ),
+ ],
+ exchange: Annotated[
+ Union[str, RabbitExchange, None],
+ Doc(
+ "RabbitMQ exchange to bind queue to. "
+ "Uses default exchange if not presented. "
+ "**FastStream** declares exchange object automatically if it is not passive (by default).",
+ ),
+ ] = None,
+ *,
+ consume_args: Annotated[
+ Optional["AnyDict"],
+ Doc("Extra consumer arguments to use in `queue.consume(...)` method."),
+ ] = None,
+ # broker arguments
+ dependencies: Annotated[
+ Iterable["params.Depends"],
+ Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ ] = (),
+ parser: Annotated[
+ Optional["CustomCallable"],
+ Doc(
+ "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one.",
+ ),
+ ] = None,
+ decoder: Annotated[
+ Optional["CustomCallable"],
+ Doc("Function to decode FastStream msg bytes body to python objects."),
+ ] = None,
+ middlewares: Annotated[
+ Sequence["SubscriberMiddleware[RabbitMessage]"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
+ no_ack: Annotated[
+ bool,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
+ no_reply: Annotated[
+ bool,
+ Doc(
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
+ ),
+ ] = False,
+ # AsyncAPI information
+ title: Annotated[
+ Optional[str],
+ Doc("AsyncAPI subscriber object title."),
+ ] = None,
+ description: Annotated[
+ Optional[str],
+ Doc(
+ "AsyncAPI subscriber object description. "
+ "Uses decorated docstring as default.",
+ ),
+ ] = None,
+ include_in_schema: Annotated[
+ bool,
+ Doc("Whetever to include operation in AsyncAPI schema or not."),
+ ] = True,
+ # FastAPI args
+ response_model: Annotated[
+ Any,
+ Doc(
+ """
+ The type to use for the response.
+
+ It could be any valid Pydantic *field* type. So, it doesn't have to
+ be a Pydantic model, it could be other things, like a `list`, `dict`,
+ etc.
+
+ It will be used for:
+
+ * Documentation: the generated OpenAPI (and the UI at `/docs`) will
+ show it as the response (JSON Schema).
+ * Serialization: you could return an arbitrary object and the
+ `response_model` would be used to serialize that object into the
+ corresponding JSON.
+ * Filtering: the JSON sent to the client will only contain the data
+ (fields) defined in the `response_model`. If you returned an object
+ that contains an attribute `password` but the `response_model` does
+ not include that field, the JSON sent to the client would not have
+ that `password`.
+ * Validation: whatever you return will be serialized with the
+ `response_model`, converting any data as necessary to generate the
+ corresponding JSON. But if the data in the object returned is not
+ valid, that would mean a violation of the contract with the client,
+ so it's an error from the API developer. So, FastAPI will raise an
+ error and return a 500 error code (Internal Server Error).
+
+ Read more about it in the
+ [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
+ """,
+ ),
+ ] = Default(None),
+ response_model_include: Annotated[
+ Optional["IncEx"],
+ Doc(
+ """
+ Configuration passed to Pydantic to include only certain fields in the
+ response data.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
+ """,
+ ),
+ ] = None,
+ response_model_exclude: Annotated[
+ Optional["IncEx"],
+ Doc(
+ """
+ Configuration passed to Pydantic to exclude certain fields in the
+ response data.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
+ """,
+ ),
+ ] = None,
+ response_model_by_alias: Annotated[
+ bool,
+ Doc(
+ """
+ Configuration passed to Pydantic to define if the response model
+ should be serialized by alias when an alias is used.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
+ """,
+ ),
+ ] = True,
+ response_model_exclude_unset: Annotated[
+ bool,
+ Doc(
+ """
+ Configuration passed to Pydantic to define if the response data
+ should have all the fields, including the ones that were not set and
+ have their default values. This is different from
+ `response_model_exclude_defaults` in that if the fields are set,
+ they will be included in the response, even if the value is the same
+ as the default.
+
+ When `True`, default values are omitted from the response.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
+ """,
+ ),
+ ] = False,
+ response_model_exclude_defaults: Annotated[
+ bool,
+ Doc(
+ """
+ Configuration passed to Pydantic to define if the response data
+ should have all the fields, including the ones that have the same value
+ as the default. This is different from `response_model_exclude_unset`
+ in that if the fields are set but contain the same default values,
+ they will be excluded from the response.
+
+ When `True`, default values are omitted from the response.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
+ """,
+ ),
+ ] = False,
+ response_model_exclude_none: Annotated[
+ bool,
+ Doc(
+ """
+ Configuration passed to Pydantic to define if the response data should
+ exclude fields set to `None`.
+
+ This is much simpler (less smart) than `response_model_exclude_unset`
+ and `response_model_exclude_defaults`. You probably want to use one of
+ those two instead of this one, as those allow returning `None` values
+ when it makes sense.
+
+ Read more about it in the
+ [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
+ """,
+ ),
+ ] = False,
+ ) -> SpecificationSubscriber:
+ return cast(
+ "SpecificationSubscriber",
+ super().subscriber(
+ queue=queue,
+ exchange=exchange,
+ consume_args=consume_args,
+ dependencies=dependencies,
+ parser=parser,
+ decoder=decoder,
+ middlewares=middlewares,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ no_reply=no_reply,
+ title=title,
+ description=description,
+ include_in_schema=include_in_schema,
+ # FastAPI args
+ response_model=response_model,
+ response_model_include=response_model_include,
+ response_model_exclude=response_model_exclude,
+ response_model_by_alias=response_model_by_alias,
+ response_model_exclude_unset=response_model_exclude_unset,
+ response_model_exclude_defaults=response_model_exclude_defaults,
+ response_model_exclude_none=response_model_exclude_none,
+ ),
+ )
+
+ @override
+ def publisher(
+ self,
+ queue: Annotated[
+ Union[RabbitQueue, str],
+ Doc("Default message routing key to publish with."),
+ ] = "",
+ exchange: Annotated[
+ Union[RabbitExchange, str, None],
+ Doc("Target exchange to publish message to."),
+ ] = None,
+ *,
+ routing_key: Annotated[
+ str,
+ Doc(
+ "Default message routing key to publish with. "
+ "Overrides `queue` option if presented.",
+ ),
+ ] = "",
+ mandatory: Annotated[
+ bool,
+ Doc(
+ "Client waits for confirmation that the message is placed to some queue. "
+ "RabbitMQ returns message to client if there is no suitable queue.",
+ ),
+ ] = True,
+ immediate: Annotated[
+ bool,
+ Doc(
+ "Client expects that there is consumer ready to take the message to work. "
+ "RabbitMQ returns message to client if there is no suitable consumer.",
+ ),
+ ] = False,
+ timeout: Annotated[
+ "TimeoutType",
+ Doc("Send confirmation time from RabbitMQ."),
+ ] = None,
+ persist: Annotated[
+ bool,
+ Doc("Restore the message on RabbitMQ reboot."),
+ ] = False,
+ reply_to: Annotated[
+ Optional[str],
+ Doc(
+ "Reply message routing key to send with (always sending to default exchange).",
+ ),
+ ] = None,
+ priority: Annotated[
+ Optional[int],
+ Doc("The message priority (0 by default)."),
+ ] = None,
+ # specific
+ middlewares: Annotated[
+ Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
+ Doc("Publisher middlewares to wrap outgoing messages."),
+ ] = (),
+ # AsyncAPI information
+ title: Annotated[
+ Optional[str],
+ Doc("AsyncAPI publisher object title."),
+ ] = None,
+ description: Annotated[
+ Optional[str],
+ Doc("AsyncAPI publisher object description."),
+ ] = None,
+ schema: Annotated[
+ Optional[Any],
+ Doc(
+ "AsyncAPI publishing message type. "
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
+ ),
+ ] = None,
+ include_in_schema: Annotated[
+ bool,
+ Doc("Whetever to include operation in AsyncAPI schema or not."),
+ ] = True,
+ # message args
+ headers: Annotated[
+ Optional["HeadersType"],
+ Doc(
+ "Message headers to store metainformation. "
+ "Can be overridden by `publish.headers` if specified.",
+ ),
+ ] = None,
+ content_type: Annotated[
+ Optional[str],
+ Doc(
+ "Message **content-type** header. "
+ "Used by application, not core RabbitMQ. "
+ "Will be set automatically if not specified.",
+ ),
+ ] = None,
+ content_encoding: Annotated[
+ Optional[str],
+ Doc("Message body content encoding, e.g. **gzip**."),
+ ] = None,
+ expiration: Annotated[
+ Optional["DateType"],
+ Doc("Message expiration (lifetime) in seconds (or datetime or timedelta)."),
+ ] = None,
+ message_type: Annotated[
+ Optional[str],
+ Doc("Application-specific message type, e.g. **orders.created**."),
+ ] = None,
+ user_id: Annotated[
+ Optional[str],
+ Doc("Publisher connection User ID, validated if set."),
+ ] = None,
+ ) -> "SpecificationPublisher":
+ return self.broker.publisher(
+ queue=queue,
+ exchange=exchange,
+ routing_key=routing_key,
+ mandatory=mandatory,
+ immediate=immediate,
+ timeout=timeout,
+ persist=persist,
+ reply_to=reply_to,
+ priority=priority,
+ middlewares=middlewares,
+ title=title,
+ description=description,
+ schema=schema,
+ include_in_schema=include_in_schema,
+ headers=headers,
+ content_type=content_type,
+ content_encoding=content_encoding,
+ expiration=expiration,
+ message_type=message_type,
+ user_id=user_id,
+ )
diff --git a/faststream/rabbit/fastapi/router.py b/faststream/rabbit/fastapi/router.py
deleted file mode 100644
index 030b460907..0000000000
--- a/faststream/rabbit/fastapi/router.py
+++ /dev/null
@@ -1,853 +0,0 @@
-import logging
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Iterable,
- List,
- Optional,
- Sequence,
- Type,
- Union,
- cast,
-)
-
-from fastapi.datastructures import Default
-from fastapi.routing import APIRoute
-from fastapi.utils import generate_unique_id
-from starlette.responses import JSONResponse
-from starlette.routing import BaseRoute
-from typing_extensions import Annotated, Doc, deprecated, override
-
-from faststream.__about__ import SERVICE_NAME
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.utils import default_filter
-from faststream.rabbit.broker.broker import RabbitBroker as RB
-from faststream.rabbit.publisher.asyncapi import AsyncAPIPublisher
-from faststream.rabbit.schemas import (
- RabbitExchange,
- RabbitQueue,
-)
-from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber
-from faststream.types import EMPTY
-
-if TYPE_CHECKING:
- from enum import Enum
-
- from aio_pika import IncomingMessage
- from aio_pika.abc import DateType, HeadersType, SSLOptions, TimeoutType
- from fastapi import params
- from fastapi.types import IncEx
- from pamqp.common import FieldTable
- from starlette.responses import Response
- from starlette.types import ASGIApp, Lifespan
- from yarl import URL
-
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- Filter,
- PublisherMiddleware,
- SubscriberMiddleware,
- )
- from faststream.rabbit.message import RabbitMessage
- from faststream.rabbit.schemas.reply import ReplyConfig
- from faststream.security import BaseSecurity
- from faststream.types import AnyDict, LoggerProto
-
-
-class RabbitRouter(StreamRouter["IncomingMessage"]):
- """A class to represent a RabbitMQ router for incoming messages."""
-
- broker_class = RB
- broker: RB
-
- def __init__(
- self,
- url: Annotated[
- Union[str, "URL", None],
- Doc("RabbitMQ destination location to connect."),
- ] = "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
- *,
- # connection args
- host: Annotated[
- Optional[str],
- Doc("Destination host. This option overrides `url` option host."),
- ] = None,
- port: Annotated[
- Optional[int],
- Doc("Destination port. This option overrides `url` option port."),
- ] = None,
- virtualhost: Annotated[
- Optional[str],
- Doc("RabbitMQ virtual host to use in the current broker connection."),
- ] = None,
- ssl_options: Annotated[
- Optional["SSLOptions"],
- Doc("Extra ssl options to establish connection."),
- ] = None,
- client_properties: Annotated[
- Optional["FieldTable"],
- Doc("Add custom client capability."),
- ] = None,
- timeout: Annotated[
- "TimeoutType",
- Doc("Connection establishement timeout."),
- ] = None,
- fail_fast: Annotated[
- bool,
- Doc(
- "Broker startup raises `AMQPConnectionError` if RabbitMQ is unreachable."
- ),
- ] = True,
- reconnect_interval: Annotated[
- "TimeoutType",
- Doc("Time to sleep between reconnection attempts."),
- ] = 5.0,
- # channel args
- channel_number: Annotated[
- Optional[int],
- Doc("Specify the channel number explicit."),
- ] = None,
- publisher_confirms: Annotated[
- bool,
- Doc(
- "if `True` the `publish` method will "
- "return `bool` type after publish is complete."
- "Otherwise it will returns `None`."
- ),
- ] = True,
- on_return_raises: Annotated[
- bool,
- Doc(
- "raise an :class:`aio_pika.exceptions.DeliveryError`"
- "when mandatory message will be returned"
- ),
- ] = False,
- # broker args
- max_consumers: Annotated[
- Optional[int],
- Doc(
- "RabbitMQ channel `qos` option. "
- "It limits max messages processing in the same time count."
- ),
- ] = None,
- app_id: Annotated[
- Optional[str],
- Doc("Application name to mark outgoing messages by."),
- ] = SERVICE_NAME,
- # broker base args
- graceful_timeout: Annotated[
- Optional[float],
- Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
- ),
- ] = 15.0,
- decoder: Annotated[
- Optional["CustomCallable"],
- Doc("Custom decoder object."),
- ] = None,
- parser: Annotated[
- Optional["CustomCallable"],
- Doc("Custom parser object."),
- ] = None,
- middlewares: Annotated[
- Sequence["BrokerMiddleware[IncomingMessage]"],
- Doc("Middlewares to apply to all broker publishers/subscribers."),
- ] = (),
- # AsyncAPI args
- security: Annotated[
- Optional["BaseSecurity"],
- Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
- ),
- ] = None,
- asyncapi_url: Annotated[
- Optional[str],
- Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
- ] = None,
- protocol: Annotated[
- Optional[str],
- Doc("AsyncAPI server protocol."),
- ] = None,
- protocol_version: Annotated[
- Optional[str],
- Doc("AsyncAPI server protocol version."),
- ] = "0.9.1",
- description: Annotated[
- Optional[str],
- Doc("AsyncAPI server description."),
- ] = None,
- asyncapi_tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
- Doc("AsyncAPI server tags."),
- ] = None,
- # logging args
- logger: Annotated[
- Optional["LoggerProto"],
- Doc("User specified logger to pass into Context and log service messages."),
- ] = EMPTY,
- log_level: Annotated[
- int,
- Doc("Service messages log level."),
- ] = logging.INFO,
- log_fmt: Annotated[
- Optional[str],
- Doc("Default logger log format."),
- ] = None,
- # StreamRouter options
- setup_state: Annotated[
- bool,
- Doc(
- "Whether to add broker to app scope in lifespan. "
- "You should disable this option at old ASGI servers."
- ),
- ] = True,
- schema_url: Annotated[
- Optional[str],
- Doc(
- "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all."
- ),
- ] = "/asyncapi",
- # FastAPI args
- prefix: Annotated[
- str,
- Doc("An optional path prefix for the router."),
- ] = "",
- tags: Annotated[
- Optional[List[Union[str, "Enum"]]],
- Doc(
- """
- A list of tags to be applied to all the *path operations* in this
- router.
-
- It will be added to the generated OpenAPI (e.g. visible at `/docs`).
-
- Read more about it in the
- [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
- ),
- ] = None,
- dependencies: Annotated[
- Optional[Sequence["params.Depends"]],
- Doc(
- """
- A list of dependencies (using `Depends()`) to be applied to all the
- *path and stream operations* in this router.
-
- Read more about it in the
- [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
- ),
- ] = None,
- default_response_class: Annotated[
- Type["Response"],
- Doc(
- """
- The default response class to be used.
-
- Read more in the
- [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
- """
- ),
- ] = Default(JSONResponse),
- responses: Annotated[
- Optional[Dict[Union[int, str], "AnyDict"]],
- Doc(
- """
- Additional responses to be shown in OpenAPI.
-
- It will be added to the generated OpenAPI (e.g. visible at `/docs`).
-
- Read more about it in the
- [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/).
-
- And in the
- [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
- ),
- ] = None,
- callbacks: Annotated[
- Optional[List[BaseRoute]],
- Doc(
- """
- OpenAPI callbacks that should apply to all *path operations* in this
- router.
-
- It will be added to the generated OpenAPI (e.g. visible at `/docs`).
-
- Read more about it in the
- [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
- """
- ),
- ] = None,
- routes: Annotated[
- Optional[List[BaseRoute]],
- Doc(
- """
- **Note**: you probably shouldn't use this parameter, it is inherited
- from Starlette and supported for compatibility.
-
- ---
-
- A list of routes to serve incoming HTTP and WebSocket requests.
- """
- ),
- deprecated(
- """
- You normally wouldn't use this parameter with FastAPI, it is inherited
- from Starlette and supported for compatibility.
-
- In FastAPI, you normally would use the *path operation methods*,
- like `router.get()`, `router.post()`, etc.
- """
- ),
- ] = None,
- redirect_slashes: Annotated[
- bool,
- Doc(
- """
- Whether to detect and redirect slashes in URLs when the client doesn't
- use the same format.
- """
- ),
- ] = True,
- default: Annotated[
- Optional["ASGIApp"],
- Doc(
- """
- Default function handler for this router. Used to handle
- 404 Not Found errors.
- """
- ),
- ] = None,
- dependency_overrides_provider: Annotated[
- Optional[Any],
- Doc(
- """
- Only used internally by FastAPI to handle dependency overrides.
-
- You shouldn't need to use it. It normally points to the `FastAPI` app
- object.
- """
- ),
- ] = None,
- route_class: Annotated[
- Type["APIRoute"],
- Doc(
- """
- Custom route (*path operation*) class to be used by this router.
-
- Read more about it in the
- [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
- """
- ),
- ] = APIRoute,
- on_startup: Annotated[
- Optional[Sequence[Callable[[], Any]]],
- Doc(
- """
- A list of startup event handler functions.
-
- You should instead use the `lifespan` handlers.
-
- Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
- ),
- ] = None,
- on_shutdown: Annotated[
- Optional[Sequence[Callable[[], Any]]],
- Doc(
- """
- A list of shutdown event handler functions.
-
- You should instead use the `lifespan` handlers.
-
- Read more in the
- [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
- ),
- ] = None,
- lifespan: Annotated[
- Optional["Lifespan[Any]"],
- Doc(
- """
- A `Lifespan` context manager handler. This replaces `startup` and
- `shutdown` functions with a single context manager.
-
- Read more in the
- [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
- ),
- ] = None,
- deprecated: Annotated[
- Optional[bool],
- Doc(
- """
- Mark all *path operations* in this router as deprecated.
-
- It will be added to the generated OpenAPI (e.g. visible at `/docs`).
-
- Read more about it in the
- [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
- ),
- ] = None,
- include_in_schema: Annotated[
- bool,
- Doc(
- """
- To include (or not) all the *path operations* in this router in the
- generated OpenAPI.
-
- This affects the generated OpenAPI (e.g. visible at `/docs`).
-
- Read more about it in the
- [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
- """
- ),
- ] = True,
- generate_unique_id_function: Annotated[
- Callable[["APIRoute"], str],
- Doc(
- """
- Customize the function used to generate unique IDs for the *path
- operations* shown in the generated OpenAPI.
-
- This is particularly useful when automatically generating clients or
- SDKs for your API.
-
- Read more about it in the
- [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
- """
- ),
- ] = Default(generate_unique_id),
- ) -> None:
- super().__init__(
- url,
- host=host,
- port=port,
- virtualhost=virtualhost,
- ssl_options=ssl_options,
- client_properties=client_properties,
- timeout=timeout,
- fail_fast=fail_fast,
- reconnect_interval=reconnect_interval,
- max_consumers=max_consumers,
- app_id=app_id,
- graceful_timeout=graceful_timeout,
- decoder=decoder,
- parser=parser,
- channel_number=channel_number,
- publisher_confirms=publisher_confirms,
- on_return_raises=on_return_raises,
- middlewares=middlewares,
- security=security,
- asyncapi_url=asyncapi_url,
- protocol=protocol,
- protocol_version=protocol_version,
- description=description,
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
- asyncapi_tags=asyncapi_tags,
- schema_url=schema_url,
- setup_state=setup_state,
- # FastAPI kwargs
- prefix=prefix,
- tags=tags,
- dependencies=dependencies,
- default_response_class=default_response_class,
- responses=responses,
- callbacks=callbacks,
- routes=routes,
- redirect_slashes=redirect_slashes,
- default=default,
- dependency_overrides_provider=dependency_overrides_provider,
- route_class=route_class,
- on_startup=on_startup,
- on_shutdown=on_shutdown,
- deprecated=deprecated,
- include_in_schema=include_in_schema,
- lifespan=lifespan,
- generate_unique_id_function=generate_unique_id_function,
- )
-
- @override
- def subscriber( # type: ignore[override]
- self,
- queue: Annotated[
- Union[str, RabbitQueue],
- Doc(
- "RabbitMQ queue to listen. "
- "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default)."
- ),
- ],
- exchange: Annotated[
- Union[str, RabbitExchange, None],
- Doc(
- "RabbitMQ exchange to bind queue to. "
- "Uses default exchange if not presented. "
- "**FastStream** declares exchange object automatically if it is not passive (by default)."
- ),
- ] = None,
- *,
- consume_args: Annotated[
- Optional["AnyDict"],
- Doc("Extra consumer arguments to use in `queue.consume(...)` method."),
- ] = None,
- reply_config: Annotated[
- Optional["ReplyConfig"],
- Doc("Extra options to use at replies publishing."),
- deprecated(
- "Deprecated in **FastStream 0.5.16**. "
- "Please, use `RabbitResponse` object as a handler return instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = None,
- # broker arguments
- dependencies: Annotated[
- Iterable["params.Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
- ] = (),
- parser: Annotated[
- Optional["CustomCallable"],
- Doc(
- "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one."
- ),
- ] = None,
- decoder: Annotated[
- Optional["CustomCallable"],
- Doc("Function to decode FastStream msg bytes body to python objects."),
- ] = None,
- middlewares: Annotated[
- Iterable["SubscriberMiddleware[RabbitMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[RabbitMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = default_filter,
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
- no_reply: Annotated[
- bool,
- Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
- ),
- ] = False,
- # AsyncAPI information
- title: Annotated[
- Optional[str],
- Doc("AsyncAPI subscriber object title."),
- ] = None,
- description: Annotated[
- Optional[str],
- Doc(
- "AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
- ),
- ] = None,
- include_in_schema: Annotated[
- bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
- ] = True,
- # FastAPI args
- response_model: Annotated[
- Any,
- Doc(
- """
- The type to use for the response.
-
- It could be any valid Pydantic *field* type. So, it doesn't have to
- be a Pydantic model, it could be other things, like a `list`, `dict`,
- etc.
-
- It will be used for:
-
- * Documentation: the generated OpenAPI (and the UI at `/docs`) will
- show it as the response (JSON Schema).
- * Serialization: you could return an arbitrary object and the
- `response_model` would be used to serialize that object into the
- corresponding JSON.
- * Filtering: the JSON sent to the client will only contain the data
- (fields) defined in the `response_model`. If you returned an object
- that contains an attribute `password` but the `response_model` does
- not include that field, the JSON sent to the client would not have
- that `password`.
- * Validation: whatever you return will be serialized with the
- `response_model`, converting any data as necessary to generate the
- corresponding JSON. But if the data in the object returned is not
- valid, that would mean a violation of the contract with the client,
- so it's an error from the API developer. So, FastAPI will raise an
- error and return a 500 error code (Internal Server Error).
-
- Read more about it in the
- [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
- ),
- ] = Default(None),
- response_model_include: Annotated[
- Optional["IncEx"],
- Doc(
- """
- Configuration passed to Pydantic to include only certain fields in the
- response data.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
- ),
- ] = None,
- response_model_exclude: Annotated[
- Optional["IncEx"],
- Doc(
- """
- Configuration passed to Pydantic to exclude certain fields in the
- response data.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
- ),
- ] = None,
- response_model_by_alias: Annotated[
- bool,
- Doc(
- """
- Configuration passed to Pydantic to define if the response model
- should be serialized by alias when an alias is used.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
- ),
- ] = True,
- response_model_exclude_unset: Annotated[
- bool,
- Doc(
- """
- Configuration passed to Pydantic to define if the response data
- should have all the fields, including the ones that were not set and
- have their default values. This is different from
- `response_model_exclude_defaults` in that if the fields are set,
- they will be included in the response, even if the value is the same
- as the default.
-
- When `True`, default values are omitted from the response.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
- ),
- ] = False,
- response_model_exclude_defaults: Annotated[
- bool,
- Doc(
- """
- Configuration passed to Pydantic to define if the response data
- should have all the fields, including the ones that have the same value
- as the default. This is different from `response_model_exclude_unset`
- in that if the fields are set but contain the same default values,
- they will be excluded from the response.
-
- When `True`, default values are omitted from the response.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
- ),
- ] = False,
- response_model_exclude_none: Annotated[
- bool,
- Doc(
- """
- Configuration passed to Pydantic to define if the response data should
- exclude fields set to `None`.
-
- This is much simpler (less smart) than `response_model_exclude_unset`
- and `response_model_exclude_defaults`. You probably want to use one of
- those two instead of this one, as those allow returning `None` values
- when it makes sense.
-
- Read more about it in the
- [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
- ),
- ] = False,
- ) -> AsyncAPISubscriber:
- return cast(
- AsyncAPISubscriber,
- super().subscriber(
- queue=queue,
- exchange=exchange,
- consume_args=consume_args,
- reply_config=reply_config,
- dependencies=dependencies,
- parser=parser,
- decoder=decoder,
- middlewares=middlewares,
- filter=filter,
- retry=retry,
- no_ack=no_ack,
- no_reply=no_reply,
- title=title,
- description=description,
- include_in_schema=include_in_schema,
- # FastAPI args
- response_model=response_model,
- response_model_include=response_model_include,
- response_model_exclude=response_model_exclude,
- response_model_by_alias=response_model_by_alias,
- response_model_exclude_unset=response_model_exclude_unset,
- response_model_exclude_defaults=response_model_exclude_defaults,
- response_model_exclude_none=response_model_exclude_none,
- ),
- )
-
- @override
- def publisher(
- self,
- queue: Annotated[
- Union[RabbitQueue, str],
- Doc("Default message routing key to publish with."),
- ] = "",
- exchange: Annotated[
- Union[RabbitExchange, str, None],
- Doc("Target exchange to publish message to."),
- ] = None,
- *,
- routing_key: Annotated[
- str,
- Doc(
- "Default message routing key to publish with. "
- "Overrides `queue` option if presented."
- ),
- ] = "",
- mandatory: Annotated[
- bool,
- Doc(
- "Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
- ),
- ] = True,
- immediate: Annotated[
- bool,
- Doc(
- "Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
- ),
- ] = False,
- timeout: Annotated[
- "TimeoutType",
- Doc("Send confirmation time from RabbitMQ."),
- ] = None,
- persist: Annotated[
- bool,
- Doc("Restore the message on RabbitMQ reboot."),
- ] = False,
- reply_to: Annotated[
- Optional[str],
- Doc(
- "Reply message routing key to send with (always sending to default exchange)."
- ),
- ] = None,
- priority: Annotated[
- Optional[int],
- Doc("The message priority (0 by default)."),
- ] = None,
- # specific
- middlewares: Annotated[
- Sequence["PublisherMiddleware"],
- Doc("Publisher middlewares to wrap outgoing messages."),
- ] = (),
- # AsyncAPI information
- title: Annotated[
- Optional[str],
- Doc("AsyncAPI publisher object title."),
- ] = None,
- description: Annotated[
- Optional[str],
- Doc("AsyncAPI publisher object description."),
- ] = None,
- schema: Annotated[
- Optional[Any],
- Doc(
- "AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
- ),
- ] = None,
- include_in_schema: Annotated[
- bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
- ] = True,
- # message args
- headers: Annotated[
- Optional["HeadersType"],
- Doc(
- "Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
- ),
- ] = None,
- content_type: Annotated[
- Optional[str],
- Doc(
- "Message **content-type** header. "
- "Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
- ),
- ] = None,
- content_encoding: Annotated[
- Optional[str],
- Doc("Message body content encoding, e.g. **gzip**."),
- ] = None,
- expiration: Annotated[
- Optional["DateType"],
- Doc("Message expiration (lifetime) in seconds (or datetime or timedelta)."),
- ] = None,
- message_type: Annotated[
- Optional[str],
- Doc("Application-specific message type, e.g. **orders.created**."),
- ] = None,
- user_id: Annotated[
- Optional[str],
- Doc("Publisher connection User ID, validated if set."),
- ] = None,
- ) -> AsyncAPIPublisher:
- return self.broker.publisher(
- queue=queue,
- exchange=exchange,
- routing_key=routing_key,
- mandatory=mandatory,
- immediate=immediate,
- timeout=timeout,
- persist=persist,
- reply_to=reply_to,
- priority=priority,
- middlewares=middlewares,
- title=title,
- description=description,
- schema=schema,
- include_in_schema=include_in_schema,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- expiration=expiration,
- message_type=message_type,
- user_id=user_id,
- )
diff --git a/faststream/rabbit/helpers/declarer.py b/faststream/rabbit/helpers/declarer.py
index 57c21a3a78..dc890617d0 100644
--- a/faststream/rabbit/helpers/declarer.py
+++ b/faststream/rabbit/helpers/declarer.py
@@ -1,4 +1,6 @@
-from typing import TYPE_CHECKING, Dict, cast
+from typing import TYPE_CHECKING, cast
+
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
if TYPE_CHECKING:
import aio_pika
@@ -9,12 +11,22 @@
class RabbitDeclarer:
"""An utility class to declare RabbitMQ queues and exchanges."""
- __channel: "aio_pika.RobustChannel"
- __queues: Dict["RabbitQueue", "aio_pika.RobustQueue"]
- __exchanges: Dict["RabbitExchange", "aio_pika.RobustExchange"]
+ def __init__(self) -> None:
+ self.__queues: dict[RabbitQueue, aio_pika.RobustQueue] = {}
+ self.__exchanges: dict[RabbitExchange, aio_pika.RobustExchange] = {}
+
+ self.__connection: ConnectionState = EmptyConnectionState()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(<{self.__connection.__class__.__name__}>, queues={list(self.__queues.keys())}, exchanges={list(self.__exchanges.keys())})"
+
+ def connect(
+ self, connection: "aio_pika.RobustConnection", channel: "aio_pika.RobustChannel"
+ ) -> None:
+ self.__connection = ConnectedState(connection=connection, channel=channel)
- def __init__(self, channel: "aio_pika.RobustChannel") -> None:
- self.__channel = channel
+ def disconnect(self) -> None:
+ self.__connection = EmptyConnectionState()
self.__queues = {}
self.__exchanges = {}
@@ -27,7 +39,7 @@ async def declare_queue(
if (q := self.__queues.get(queue)) is None:
self.__queues[queue] = q = cast(
"aio_pika.RobustQueue",
- await self.__channel.declare_queue(
+ await self.__connection.channel.declare_queue(
name=queue.name,
durable=queue.durable,
exclusive=queue.exclusive,
@@ -48,12 +60,12 @@ async def declare_exchange(
) -> "aio_pika.RobustExchange":
"""Declare an exchange, parent exchanges and bind them each other."""
if not exchange.name:
- return self.__channel.default_exchange
+ return self.__connection.channel.default_exchange
if (exch := self.__exchanges.get(exchange)) is None:
self.__exchanges[exchange] = exch = cast(
"aio_pika.RobustExchange",
- await self.__channel.declare_exchange(
+ await self.__connection.channel.declare_exchange(
name=exchange.name,
type=exchange.type.value,
durable=exchange.durable,
diff --git a/faststream/rabbit/helpers/state.py b/faststream/rabbit/helpers/state.py
new file mode 100644
index 0000000000..182b588557
--- /dev/null
+++ b/faststream/rabbit/helpers/state.py
@@ -0,0 +1,35 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from aio_pika import RobustChannel, RobustConnection
+
+
+class ConnectionState(Protocol):
+ connection: "RobustConnection"
+ channel: "RobustChannel"
+
+
+class EmptyConnectionState(ConnectionState):
+ __slots__ = ()
+
+ error_msg = "You should connect broker first."
+
+ @property
+ def connection(self) -> "RobustConnection":
+ raise IncorrectState(self.error_msg)
+
+ @property
+ def channel(self) -> "RobustChannel":
+ raise IncorrectState(self.error_msg)
+
+
+class ConnectedState(ConnectionState):
+ __slots__ = ("channel", "connection")
+
+ def __init__(
+ self, connection: "RobustConnection", channel: "RobustChannel"
+ ) -> None:
+ self.connection = connection
+ self.channel = channel
diff --git a/faststream/rabbit/message.py b/faststream/rabbit/message.py
index 4287cf2fd7..7b91fdd72f 100644
--- a/faststream/rabbit/message.py
+++ b/faststream/rabbit/message.py
@@ -1,6 +1,6 @@
from aio_pika import IncomingMessage
-from faststream.broker.message import StreamMessage
+from faststream.message import StreamMessage
class RabbitMessage(StreamMessage[IncomingMessage]):
diff --git a/faststream/rabbit/opentelemetry/middleware.py b/faststream/rabbit/opentelemetry/middleware.py
index 29a553a7f0..2973fc60dc 100644
--- a/faststream/rabbit/opentelemetry/middleware.py
+++ b/faststream/rabbit/opentelemetry/middleware.py
@@ -5,9 +5,10 @@
from faststream.opentelemetry.middleware import TelemetryMiddleware
from faststream.rabbit.opentelemetry.provider import RabbitTelemetrySettingsProvider
+from faststream.rabbit.response import RabbitPublishCommand
-class RabbitTelemetryMiddleware(TelemetryMiddleware):
+class RabbitTelemetryMiddleware(TelemetryMiddleware[RabbitPublishCommand]):
def __init__(
self,
*,
diff --git a/faststream/rabbit/opentelemetry/provider.py b/faststream/rabbit/opentelemetry/provider.py
index 6971810ff2..374285d932 100644
--- a/faststream/rabbit/opentelemetry/provider.py
+++ b/faststream/rabbit/opentelemetry/provider.py
@@ -1,19 +1,21 @@
-from typing import TYPE_CHECKING, Union
+from typing import TYPE_CHECKING
from opentelemetry.semconv.trace import SpanAttributes
from faststream.opentelemetry import TelemetrySettingsProvider
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
+from faststream.rabbit.response import RabbitPublishCommand
if TYPE_CHECKING:
from aio_pika import IncomingMessage
- from faststream.broker.message import StreamMessage
- from faststream.rabbit.schemas.exchange import RabbitExchange
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message import StreamMessage
-class RabbitTelemetrySettingsProvider(TelemetrySettingsProvider["IncomingMessage"]):
+class RabbitTelemetrySettingsProvider(
+ TelemetrySettingsProvider["IncomingMessage", RabbitPublishCommand],
+):
__slots__ = ("messaging_system",)
def __init__(self) -> None:
@@ -41,26 +43,19 @@ def get_consume_destination_name(
routing_key = msg.raw_message.routing_key
return f"{exchange}.{routing_key}"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "RabbitPublishCommand",
) -> "AnyDict":
- exchange: Union[None, str, RabbitExchange] = kwargs.get("exchange")
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: getattr(
- exchange, "name", exchange or ""
- ),
- SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: kwargs[
- "routing_key"
- ],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.exchange.name,
+ SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "RabbitPublishCommand",
) -> str:
- exchange: str = kwargs.get("exchange") or "default"
- routing_key: str = kwargs["routing_key"]
- return f"{exchange}.{routing_key}"
+ return f"{cmd.exchange.name or 'default'}.{cmd.destination}"
diff --git a/faststream/rabbit/parser.py b/faststream/rabbit/parser.py
index 8fe02dc4b3..de43697e17 100644
--- a/faststream/rabbit/parser.py
+++ b/faststream/rabbit/parser.py
@@ -3,7 +3,7 @@
from aio_pika import Message
from aio_pika.abc import DeliveryMode
-from faststream.broker.message import (
+from faststream.message import (
StreamMessage,
decode_message,
encode_message,
@@ -17,8 +17,8 @@
from aio_pika import IncomingMessage
from aio_pika.abc import DateType, HeadersType
+ from faststream._internal.basic_types import DecodedMessage
from faststream.rabbit.types import AioPikaSendableMessage
- from faststream.types import DecodedMessage
class AioPikaParser:
@@ -61,44 +61,43 @@ async def decode_message(
def encode_message(
message: "AioPikaSendableMessage",
*,
- persist: bool,
- reply_to: Optional[str],
- headers: Optional["HeadersType"],
- content_type: Optional[str],
- content_encoding: Optional[str],
- priority: Optional[int],
- correlation_id: Optional[str],
- expiration: Optional["DateType"],
- message_id: Optional[str],
- timestamp: Optional["DateType"],
- message_type: Optional[str],
- user_id: Optional[str],
- app_id: Optional[str],
+ persist: bool = False,
+ reply_to: Optional[str] = None,
+ headers: Optional["HeadersType"] = None,
+ content_type: Optional[str] = None,
+ content_encoding: Optional[str] = None,
+ priority: Optional[int] = None,
+ correlation_id: Optional[str] = None,
+ expiration: "DateType" = None,
+ message_id: Optional[str] = None,
+ timestamp: "DateType" = None,
+ message_type: Optional[str] = None,
+ user_id: Optional[str] = None,
+ app_id: Optional[str] = None,
) -> Message:
"""Encodes a message for sending using AioPika."""
if isinstance(message, Message):
return message
- else:
- message_body, generated_content_type = encode_message(message)
+ message_body, generated_content_type = encode_message(message)
- delivery_mode = (
- DeliveryMode.PERSISTENT if persist else DeliveryMode.NOT_PERSISTENT
- )
+ delivery_mode = (
+ DeliveryMode.PERSISTENT if persist else DeliveryMode.NOT_PERSISTENT
+ )
- return Message(
- message_body,
- content_type=content_type or generated_content_type,
- delivery_mode=delivery_mode,
- reply_to=reply_to,
- correlation_id=correlation_id or gen_cor_id(),
- headers=headers,
- content_encoding=content_encoding,
- priority=priority,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- type=message_type,
- user_id=user_id,
- app_id=app_id,
- )
+ return Message(
+ message_body,
+ content_type=content_type or generated_content_type,
+ delivery_mode=delivery_mode,
+ reply_to=reply_to,
+ correlation_id=correlation_id or gen_cor_id(),
+ headers=headers,
+ content_encoding=content_encoding,
+ priority=priority,
+ expiration=expiration,
+ message_id=message_id,
+ timestamp=timestamp,
+ type=message_type,
+ user_id=user_id,
+ app_id=app_id,
+ )
diff --git a/faststream/rabbit/prometheus/middleware.py b/faststream/rabbit/prometheus/middleware.py
index b2f96e45ca..9b85febaeb 100644
--- a/faststream/rabbit/prometheus/middleware.py
+++ b/faststream/rabbit/prometheus/middleware.py
@@ -1,14 +1,20 @@
-from typing import TYPE_CHECKING, Optional, Sequence
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
-from faststream.prometheus.middleware import BasePrometheusMiddleware
+from aio_pika import IncomingMessage
+
+from faststream._internal.constants import EMPTY
+from faststream.prometheus.middleware import PrometheusMiddleware
from faststream.rabbit.prometheus.provider import RabbitMetricsSettingsProvider
-from faststream.types import EMPTY
+from faststream.rabbit.response import RabbitPublishCommand
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
-class RabbitPrometheusMiddleware(BasePrometheusMiddleware):
+class RabbitPrometheusMiddleware(
+ PrometheusMiddleware[RabbitPublishCommand, IncomingMessage]
+):
def __init__(
self,
*,
diff --git a/faststream/rabbit/prometheus/provider.py b/faststream/rabbit/prometheus/provider.py
index 48c1bb2541..14427f977d 100644
--- a/faststream/rabbit/prometheus/provider.py
+++ b/faststream/rabbit/prometheus/provider.py
@@ -1,19 +1,20 @@
-from typing import TYPE_CHECKING, Union
+from typing import TYPE_CHECKING
from faststream.prometheus import (
ConsumeAttrs,
MetricsSettingsProvider,
)
+from faststream.rabbit.response import RabbitPublishCommand
if TYPE_CHECKING:
from aio_pika import IncomingMessage
- from faststream.broker.message import StreamMessage
- from faststream.rabbit.schemas.exchange import RabbitExchange
- from faststream.types import AnyDict
+ from faststream.message.message import StreamMessage
-class RabbitMetricsSettingsProvider(MetricsSettingsProvider["IncomingMessage"]):
+class RabbitMetricsSettingsProvider(
+ MetricsSettingsProvider["IncomingMessage", RabbitPublishCommand],
+):
__slots__ = ("messaging_system",)
def __init__(self) -> None:
@@ -32,13 +33,8 @@ def get_consume_attrs_from_message(
"messages_count": 1,
}
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: RabbitPublishCommand,
) -> str:
- exchange: Union[None, str, RabbitExchange] = kwargs.get("exchange")
- exchange_prefix = getattr(exchange, "name", exchange or "default")
-
- routing_key: str = kwargs["routing_key"]
-
- return f"{exchange_prefix}.{routing_key}"
+ return f"{cmd.exchange.name or 'default'}.{cmd.destination}"
diff --git a/faststream/rabbit/publisher/asyncapi.py b/faststream/rabbit/publisher/asyncapi.py
deleted file mode 100644
index d8328ab05c..0000000000
--- a/faststream/rabbit/publisher/asyncapi.py
+++ /dev/null
@@ -1,137 +0,0 @@
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence
-
-from typing_extensions import override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
- OperationBinding,
-)
-from faststream.asyncapi.schema.bindings import amqp
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.rabbit.publisher.usecase import LogicPublisher, PublishKwargs
-from faststream.rabbit.utils import is_routing_exchange
-
-if TYPE_CHECKING:
- from aio_pika import IncomingMessage
-
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
- from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
-
-
-class AsyncAPIPublisher(LogicPublisher):
- """AsyncAPI-compatible Rabbit Publisher class.
-
- Creting by
-
- ```python
- publisher: AsyncAPIPublisher = broker.publisher(...)
- # or
- publisher: AsyncAPIPublisher = router.publisher(...)
- ```
- """
-
- def get_name(self) -> str:
- routing = (
- self.routing_key
- or (self.queue.routing if is_routing_exchange(self.exchange) else None)
- or "_"
- )
-
- return f"{routing}:{getattr(self.exchange, 'name', None) or '_'}:Publisher"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- publish=Operation(
- bindings=OperationBinding(
- amqp=amqp.OperationBinding(
- cc=self.routing or None,
- deliveryMode=2 if self.message_kwargs.get("persist") else 1,
- mandatory=self.message_kwargs.get("mandatory"),
- replyTo=self.message_kwargs.get("reply_to"),
- priority=self.message_kwargs.get("priority"),
- ),
- )
- if is_routing_exchange(self.exchange)
- else None,
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(
- payloads,
- "Publisher",
- served_words=2 if self.title_ is None else 1,
- ),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- amqp=amqp.ChannelBinding(
- **{
- "is": "routingKey",
- "queue": amqp.Queue(
- name=self.queue.name,
- durable=self.queue.durable,
- exclusive=self.queue.exclusive,
- autoDelete=self.queue.auto_delete,
- vhost=self.virtual_host,
- )
- if is_routing_exchange(self.exchange) and self.queue.name
- else None,
- "exchange": (
- amqp.Exchange(type="default", vhost=self.virtual_host)
- if not self.exchange.name
- else amqp.Exchange(
- type=self.exchange.type.value,
- name=self.exchange.name,
- durable=self.exchange.durable,
- autoDelete=self.exchange.auto_delete,
- vhost=self.virtual_host,
- )
- ),
- }
- )
- ),
- )
- }
-
- @override
- @classmethod
- def create( # type: ignore[override]
- cls,
- *,
- routing_key: str,
- queue: "RabbitQueue",
- exchange: "RabbitExchange",
- message_kwargs: "PublishKwargs",
- # Publisher args
- broker_middlewares: Sequence["BrokerMiddleware[IncomingMessage]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "AsyncAPIPublisher":
- return cls(
- routing_key=routing_key,
- queue=queue,
- exchange=exchange,
- message_kwargs=message_kwargs,
- # Publisher args
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
diff --git a/faststream/rabbit/publisher/factory.py b/faststream/rabbit/publisher/factory.py
new file mode 100644
index 0000000000..e380080c5e
--- /dev/null
+++ b/faststream/rabbit/publisher/factory.py
@@ -0,0 +1,43 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional
+
+from .specified import SpecificationPublisher
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+
+ from .usecase import PublishKwargs
+
+
+def create_publisher(
+ *,
+ routing_key: str,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ message_kwargs: "PublishKwargs",
+ # Publisher args
+ broker_middlewares: Sequence["BrokerMiddleware[IncomingMessage]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> SpecificationPublisher:
+ return SpecificationPublisher(
+ routing_key=routing_key,
+ queue=queue,
+ exchange=exchange,
+ message_kwargs=message_kwargs,
+ # Publisher args
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/rabbit/publisher/fake.py b/faststream/rabbit/publisher/fake.py
new file mode 100644
index 0000000000..30ff04c425
--- /dev/null
+++ b/faststream/rabbit/publisher/fake.py
@@ -0,0 +1,32 @@
+from typing import TYPE_CHECKING, Optional, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.rabbit.response import RabbitPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class RabbitFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ routing_key: str,
+ app_id: Optional[str],
+ ) -> None:
+ super().__init__(producer=producer)
+ self.routing_key = routing_key
+ self.app_id = app_id
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "RabbitPublishCommand"]
+ ) -> "RabbitPublishCommand":
+ cmd = super().patch_command(cmd)
+ real_cmd = RabbitPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.routing_key
+ if self.app_id:
+ real_cmd.message_options["app_id"] = self.app_id
+ return real_cmd
diff --git a/faststream/rabbit/publisher/options.py b/faststream/rabbit/publisher/options.py
new file mode 100644
index 0000000000..81343b7cf9
--- /dev/null
+++ b/faststream/rabbit/publisher/options.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Optional
+
+from typing_extensions import TypedDict
+
+if TYPE_CHECKING:
+ from aio_pika.abc import DateType, HeadersType, TimeoutType
+
+
+class PublishOptions(TypedDict, total=False):
+ mandatory: bool
+ immediate: bool
+ timeout: "TimeoutType"
+
+
+class MessageOptions(TypedDict, total=False):
+ persist: bool
+ reply_to: Optional[str]
+ headers: Optional["HeadersType"]
+ content_type: Optional[str]
+ content_encoding: Optional[str]
+ priority: Optional[int]
+ expiration: "DateType"
+ message_id: Optional[str]
+ timestamp: "DateType"
+ message_type: Optional[str]
+ user_id: Optional[str]
+ app_id: Optional[str]
+ correlation_id: Optional[str]
diff --git a/faststream/rabbit/publisher/producer.py b/faststream/rabbit/publisher/producer.py
index ea83ba0672..55fe050c19 100644
--- a/faststream/rabbit/publisher/producer.py
+++ b/faststream/rabbit/publisher/producer.py
@@ -1,38 +1,54 @@
from typing import (
TYPE_CHECKING,
- Any,
- AsyncContextManager,
Optional,
- Type,
- Union,
+ Protocol,
cast,
)
import anyio
-from typing_extensions import override
+from typing_extensions import Unpack, override
-from faststream.broker.publisher.proto import ProducerProto
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS
+from faststream._internal.publisher.proto import ProducerProto
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream.exceptions import FeatureNotSupportedException, IncorrectState
from faststream.rabbit.parser import AioPikaParser
from faststream.rabbit.schemas import RABBIT_REPLY, RabbitExchange
-from faststream.utils.functions import fake_context, timeout_scope
if TYPE_CHECKING:
from types import TracebackType
import aiormq
from aio_pika import IncomingMessage, RobustQueue
- from aio_pika.abc import AbstractIncomingMessage, DateType, HeadersType, TimeoutType
+ from aio_pika.abc import AbstractIncomingMessage, TimeoutType
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
- from faststream.broker.types import (
+ from faststream._internal.types import (
AsyncCallable,
CustomCallable,
)
from faststream.rabbit.helpers.declarer import RabbitDeclarer
+ from faststream.rabbit.response import MessageOptions, RabbitPublishCommand
from faststream.rabbit.types import AioPikaSendableMessage
- from faststream.types import SendableMessage
+
+
+class LockState(Protocol):
+ lock: "anyio.Lock"
+
+
+class LockUnset(LockState):
+ __slots__ = ()
+
+ @property
+ def lock(self) -> "anyio.Lock":
+ msg = "You should call `producer.connect()` method at first."
+ raise IncorrectState(msg)
+
+
+class RealLock(LockState):
+ __slots__ = ("lock",)
+
+ def __init__(self) -> None:
+ self.lock = anyio.Lock()
class AioPikaFastProducer(ProducerProto):
@@ -50,139 +66,59 @@ def __init__(
) -> None:
self.declarer = declarer
- self._rpc_lock = anyio.Lock()
+ self.__lock: LockState = LockUnset()
default_parser = AioPikaParser()
self._parser = resolve_custom_func(parser, default_parser.parse_message)
self._decoder = resolve_custom_func(decoder, default_parser.decode_message)
+ def connect(self) -> None:
+ """Lock initialization.
+
+ Should be called in async context due `anyio.Lock` object can't be created outside event loop.
+ """
+ self.__lock = RealLock()
+
+ def disconnect(self) -> None:
+ self.__lock = LockUnset()
+
@override
async def publish( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: "TimeoutType" = None,
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- persist: bool = False,
- reply_to: Optional[str] = None,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
- ) -> Optional[Any]:
+ cmd: "RabbitPublishCommand",
+ ) -> Optional["aiormq.abc.ConfirmationFrameType"]:
"""Publish a message to a RabbitMQ queue."""
- context: AsyncContextManager[
- Optional[MemoryObjectReceiveStream[IncomingMessage]]
- ]
- if rpc:
- if reply_to is not None:
- raise WRONG_PUBLISH_ARGS
-
- context = _RPCCallback(
- self._rpc_lock,
- await self.declarer.declare_queue(RABBIT_REPLY),
- )
- else:
- context = fake_context()
-
- async with context as response_queue:
- r = await self._publish(
- message=message,
- exchange=exchange,
- routing_key=routing_key,
- mandatory=mandatory,
- immediate=immediate,
- timeout=timeout,
- persist=persist,
- reply_to=reply_to if response_queue is None else RABBIT_REPLY.name,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
- )
-
- if response_queue is None:
- return r
-
- else:
- msg: Optional[IncomingMessage] = None
- with timeout_scope(rpc_timeout, raise_timeout):
- msg = await response_queue.receive()
-
- if msg: # pragma: no branch
- return await self._decoder(await self._parser(msg))
-
- return None
+ return await self._publish(
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ **cmd.publish_options,
+ **cmd.message_options,
+ )
@override
async def request( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: Optional[float] = None,
- persist: bool = False,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> "IncomingMessage":
"""Publish a message to a RabbitMQ queue."""
async with _RPCCallback(
- self._rpc_lock,
+ self.__lock.lock,
await self.declarer.declare_queue(RABBIT_REPLY),
) as response_queue:
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
await self._publish(
- message=message,
- exchange=exchange,
- routing_key=routing_key,
- mandatory=mandatory,
- immediate=immediate,
- timeout=timeout,
- persist=persist,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
reply_to=RABBIT_REPLY.name,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ **cmd.publish_options,
+ **cmd.message_options,
)
return await response_queue.receive()
@@ -190,45 +126,18 @@ async def _publish(
self,
message: "AioPikaSendableMessage",
*,
- correlation_id: str,
- exchange: Union["RabbitExchange", str, None],
+ exchange: "RabbitExchange",
routing_key: str,
- mandatory: bool,
- immediate: bool,
- timeout: "TimeoutType",
- persist: bool,
- reply_to: Optional[str],
- headers: Optional["HeadersType"],
- content_type: Optional[str],
- content_encoding: Optional[str],
- priority: Optional[int],
- expiration: Optional["DateType"],
- message_id: Optional[str],
- timestamp: Optional["DateType"],
- message_type: Optional[str],
- user_id: Optional[str],
- app_id: Optional[str],
- ) -> Union["aiormq.abc.ConfirmationFrameType", "SendableMessage"]:
+ mandatory: bool = True,
+ immediate: bool = False,
+ timeout: "TimeoutType" = None,
+ **message_options: Unpack["MessageOptions"],
+ ) -> Optional["aiormq.abc.ConfirmationFrameType"]:
"""Publish a message to a RabbitMQ exchange."""
- message = AioPikaParser.encode_message(
- message=message,
- persist=persist,
- reply_to=reply_to,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
- )
+ message = AioPikaParser.encode_message(message=message, **message_options)
exchange_obj = await self.declarer.declare_exchange(
- exchange=RabbitExchange.validate(exchange),
+ exchange=exchange,
passive=True,
)
@@ -240,6 +149,14 @@ async def _publish(
timeout=timeout,
)
+ @override
+ async def publish_batch(
+ self,
+ cmd: "RabbitPublishCommand",
+ ) -> None:
+ msg = "RabbitMQ doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
+
class _RPCCallback:
"""A class provides an RPC lock."""
@@ -270,7 +187,7 @@ async def __aenter__(self) -> "MemoryObjectReceiveStream[IncomingMessage]":
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
diff --git a/faststream/rabbit/publisher/specified.py b/faststream/rabbit/publisher/specified.py
new file mode 100644
index 0000000000..e8da19b3fd
--- /dev/null
+++ b/faststream/rabbit/publisher/specified.py
@@ -0,0 +1,122 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.rabbit.schemas.proto import BaseRMQInformation as RMQSpecificationMixin
+from faststream.rabbit.utils import is_routing_exchange
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, PublisherSpec
+from faststream.specification.schema.bindings import (
+ ChannelBinding,
+ OperationBinding,
+ amqp,
+)
+
+from .usecase import LogicPublisher, PublishKwargs
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+
+
+class SpecificationPublisher(
+ SpecificationPublisherMixin,
+ RMQSpecificationMixin,
+ LogicPublisher,
+):
+ """AsyncAPI-compatible Rabbit Publisher class."""
+
+ def __init__(
+ self,
+ *,
+ routing_key: str,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ # PublishCommand options
+ message_kwargs: "PublishKwargs",
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ schema_=schema_,
+ # propagate to RMQSpecificationMixin
+ queue=queue,
+ exchange=exchange,
+ )
+
+ LogicPublisher.__init__(
+ self,
+ queue=queue,
+ exchange=exchange,
+ routing_key=routing_key,
+ message_kwargs=message_kwargs,
+ middlewares=middlewares,
+ broker_middlewares=broker_middlewares,
+ )
+
+ def get_default_name(self) -> str:
+ routing = (
+ self.routing_key
+ or (self.queue.routing if is_routing_exchange(self.exchange) else None)
+ or "_"
+ )
+
+ return f"{routing}:{getattr(self.exchange, 'name', None) or '_'}:Publisher"
+
+ def get_schema(self) -> dict[str, PublisherSpec]:
+ payloads = self.get_payloads()
+
+ exchange_binding = amqp.Exchange.from_exchange(self.exchange)
+ queue_binding = amqp.Queue.from_queue(self.queue)
+
+ return {
+ self.name: PublisherSpec(
+ description=self.description,
+ operation=Operation(
+ bindings=OperationBinding(
+ amqp=amqp.OperationBinding(
+ routing_key=self.routing or None,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ack=True,
+ persist=self.message_options.get("persist"),
+ priority=self.message_options.get("priority"),
+ reply_to=self.message_options.get("reply_to"),
+ mandatory=self.publish_options.get("mandatory"),
+ ),
+ ),
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(
+ payloads,
+ "Publisher",
+ served_words=2 if self.title_ is None else 1,
+ ),
+ ),
+ ),
+ bindings=ChannelBinding(
+ amqp=amqp.ChannelBinding(
+ virtual_host=self.virtual_host,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ),
+ ),
+ ),
+ }
diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py
index 303b71503d..a898e968d0 100644
--- a/faststream/rabbit/publisher/usecase.py
+++ b/faststream/rabbit/publisher/usecase.py
@@ -1,118 +1,47 @@
-from contextlib import AsyncExitStack
+from collections.abc import Iterable, Sequence
from copy import deepcopy
-from functools import partial
-from itertools import chain
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Iterable,
- Optional,
- Sequence,
- Union,
-)
+from typing import TYPE_CHECKING, Annotated, Optional, Union
from aio_pika import IncomingMessage
-from typing_extensions import Annotated, Doc, TypedDict, Unpack, deprecated, override
+from typing_extensions import Doc, Unpack, override
-from faststream.broker.message import SourceType, gen_cor_id
-from faststream.broker.publisher.usecase import PublisherUsecase
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.rabbit.schemas import BaseRMQInformation, RabbitQueue
-from faststream.rabbit.subscriber.usecase import LogicSubscriber
-from faststream.utils.functions import return_input
+from faststream._internal.publisher.usecase import PublisherUsecase
+from faststream._internal.utils.data import filter_by_dict
+from faststream.message import gen_cor_id
+from faststream.rabbit.response import RabbitPublishCommand
+from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+from faststream.response.publish_type import PublishType
+
+from .options import MessageOptions, PublishOptions
if TYPE_CHECKING:
- from aio_pika.abc import DateType, HeadersType, TimeoutType
+ import aiormq
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
+ from faststream._internal.state import BrokerState
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.publisher.producer import AioPikaFastProducer
- from faststream.rabbit.schemas.exchange import RabbitExchange
from faststream.rabbit.types import AioPikaSendableMessage
- from faststream.types import AnyDict, AsyncFunc
+ from faststream.response.response import PublishCommand
# should be public to use in imports
-class RequestPublishKwargs(TypedDict, total=False):
+class RequestPublishKwargs(MessageOptions, PublishOptions, total=False):
"""Typed dict to annotate RabbitMQ requesters."""
- headers: Annotated[
- Optional["HeadersType"],
- Doc(
- "Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
- ),
- ]
- mandatory: Annotated[
- Optional[bool],
- Doc(
- "Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
- ),
- ]
- immediate: Annotated[
- Optional[bool],
- Doc(
- "Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
- ),
- ]
- timeout: Annotated[
- "TimeoutType",
- Doc("Send confirmation time from RabbitMQ."),
- ]
- persist: Annotated[
- Optional[bool],
- Doc("Restore the message on RabbitMQ reboot."),
- ]
- priority: Annotated[
- Optional[int],
- Doc("The message priority (0 by default)."),
- ]
- message_type: Annotated[
- Optional[str],
- Doc("Application-specific message type, e.g. **orders.created**."),
- ]
- content_type: Annotated[
- Optional[str],
- Doc(
- "Message **content-type** header. "
- "Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
- ),
- ]
- user_id: Annotated[
- Optional[str],
- Doc("Publisher connection User ID, validated if set."),
- ]
- expiration: Annotated[
- Optional["DateType"],
- Doc("Message expiration (lifetime) in seconds (or datetime or timedelta)."),
- ]
- content_encoding: Annotated[
- Optional[str],
- Doc("Message body content encoding, e.g. **gzip**."),
- ]
-
-
-class PublishKwargs(RequestPublishKwargs, total=False):
+class PublishKwargs(MessageOptions, PublishOptions, total=False):
"""Typed dict to annotate RabbitMQ publishers."""
reply_to: Annotated[
Optional[str],
Doc(
- "Reply message routing key to send with (always sending to default exchange)."
+ "Reply message routing key to send with (always sending to default exchange).",
),
]
-class LogicPublisher(
- PublisherUsecase[IncomingMessage],
- BaseRMQInformation,
-):
+class LogicPublisher(PublisherUsecase[IncomingMessage]):
"""A class to represent a RabbitMQ publisher."""
app_id: Optional[str]
@@ -125,62 +54,49 @@ def __init__(
routing_key: str,
queue: "RabbitQueue",
exchange: "RabbitExchange",
+ # PublishCommand options
message_kwargs: "PublishKwargs",
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[IncomingMessage]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
+ self.queue = queue
+ self.routing_key = routing_key
+
+ self.exchange = exchange
+
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
- self.routing_key = routing_key
+ self.headers = message_kwargs.pop("headers") or {}
+ self.reply_to: str = message_kwargs.pop("reply_to", None) or ""
+ self.timeout = message_kwargs.pop("timeout", None)
- request_kwargs = dict(message_kwargs)
- self.reply_to = request_kwargs.pop("reply_to", None)
- self.message_kwargs = request_kwargs
+ message_options, _ = filter_by_dict(MessageOptions, dict(message_kwargs))
+ self.message_options = message_options
- # BaseRMQInformation
- self.queue = queue
- self.exchange = exchange
+ publish_options, _ = filter_by_dict(PublishOptions, dict(message_kwargs))
+ self.publish_options = publish_options
- # Setup it later
self.app_id = None
- self.virtual_host = ""
@override
- def setup( # type: ignore[override]
+ def _setup( # type: ignore[override]
self,
*,
- producer: Optional["AioPikaFastProducer"],
- app_id: Optional[str],
- virtual_host: str,
+ state: "BrokerState",
) -> None:
- self.app_id = app_id
- self.virtual_host = virtual_host
- super().setup(producer=producer)
+ # AppId was set in `faststream.rabbit.schemas.proto.BaseRMQInformation`
+ self.message_options["app_id"] = self.app_id
+ super()._setup(state=state)
@property
def routing(self) -> str:
"""Return real routing_key of Publisher."""
return self.routing_key or self.queue.routing
- def __hash__(self) -> int:
- return LogicSubscriber.get_routing_hash(self.queue, self.exchange) + hash(
- self.routing_key
- )
-
@override
async def publish(
self,
@@ -198,7 +114,7 @@ async def publish(
str,
Doc(
"Message routing key to publish with. "
- "Overrides `queue` option if presented."
+ "Overrides `queue` option if presented.",
),
] = "",
# message args
@@ -206,87 +122,55 @@ async def publish(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
- message_id: Annotated[
- Optional[str],
- Doc("Arbitrary message id. Generated automatically if not presented."),
- ] = None,
- timestamp: Annotated[
- Optional["DateType"],
- Doc("Message publish timestamp. Generated automatically if not presented."),
- ] = None,
- # rpc args
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
# publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
**publish_kwargs: "Unpack[PublishKwargs]",
- ) -> Optional[Any]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "routing_key": routing_key
- or self.routing_key
- or RabbitQueue.validate(queue or self.queue).routing,
- "exchange": exchange or self.exchange.name,
- "app_id": self.app_id,
- "correlation_id": correlation_id or gen_cor_id(),
- "message_id": message_id,
- "timestamp": timestamp,
- # specific args
- "rpc": rpc,
- "rpc_timeout": rpc_timeout,
- "raise_timeout": raise_timeout,
- "reply_to": self.reply_to,
- **self.message_kwargs,
- **publish_kwargs,
- }
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
+ ) -> Optional["aiormq.abc.ConfirmationFrameType"]:
+ if not routing_key:
+ if q := RabbitQueue.validate(queue):
+ routing_key = q.routing
+ else:
+ routing_key = self.routing
+
+ headers = self.headers | publish_kwargs.pop("headers", {})
+ cmd = RabbitPublishCommand(
+ message,
+ routing_key=routing_key,
+ exchange=RabbitExchange.validate(exchange or self.exchange),
+ correlation_id=correlation_id or gen_cor_id(),
+ headers=headers,
+ _publish_type=PublishType.PUBLISH,
+ **(self.publish_options | self.message_options | publish_kwargs),
+ )
- return await call(message, **kwargs)
+ frame: Optional[aiormq.abc.ConfirmationFrameType] = await self._basic_publish(
+ cmd,
+ _extra_middlewares=(),
+ )
+ return frame
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["RabbitPublishCommand", "PublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RabbitPublishCommand.from_cmd(cmd)
+
+ cmd.destination = self.routing
+ cmd.reply_to = cmd.reply_to or self.reply_to
+ cmd.add_headers(self.headers, override=False)
+
+ cmd.timeout = cmd.timeout or self.timeout
+
+ cmd.message_options = {**self.message_options, **cmd.message_options}
+ cmd.publish_options = {**self.publish_options, **cmd.publish_options}
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -305,7 +189,7 @@ async def request(
str,
Doc(
"Message routing key to publish with. "
- "Overrides `queue` option if presented."
+ "Overrides `queue` option if presented.",
),
] = "",
# message args
@@ -313,71 +197,31 @@ async def request(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
- message_id: Annotated[
- Optional[str],
- Doc("Arbitrary message id. Generated automatically if not presented."),
- ] = None,
- timestamp: Annotated[
- Optional["DateType"],
- Doc("Message publish timestamp. Generated automatically if not presented."),
- ] = None,
# publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
**publish_kwargs: "Unpack[RequestPublishKwargs]",
) -> "RabbitMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "routing_key": routing_key
- or self.routing_key
- or RabbitQueue.validate(queue or self.queue).routing,
- "exchange": exchange or self.exchange.name,
- "app_id": self.app_id,
- "correlation_id": correlation_id or gen_cor_id(),
- "message_id": message_id,
- "timestamp": timestamp,
- # specific args
- **self.message_kwargs,
- **publish_kwargs,
- }
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ if not routing_key:
+ if q := RabbitQueue.validate(queue):
+ routing_key = q.routing
+ else:
+ routing_key = self.routing
+
+ headers = self.headers | publish_kwargs.pop("headers", {})
+ cmd = RabbitPublishCommand(
message,
- **kwargs,
+ routing_key=routing_key,
+ exchange=RabbitExchange.validate(exchange or self.exchange),
+ correlation_id=correlation_id or gen_cor_id(),
+ headers=headers,
+ _publish_type=PublishType.PUBLISH,
+ **(self.publish_options | self.message_options | publish_kwargs),
)
- async with AsyncExitStack() as stack:
- return_msg: Callable[[RabbitMessage], Awaitable[RabbitMessage]] = (
- return_input
- )
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
+ msg: RabbitMessage = await self._basic_request(cmd)
+ return msg
def add_prefix(self, prefix: str) -> None:
"""Include Publisher in router."""
diff --git a/faststream/rabbit/response.py b/faststream/rabbit/response.py
index c145f295dd..7b261f47f9 100644
--- a/faststream/rabbit/response.py
+++ b/faststream/rabbit/response.py
@@ -1,14 +1,23 @@
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Optional, Union
-from typing_extensions import override
+from typing_extensions import Unpack, override
-from faststream.broker.response import Response
+from faststream.rabbit.schemas.exchange import RabbitExchange
+from faststream.response import PublishCommand, Response
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from aio_pika.abc import DateType, TimeoutType
+ from typing import TypedDict
+ from aio_pika.abc import TimeoutType
+
+ from faststream.rabbit.publisher.options import MessageOptions
from faststream.rabbit.types import AioPikaSendableMessage
- from faststream.types import AnyDict
+
+ class _PublishOptions(TypedDict):
+ timeout: TimeoutType
+ mandatory: bool
+ immediate: bool
class RabbitResponse(Response):
@@ -16,49 +25,91 @@ def __init__(
self,
body: "AioPikaSendableMessage",
*,
- headers: Optional["AnyDict"] = None,
- correlation_id: Optional[str] = None,
- message_id: Optional[str] = None,
+ timeout: "TimeoutType" = None,
mandatory: bool = True,
immediate: bool = False,
- timeout: "TimeoutType" = None,
- persist: Optional[bool] = None,
- priority: Optional[int] = None,
- message_type: Optional[str] = None,
- content_type: Optional[str] = None,
- expiration: Optional["DateType"] = None,
- content_encoding: Optional[str] = None,
+ **message_options: Unpack["MessageOptions"],
) -> None:
+ headers = message_options.pop("headers", {})
+ correlation_id = message_options.pop("correlation_id", None)
+
super().__init__(
body=body,
headers=headers,
correlation_id=correlation_id,
)
- self.message_id = message_id
- self.mandatory = mandatory
- self.immediate = immediate
- self.timeout = timeout
- self.persist = persist
- self.priority = priority
- self.message_type = message_type
- self.content_type = content_type
- self.expiration = expiration
- self.content_encoding = content_encoding
+ self.message_options = message_options
+ self.publish_options: _PublishOptions = {
+ "mandatory": mandatory,
+ "immediate": immediate,
+ "timeout": timeout,
+ }
@override
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- **super().as_publish_kwargs(),
- "message_id": self.message_id,
- "mandatory": self.mandatory,
- "immediate": self.immediate,
- "timeout": self.timeout,
- "persist": self.persist,
- "priority": self.priority,
- "message_type": self.message_type,
- "content_type": self.content_type,
- "expiration": self.expiration,
- "content_encoding": self.content_encoding,
+ def as_publish_command(self) -> "RabbitPublishCommand":
+ return RabbitPublishCommand( # type: ignore[misc]
+ message=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ # RMQ specific
+ routing_key="",
+ **self.publish_options,
+ **self.message_options,
+ )
+
+
+class RabbitPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "AioPikaSendableMessage",
+ *,
+ _publish_type: PublishType,
+ routing_key: str = "",
+ exchange: Optional[RabbitExchange] = None,
+ # publish kwargs
+ mandatory: bool = True,
+ immediate: bool = False,
+ timeout: "TimeoutType" = None,
+ **message_options: Unpack["MessageOptions"],
+ ) -> None:
+ headers = message_options.pop("headers", {})
+ reply_to = message_options.pop("reply_to", None) or ""
+ correlation_id = message_options.pop("correlation_id", None)
+
+ super().__init__(
+ body=message,
+ destination=routing_key,
+ correlation_id=correlation_id,
+ headers=headers,
+ reply_to=reply_to,
+ _publish_type=_publish_type,
+ )
+ self.exchange = exchange or RabbitExchange()
+
+ self.timeout = timeout
+
+ self.message_options = message_options
+ self.publish_options = {
+ "mandatory": mandatory,
+ "immediate": immediate,
}
- return publish_options
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "RabbitPublishCommand"],
+ ) -> "RabbitPublishCommand":
+ if isinstance(cmd, RabbitPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ return cls(
+ message=cmd.body,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
diff --git a/faststream/rabbit/router.py b/faststream/rabbit/router.py
index 8a9fdd87e6..cb6baee71f 100644
--- a/faststream/rabbit/router.py
+++ b/faststream/rabbit/router.py
@@ -1,30 +1,28 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Iterable,
- Optional,
- Sequence,
- Union,
-)
+from collections.abc import Awaitable, Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Callable, Optional, Union
-from typing_extensions import Annotated, Doc, deprecated
+from typing_extensions import Doc, deprecated
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.rabbit.broker.registrator import RabbitRegistrator
if TYPE_CHECKING:
from aio_pika.abc import DateType, HeadersType, TimeoutType
from aio_pika.message import IncomingMessage
- from broker.types import PublisherMiddleware
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
+ PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.rabbit.message import RabbitMessage
@@ -32,9 +30,7 @@
RabbitExchange,
RabbitQueue,
)
- from faststream.rabbit.schemas.reply import ReplyConfig
from faststream.rabbit.types import AioPikaSendableMessage
- from faststream.types import AnyDict
class RabbitPublisher(ArgsContainer):
@@ -58,21 +54,21 @@ def __init__(
str,
Doc(
"Default message routing key to publish with. "
- "Overrides `queue` option if presented."
+ "Overrides `queue` option if presented.",
),
] = "",
mandatory: Annotated[
bool,
Doc(
"Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
+ "RabbitMQ returns message to client if there is no suitable queue.",
),
] = True,
immediate: Annotated[
bool,
Doc(
"Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
+ "RabbitMQ returns message to client if there is no suitable consumer.",
),
] = False,
timeout: Annotated[
@@ -86,7 +82,7 @@ def __init__(
reply_to: Annotated[
Optional[str],
Doc(
- "Reply message routing key to send with (always sending to default exchange)."
+ "Reply message routing key to send with (always sending to default exchange).",
),
] = None,
priority: Annotated[
@@ -96,6 +92,10 @@ def __init__(
# basic args
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI args
@@ -111,7 +111,7 @@ def __init__(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -123,7 +123,7 @@ def __init__(
Optional["HeadersType"],
Doc(
"Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
content_type: Annotated[
@@ -131,7 +131,7 @@ def __init__(
Doc(
"Message **content-type** header. "
"Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified."
+ "Will be set automatically if not specified.",
),
] = None,
content_encoding: Annotated[
@@ -192,14 +192,14 @@ def __init__(
],
Doc(
"Message handler function "
- "to wrap the same with `@broker.subscriber(...)` way."
+ "to wrap the same with `@broker.subscriber(...)` way.",
),
],
queue: Annotated[
Union[str, "RabbitQueue"],
Doc(
"RabbitMQ queue to listen. "
- "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default)."
+ "**FastStream** declares and binds queue object to `exchange` automatically if it is not passive (by default).",
),
],
exchange: Annotated[
@@ -207,7 +207,7 @@ def __init__(
Doc(
"RabbitMQ exchange to bind queue to. "
"Uses default exchange if not presented. "
- "**FastStream** declares exchange object automatically if it is not passive (by default)."
+ "**FastStream** declares exchange object automatically if it is not passive (by default).",
),
] = None,
*,
@@ -219,19 +219,10 @@ def __init__(
Optional["AnyDict"],
Doc("Extra consumer arguments to use in `queue.consume(...)` method."),
] = None,
- reply_config: Annotated[
- Optional["ReplyConfig"],
- Doc("Extra options to use at replies publishing."),
- deprecated(
- "Deprecated in **FastStream 0.5.16**. "
- "Please, use `RabbitResponse` object as a handler return instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -243,31 +234,25 @@ def __init__(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[RabbitMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[RabbitMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -279,7 +264,7 @@ def __init__(
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -293,13 +278,11 @@ def __init__(
queue=queue,
exchange=exchange,
consume_args=consume_args,
- reply_config=reply_config,
dependencies=dependencies,
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -308,10 +291,7 @@ def __init__(
)
-class RabbitRouter(
- RabbitRegistrator,
- BrokerRouter["IncomingMessage"],
-):
+class RabbitRouter(RabbitRegistrator, BrokerRouter["IncomingMessage"]):
"""Includable to RabbitBroker router."""
def __init__(
@@ -326,15 +306,19 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers."
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[IncomingMessage]"],
Doc("Router middlewares to apply to all routers' publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[IncomingMessage]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc("Parser to map original **IncomingMessage** Msg to FastStream one."),
@@ -354,6 +338,7 @@ def __init__(
prefix=prefix,
dependencies=dependencies,
middlewares=middlewares,
+ routers=routers,
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
diff --git a/faststream/rabbit/schemas/__init__.py b/faststream/rabbit/schemas/__init__.py
index 08e72e7700..b7b03b3887 100644
--- a/faststream/rabbit/schemas/__init__.py
+++ b/faststream/rabbit/schemas/__init__.py
@@ -2,7 +2,6 @@
from faststream.rabbit.schemas.exchange import RabbitExchange
from faststream.rabbit.schemas.proto import BaseRMQInformation
from faststream.rabbit.schemas.queue import QueueType, RabbitQueue
-from faststream.rabbit.schemas.reply import ReplyConfig
__all__ = (
"RABBIT_REPLY",
@@ -11,7 +10,6 @@
"QueueType",
"RabbitExchange",
"RabbitQueue",
- "ReplyConfig",
)
RABBIT_REPLY = RabbitQueue("amq.rabbitmq.reply-to", passive=True)
diff --git a/faststream/rabbit/schemas/exchange.py b/faststream/rabbit/schemas/exchange.py
index 6ff816bb10..af146f78b0 100644
--- a/faststream/rabbit/schemas/exchange.py
+++ b/faststream/rabbit/schemas/exchange.py
@@ -1,11 +1,11 @@
import warnings
-from typing import TYPE_CHECKING, Any, Optional, Union
+from typing import TYPE_CHECKING, Annotated, Any, Optional, Union
-from typing_extensions import Annotated, Doc, override
+from typing_extensions import Doc, override
-from faststream.broker.schemas import NameRequired
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.proto import NameRequired
from faststream.rabbit.schemas.constants import ExchangeType
-from faststream.types import AnyDict
if TYPE_CHECKING:
from aio_pika.abc import TimeoutType
@@ -28,7 +28,16 @@ class RabbitExchange(NameRequired):
"type",
)
+ def __repr__(self) -> str:
+ if self.passive:
+ body = ""
+ else:
+ body = f", robust={self.robust}, durable={self.durable}, auto_delete={self.auto_delete})"
+
+ return f"{self.__class__.__name__}({self.name}, type={self.type}, routing_key='{self.routing}'{body})"
+
def __hash__(self) -> int:
+ """Supports hash to store real objects in declarer."""
return sum(
(
hash(self.name),
@@ -36,7 +45,7 @@ def __hash__(self) -> int:
hash(self.routing_key),
int(self.durable),
int(self.auto_delete),
- )
+ ),
)
@property
@@ -58,7 +67,7 @@ def __init__(
"https://www.rabbitmq.com/tutorials/amqp-concepts#exchanges"
"\n"
"Or in the FastStream one: "
- "https://faststream.airt.ai/latest/rabbit/examples/"
+ "https://faststream.airt.ai/latest/rabbit/examples/",
),
] = ExchangeType.DIRECT,
durable: Annotated[
@@ -78,7 +87,7 @@ def __init__(
Doc(
"Exchange declarationg arguments. "
"You can find usage example in the official RabbitMQ documentation: "
- "https://www.rabbitmq.com/docs/ae"
+ "https://www.rabbitmq.com/docs/ae",
),
] = None,
timeout: Annotated[
@@ -94,7 +103,7 @@ def __init__(
Doc(
"Another `RabbitExchange` object to bind the current one to. "
"You can find more information in the official RabbitMQ blog post: "
- "https://www.rabbitmq.com/blog/2010/10/19/exchange-to-exchange-bindings"
+ "https://www.rabbitmq.com/blog/2010/10/19/exchange-to-exchange-bindings",
),
] = None,
bind_arguments: Annotated[
diff --git a/faststream/rabbit/schemas/proto.py b/faststream/rabbit/schemas/proto.py
index 226840925e..2929ce7c4f 100644
--- a/faststream/rabbit/schemas/proto.py
+++ b/faststream/rabbit/schemas/proto.py
@@ -1,13 +1,40 @@
-from typing import Optional, Protocol
+from typing import TYPE_CHECKING, Any, Optional
-from faststream.rabbit.schemas.exchange import RabbitExchange
-from faststream.rabbit.schemas.queue import RabbitQueue
+if TYPE_CHECKING:
+ from faststream.rabbit.schemas.exchange import RabbitExchange
+ from faststream.rabbit.schemas.queue import RabbitQueue
-class BaseRMQInformation(Protocol):
- """Base class to store AsyncAPI RMQ bindings."""
+class BaseRMQInformation:
+ """Base class to store Specification RMQ bindings."""
virtual_host: str
- queue: RabbitQueue
- exchange: Optional[RabbitExchange]
+ queue: "RabbitQueue"
+ exchange: "RabbitExchange"
app_id: Optional[str]
+
+ def __init__(
+ self,
+ *,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ ) -> None:
+ self.queue = queue
+ self.exchange = exchange
+
+ # Setup it later
+ self.app_id = None
+ self.virtual_host = ""
+
+ def _setup(
+ self,
+ *,
+ app_id: Optional[str],
+ virtual_host: str,
+ **kwargs: Any,
+ ) -> None:
+ self.app_id = app_id
+ self.virtual_host = virtual_host
+
+ # Setup next parent class
+ super()._setup(**kwargs) # type: ignore[misc]
diff --git a/faststream/rabbit/schemas/queue.py b/faststream/rabbit/schemas/queue.py
index 9ae3c322d3..0fff0a83ca 100644
--- a/faststream/rabbit/schemas/queue.py
+++ b/faststream/rabbit/schemas/queue.py
@@ -2,15 +2,15 @@
from enum import Enum
from typing import TYPE_CHECKING, Literal, Optional, TypedDict, Union, overload
-from faststream.broker.schemas import NameRequired
+from faststream._internal.constants import EMPTY
+from faststream._internal.proto import NameRequired
+from faststream._internal.utils.path import compile_path
from faststream.exceptions import SetupError
-from faststream.types import EMPTY
-from faststream.utils.path import compile_path
if TYPE_CHECKING:
from aio_pika.abc import TimeoutType
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
class QueueType(str, Enum):
@@ -46,14 +46,23 @@ class RabbitQueue(NameRequired):
"timeout",
)
+ def __repr__(self) -> str:
+ if self.passive:
+ body = ""
+ else:
+ body = f", robust={self.robust}, durable={self.durable}, exclusive={self.exclusive}, auto_delete={self.auto_delete})"
+
+ return f"{self.__class__.__name__}({self.name}, routing_key='{self.routing}'{body})"
+
def __hash__(self) -> int:
+ """Supports hash to store real objects in declarer."""
return sum(
(
hash(self.name),
int(self.durable),
int(self.exclusive),
int(self.auto_delete),
- )
+ ),
)
@property
@@ -154,7 +163,8 @@ def __init__(
if durable is EMPTY:
durable = True
elif not durable:
- raise SetupError("Quorum and Stream queues must be durable")
+ error_msg = "Quorum and Stream queues must be durable"
+ raise SetupError(error_msg)
elif durable is EMPTY:
durable = False
@@ -174,10 +184,10 @@ def __init__(
def add_prefix(self, prefix: str) -> "RabbitQueue":
new_q: RabbitQueue = deepcopy(self)
- new_q.name = "".join((prefix, new_q.name))
+ new_q.name = f"{prefix}{new_q.name}"
if new_q.routing_key:
- new_q.routing_key = "".join((prefix, new_q.routing_key))
+ new_q.routing_key = f"{prefix}{new_q.routing_key}"
return new_q
diff --git a/faststream/rabbit/schemas/reply.py b/faststream/rabbit/schemas/reply.py
deleted file mode 100644
index 06acb377a9..0000000000
--- a/faststream/rabbit/schemas/reply.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from typing import Dict
-
-from typing_extensions import Annotated, Doc
-
-
-class ReplyConfig:
- """Class to store a config for subscribers' replies."""
-
- __slots__ = (
- "immediate",
- "mandatory",
- "persist",
- )
-
- def __init__(
- self,
- mandatory: Annotated[
- bool,
- Doc(
- "Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue."
- ),
- ] = True,
- immediate: Annotated[
- bool,
- Doc(
- "Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer."
- ),
- ] = False,
- persist: Annotated[
- bool,
- Doc("Restore the message on RabbitMQ reboot."),
- ] = False,
- ) -> None:
- self.mandatory = mandatory
- self.immediate = immediate
- self.persist = persist
-
- def to_dict(self) -> Dict[str, bool]:
- """Convert object to options dict."""
- return {
- "mandatory": self.mandatory,
- "immediate": self.immediate,
- "persist": self.persist,
- }
diff --git a/faststream/rabbit/security.py b/faststream/rabbit/security.py
index eb887076d6..591cc665d7 100644
--- a/faststream/rabbit/security.py
+++ b/faststream/rabbit/security.py
@@ -6,19 +6,19 @@
)
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
def parse_security(security: Optional[BaseSecurity]) -> "AnyDict":
"""Convert security object to connection arguments."""
if security is None:
return {}
- elif isinstance(security, SASLPlaintext):
+ if isinstance(security, SASLPlaintext):
return _parse_sasl_plaintext(security)
- elif isinstance(security, BaseSecurity):
+ if isinstance(security, BaseSecurity):
return _parse_base_security(security)
- else:
- raise NotImplementedError(f"RabbitBroker does not support {type(security)}")
+ msg = f"RabbitBroker does not support {type(security)}"
+ raise NotImplementedError(msg)
def _parse_base_security(security: BaseSecurity) -> "AnyDict":
diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py
deleted file mode 100644
index 2ba7cabaa5..0000000000
--- a/faststream/rabbit/subscriber/asyncapi.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from typing import Dict
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
- OperationBinding,
-)
-from faststream.asyncapi.schema.bindings import amqp
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.rabbit.subscriber.usecase import LogicSubscriber
-from faststream.rabbit.utils import is_routing_exchange
-
-
-class AsyncAPISubscriber(LogicSubscriber):
- """AsyncAPI-compatible Rabbit Subscriber class."""
-
- def get_name(self) -> str:
- return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}"
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- subscribe=Operation(
- bindings=OperationBinding(
- amqp=amqp.OperationBinding(
- cc=self.queue.routing,
- ),
- )
- if is_routing_exchange(self.exchange)
- else None,
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- amqp=amqp.ChannelBinding(
- **{
- "is": "routingKey",
- "queue": amqp.Queue(
- name=self.queue.name,
- durable=self.queue.durable,
- exclusive=self.queue.exclusive,
- autoDelete=self.queue.auto_delete,
- vhost=self.virtual_host,
- )
- if is_routing_exchange(self.exchange) and self.queue.name
- else None,
- "exchange": (
- amqp.Exchange(type="default", vhost=self.virtual_host)
- if not self.exchange.name
- else amqp.Exchange(
- type=self.exchange.type.value,
- name=self.exchange.name,
- durable=self.exchange.durable,
- autoDelete=self.exchange.auto_delete,
- vhost=self.virtual_host,
- )
- ),
- }
- )
- ),
- )
- }
diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py
index abf1cd8351..e10e04dd95 100644
--- a/faststream/rabbit/subscriber/factory.py
+++ b/faststream/rabbit/subscriber/factory.py
@@ -1,15 +1,19 @@
import warnings
-from typing import TYPE_CHECKING, Iterable, Optional, Sequence, Union
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Optional
-from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber
+from faststream._internal.constants import EMPTY
+from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from aio_pika import IncomingMessage
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import BrokerMiddleware
- from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
def create_subscriber(
@@ -17,40 +21,48 @@ def create_subscriber(
queue: "RabbitQueue",
exchange: "RabbitExchange",
consume_args: Optional["AnyDict"],
- reply_config: Optional["ReplyConfig"],
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[IncomingMessage]"],
+ ack_policy: "AckPolicy",
+ no_ack: bool,
# AsyncAPI args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
-) -> AsyncAPISubscriber:
- if reply_config: # pragma: no cover
- warnings.warn(
- (
- "\n`reply_config` was deprecated in **FastStream 0.5.16**."
- "\nPlease, use `RabbitResponse` object as a handler return instead."
- "\nArgument will be removed in **FastStream 0.6.0**."
- ),
- DeprecationWarning,
- stacklevel=2,
- )
+) -> SpecificationSubscriber:
+ _validate_input_for_misconfigure(ack_policy=ack_policy, no_ack=no_ack)
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else AckPolicy.REJECT_ON_ERROR
- return AsyncAPISubscriber(
+ return SpecificationSubscriber(
queue=queue,
exchange=exchange,
consume_args=consume_args,
- reply_config=reply_config,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+
+
+def _validate_input_for_misconfigure(
+ *,
+ ack_policy: "AckPolicy",
+ no_ack: bool,
+) -> None:
+ if no_ack is not EMPTY:
+ warnings.warn(
+ "`no_ack` option was deprecated in prior to `ack_policy=AckPolicy.DO_NOTHING`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `no_ack` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
diff --git a/faststream/rabbit/subscriber/specified.py b/faststream/rabbit/subscriber/specified.py
new file mode 100644
index 0000000000..b071ea2828
--- /dev/null
+++ b/faststream/rabbit/subscriber/specified.py
@@ -0,0 +1,108 @@
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.rabbit.schemas.proto import BaseRMQInformation as RMQSpecificationMixin
+from faststream.rabbit.subscriber.usecase import LogicSubscriber
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import (
+ ChannelBinding,
+ OperationBinding,
+ amqp,
+)
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.middlewares import AckPolicy
+ from faststream.rabbit.schemas.exchange import RabbitExchange
+ from faststream.rabbit.schemas.queue import RabbitQueue
+
+
+class SpecificationSubscriber(
+ SpecificationSubscriberMixin,
+ RMQSpecificationMixin,
+ LogicSubscriber,
+):
+ """AsyncAPI-compatible Rabbit Subscriber class."""
+
+ def __init__(
+ self,
+ *,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ consume_args: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ # propagate to RMQSpecificationMixin
+ queue=queue,
+ exchange=exchange,
+ )
+
+ LogicSubscriber.__init__(
+ self,
+ queue=queue,
+ consume_args=consume_args,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ )
+
+ def get_default_name(self) -> str:
+ return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}"
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ payloads = self.get_payloads()
+
+ exchange_binding = amqp.Exchange.from_exchange(self.exchange)
+ queue_binding = amqp.Queue.from_queue(self.queue)
+
+ return {
+ self.name: SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ bindings=OperationBinding(
+ amqp=amqp.OperationBinding(
+ routing_key=self.queue.routing,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ack=True,
+ reply_to=None,
+ persist=None,
+ mandatory=None,
+ priority=None,
+ ),
+ ),
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ ),
+ bindings=ChannelBinding(
+ amqp=amqp.ChannelBinding(
+ virtual_host=self.virtual_host,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ),
+ ),
+ ),
+ }
diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py
index 0401879237..5f08df4571 100644
--- a/faststream/rabbit/subscriber/usecase.py
+++ b/faststream/rabbit/subscriber/usecase.py
@@ -1,46 +1,41 @@
+import asyncio
+import contextlib
+from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
Any,
- Callable,
- Dict,
- Iterable,
Optional,
- Sequence,
- Union,
)
import anyio
from typing_extensions import override
-from faststream.broker.publisher.fake import FakePublisher
-from faststream.broker.subscriber.usecase import SubscriberUsecase
-from faststream.broker.utils import process_msg
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.subscriber.utils import process_msg
from faststream.exceptions import SetupError
-from faststream.rabbit.helpers.declarer import RabbitDeclarer
from faststream.rabbit.parser import AioPikaParser
-from faststream.rabbit.schemas import BaseRMQInformation
+from faststream.rabbit.publisher.fake import RabbitFakePublisher
if TYPE_CHECKING:
from aio_pika import IncomingMessage, RobustQueue
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage
- from faststream.broker.types import BrokerMiddleware, CustomCallable
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
+ from faststream._internal.types import BrokerMiddleware, CustomCallable
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
from faststream.rabbit.helpers.declarer import RabbitDeclarer
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.publisher.producer import AioPikaFastProducer
from faststream.rabbit.schemas import (
RabbitExchange,
RabbitQueue,
- ReplyConfig,
)
- from faststream.types import AnyDict, Decorator, LoggerProto
-class LogicSubscriber(
- SubscriberUsecase["IncomingMessage"],
- BaseRMQInformation,
-):
+class LogicSubscriber(SubscriberUsecase["IncomingMessage"]):
"""A class to handle logic for RabbitMQ message consumption."""
app_id: Optional[str]
@@ -54,94 +49,63 @@ def __init__(
self,
*,
queue: "RabbitQueue",
- exchange: "RabbitExchange",
consume_args: Optional["AnyDict"],
- reply_config: Optional["ReplyConfig"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Sequence["BrokerMiddleware[IncomingMessage]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
+ self.queue = queue
+
parser = AioPikaParser(pattern=queue.path_regex)
super().__init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.consume_args = consume_args or {}
- self.reply_config = reply_config.to_dict() if reply_config else {}
self._consumer_tag = None
self._queue_obj = None
- # BaseRMQInformation
- self.queue = queue
- self.exchange = exchange
# Setup it later
- self.app_id = None
- self.virtual_host = ""
self.declarer = None
@override
- def setup( # type: ignore[override]
+ def _setup( # type: ignore[override]
self,
*,
- app_id: Optional[str],
- virtual_host: str,
declarer: "RabbitDeclarer",
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["AioPikaFastProducer"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
+ state: "BrokerState",
) -> None:
- self.app_id = app_id
- self.virtual_host = virtual_host
self.declarer = declarer
- super().setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
+ super()._setup(
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
+ state=state,
)
@override
async def start(self) -> None:
"""Starts the consumer for the RabbitMQ queue."""
if self.declarer is None:
- raise SetupError("You should setup subscriber at first.")
+ msg = "You should setup subscriber at first."
+ raise SetupError(msg)
self._queue_obj = queue = await self.declarer.declare_queue(self.queue)
@@ -195,7 +159,10 @@ async def get_one(
sleep_interval = timeout / 10
raw_message: Optional[IncomingMessage] = None
- with anyio.move_on_after(timeout):
+ with (
+ contextlib.suppress(asyncio.exceptions.CancelledError),
+ anyio.move_on_after(timeout),
+ ):
while ( # noqa: ASYNC110
raw_message := await self._queue_obj.get(
fail=False,
@@ -205,9 +172,13 @@ async def get_one(
) is None:
await anyio.sleep(sleep_interval)
+ context = self._state.get().di_state.context
+
msg: Optional[RabbitMessage] = await process_msg( # type: ignore[assignment]
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -216,38 +187,21 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence["FakePublisher"]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- **self.reply_config,
- "routing_key": message.reply_to,
- "app_id": self.app_id,
- },
+ RabbitFakePublisher(
+ self._state.get().producer,
+ routing_key=message.reply_to,
+ app_id=self.app_id,
),
)
- def __hash__(self) -> int:
- return self.get_routing_hash(self.queue, self.exchange)
-
- @staticmethod
- def get_routing_hash(
- queue: "RabbitQueue",
- exchange: Optional["RabbitExchange"] = None,
- ) -> int:
- """Calculate the routing hash for a RabbitMQ queue and exchange."""
- return hash(queue) + hash(exchange or "")
-
@staticmethod
def build_log_context(
message: Optional["StreamMessage[Any]"],
queue: "RabbitQueue",
exchange: Optional["RabbitExchange"] = None,
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
return {
"queue": queue.name,
"exchange": getattr(exchange, "name", ""),
@@ -257,7 +211,7 @@ def build_log_context(
def get_log_context(
self,
message: Optional["StreamMessage[Any]"],
- ) -> Dict[str, str]:
+ ) -> dict[str, str]:
return self.build_log_context(
message=message,
queue=self.queue,
diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py
index 6863008cc1..919cc2aec7 100644
--- a/faststream/rabbit/testing.py
+++ b/faststream/rabbit/testing.py
@@ -1,5 +1,6 @@
+from collections.abc import Generator, Iterator, Mapping
from contextlib import contextmanager
-from typing import TYPE_CHECKING, Any, Generator, Mapping, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Optional, Union
from unittest import mock
from unittest.mock import AsyncMock
@@ -10,24 +11,24 @@
from pamqp.header import ContentHeader
from typing_extensions import override
-from faststream.broker.message import gen_cor_id
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS, SubscriberNotFound
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.testing.broker import TestBroker
+from faststream.exceptions import SubscriberNotFound
+from faststream.message import gen_cor_id
from faststream.rabbit.broker.broker import RabbitBroker
from faststream.rabbit.parser import AioPikaParser
-from faststream.rabbit.publisher.asyncapi import AsyncAPIPublisher
from faststream.rabbit.publisher.producer import AioPikaFastProducer
from faststream.rabbit.schemas import (
ExchangeType,
RabbitExchange,
RabbitQueue,
)
-from faststream.testing.broker import TestBroker
-from faststream.utils.functions import timeout_scope
if TYPE_CHECKING:
- from aio_pika.abc import DateType, HeadersType, TimeoutType
+ from aio_pika.abc import DateType, HeadersType
+ from faststream.rabbit.publisher.specified import SpecificationPublisher
+ from faststream.rabbit.response import RabbitPublishCommand
from faststream.rabbit.subscriber.usecase import LogicSubscriber
from faststream.rabbit.types import AioPikaSendableMessage
@@ -39,30 +40,41 @@ class TestRabbitBroker(TestBroker[RabbitBroker]):
"""A class to test RabbitMQ brokers."""
@contextmanager
- def _patch_broker(self, broker: RabbitBroker) -> Generator[None, None, None]:
- with mock.patch.object(
- broker,
- "_channel",
- new_callable=AsyncMock,
- ), mock.patch.object(
- broker,
- "declarer",
- new_callable=AsyncMock,
- ), super()._patch_broker(broker):
+ def _patch_broker(self, broker: "RabbitBroker") -> Generator[None, None, None]:
+ with (
+ mock.patch.object(
+ broker,
+ "_channel",
+ new_callable=AsyncMock,
+ ),
+ mock.patch.object(
+ broker,
+ "declarer",
+ new_callable=AsyncMock,
+ ),
+ super()._patch_broker(broker),
+ ):
yield
+ @contextmanager
+ def _patch_producer(self, broker: RabbitBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
- async def _fake_connect(broker: RabbitBroker, *args: Any, **kwargs: Any) -> None:
- broker._producer = FakeProducer(broker)
+ async def _fake_connect(broker: "RabbitBroker", *args: Any, **kwargs: Any) -> None:
+ pass
@staticmethod
def create_publisher_fake_subscriber(
- broker: RabbitBroker,
- publisher: AsyncAPIPublisher,
- ) -> Tuple["LogicSubscriber", bool]:
+ broker: "RabbitBroker",
+ publisher: "SpecificationPublisher",
+ ) -> tuple["LogicSubscriber", bool]:
sub: Optional[LogicSubscriber] = None
- for handler in broker._subscribers.values():
- if _is_handler_suitable(
+ for handler in broker._subscribers:
+ if _is_handler_matches(
handler,
publisher.routing,
{},
@@ -93,15 +105,12 @@ class PatchedMessage(IncomingMessage):
async def ack(self, multiple: bool = False) -> None:
"""Asynchronously acknowledge a message."""
- pass
async def nack(self, multiple: bool = False, requeue: bool = True) -> None:
"""Nack the message."""
- pass
async def reject(self, requeue: bool = False) -> None:
"""Rejects a task."""
- pass
def build_message(
@@ -130,6 +139,7 @@ def build_message(
routing = routing_key or que.routing
+ correlation_id = correlation_id or gen_cor_id()
msg = AioPikaParser.encode_message(
message=message,
persist=persist,
@@ -140,7 +150,7 @@ def build_message(
priority=priority,
correlation_id=correlation_id,
expiration=expiration,
- message_id=message_id or gen_cor_id(),
+ message_id=message_id or correlation_id,
timestamp=timestamp,
message_type=message_type,
user_id=user_id,
@@ -166,7 +176,7 @@ def build_message(
message_type=message_type,
user_id=msg.user_id,
app_id=msg.app_id,
- )
+ ),
),
body=msg.body,
channel=AsyncMock(),
@@ -186,127 +196,66 @@ def __init__(self, broker: RabbitBroker) -> None:
default_parser = AioPikaParser()
self._parser = resolve_custom_func(broker._parser, default_parser.parse_message)
self._decoder = resolve_custom_func(
- broker._decoder, default_parser.decode_message
+ broker._decoder,
+ default_parser.decode_message,
)
@override
async def publish( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: "TimeoutType" = None,
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- persist: bool = False,
- reply_to: Optional[str] = None,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
- ) -> Optional[Any]:
+ cmd: "RabbitPublishCommand",
+ ) -> None:
"""Publish a message to a RabbitMQ queue or exchange."""
- exch = RabbitExchange.validate(exchange)
-
- if rpc and reply_to:
- raise WRONG_PUBLISH_ARGS
-
incoming = build_message(
- message=message,
- exchange=exch,
- routing_key=routing_key,
- reply_to=reply_to,
- app_id=app_id,
- user_id=user_id,
- message_type=message_type,
- headers=headers,
- persist=persist,
- message_id=message_id,
- priority=priority,
- content_encoding=content_encoding,
- content_type=content_type,
- correlation_id=correlation_id,
- expiration=expiration,
- timestamp=timestamp,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ **cmd.message_options,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_suitable(
- handler, incoming.routing_key, incoming.headers, exch
+ for handler in self.broker._subscribers: # pragma: no branch
+ if _is_handler_matches(
+ handler,
+ incoming.routing_key,
+ incoming.headers,
+ cmd.exchange,
):
- with timeout_scope(rpc_timeout, raise_timeout):
- response = await self._execute_handler(incoming, handler)
- if rpc:
- return await self._decoder(await self._parser(response))
-
- return None
+ await self._execute_handler(incoming, handler)
@override
async def request( # type: ignore[override]
self,
- message: "AioPikaSendableMessage" = "",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: Optional[float] = None,
- persist: bool = False,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> "PatchedMessage":
"""Publish a message to a RabbitMQ queue or exchange."""
- exch = RabbitExchange.validate(exchange)
-
incoming = build_message(
- message=message,
- exchange=exch,
- routing_key=routing_key,
- app_id=app_id,
- user_id=user_id,
- message_type=message_type,
- headers=headers,
- persist=persist,
- message_id=message_id,
- priority=priority,
- content_encoding=content_encoding,
- content_type=content_type,
- correlation_id=correlation_id,
- expiration=expiration,
- timestamp=timestamp,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ **cmd.message_options,
)
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if _is_handler_suitable(
- handler, incoming.routing_key, incoming.headers, exch
+ for handler in self.broker._subscribers: # pragma: no branch
+ if _is_handler_matches(
+ handler,
+ incoming.routing_key,
+ incoming.headers,
+ cmd.exchange,
):
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
return await self._execute_handler(incoming, handler)
raise SubscriberNotFound
async def _execute_handler(
- self, msg: PatchedMessage, handler: "LogicSubscriber"
+ self,
+ msg: PatchedMessage,
+ handler: "LogicSubscriber",
) -> "PatchedMessage":
result = await handler.process_message(msg)
@@ -318,45 +267,47 @@ async def _execute_handler(
)
-def _is_handler_suitable(
+def _is_handler_matches(
handler: "LogicSubscriber",
routing_key: str,
- headers: "Mapping[Any, Any]",
- exchange: "RabbitExchange",
+ headers: Optional["Mapping[Any, Any]"] = None,
+ exchange: Optional["RabbitExchange"] = None,
) -> bool:
+ headers = headers or {}
+ exchange = RabbitExchange.validate(exchange)
+
if handler.exchange != exchange:
return False
if handler.exchange is None or handler.exchange.type == ExchangeType.DIRECT:
return handler.queue.name == routing_key
- elif handler.exchange.type == ExchangeType.FANOUT:
+ if handler.exchange.type == ExchangeType.FANOUT:
return True
- elif handler.exchange.type == ExchangeType.TOPIC:
+ if handler.exchange.type == ExchangeType.TOPIC:
return apply_pattern(handler.queue.routing, routing_key)
- elif handler.exchange.type == ExchangeType.HEADERS:
+ if handler.exchange.type == ExchangeType.HEADERS:
queue_headers = (handler.queue.bind_arguments or {}).copy()
if not queue_headers:
return True
- else:
- match_rule = queue_headers.pop("x-match", "all")
+ match_rule = queue_headers.pop("x-match", "all")
- full_match = True
- is_headers_empty = True
- for k, v in queue_headers.items():
- if headers.get(k) != v:
- full_match = False
- else:
- is_headers_empty = False
+ full_match = True
+ is_headers_empty = True
+ for k, v in queue_headers.items():
+ if headers.get(k) != v:
+ full_match = False
+ else:
+ is_headers_empty = False
- if is_headers_empty:
- return False
+ if is_headers_empty:
+ return False
- return full_match or (match_rule == "any")
+ return full_match or (match_rule == "any")
raise AssertionError
@@ -371,7 +322,7 @@ def apply_pattern(pattern: str, current: str) -> bool:
if (next_symb := next(current_queue, None)) is None:
return False
- elif pattern_symb == "#":
+ if pattern_symb == "#":
next_pattern = next(pattern_queue, None)
if next_pattern is None:
@@ -391,7 +342,7 @@ def apply_pattern(pattern: str, current: str) -> bool:
pattern_symb = next(pattern_queue, None)
- elif pattern_symb == "*" or pattern_symb == next_symb:
+ elif pattern_symb in {"*", next_symb}:
pattern_symb = next(pattern_queue, None)
else:
diff --git a/faststream/rabbit/types.py b/faststream/rabbit/types.py
index fb5bca3e3c..127100d59a 100644
--- a/faststream/rabbit/types.py
+++ b/faststream/rabbit/types.py
@@ -3,6 +3,6 @@
import aio_pika
from typing_extensions import TypeAlias
-from faststream.types import SendableMessage
+from faststream._internal.basic_types import SendableMessage
AioPikaSendableMessage: TypeAlias = Union[aio_pika.Message, SendableMessage]
diff --git a/faststream/rabbit/utils.py b/faststream/rabbit/utils.py
index a5d48a0580..cbf87d0ce6 100644
--- a/faststream/rabbit/utils.py
+++ b/faststream/rabbit/utils.py
@@ -17,10 +17,9 @@ def build_virtual_host(
) -> str:
if (not url and not virtualhost) or virtualhost == "/":
return ""
- elif virtualhost:
+ if virtualhost:
return virtualhost.replace("/", "", 1)
- else:
- return path.replace("/", "", 1)
+ return path.replace("/", "", 1)
def build_url(
@@ -60,7 +59,7 @@ def build_url(
def is_routing_exchange(exchange: Optional["RabbitExchange"]) -> bool:
"""Check if an exchange requires routing_key to deliver message."""
- return not exchange or exchange.type in (
+ return not exchange or exchange.type in {
ExchangeType.DIRECT.value,
ExchangeType.TOPIC.value,
- )
+ }
diff --git a/faststream/redis/__init__.py b/faststream/redis/__init__.py
index c83abee4d3..6dc43db274 100644
--- a/faststream/redis/__init__.py
+++ b/faststream/redis/__init__.py
@@ -1,10 +1,10 @@
+from faststream._internal.testing.app import TestApp
from faststream.redis.annotations import Redis, RedisMessage
from faststream.redis.broker.broker import RedisBroker
from faststream.redis.response import RedisResponse
from faststream.redis.router import RedisPublisher, RedisRoute, RedisRouter
from faststream.redis.schemas import ListSub, PubSub, StreamSub
from faststream.redis.testing import TestRedisBroker
-from faststream.testing.app import TestApp
__all__ = (
"ListSub",
diff --git a/faststream/redis/annotations.py b/faststream/redis/annotations.py
index eb989438a7..4bbbb9b324 100644
--- a/faststream/redis/annotations.py
+++ b/faststream/redis/annotations.py
@@ -1,10 +1,12 @@
+from typing import Annotated
+
from redis.asyncio.client import Redis as RedisClient
-from typing_extensions import Annotated
-from faststream.annotations import ContextRepo, Logger, NoCast
+from faststream._internal.context import Context
+from faststream.annotations import ContextRepo, Logger
+from faststream.params import NoCast
from faststream.redis.broker.broker import RedisBroker as RB
from faststream.redis.message import UnifyRedisMessage
-from faststream.utils.context import Context
__all__ = (
"ContextRepo",
diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py
index cd6661c282..f3faffad89 100644
--- a/faststream/redis/broker/broker.py
+++ b/faststream/redis/broker/broker.py
@@ -1,14 +1,11 @@
import logging
-from functools import partial
+from collections.abc import Iterable, Mapping, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Iterable,
- Mapping,
Optional,
- Sequence,
- Type,
Union,
)
from urllib.parse import urlparse
@@ -24,39 +21,43 @@
parse_url,
)
from redis.exceptions import ConnectionError
-from typing_extensions import Annotated, Doc, TypeAlias, deprecated, override
+from typing_extensions import Doc, TypeAlias, overload, override
from faststream.__about__ import __version__
-from faststream.broker.message import gen_cor_id
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.redis.broker.logging import RedisLoggingBroker
-from faststream.redis.broker.registrator import RedisRegistrator
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.constants import EMPTY
+from faststream.message import gen_cor_id
+from faststream.redis.message import UnifyRedisDict
from faststream.redis.publisher.producer import RedisFastProducer
+from faststream.redis.response import RedisPublishCommand
from faststream.redis.security import parse_security
-from faststream.types import EMPTY
+from faststream.response.publish_type import PublishType
+
+from .logging import make_redis_logger_state
+from .registrator import RedisRegistrator
if TYPE_CHECKING:
from types import TracebackType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from redis.asyncio.connection import BaseParser
from typing_extensions import TypedDict, Unpack
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.redis.message import BaseMessage, RedisMessage
- from faststream.security import BaseSecurity
- from faststream.types import (
+ from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
- DecodedMessage,
Decorator,
LoggerProto,
SendableMessage,
)
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.redis.message import BaseMessage, RedisMessage
+ from faststream.security import BaseSecurity
+ from faststream.specification.schema.extra import Tag, TagDict
class RedisInitKwargs(TypedDict, total=False):
host: Optional[str]
@@ -75,9 +76,9 @@ class RedisInitKwargs(TypedDict, total=False):
encoding: Optional[str]
encoding_errors: Optional[str]
decode_responses: Optional[bool]
- parser_class: Optional[Type["BaseParser"]]
- connection_class: Optional[Type["Connection"]]
- encoder_class: Optional[Type["Encoder"]]
+ parser_class: Optional[type["BaseParser"]]
+ connection_class: Optional[type["Connection"]]
+ encoder_class: Optional[type["Encoder"]]
Channel: TypeAlias = str
@@ -85,12 +86,12 @@ class RedisInitKwargs(TypedDict, total=False):
class RedisBroker(
RedisRegistrator,
- RedisLoggingBroker,
+ BrokerUsecase[UnifyRedisDict, "Redis[bytes]"],
):
"""Redis broker."""
url: str
- _producer: Optional[RedisFastProducer]
+ _producer: "RedisFastProducer"
def __init__(
self,
@@ -99,7 +100,7 @@ def __init__(
host: str = EMPTY,
port: Union[str, int] = EMPTY,
db: Union[str, int] = EMPTY,
- connection_class: Type["Connection"] = EMPTY,
+ connection_class: type["Connection"] = EMPTY,
client_name: Optional[str] = None,
health_check_interval: float = 0,
max_connections: Optional[int] = None,
@@ -113,13 +114,13 @@ def __init__(
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
- parser_class: Type["BaseParser"] = DefaultParser,
- encoder_class: Type["Encoder"] = Encoder,
+ parser_class: type["BaseParser"] = DefaultParser,
+ encoder_class: type["Encoder"] = Encoder,
# broker args
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -131,21 +132,25 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[BaseMessage]"],
Doc("Middlewares to apply to all broker publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[BaseMessage]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
# AsyncAPI args
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Optional[str],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -162,9 +167,9 @@ def __init__(
Doc("AsyncAPI server description."),
] = None,
tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
Optional["LoggerProto"],
@@ -183,10 +188,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -196,13 +198,11 @@ def __init__(
Doc("Any custom decorator to apply to wrapped functions."),
] = (),
) -> None:
- self._producer = None
-
- if asyncapi_url is None:
- asyncapi_url = url
+ if specification_url is None:
+ specification_url = url
if protocol is None:
- url_kwargs = urlparse(asyncapi_url)
+ url_kwargs = urlparse(specification_url)
protocol = url_kwargs.scheme
super().__init__(
@@ -233,24 +233,34 @@ def __init__(
decoder=decoder,
parser=parser,
middlewares=middlewares,
+ routers=routers,
# AsyncAPI
description=description,
- asyncapi_url=asyncapi_url,
+ specification_url=specification_url,
protocol=protocol,
protocol_version=protocol_version,
security=security,
tags=tags,
# logging
- logger=logger,
- log_level=log_level,
- log_fmt=log_fmt,
+ logger_state=make_redis_logger_state(
+ logger=logger,
+ log_level=log_level,
+ log_fmt=log_fmt,
+ ),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
+ self._state.patch_value(
+ producer=RedisFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
@override
async def connect( # type: ignore[override]
self,
@@ -276,7 +286,7 @@ async def _connect( # type: ignore[override]
host: str,
port: Union[str, int],
db: Union[str, int],
- connection_class: Type["Connection"],
+ connection_class: type["Connection"],
client_name: Optional[str],
health_check_interval: float,
max_connections: Optional[int],
@@ -290,8 +300,8 @@ async def _connect( # type: ignore[override]
encoding: str,
encoding_errors: str,
decode_responses: bool,
- parser_class: Type["BaseParser"],
- encoder_class: Type["Encoder"],
+ parser_class: type["BaseParser"],
+ encoder_class: type["Encoder"],
) -> "Redis[bytes]":
url_options: AnyDict = {
**dict(parse_url(url)),
@@ -329,34 +339,28 @@ async def _connect( # type: ignore[override]
)
client: Redis[bytes] = Redis.from_pool(pool) # type: ignore[attr-defined]
- self._producer = RedisFastProducer(
- connection=client,
- parser=self._parser,
- decoder=self._decoder,
- )
+ self._producer.connect(client)
return client
- async def _close(
+ async def close(
self,
- exc_type: Optional[Type[BaseException]] = None,
+ exc_type: Optional[type[BaseException]] = None,
exc_val: Optional[BaseException] = None,
exc_tb: Optional["TracebackType"] = None,
) -> None:
+ await super().close(exc_type, exc_val, exc_tb)
+
+ self._producer.disconnect()
+
if self._connection is not None:
await self._connection.aclose() # type: ignore[attr-defined]
-
- await super()._close(exc_type, exc_val, exc_tb)
+ self._connection = None
async def start(self) -> None:
+ await self.connect()
+ self._setup()
await super().start()
- for handler in self._subscribers.values():
- self._log(
- f"`{handler.call_name}` waiting for messages",
- extra=handler.get_log_context(None),
- )
- await handler.start()
-
@property
def _subscriber_setup_extra(self) -> "AnyDict":
return {
@@ -364,89 +368,75 @@ def _subscriber_setup_extra(self) -> "AnyDict":
"connection": self._connection,
}
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage" = None,
+ channel: Optional[str] = None,
+ *,
+ reply_to: str = "",
+ headers: Optional["AnyDict"] = None,
+ correlation_id: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: None = None,
+ maxlen: Optional[int] = None,
+ ) -> int: ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage" = None,
+ channel: Optional[str] = None,
+ *,
+ reply_to: str = "",
+ headers: Optional["AnyDict"] = None,
+ correlation_id: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: str,
+ maxlen: Optional[int] = None,
+ ) -> bytes: ...
+
@override
- async def publish( # type: ignore[override]
+ async def publish(
self,
- message: Annotated[
- "SendableMessage",
- Doc("Message body to send."),
- ] = None,
- channel: Annotated[
- Optional[str],
- Doc("Redis PubSub object name to send message."),
- ] = None,
+ message: "SendableMessage" = None,
+ channel: Optional[str] = None,
*,
- reply_to: Annotated[
- str,
- Doc("Reply message destination PubSub object name."),
- ] = "",
- headers: Annotated[
- Optional["AnyDict"],
- Doc("Message headers to store metainformation."),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
- ),
- ] = None,
- list: Annotated[
- Optional[str],
- Doc("Redis List object name to send message."),
- ] = None,
- stream: Annotated[
- Optional[str],
- Doc("Redis Stream object name to send message."),
- ] = None,
- maxlen: Annotated[
- Optional[int],
- Doc(
- "Redis Stream maxlen publish option. "
- "Remove eldest message if maxlen exceeded."
- ),
- ] = None,
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- ) -> Optional["DecodedMessage"]:
+ reply_to: str = "",
+ headers: Optional["AnyDict"] = None,
+ correlation_id: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: Optional[str] = None,
+ maxlen: Optional[int] = None,
+ ) -> Union[int, bytes]:
"""Publish message directly.
- This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
- applications or to publish messages from time to time.
-
- Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
+ This method allows you to publish a message in a non-AsyncAPI-documented way.
+ It can be used in other frameworks or to publish messages at specific intervals.
+
+ Args:
+ message:
+ Message body to send.
+ channel:
+ Redis PubSub object name to send message.
+ reply_to:
+ Reply message destination PubSub object name.
+ headers:
+ Message headers to store metainformation.
+ correlation_id:
+ Manual message correlation_id setter. correlation_id is a useful option to trace messages.
+ list:
+ Redis List object name to send message.
+ stream:
+ Redis Stream object name to send message.
+ maxlen:
+ Redis Stream maxlen publish option. Remove eldest message if maxlen exceeded.
+
+ Returns:
+ int: The result of the publish operation, typically the number of messages published.
"""
- return await super().publish(
+ cmd = RedisPublishCommand(
message,
- producer=self._producer,
correlation_id=correlation_id or gen_cor_id(),
channel=channel,
list=list,
@@ -454,10 +444,9 @@ async def publish( # type: ignore[override]
maxlen=maxlen,
reply_to=reply_to,
headers=headers,
- rpc=rpc,
- rpc_timeout=rpc_timeout,
- raise_timeout=raise_timeout,
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
@@ -472,9 +461,8 @@ async def request( # type: ignore[override]
headers: Optional["AnyDict"] = None,
timeout: Optional[float] = 30.0,
) -> "RedisMessage":
- msg: RedisMessage = await super().request(
+ cmd = RedisPublishCommand(
message,
- producer=self._producer,
correlation_id=correlation_id or gen_cor_id(),
channel=channel,
list=list,
@@ -482,12 +470,14 @@ async def request( # type: ignore[override]
maxlen=maxlen,
headers=headers,
timeout=timeout,
+ _publish_type=PublishType.REQUEST,
)
+ msg: RedisMessage = await super()._basic_request(cmd, producer=self._producer)
return msg
async def publish_batch(
self,
- *msgs: Annotated[
+ *messages: Annotated[
"SendableMessage",
Doc("Messages bodies to send."),
],
@@ -499,26 +489,30 @@ async def publish_batch(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
- ) -> None:
+ reply_to: Annotated[
+ str,
+ Doc("Reply message destination PubSub object name."),
+ ] = "",
+ headers: Annotated[
+ Optional["AnyDict"],
+ Doc("Message headers to store metainformation."),
+ ] = None,
+ ) -> int:
"""Publish multiple messages to Redis List by one request."""
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
-
- for m in self._middlewares[::-1]:
- call = partial(m(None).publish_scope, call)
-
- await call(
- *msgs,
+ cmd = RedisPublishCommand(
+ *messages,
list=list,
- correlation_id=correlation_id,
+ reply_to=reply_to,
+ headers=headers,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
diff --git a/faststream/redis/broker/logging.py b/faststream/redis/broker/logging.py
index b4d94af615..6fe8f718ae 100644
--- a/faststream/redis/broker/logging.py
+++ b/faststream/redis/broker/logging.py
@@ -1,59 +1,63 @@
import logging
-from typing import TYPE_CHECKING, Any, ClassVar, Optional
+from functools import partial
+from typing import TYPE_CHECKING, Optional
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.log.logging import get_broker_logger
-from faststream.redis.message import UnifyRedisDict
-from faststream.types import EMPTY
+from faststream._internal.log.logging import get_broker_logger
+from faststream._internal.state.logger import (
+ DefaultLoggerStorage,
+ make_logger_state,
+)
if TYPE_CHECKING:
- from redis.asyncio.client import Redis # noqa: F401
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
- from faststream.types import LoggerProto
-
-
-class RedisLoggingBroker(BrokerUsecase[UnifyRedisDict, "Redis[bytes]"]):
- """A class that extends the LoggingMixin class and adds additional functionality for logging Redis related information."""
-
- _max_channel_name: int
- __max_msg_id_ln: ClassVar[int] = 10
+class RedisParamsStorage(DefaultLoggerStorage):
def __init__(
self,
- *args: Any,
- logger: Optional["LoggerProto"] = EMPTY,
- log_level: int = logging.INFO,
- log_fmt: Optional[str] = None,
- **kwargs: Any,
+ log_fmt: Optional[str],
) -> None:
- super().__init__(
- *args,
- logger=logger,
- # TODO: generate unique logger names to not share between brokers
- default_logger=get_broker_logger(
- name="redis",
- default_context={
- "channel": "",
- },
- message_id_ln=self.__max_msg_id_ln,
+ super().__init__(log_fmt)
+
+ self._max_channel_name = 4
+
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ self._max_channel_name = max(
+ (
+ self._max_channel_name,
+ len(params.get("channel", "")),
),
- log_level=log_level,
- log_fmt=log_fmt,
- **kwargs,
)
- self._max_channel_name = 4
- def get_fmt(self) -> str:
- return (
- "%(asctime)s %(levelname)-8s - "
- f"%(channel)-{self._max_channel_name}s | "
- f"%(message_id)-{self.__max_msg_id_ln}s "
- "- %(message)s"
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
+ message_id_ln = 10
+
+ # TODO: generate unique logger names to not share between brokers
+ return get_broker_logger(
+ name="redis",
+ default_context={
+ "channel": "",
+ },
+ message_id_ln=message_id_ln,
+ fmt=self._log_fmt
+ or (
+ "%(asctime)s %(levelname)-8s - "
+ f"%(channel)-{self._max_channel_name}s | "
+ f"%(message_id)-{message_id_ln}s "
+ "- %(message)s"
+ ),
+ context=context,
+ log_level=self.logger_log_level,
)
- def _setup_log_context(
- self,
- *,
- channel: Optional[str] = None,
- ) -> None:
- self._max_channel_name = max((self._max_channel_name, len(channel or "")))
+
+make_redis_logger_state = partial(
+ make_logger_state,
+ default_storage_cls=RedisParamsStorage,
+)
diff --git a/faststream/redis/broker/registrator.py b/faststream/redis/broker/registrator.py
index ef4161ea40..962f582a4a 100644
--- a/faststream/redis/broker/registrator.py
+++ b/faststream/redis/broker/registrator.py
@@ -1,34 +1,41 @@
-from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Sequence, Union, cast
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Optional, Union, cast
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
-from faststream.broker.core.abc import ABCBroker
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.publisher.asyncapi import AsyncAPIPublisher
-from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber
+from faststream.redis.publisher.factory import create_publisher
from faststream.redis.subscriber.factory import SubsciberType, create_subscriber
+from faststream.redis.subscriber.specified import (
+ SpecificationConcurrentSubscriber,
+ SpecificationSubscriber,
+)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import (
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.redis.message import UnifyRedisMessage
- from faststream.redis.publisher.asyncapi import PublisherType
+ from faststream.redis.publisher.specified import (
+ PublisherType,
+ SpecificationPublisher,
+ )
from faststream.redis.schemas import ListSub, PubSub, StreamSub
- from faststream.types import AnyDict
class RedisRegistrator(ABCBroker[UnifyRedisDict]):
"""Includable to RedisBroker router."""
- _subscribers: Dict[int, "SubsciberType"]
- _publishers: Dict[int, "PublisherType"]
+ _subscribers: list["SubsciberType"]
+ _publishers: list["PublisherType"]
@override
def subscriber( # type: ignore[override]
@@ -48,13 +55,13 @@ def subscriber( # type: ignore[override]
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc(
- "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one."
+ "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one.",
),
] = None,
decoder: Annotated[
@@ -63,31 +70,25 @@ def subscriber( # type: ignore[override]
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[UnifyRedisMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[UnifyRedisMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -99,37 +100,43 @@ def subscriber( # type: ignore[override]
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPISubscriber:
- subscriber = cast(
- AsyncAPISubscriber,
- super().subscriber(
- create_subscriber(
- channel=channel,
- list=list,
- stream=stream,
- # subscriber args
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
- broker_dependencies=self._dependencies,
- # AsyncAPI
- title_=title,
- description_=description,
- include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
- ),
+ max_workers: Annotated[
+ int,
+ Doc("Number of workers to process messages concurrently."),
+ ] = 1,
+ ) -> Union[SpecificationSubscriber, SpecificationConcurrentSubscriber]:
+ subscriber = create_subscriber(
+ channel=channel,
+ list=list,
+ stream=stream,
+ # subscriber args
+ max_workers=max_workers,
+ no_ack=no_ack,
+ no_reply=no_reply,
+ ack_policy=ack_policy,
+ broker_middlewares=self.middlewares,
+ broker_dependencies=self._dependencies,
+ # AsyncAPI
+ title_=title,
+ description_=description,
+ include_in_schema=self._solve_include_in_schema(include_in_schema),
)
+ if max_workers > 1:
+ subscriber = cast("SpecificationConcurrentSubscriber", subscriber)
+ else:
+ subscriber = cast("SpecificationSubscriber", subscriber)
+
+ subscriber = super().subscriber(subscriber) # type: ignore[assignment]
+
return subscriber.add_call(
- filter_=filter,
parser_=parser or self._parser,
decoder_=decoder or self._decoder,
dependencies_=dependencies,
@@ -156,7 +163,7 @@ def publisher( # type: ignore[override]
Optional["AnyDict"],
Doc(
"Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -165,6 +172,10 @@ def publisher( # type: ignore[override]
] = "",
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -180,14 +191,14 @@ def publisher( # type: ignore[override]
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPIPublisher:
+ ) -> "SpecificationPublisher":
"""Creates long-living and AsyncAPI-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
@@ -196,22 +207,22 @@ def publisher( # type: ignore[override]
Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`.
"""
return cast(
- AsyncAPIPublisher,
+ "SpecificationPublisher",
super().publisher(
- AsyncAPIPublisher.create(
+ create_publisher(
channel=channel,
list=list,
stream=stream,
headers=headers,
reply_to=reply_to,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
description_=description,
schema_=schema,
include_in_schema=self._solve_include_in_schema(include_in_schema),
- )
+ ),
),
)
diff --git a/faststream/redis/fastapi/__init__.py b/faststream/redis/fastapi/__init__.py
index db8b797dda..117c03aae2 100644
--- a/faststream/redis/fastapi/__init__.py
+++ b/faststream/redis/fastapi/__init__.py
@@ -1,11 +1,13 @@
+from typing import Annotated
+
from redis.asyncio.client import Redis as RedisClient
-from typing_extensions import Annotated
-from faststream.broker.fastapi.context import Context, ContextRepo, Logger
+from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.redis.broker.broker import RedisBroker as RB
-from faststream.redis.fastapi.fastapi import RedisRouter
from faststream.redis.message import BaseMessage as RM # noqa: N814
+from .fastapi import RedisRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/redis/fastapi/fastapi.py b/faststream/redis/fastapi/fastapi.py
index cef4084784..06d6a48fc3 100644
--- a/faststream/redis/fastapi/fastapi.py
+++ b/faststream/redis/fastapi/fastapi.py
@@ -1,15 +1,11 @@
import logging
+from collections.abc import Iterable, Mapping, Sequence
from typing import (
TYPE_CHECKING,
+ Annotated,
Any,
Callable,
- Dict,
- Iterable,
- List,
- Mapping,
Optional,
- Sequence,
- Type,
Union,
cast,
)
@@ -24,17 +20,19 @@
)
from starlette.responses import JSONResponse
from starlette.routing import BaseRoute
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, deprecated, override
from faststream.__about__ import SERVICE_NAME
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.utils import default_filter
+from faststream._internal.constants import EMPTY
+from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
from faststream.redis.broker.broker import RedisBroker as RB
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.publisher.asyncapi import AsyncAPIPublisher
from faststream.redis.schemas import ListSub, PubSub, StreamSub
-from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber
-from faststream.types import EMPTY
+from faststream.redis.subscriber.specified import (
+ SpecificationConcurrentSubscriber,
+ SpecificationSubscriber,
+)
if TYPE_CHECKING:
from enum import Enum
@@ -45,17 +43,17 @@
from starlette.responses import Response
from starlette.types import ASGIApp, Lifespan
- from faststream.asyncapi import schema as asyncapi
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.redis.message import UnifyRedisMessage
+ from faststream.redis.publisher.specified import SpecificationPublisher
from faststream.security import BaseSecurity
- from faststream.types import AnyDict, LoggerProto
+ from faststream.specification.schema.extra import Tag, TagDict
class RedisRouter(StreamRouter[UnifyRedisDict]):
@@ -71,7 +69,7 @@ def __init__(
host: str = EMPTY,
port: Union[str, int] = EMPTY,
db: Union[str, int] = EMPTY,
- connection_class: Type["Connection"] = EMPTY,
+ connection_class: type["Connection"] = EMPTY,
client_name: Optional[str] = SERVICE_NAME,
health_check_interval: float = 0,
max_connections: Optional[int] = None,
@@ -85,13 +83,13 @@ def __init__(
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
- parser_class: Type["BaseParser"] = DefaultParser,
- encoder_class: Type["Encoder"] = Encoder,
+ parser_class: type["BaseParser"] = DefaultParser,
+ encoder_class: type["Encoder"] = Encoder,
# broker base args
graceful_timeout: Annotated[
Optional[float],
Doc(
- "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down."
+ "Graceful shutdown timeout. Broker waits for all running subscribers completion before shut down.",
),
] = 15.0,
decoder: Annotated[
@@ -110,10 +108,10 @@ def __init__(
security: Annotated[
Optional["BaseSecurity"],
Doc(
- "Security options to connect broker and generate AsyncAPI server security information."
+ "Security options to connect broker and generate AsyncAPI server security information.",
),
] = None,
- asyncapi_url: Annotated[
+ specification_url: Annotated[
Optional[str],
Doc("AsyncAPI hardcoded server addresses. Use `servers` if not specified."),
] = None,
@@ -129,13 +127,13 @@ def __init__(
Optional[str],
Doc("AsyncAPI server description."),
] = None,
- asyncapi_tags: Annotated[
- Optional[Iterable[Union["asyncapi.Tag", "asyncapi.TagDict"]]],
+ specification_tags: Annotated[
+ Iterable[Union["Tag", "TagDict"]],
Doc("AsyncAPI server tags."),
- ] = None,
+ ] = (),
# logging args
logger: Annotated[
- Union["LoggerProto", None, object],
+ Optional["LoggerProto"],
Doc("User specified logger to pass into Context and log service messages."),
] = EMPTY,
log_level: Annotated[
@@ -151,13 +149,13 @@ def __init__(
bool,
Doc(
"Whether to add broker to app scope in lifespan. "
- "You should disable this option at old ASGI servers."
+ "You should disable this option at old ASGI servers.",
),
] = True,
schema_url: Annotated[
Optional[str],
Doc(
- "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all."
+ "AsyncAPI schema url. You should set this option to `None` to disable AsyncAPI routes at all.",
),
] = "/asyncapi",
# FastAPI args
@@ -166,7 +164,7 @@ def __init__(
Doc("An optional path prefix for the router."),
] = "",
tags: Annotated[
- Optional[List[Union[str, "Enum"]]],
+ Optional[list[Union[str, "Enum"]]],
Doc(
"""
A list of tags to be applied to all the *path operations* in this
@@ -176,7 +174,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
dependencies: Annotated[
@@ -188,22 +186,22 @@ def __init__(
Read more about it in the
[FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
default_response_class: Annotated[
- Type["Response"],
+ type["Response"],
Doc(
"""
The default response class to be used.
Read more in the
[FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class).
- """
+ """,
),
] = Default(JSONResponse),
responses: Annotated[
- Optional[Dict[Union[int, str], "AnyDict"]],
+ Optional[dict[Union[int, str], "AnyDict"]],
Doc(
"""
Additional responses to be shown in OpenAPI.
@@ -215,11 +213,11 @@ def __init__(
And in the
[FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies).
- """
+ """,
),
] = None,
callbacks: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
OpenAPI callbacks that should apply to all *path operations* in this
@@ -229,11 +227,11 @@ def __init__(
Read more about it in the
[FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/).
- """
+ """,
),
] = None,
routes: Annotated[
- Optional[List[BaseRoute]],
+ Optional[list[BaseRoute]],
Doc(
"""
**Note**: you probably shouldn't use this parameter, it is inherited
@@ -242,7 +240,7 @@ def __init__(
---
A list of routes to serve incoming HTTP and WebSocket requests.
- """
+ """,
),
deprecated(
"""
@@ -251,7 +249,7 @@ def __init__(
In FastAPI, you normally would use the *path operation methods*,
like `router.get()`, `router.post()`, etc.
- """
+ """,
),
] = None,
redirect_slashes: Annotated[
@@ -260,7 +258,7 @@ def __init__(
"""
Whether to detect and redirect slashes in URLs when the client doesn't
use the same format.
- """
+ """,
),
] = True,
default: Annotated[
@@ -269,7 +267,7 @@ def __init__(
"""
Default function handler for this router. Used to handle
404 Not Found errors.
- """
+ """,
),
] = None,
dependency_overrides_provider: Annotated[
@@ -280,18 +278,18 @@ def __init__(
You shouldn't need to use it. It normally points to the `FastAPI` app
object.
- """
+ """,
),
] = None,
route_class: Annotated[
- Type["APIRoute"],
+ type["APIRoute"],
Doc(
"""
Custom route (*path operation*) class to be used by this router.
Read more about it in the
[FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router).
- """
+ """,
),
] = APIRoute,
on_startup: Annotated[
@@ -303,7 +301,7 @@ def __init__(
You should instead use the `lifespan` handlers.
Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
on_shutdown: Annotated[
@@ -316,7 +314,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
lifespan: Annotated[
@@ -328,7 +326,7 @@ def __init__(
Read more in the
[FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/).
- """
+ """,
),
] = None,
deprecated: Annotated[
@@ -341,7 +339,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/).
- """
+ """,
),
] = None,
include_in_schema: Annotated[
@@ -355,7 +353,7 @@ def __init__(
Read more about it in the
[FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi).
- """
+ """,
),
] = True,
generate_unique_id_function: Annotated[
@@ -370,7 +368,7 @@ def __init__(
Read more about it in the
[FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function).
- """
+ """,
),
] = Default(generate_unique_id),
) -> None:
@@ -410,8 +408,8 @@ def __init__(
protocol=protocol,
description=description,
protocol_version=protocol_version,
- asyncapi_tags=asyncapi_tags,
- asyncapi_url=asyncapi_url,
+ specification_tags=specification_tags,
+ specification_url=specification_url,
# FastAPI kwargs
prefix=prefix,
tags=tags,
@@ -456,7 +454,7 @@ def subscriber( # type: ignore[override]
parser: Annotated[
Optional["CustomCallable"],
Doc(
- "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one."
+ "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one.",
),
] = None,
decoder: Annotated[
@@ -465,31 +463,25 @@ def subscriber( # type: ignore[override]
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[UnifyRedisMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[UnifyRedisMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -501,7 +493,7 @@ def subscriber( # type: ignore[override]
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
@@ -540,7 +532,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/).
- """
+ """,
),
] = Default(None),
response_model_include: Annotated[
@@ -552,7 +544,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_exclude: Annotated[
@@ -564,7 +556,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = None,
response_model_by_alias: Annotated[
@@ -576,7 +568,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude).
- """
+ """,
),
] = True,
response_model_exclude_unset: Annotated[
@@ -594,7 +586,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_defaults: Annotated[
@@ -611,7 +603,7 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter).
- """
+ """,
),
] = False,
response_model_exclude_none: Annotated[
@@ -628,38 +620,43 @@ def subscriber( # type: ignore[override]
Read more about it in the
[FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none).
- """
+ """,
),
] = False,
- ) -> AsyncAPISubscriber:
- return cast(
- AsyncAPISubscriber,
- super().subscriber(
- channel=channel,
- list=list,
- stream=stream,
- dependencies=dependencies,
- parser=parser,
- decoder=decoder,
- middlewares=middlewares,
- filter=filter,
- retry=retry,
- no_ack=no_ack,
- no_reply=no_reply,
- title=title,
- description=description,
- include_in_schema=include_in_schema,
- # FastAPI args
- response_model=response_model,
- response_model_include=response_model_include,
- response_model_exclude=response_model_exclude,
- response_model_by_alias=response_model_by_alias,
- response_model_exclude_unset=response_model_exclude_unset,
- response_model_exclude_defaults=response_model_exclude_defaults,
- response_model_exclude_none=response_model_exclude_none,
- ),
+ max_workers: Annotated[
+ int,
+ Doc("Number of workers to process messages concurrently."),
+ ] = 1,
+ ) -> Union[SpecificationSubscriber, SpecificationConcurrentSubscriber]:
+ subscriber = super().subscriber(
+ channel=channel,
+ max_workers=max_workers,
+ list=list,
+ stream=stream,
+ dependencies=dependencies,
+ parser=parser,
+ decoder=decoder,
+ middlewares=middlewares,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ no_reply=no_reply,
+ title=title,
+ description=description,
+ include_in_schema=include_in_schema,
+ # FastAPI args
+ response_model=response_model,
+ response_model_include=response_model_include,
+ response_model_exclude=response_model_exclude,
+ response_model_by_alias=response_model_by_alias,
+ response_model_exclude_unset=response_model_exclude_unset,
+ response_model_exclude_defaults=response_model_exclude_defaults,
+ response_model_exclude_none=response_model_exclude_none,
)
+ if max_workers > 1:
+ return cast("SpecificationConcurrentSubscriber", subscriber)
+ return cast("SpecificationSubscriber", subscriber)
+
@override
def publisher(
self,
@@ -679,7 +676,7 @@ def publisher(
Optional["AnyDict"],
Doc(
"Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -688,6 +685,10 @@ def publisher(
] = "",
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -703,14 +704,14 @@ def publisher(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> AsyncAPIPublisher:
+ ) -> "SpecificationPublisher":
return self.broker.publisher(
channel,
list=list,
diff --git a/faststream/cli/supervisors/__init__.py b/faststream/redis/helpers/__init__.py
similarity index 100%
rename from faststream/cli/supervisors/__init__.py
rename to faststream/redis/helpers/__init__.py
diff --git a/faststream/redis/helpers/state.py b/faststream/redis/helpers/state.py
new file mode 100644
index 0000000000..1d3d2a6bad
--- /dev/null
+++ b/faststream/redis/helpers/state.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from redis.asyncio.client import Redis
+
+
+class ConnectionState(Protocol):
+ client: "Redis[bytes]"
+
+
+class EmptyConnectionState(ConnectionState):
+ __slots__ = ()
+
+ error_msg = "You should connect broker first."
+
+ @property
+ def client(self) -> "Redis[bytes]":
+ raise IncorrectState(self.error_msg)
+
+
+class ConnectedState(ConnectionState):
+ __slots__ = ("client",)
+
+ def __init__(self, client: "Redis[bytes]") -> None:
+ self.client = client
diff --git a/faststream/redis/message.py b/faststream/redis/message.py
index 86cc9b3d96..b4b0d443d4 100644
--- a/faststream/redis/message.py
+++ b/faststream/redis/message.py
@@ -1,7 +1,5 @@
from typing import (
TYPE_CHECKING,
- Dict,
- List,
Literal,
Optional,
TypeVar,
@@ -10,12 +8,12 @@
from typing_extensions import NotRequired, TypeAlias, TypedDict, override
-from faststream.broker.message import StreamMessage as BrokerStreamMessage
+from faststream.message import StreamMessage as BrokerStreamMessage
if TYPE_CHECKING:
from redis.asyncio import Redis
- from faststream.types import DecodedMessage
+ from faststream._internal.basic_types import DecodedMessage
BaseMessage: TypeAlias = Union[
@@ -39,9 +37,9 @@ class UnifyRedisDict(TypedDict):
channel: str
data: Union[
bytes,
- List[bytes],
- Dict[bytes, bytes],
- List[Dict[bytes, bytes]],
+ list[bytes],
+ dict[bytes, bytes],
+ list[dict[bytes, bytes]],
]
pattern: NotRequired[Optional[bytes]]
@@ -63,58 +61,56 @@ class RedisMessage(BrokerStreamMessage[PubSubMessage]):
pass
-class ListMessage(TypedDict):
+class _ListMessage(TypedDict):
"""A class to represent an Abstract List message."""
channel: str
-class DefaultListMessage(ListMessage):
+class DefaultListMessage(_ListMessage):
"""A class to represent a single List message."""
type: Literal["list"]
data: bytes
-class BatchListMessage(ListMessage):
+class BatchListMessage(_ListMessage):
"""A class to represent a List messages batch."""
type: Literal["blist"]
- data: List[bytes]
+ data: list[bytes]
class RedisListMessage(BrokerStreamMessage[DefaultListMessage]):
"""StreamMessage for single List message."""
- pass
-
class RedisBatchListMessage(BrokerStreamMessage[BatchListMessage]):
"""StreamMessage for single List message."""
- decoded_body: List["DecodedMessage"]
+ decoded_body: list["DecodedMessage"]
DATA_KEY = "__data__"
bDATA_KEY = DATA_KEY.encode() # noqa: N816
-class StreamMessage(TypedDict):
+class _StreamMessage(TypedDict):
channel: str
- message_ids: List[bytes]
+ message_ids: list[bytes]
-class DefaultStreamMessage(StreamMessage):
+class DefaultStreamMessage(_StreamMessage):
type: Literal["stream"]
- data: Dict[bytes, bytes]
+ data: dict[bytes, bytes]
-class BatchStreamMessage(StreamMessage):
+class BatchStreamMessage(_StreamMessage):
type: Literal["bstream"]
- data: List[Dict[bytes, bytes]]
+ data: list[dict[bytes, bytes]]
-_StreamMsgType = TypeVar("_StreamMsgType", bound=StreamMessage)
+_StreamMsgType = TypeVar("_StreamMsgType", bound=_StreamMessage)
class _RedisStreamMessageMixin(BrokerStreamMessage[_StreamMsgType]):
@@ -152,4 +148,4 @@ class RedisStreamMessage(_RedisStreamMessageMixin[DefaultStreamMessage]):
class RedisBatchStreamMessage(_RedisStreamMessageMixin[BatchStreamMessage]):
- decoded_body: List["DecodedMessage"]
+ decoded_body: list["DecodedMessage"]
diff --git a/faststream/redis/opentelemetry/middleware.py b/faststream/redis/opentelemetry/middleware.py
index 54c0024143..64f3b57b71 100644
--- a/faststream/redis/opentelemetry/middleware.py
+++ b/faststream/redis/opentelemetry/middleware.py
@@ -5,9 +5,10 @@
from faststream.opentelemetry.middleware import TelemetryMiddleware
from faststream.redis.opentelemetry.provider import RedisTelemetrySettingsProvider
+from faststream.redis.response import RedisPublishCommand
-class RedisTelemetryMiddleware(TelemetryMiddleware):
+class RedisTelemetryMiddleware(TelemetryMiddleware[RedisPublishCommand]):
def __init__(
self,
*,
diff --git a/faststream/redis/opentelemetry/provider.py b/faststream/redis/opentelemetry/provider.py
index a809db603d..334cc2f564 100644
--- a/faststream/redis/opentelemetry/provider.py
+++ b/faststream/redis/opentelemetry/provider.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING, Sized, cast
+from typing import TYPE_CHECKING, cast
from opentelemetry.semconv.trace import SpanAttributes
@@ -6,8 +6,9 @@
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message import StreamMessage
+ from faststream.response import PublishCommand
class RedisTelemetrySettingsProvider(TelemetrySettingsProvider["AnyDict"]):
@@ -28,9 +29,9 @@ def get_consume_attrs_from_message(
MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message["channel"],
}
- if cast(str, msg.raw_message.get("type", "")).startswith("b"):
+ if cast("str", msg.raw_message.get("type", "")).startswith("b"):
attrs[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = len(
- cast(Sized, msg._decoded_body)
+ msg.raw_message["data"]
)
return attrs
@@ -41,21 +42,21 @@ def get_consume_destination_name(
) -> str:
return self._get_destination(msg.raw_message)
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> "AnyDict":
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: self._get_destination(kwargs),
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str:
- return self._get_destination(kwargs)
+ return cmd.destination
@staticmethod
def _get_destination(kwargs: "AnyDict") -> str:
diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py
index d42297af77..382d7be15b 100644
--- a/faststream/redis/parser.py
+++ b/faststream/redis/parser.py
@@ -1,23 +1,20 @@
+from collections.abc import Mapping, Sequence
from typing import (
TYPE_CHECKING,
Any,
- List,
- Mapping,
Optional,
- Sequence,
- Tuple,
- Type,
TypeVar,
Union,
)
-from faststream._compat import dump_json, json_loads
-from faststream.broker.message import (
+from faststream._internal._compat import dump_json, json_loads
+from faststream._internal.basic_types import AnyDict, DecodedMessage, SendableMessage
+from faststream._internal.constants import ContentTypes
+from faststream.message import (
decode_message,
encode_message,
gen_cor_id,
)
-from faststream.constants import ContentTypes
from faststream.redis.message import (
RedisBatchListMessage,
RedisBatchStreamMessage,
@@ -26,12 +23,11 @@
RedisStreamMessage,
bDATA_KEY,
)
-from faststream.types import AnyDict, DecodedMessage, SendableMessage
if TYPE_CHECKING:
from re import Pattern
- from faststream.broker.message import StreamMessage
+ from faststream.message import StreamMessage
MsgType = TypeVar("MsgType", bound=Mapping[str, Any])
@@ -102,29 +98,29 @@ def encode(
{
"data": msg.data,
"headers": msg.headers,
- }
+ },
)
@staticmethod
- def parse(data: bytes) -> Tuple[bytes, "AnyDict"]:
+ def parse(data: bytes) -> tuple[bytes, "AnyDict"]:
headers: AnyDict
try:
# FastStream message format
parsed_data = json_loads(data)
- data = parsed_data["data"].encode()
+ final_data = parsed_data["data"].encode()
headers = parsed_data["headers"]
except Exception:
# Raw Redis message format
- data = data
+ final_data = data
headers = {}
- return data, headers
+ return final_data, headers
class SimpleParser:
- msg_class: Type["StreamMessage[Any]"]
+ msg_class: type["StreamMessage[Any]"]
def __init__(
self,
@@ -155,7 +151,7 @@ async def parse_message(
def _parse_data(
self,
message: Mapping[str, Any],
- ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]:
+ ) -> tuple[bytes, "AnyDict", list["AnyDict"]]:
return (*RawMessage.parse(message["data"]), [])
def get_path(self, message: Mapping[str, Any]) -> "AnyDict":
@@ -166,8 +162,7 @@ def get_path(self, message: Mapping[str, Any]) -> "AnyDict":
):
return match.groupdict()
- else:
- return {}
+ return {}
async def decode_message(
self,
@@ -190,9 +185,9 @@ class RedisBatchListParser(SimpleParser):
def _parse_data(
self,
message: Mapping[str, Any],
- ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]:
- body: List[Any] = []
- batch_headers: List[AnyDict] = []
+ ) -> tuple[bytes, "AnyDict", list["AnyDict"]]:
+ body: list[Any] = []
+ batch_headers: list[AnyDict] = []
for x in message["data"]:
msg_data, msg_headers = _decode_batch_body_item(x)
@@ -205,7 +200,7 @@ def _parse_data(
dump_json(body),
{
**first_msg_headers,
- "content-type": ContentTypes.json.value,
+ "content-type": ContentTypes.JSON.value,
},
batch_headers,
)
@@ -216,8 +211,9 @@ class RedisStreamParser(SimpleParser):
@classmethod
def _parse_data(
- cls, message: Mapping[str, Any]
- ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]:
+ cls,
+ message: Mapping[str, Any],
+ ) -> tuple[bytes, "AnyDict", list["AnyDict"]]:
data = message["data"]
return (*RawMessage.parse(data.get(bDATA_KEY) or dump_json(data)), [])
@@ -228,9 +224,9 @@ class RedisBatchStreamParser(SimpleParser):
def _parse_data(
self,
message: Mapping[str, Any],
- ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]:
- body: List[Any] = []
- batch_headers: List[AnyDict] = []
+ ) -> tuple[bytes, "AnyDict", list["AnyDict"]]:
+ body: list[Any] = []
+ batch_headers: list[AnyDict] = []
for x in message["data"]:
msg_data, msg_headers = _decode_batch_body_item(x.get(bDATA_KEY, x))
@@ -243,13 +239,13 @@ def _parse_data(
dump_json(body),
{
**first_msg_headers,
- "content-type": ContentTypes.json.value,
+ "content-type": ContentTypes.JSON.value,
},
batch_headers,
)
-def _decode_batch_body_item(msg_content: bytes) -> Tuple[Any, "AnyDict"]:
+def _decode_batch_body_item(msg_content: bytes) -> tuple[Any, "AnyDict"]:
msg_body, headers = RawMessage.parse(msg_content)
try:
return json_loads(msg_body), headers
diff --git a/faststream/redis/prometheus/middleware.py b/faststream/redis/prometheus/middleware.py
index 1b157cb5a9..0ec09d9246 100644
--- a/faststream/redis/prometheus/middleware.py
+++ b/faststream/redis/prometheus/middleware.py
@@ -1,14 +1,17 @@
-from typing import TYPE_CHECKING, Optional, Sequence
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
-from faststream.prometheus.middleware import BasePrometheusMiddleware
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.constants import EMPTY
+from faststream.prometheus.middleware import PrometheusMiddleware
from faststream.redis.prometheus.provider import settings_provider_factory
-from faststream.types import EMPTY
+from faststream.redis.response import RedisPublishCommand
if TYPE_CHECKING:
from prometheus_client import CollectorRegistry
-class RedisPrometheusMiddleware(BasePrometheusMiddleware):
+class RedisPrometheusMiddleware(PrometheusMiddleware[RedisPublishCommand, AnyDict]):
def __init__(
self,
*,
diff --git a/faststream/redis/prometheus/provider.py b/faststream/redis/prometheus/provider.py
index 51eb831669..952dc8f97d 100644
--- a/faststream/redis/prometheus/provider.py
+++ b/faststream/redis/prometheus/provider.py
@@ -1,30 +1,29 @@
-from typing import TYPE_CHECKING, Optional, Sized, Union, cast
+from typing import TYPE_CHECKING, Optional, Union
from faststream.prometheus import (
ConsumeAttrs,
MetricsSettingsProvider,
)
+from faststream.redis.response import RedisPublishCommand
if TYPE_CHECKING:
- from faststream.broker.message import StreamMessage
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message.message import StreamMessage
-class BaseRedisMetricsSettingsProvider(MetricsSettingsProvider["AnyDict"]):
+class BaseRedisMetricsSettingsProvider(
+ MetricsSettingsProvider["AnyDict", RedisPublishCommand]
+):
__slots__ = ("messaging_system",)
def __init__(self) -> None:
self.messaging_system = "redis"
- def get_publish_destination_name_from_kwargs(
+ def get_publish_destination_name_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: RedisPublishCommand,
) -> str:
- return self._get_destination(kwargs)
-
- @staticmethod
- def _get_destination(kwargs: "AnyDict") -> str:
- return kwargs.get("channel") or kwargs.get("list") or kwargs.get("stream") or ""
+ return cmd.destination
class RedisMetricsSettingsProvider(BaseRedisMetricsSettingsProvider):
@@ -33,7 +32,7 @@ def get_consume_attrs_from_message(
msg: "StreamMessage[AnyDict]",
) -> ConsumeAttrs:
return {
- "destination_name": self._get_destination(msg.raw_message),
+ "destination_name": _get_destination(msg.raw_message),
"message_size": len(msg.body),
"messages_count": 1,
}
@@ -45,9 +44,9 @@ def get_consume_attrs_from_message(
msg: "StreamMessage[AnyDict]",
) -> ConsumeAttrs:
return {
- "destination_name": self._get_destination(msg.raw_message),
+ "destination_name": _get_destination(msg.raw_message),
"message_size": len(msg.body),
- "messages_count": len(cast(Sized, msg._decoded_body)),
+ "messages_count": len(msg.raw_message["data"]),
}
@@ -59,5 +58,8 @@ def settings_provider_factory(
]:
if msg is not None and msg.get("type", "").startswith("b"):
return BatchRedisMetricsSettingsProvider()
- else:
- return RedisMetricsSettingsProvider()
+ return RedisMetricsSettingsProvider()
+
+
+def _get_destination(kwargs: "AnyDict") -> str:
+ return kwargs.get("channel") or kwargs.get("list") or kwargs.get("stream") or ""
diff --git a/faststream/redis/publisher/asyncapi.py b/faststream/redis/publisher/asyncapi.py
deleted file mode 100644
index fe1d4d7a90..0000000000
--- a/faststream/redis/publisher/asyncapi.py
+++ /dev/null
@@ -1,188 +0,0 @@
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Union
-
-from typing_extensions import TypeAlias, override
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import redis
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.exceptions import SetupError
-from faststream.redis.publisher.usecase import (
- ChannelPublisher,
- ListBatchPublisher,
- ListPublisher,
- LogicPublisher,
- StreamPublisher,
-)
-from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub
-from faststream.redis.schemas.proto import RedisAsyncAPIProtocol, validate_options
-
-if TYPE_CHECKING:
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
- from faststream.redis.message import UnifyRedisDict
- from faststream.types import AnyDict
-
-PublisherType: TypeAlias = Union[
- "AsyncAPIChannelPublisher",
- "AsyncAPIStreamPublisher",
- "AsyncAPIListPublisher",
- "AsyncAPIListBatchPublisher",
-]
-
-
-class AsyncAPIPublisher(LogicPublisher, RedisAsyncAPIProtocol):
- """A class to represent a Redis publisher."""
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- publish=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads, "Publisher"),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- redis=self.channel_binding,
- ),
- )
- }
-
- @override
- @staticmethod
- def create( # type: ignore[override]
- *,
- channel: Union["PubSub", str, None],
- list: Union["ListSub", str, None],
- stream: Union["StreamSub", str, None],
- headers: Optional["AnyDict"],
- reply_to: str,
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- schema_: Optional[Any],
- include_in_schema: bool,
- ) -> PublisherType:
- validate_options(channel=channel, list=list, stream=stream)
-
- if (channel := PubSub.validate(channel)) is not None:
- return AsyncAPIChannelPublisher(
- channel=channel,
- # basic args
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- schema_=schema_,
- include_in_schema=include_in_schema,
- )
-
- elif (stream := StreamSub.validate(stream)) is not None:
- return AsyncAPIStreamPublisher(
- stream=stream,
- # basic args
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- schema_=schema_,
- include_in_schema=include_in_schema,
- )
-
- elif (list := ListSub.validate(list)) is not None:
- if list.batch:
- return AsyncAPIListBatchPublisher(
- list=list,
- # basic args
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- schema_=schema_,
- include_in_schema=include_in_schema,
- )
- else:
- return AsyncAPIListPublisher(
- list=list,
- # basic args
- headers=headers,
- reply_to=reply_to,
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- title_=title_,
- description_=description_,
- schema_=schema_,
- include_in_schema=include_in_schema,
- )
-
- else:
- raise SetupError(INCORRECT_SETUP_MSG)
-
-
-class AsyncAPIChannelPublisher(ChannelPublisher, AsyncAPIPublisher):
- def get_name(self) -> str:
- return f"{self.channel.name}:Publisher"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.channel.name,
- method="publish",
- )
-
-
-class _ListPublisherMixin(AsyncAPIPublisher):
- list: "ListSub"
-
- def get_name(self) -> str:
- return f"{self.list.name}:Publisher"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.list.name,
- method="rpush",
- )
-
-
-class AsyncAPIListPublisher(ListPublisher, _ListPublisherMixin):
- pass
-
-
-class AsyncAPIListBatchPublisher(ListBatchPublisher, _ListPublisherMixin):
- pass
-
-
-class AsyncAPIStreamPublisher(StreamPublisher, AsyncAPIPublisher):
- def get_name(self) -> str:
- return f"{self.stream.name}:Publisher"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.stream.name,
- method="xadd",
- )
diff --git a/faststream/redis/publisher/factory.py b/faststream/redis/publisher/factory.py
new file mode 100644
index 0000000000..1f886f5885
--- /dev/null
+++ b/faststream/redis/publisher/factory.py
@@ -0,0 +1,107 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from typing_extensions import TypeAlias
+
+from faststream.exceptions import SetupError
+from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub
+from faststream.redis.schemas.proto import validate_options
+
+from .specified import (
+ SpecificationChannelPublisher,
+ SpecificationListBatchPublisher,
+ SpecificationListPublisher,
+ SpecificationStreamPublisher,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.redis.message import UnifyRedisDict
+
+
+PublisherType: TypeAlias = Union[
+ SpecificationChannelPublisher,
+ SpecificationStreamPublisher,
+ SpecificationListPublisher,
+ SpecificationListBatchPublisher,
+]
+
+
+def create_publisher(
+ *,
+ channel: Union["PubSub", str, None],
+ list: Union["ListSub", str, None],
+ stream: Union["StreamSub", str, None],
+ headers: Optional["AnyDict"],
+ reply_to: str,
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ middlewares: Sequence["PublisherMiddleware"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ schema_: Optional[Any],
+ include_in_schema: bool,
+) -> PublisherType:
+ validate_options(channel=channel, list=list, stream=stream)
+
+ if (channel := PubSub.validate(channel)) is not None:
+ return SpecificationChannelPublisher(
+ channel=channel,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ if (stream := StreamSub.validate(stream)) is not None:
+ return SpecificationStreamPublisher(
+ stream=stream,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ if (list := ListSub.validate(list)) is not None:
+ if list.batch:
+ return SpecificationListBatchPublisher(
+ list=list,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+ return SpecificationListPublisher(
+ list=list,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ raise SetupError(INCORRECT_SETUP_MSG)
diff --git a/faststream/redis/publisher/fake.py b/faststream/redis/publisher/fake.py
new file mode 100644
index 0000000000..b7efa2b0d5
--- /dev/null
+++ b/faststream/redis/publisher/fake.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.redis.response import RedisPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class RedisFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ channel: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.channel = channel
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "RedisPublishCommand"]
+ ) -> "RedisPublishCommand":
+ cmd = super().patch_command(cmd)
+ real_cmd = RedisPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.channel
+ return real_cmd
diff --git a/faststream/redis/publisher/producer.py b/faststream/redis/publisher/producer.py
index 3dc44271e0..0de145f094 100644
--- a/faststream/redis/publisher/producer.py
+++ b/faststream/redis/publisher/producer.py
@@ -1,41 +1,41 @@
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Any, Optional, Union, cast
import anyio
from typing_extensions import override
-from faststream.broker.publisher.proto import ProducerProto
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS, SetupError
+from faststream._internal.publisher.proto import ProducerProto
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.utils.nuid import NUID
+from faststream.redis.helpers.state import (
+ ConnectedState,
+ ConnectionState,
+ EmptyConnectionState,
+)
from faststream.redis.message import DATA_KEY
from faststream.redis.parser import RawMessage, RedisPubSubParser
-from faststream.redis.schemas import INCORRECT_SETUP_MSG
-from faststream.utils.functions import timeout_scope
-from faststream.utils.nuid import NUID
+from faststream.redis.response import DestinationType, RedisPublishCommand
if TYPE_CHECKING:
- from redis.asyncio.client import PubSub, Redis
+ from redis.asyncio.client import Redis
- from faststream.broker.types import (
+ from faststream._internal.types import (
AsyncCallable,
CustomCallable,
)
- from faststream.types import AnyDict, SendableMessage
class RedisFastProducer(ProducerProto):
"""A class to represent a Redis producer."""
- _connection: "Redis[bytes]"
_decoder: "AsyncCallable"
_parser: "AsyncCallable"
def __init__(
self,
- connection: "Redis[bytes]",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
+ self._connection: ConnectionState = EmptyConnectionState()
default = RedisPubSubParser()
self._parser = resolve_custom_func(
@@ -47,136 +47,56 @@ def __init__(
default.decode_message,
)
+ def connect(self, client: "Redis[bytes]") -> None:
+ self._connection = ConnectedState(client)
+
+ def disconnect(self) -> None:
+ self._connection = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- reply_to: str = "",
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- ) -> Optional[Any]:
- if not any((channel, list, stream)):
- raise SetupError(INCORRECT_SETUP_MSG)
-
- psub: Optional[PubSub] = None
- if rpc:
- if reply_to:
- raise WRONG_PUBLISH_ARGS
- nuid = NUID()
- rpc_nuid = str(nuid.next(), "utf-8")
- reply_to = rpc_nuid
- psub = self._connection.pubsub()
- await psub.subscribe(reply_to)
-
+ cmd: "RedisPublishCommand",
+ ) -> Union[int, bytes]:
msg = RawMessage.encode(
- message=message,
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id or "",
)
- if channel is not None:
- await self._connection.publish(channel, msg)
- elif list is not None:
- await self._connection.rpush(list, msg)
- elif stream is not None:
- await self._connection.xadd(
- name=stream,
- fields={DATA_KEY: msg},
- maxlen=maxlen,
- )
- else:
- raise AssertionError("unreachable")
-
- if psub is None:
- return None
-
- else:
- m = None
- with timeout_scope(rpc_timeout, raise_timeout):
- # skip subscribe message
- await psub.get_message(
- ignore_subscribe_messages=True,
- timeout=rpc_timeout or 0.0,
- )
-
- # get real response
- m = await psub.get_message(
- ignore_subscribe_messages=True,
- timeout=rpc_timeout or 0.0,
- )
-
- await psub.unsubscribe()
- await psub.aclose() # type: ignore[attr-defined]
-
- if m is None:
- if raise_timeout:
- raise TimeoutError()
- else:
- return None
- else:
- return await self._decoder(await self._parser(m))
+ return await self.__publish(msg, cmd)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- timeout: Optional[float] = 30.0,
+ cmd: "RedisPublishCommand",
) -> "Any":
- if not any((channel, list, stream)):
- raise SetupError(INCORRECT_SETUP_MSG)
-
nuid = NUID()
reply_to = str(nuid.next(), "utf-8")
- psub = self._connection.pubsub()
+ psub = self._connection.client.pubsub()
await psub.subscribe(reply_to)
msg = RawMessage.encode(
- message=message,
+ message=cmd.body,
reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id or "",
)
- if channel is not None:
- await self._connection.publish(channel, msg)
- elif list is not None:
- await self._connection.rpush(list, msg)
- elif stream is not None:
- await self._connection.xadd(
- name=stream,
- fields={DATA_KEY: msg},
- maxlen=maxlen,
- )
- else:
- raise AssertionError("unreachable")
+ await self.__publish(msg, cmd)
- with anyio.fail_after(timeout) as scope:
+ with anyio.fail_after(cmd.timeout) as scope:
# skip subscribe message
await psub.get_message(
ignore_subscribe_messages=True,
- timeout=timeout or 0.0,
+ timeout=cmd.timeout or 0.0,
)
# get real response
response_msg = await psub.get_message(
ignore_subscribe_messages=True,
- timeout=timeout or 0.0,
+ timeout=cmd.timeout or 0.0,
)
await psub.unsubscribe()
@@ -187,20 +107,40 @@ async def request( # type: ignore[override]
return response_msg
+ @override
async def publish_batch(
self,
- *msgs: "SendableMessage",
- list: str,
- correlation_id: str,
- headers: Optional["AnyDict"] = None,
- ) -> None:
- batch = (
+ cmd: "RedisPublishCommand",
+ ) -> int:
+ batch = [
RawMessage.encode(
message=msg,
- correlation_id=correlation_id,
- reply_to=None,
- headers=headers,
+ correlation_id=cmd.correlation_id or "",
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
)
- for msg in msgs
- )
- await self._connection.rpush(list, *batch)
+ for msg in cmd.batch_bodies
+ ]
+ return await self._connection.client.rpush(cmd.destination, *batch)
+
+ async def __publish(
+ self, msg: bytes, cmd: "RedisPublishCommand"
+ ) -> Union[int, bytes]:
+ if cmd.destination_type is DestinationType.Channel:
+ return await self._connection.client.publish(cmd.destination, msg)
+
+ if cmd.destination_type is DestinationType.List:
+ return await self._connection.client.rpush(cmd.destination, msg)
+
+ if cmd.destination_type is DestinationType.Stream:
+ return cast(
+ "bytes",
+ await self._connection.client.xadd(
+ name=cmd.destination,
+ fields={DATA_KEY: msg},
+ maxlen=cmd.maxlen,
+ ),
+ )
+
+ error_msg = "unreachable"
+ raise AssertionError(error_msg)
diff --git a/faststream/redis/publisher/specified.py b/faststream/redis/publisher/specified.py
new file mode 100644
index 0000000000..3ccd57c931
--- /dev/null
+++ b/faststream/redis/publisher/specified.py
@@ -0,0 +1,90 @@
+from typing import TYPE_CHECKING
+
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.redis.publisher.usecase import (
+ ChannelPublisher,
+ ListBatchPublisher,
+ ListPublisher,
+ StreamPublisher,
+)
+from faststream.redis.schemas.proto import RedisSpecificationProtocol
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, PublisherSpec
+from faststream.specification.schema.bindings import ChannelBinding, redis
+
+if TYPE_CHECKING:
+ from faststream.redis.schemas import ListSub
+
+
+class SpecificationPublisher(
+ SpecificationPublisherMixin,
+ RedisSpecificationProtocol[PublisherSpec],
+):
+ """A class to represent a Redis publisher."""
+
+ def get_schema(self) -> dict[str, PublisherSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: PublisherSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ redis=self.channel_binding,
+ ),
+ ),
+ }
+
+
+class SpecificationChannelPublisher(SpecificationPublisher, ChannelPublisher):
+ def get_default_name(self) -> str:
+ return f"{self.channel.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.channel.name,
+ method="publish",
+ )
+
+
+class _ListPublisherMixin(SpecificationPublisher):
+ list: "ListSub"
+
+ def get_default_name(self) -> str:
+ return f"{self.list.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.list.name,
+ method="rpush",
+ )
+
+
+class SpecificationListPublisher(_ListPublisherMixin, ListPublisher):
+ pass
+
+
+class SpecificationListBatchPublisher(_ListPublisherMixin, ListBatchPublisher):
+ pass
+
+
+class SpecificationStreamPublisher(SpecificationPublisher, StreamPublisher):
+ def get_default_name(self) -> str:
+ return f"{self.stream.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.stream.name,
+ method="xadd",
+ )
diff --git a/faststream/redis/publisher/usecase.py b/faststream/redis/publisher/usecase.py
index 130f5b4f6b..82fcfd0b2b 100644
--- a/faststream/redis/publisher/usecase.py
+++ b/faststream/redis/publisher/usecase.py
@@ -1,30 +1,29 @@
from abc import abstractmethod
-from contextlib import AsyncExitStack
+from collections.abc import Iterable, Sequence
from copy import deepcopy
-from functools import partial
-from itertools import chain
-from typing import TYPE_CHECKING, Any, Awaitable, Callable, Iterable, Optional, Sequence
+from typing import TYPE_CHECKING, Annotated, Optional, Union
-from typing_extensions import Annotated, Doc, deprecated, override
+from typing_extensions import Doc, override
-from faststream.broker.message import SourceType, gen_cor_id
-from faststream.broker.publisher.usecase import PublisherUsecase
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream._internal.publisher.usecase import PublisherUsecase
+from faststream.message import gen_cor_id
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.schemas import ListSub, PubSub, StreamSub
-from faststream.utils.functions import return_input
+from faststream.redis.response import RedisPublishCommand
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream.broker.types import BrokerMiddleware, PublisherMiddleware
+ from faststream._internal.basic_types import AnyDict, SendableMessage
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.redis.message import RedisMessage
from faststream.redis.publisher.producer import RedisFastProducer
- from faststream.types import AnyDict, AsyncFunc, SendableMessage
+ from faststream.redis.schemas import ListSub, PubSub, StreamSub
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[UnifyRedisDict]):
"""A class to represent a Redis publisher."""
- _producer: Optional["RedisFastProducer"]
+ _producer: "RedisFastProducer"
def __init__(
self,
@@ -34,30 +33,18 @@ def __init__(
# Publisher args
broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
+ self.headers = headers or {}
@abstractmethod
def subscriber_property(self, *, name_only: bool) -> "AnyDict":
- raise NotImplementedError()
+ raise NotImplementedError
class ChannelPublisher(LogicPublisher):
@@ -70,28 +57,16 @@ def __init__(
# Regular publisher options
broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI options
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
reply_to=reply_to,
headers=headers,
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.channel = channel
- def __hash__(self) -> int:
- return hash(f"publisher:pubsub:{self.channel.name}")
-
@override
def subscriber_property(self, *, name_only: bool) -> "AnyDict":
return {
@@ -102,7 +77,7 @@ def subscriber_property(self, *, name_only: bool) -> "AnyDict":
def add_prefix(self, prefix: str) -> None:
channel = deepcopy(self.channel)
- channel.name = "".join((prefix, channel.name))
+ channel.name = f"{prefix}{channel.name}"
self.channel = channel
@override
@@ -128,78 +103,36 @@ async def publish(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
- *,
- # rpc args
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
- ) -> Optional[Any]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- channel_sub = PubSub.validate(channel or self.channel)
- reply_to = reply_to or self.reply_to
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
-
- return await call(
+ ) -> int:
+ cmd = RedisPublishCommand(
message,
- channel=channel_sub.name,
- # basic args
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
- # RPC args
- rpc=rpc,
- rpc_timeout=rpc_timeout,
- raise_timeout=raise_timeout,
+ channel=channel or self.channel.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
+
+ cmd.set_destination(channel=self.channel.name)
+
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -217,7 +150,7 @@ async def request(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
headers: Annotated[
@@ -228,50 +161,18 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "channel": PubSub.validate(channel or self.channel).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ channel=channel or self.channel.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ timeout=timeout,
)
- async with AsyncExitStack() as stack:
- return_msg: Callable[[RedisMessage], Awaitable[RedisMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
+ msg: RedisMessage = await self._basic_request(cmd)
+ return msg
class ListPublisher(LogicPublisher):
@@ -284,28 +185,16 @@ def __init__(
# Regular publisher options
broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI options
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
reply_to=reply_to,
headers=headers,
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.list = list
- def __hash__(self) -> int:
- return hash(f"publisher:list:{self.list.name}")
-
@override
def subscriber_property(self, *, name_only: bool) -> "AnyDict":
return {
@@ -316,7 +205,7 @@ def subscriber_property(self, *, name_only: bool) -> "AnyDict":
def add_prefix(self, prefix: str) -> None:
list_sub = deepcopy(self.list)
- list_sub.name = "".join((prefix, list_sub.name))
+ list_sub.name = f"{prefix}{list_sub.name}"
self.list = list_sub
@override
@@ -342,78 +231,38 @@ async def publish(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
- *,
- # rpc args
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
- ) -> Any:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- list_sub = ListSub.validate(list or self.list)
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
-
- return await call(
+ ) -> int:
+ cmd = RedisPublishCommand(
message,
- list=list_sub.name,
- # basic args
- reply_to=reply_to,
- headers=headers or self.headers,
- correlation_id=correlation_id,
- # RPC args
- rpc=rpc,
- rpc_timeout=rpc_timeout,
- raise_timeout=raise_timeout,
+ list=list or self.list.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
+
+ cmd.set_destination(list=self.list.name)
+
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
+
@override
async def request(
self,
@@ -430,7 +279,7 @@ async def request(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
headers: Annotated[
@@ -441,103 +290,75 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "list": ListSub.validate(list or self.list).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ list=list or self.list.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ timeout=timeout,
)
- async with AsyncExitStack() as stack:
- return_msg: Callable[[RedisMessage], Awaitable[RedisMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
+ msg: RedisMessage = await self._basic_request(cmd)
+ return msg
class ListBatchPublisher(ListPublisher):
@override
async def publish( # type: ignore[override]
self,
- message: Annotated[
- Iterable["SendableMessage"],
- Doc("Message body to send."),
- ] = (),
+ *messages: Annotated[
+ "SendableMessage",
+ Doc("Messages bodies to send."),
+ ],
list: Annotated[
- Optional[str],
- Doc("Redis List object name to send message."),
- ] = None,
- *,
+ str,
+ Doc("Redis List object name to send messages."),
+ ],
correlation_id: Annotated[
Optional[str],
- Doc("Has no real effect. Option to be compatible with original protocol."),
+ Doc(
+ "Manual message **correlation_id** setter. "
+ "**correlation_id** is a useful option to trace messages.",
+ ),
] = None,
+ reply_to: Annotated[
+ str,
+ Doc("Reply message destination PubSub object name."),
+ ] = "",
headers: Annotated[
Optional["AnyDict"],
Doc("Message headers to store metainformation."),
] = None,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
+ ) -> int:
+ cmd = RedisPublishCommand(
+ *messages,
+ list=list or self.list.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
+ )
+
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish( # type: ignore[override]
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd, batch=True)
- list_sub = ListSub.validate(list or self.list)
- correlation_id = correlation_id or gen_cor_id()
+ cmd.set_destination(list=self.list.name)
- call: AsyncFunc = self._producer.publish_batch
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
-
- await call(
- *message,
- list=list_sub.name,
- correlation_id=correlation_id,
- headers=headers or self.headers,
- )
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
class StreamPublisher(LogicPublisher):
@@ -550,28 +371,16 @@ def __init__(
# Regular publisher options
broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
middlewares: Sequence["PublisherMiddleware"],
- # AsyncAPI options
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
super().__init__(
reply_to=reply_to,
headers=headers,
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.stream = stream
- def __hash__(self) -> int:
- return hash(f"publisher:stream:{self.stream.name}")
-
@override
def subscriber_property(self, *, name_only: bool) -> "AnyDict":
return {
@@ -582,7 +391,7 @@ def subscriber_property(self, *, name_only: bool) -> "AnyDict":
def add_prefix(self, prefix: str) -> None:
stream_sub = deepcopy(self.stream)
- stream_sub.name = "".join((prefix, stream_sub.name))
+ stream_sub.name = f"{prefix}{stream_sub.name}"
self.stream = stream_sub
@override
@@ -608,7 +417,7 @@ async def publish(
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
*,
@@ -616,79 +425,40 @@ async def publish(
Optional[int],
Doc(
"Redis Stream maxlen publish option. "
- "Remove eldest message if maxlen exceeded."
+ "Remove eldest message if maxlen exceeded.",
),
] = None,
- # rpc args
- rpc: Annotated[
- bool,
- Doc("Whether to wait for reply in blocking mode."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- rpc_timeout: Annotated[
- Optional[float],
- Doc("RPC reply waiting time."),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "Please, use `request` method with `timeout` instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = 30.0,
- raise_timeout: Annotated[
- bool,
- Doc(
- "Whetever to raise `TimeoutError` or return `None` at **rpc_timeout**. "
- "RPC request returns `None` at timeout by default."
- ),
- deprecated(
- "Deprecated in **FastStream 0.5.17**. "
- "`request` always raises TimeoutError instead. "
- "Argument will be removed in **FastStream 0.6.0**."
- ),
- ] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> Optional[Any]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- stream_sub = StreamSub.validate(stream or self.stream)
- maxlen = maxlen or stream_sub.maxlen
- reply_to = reply_to or self.reply_to
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish
-
- for m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- call = partial(m, call)
-
- return await call(
+ ) -> bytes:
+ cmd = RedisPublishCommand(
message,
- stream=stream_sub.name,
- maxlen=maxlen,
- # basic args
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
- # RPC args
- rpc=rpc,
- rpc_timeout=rpc_timeout,
- raise_timeout=raise_timeout,
+ stream=stream or self.stream.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ maxlen=maxlen or self.stream.maxlen,
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
+
+ cmd.set_destination(stream=self.stream.name)
+
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+ cmd.maxlen = self.stream.maxlen
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
+
@override
async def request(
self,
@@ -705,14 +475,14 @@ async def request(
Optional[int],
Doc(
"Redis Stream maxlen publish option. "
- "Remove eldest message if maxlen exceeded."
+ "Remove eldest message if maxlen exceeded.",
),
] = None,
correlation_id: Annotated[
Optional[str],
Doc(
"Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages."
+ "**correlation_id** is a useful option to trace messages.",
),
] = None,
headers: Annotated[
@@ -723,48 +493,16 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "stream": StreamSub.validate(stream or self.stream).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
-
- request: AsyncFunc = self._producer.request
-
- for pub_m in chain(
- self._middlewares[::-1],
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares[::-1])
- ),
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ stream=stream or self.stream.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ maxlen=maxlen or self.stream.maxlen,
+ timeout=timeout,
)
- async with AsyncExitStack() as stack:
- return_msg: Callable[[RedisMessage], Awaitable[RedisMessage]] = return_input
- for m in self._broker_middlewares[::-1]:
- mid = m(published_msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
-
- parsed_msg = await self._producer._parser(published_msg)
- parsed_msg._decoded_body = await self._producer._decoder(parsed_msg)
- parsed_msg._source_type = SourceType.Response
- return await return_msg(parsed_msg)
-
- raise AssertionError("unreachable")
+ msg: RedisMessage = await self._basic_request(cmd)
+ return msg
diff --git a/faststream/redis/response.py b/faststream/redis/response.py
index 9656fbc7b3..7f173776c5 100644
--- a/faststream/redis/response.py
+++ b/faststream/redis/response.py
@@ -1,11 +1,21 @@
-from typing import TYPE_CHECKING, Optional
+from enum import Enum
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.broker.response import Response
+from faststream.exceptions import SetupError
+from faststream.redis.schemas import INCORRECT_SETUP_MSG
+from faststream.response.publish_type import PublishType
+from faststream.response.response import BatchPublishCommand, PublishCommand, Response
if TYPE_CHECKING:
- from faststream.types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import AnyDict, SendableMessage
+
+
+class DestinationType(str, Enum):
+ Channel = "channel"
+ List = "list"
+ Stream = "stream"
class RedisResponse(Response):
@@ -25,9 +35,96 @@ def __init__(
self.maxlen = maxlen
@override
- def as_publish_kwargs(self) -> "AnyDict":
- publish_options = {
- **super().as_publish_kwargs(),
- "maxlen": self.maxlen,
- }
- return publish_options
+ def as_publish_command(self) -> "RedisPublishCommand":
+ return RedisPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ # Kafka specific
+ channel="fake-channel", # it will be replaced by reply-sender
+ maxlen=self.maxlen,
+ )
+
+
+class RedisPublishCommand(BatchPublishCommand):
+ destination_type: DestinationType
+
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ _publish_type: "PublishType",
+ correlation_id: Optional[str] = None,
+ channel: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: Optional[str] = None,
+ maxlen: Optional[int] = None,
+ headers: Optional["AnyDict"] = None,
+ reply_to: str = "",
+ timeout: Optional[float] = 30.0,
+ ) -> None:
+ super().__init__(
+ message,
+ *messages,
+ _publish_type=_publish_type,
+ correlation_id=correlation_id,
+ reply_to=reply_to,
+ destination="",
+ headers=headers,
+ )
+
+ self.set_destination(
+ channel=channel,
+ list=list,
+ stream=stream,
+ )
+
+ # Stream option
+ self.maxlen = maxlen
+
+ # Request option
+ self.timeout = timeout
+
+ def set_destination(
+ self,
+ *,
+ channel: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: Optional[str] = None,
+ ) -> str:
+ if channel is not None:
+ self.destination_type = DestinationType.Channel
+ self.destination = channel
+ elif list is not None:
+ self.destination_type = DestinationType.List
+ self.destination = list
+ elif stream is not None:
+ self.destination_type = DestinationType.Stream
+ self.destination = stream
+ else:
+ raise SetupError(INCORRECT_SETUP_MSG)
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "RedisPublishCommand":
+ if isinstance(cmd, RedisPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cls._parse_bodies(cmd.body, batch=batch)
+
+ return cls(
+ body,
+ *extra_bodies,
+ channel=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
diff --git a/faststream/redis/router.py b/faststream/redis/router.py
index ab625f8711..17fbb8854f 100644
--- a/faststream/redis/router.py
+++ b/faststream/redis/router.py
@@ -1,34 +1,31 @@
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Iterable,
- Optional,
- Sequence,
- Union,
-)
+from collections.abc import Awaitable, Iterable, Sequence
+from typing import TYPE_CHECKING, Annotated, Any, Callable, Optional, Union
-from typing_extensions import Annotated, Doc, deprecated
+from typing_extensions import Doc, deprecated
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.broker.utils import default_filter
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.redis.broker.registrator import RedisRegistrator
from faststream.redis.message import BaseMessage
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import (
+ from faststream._internal.basic_types import AnyDict, SendableMessage
+ from faststream._internal.broker.abc_broker import ABCBroker
+ from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
- Filter,
PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.redis.message import UnifyRedisMessage
from faststream.redis.schemas import ListSub, PubSub, StreamSub
- from faststream.types import AnyDict, SendableMessage
class RedisPublisher(ArgsContainer):
@@ -56,7 +53,7 @@ def __init__(
Optional["AnyDict"],
Doc(
"Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified."
+ "Can be overridden by `publish.headers` if specified.",
),
] = None,
reply_to: Annotated[
@@ -65,6 +62,10 @@ def __init__(
] = "",
middlewares: Annotated[
Sequence["PublisherMiddleware"],
+ deprecated(
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
+ ),
Doc("Publisher middlewares to wrap outgoing messages."),
] = (),
# AsyncAPI information
@@ -80,7 +81,7 @@ def __init__(
Optional[Any],
Doc(
"AsyncAPI publishing message type. "
- "Should be any python-native object annotation or `pydantic.BaseModel`."
+ "Should be any python-native object annotation or `pydantic.BaseModel`.",
),
] = None,
include_in_schema: Annotated[
@@ -114,7 +115,7 @@ def __init__(
],
Doc(
"Message handler function "
- "to wrap the same with `@broker.subscriber(...)` way."
+ "to wrap the same with `@broker.subscriber(...)` way.",
),
],
channel: Annotated[
@@ -136,13 +137,13 @@ def __init__(
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc(
- "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one."
+ "Parser to map original **aio_pika.IncomingMessage** Msg to FastStream one.",
),
] = None,
decoder: Annotated[
@@ -151,31 +152,25 @@ def __init__(
] = None,
middlewares: Annotated[
Sequence["SubscriberMiddleware[UnifyRedisMessage]"],
- Doc("Subscriber middlewares to wrap incoming message processing."),
- ] = (),
- filter: Annotated[
- "Filter[UnifyRedisMessage]",
- Doc(
- "Overload subscriber to consume various messages from the same source."
- ),
deprecated(
- "Deprecated in **FastStream 0.5.0**. "
- "Please, create `subscriber` object and use it explicitly instead. "
- "Argument will be removed in **FastStream 0.6.0**."
+ "This option was deprecated in 0.6.0. Use router-level middlewares instead."
+ "Scheduled to remove in 0.7.0"
),
- ] = default_filter,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
+ Doc("Subscriber middlewares to wrap incoming message processing."),
+ ] = (),
no_ack: Annotated[
bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ deprecated(
+ "This option was deprecated in 0.6.0 to prior to **ack_policy=AckPolicy.DO_NOTHING**. "
+ "Scheduled to remove in 0.7.0"
+ ),
+ ] = EMPTY,
+ ack_policy: AckPolicy = EMPTY,
no_reply: Annotated[
bool,
Doc(
- "Whether to disable **FastStream** RPC and Reply To auto responses or not."
+ "Whether to disable **FastStream** RPC and Reply To auto responses or not.",
),
] = False,
# AsyncAPI information
@@ -187,13 +182,17 @@ def __init__(
Optional[str],
Doc(
"AsyncAPI subscriber object description. "
- "Uses decorated docstring as default."
+ "Uses decorated docstring as default.",
),
] = None,
include_in_schema: Annotated[
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
+ max_workers: Annotated[
+ int,
+ Doc("Number of workers to process messages concurrently."),
+ ] = 1,
) -> None:
super().__init__(
call,
@@ -202,11 +201,11 @@ def __init__(
list=list,
stream=stream,
dependencies=dependencies,
+ max_workers=max_workers,
parser=parser,
decoder=decoder,
middlewares=middlewares,
- filter=filter,
- retry=retry,
+ ack_policy=ack_policy,
no_ack=no_ack,
no_reply=no_reply,
title=title,
@@ -215,10 +214,7 @@ def __init__(
)
-class RedisRouter(
- RedisRegistrator,
- BrokerRouter[BaseMessage],
-):
+class RedisRouter(RedisRegistrator, BrokerRouter[BaseMessage]):
"""Includable to RedisBroker router."""
def __init__(
@@ -233,15 +229,19 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers."
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
Sequence["BrokerMiddleware[BaseMessage]"],
Doc("Router middlewares to apply to all routers' publishers/subscribers."),
] = (),
+ routers: Annotated[
+ Sequence["ABCBroker[BaseMessage]"],
+ Doc("Routers to apply to broker."),
+ ] = (),
parser: Annotated[
Optional["CustomCallable"],
Doc("Parser to map original **IncomingMessage** Msg to FastStream one."),
@@ -261,6 +261,7 @@ def __init__(
prefix=prefix,
dependencies=dependencies,
middlewares=middlewares,
+ routers=routers,
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
diff --git a/faststream/redis/schemas/list_sub.py b/faststream/redis/schemas/list_sub.py
index f518f5cfb8..b842cc29e3 100644
--- a/faststream/redis/schemas/list_sub.py
+++ b/faststream/redis/schemas/list_sub.py
@@ -1,7 +1,7 @@
from functools import cached_property
from typing import Optional
-from faststream.broker.schemas import NameRequired
+from faststream._internal.proto import NameRequired
class ListSub(NameRequired):
@@ -30,6 +30,3 @@ def __init__(
@cached_property
def records(self) -> Optional[int]:
return self.max_records if self.batch else None
-
- def __hash__(self) -> int:
- return hash(f"list:{self.name}")
diff --git a/faststream/redis/schemas/proto.py b/faststream/redis/schemas/proto.py
index 2521a1a0a3..8b2a3154ad 100644
--- a/faststream/redis/schemas/proto.py
+++ b/faststream/redis/schemas/proto.py
@@ -1,15 +1,15 @@
from abc import abstractmethod
from typing import TYPE_CHECKING, Any, Union
-from faststream.asyncapi.abc import AsyncAPIOperation
from faststream.exceptions import SetupError
+from faststream.specification.proto.endpoint import EndpointSpecification, T
if TYPE_CHECKING:
- from faststream.asyncapi.schema.bindings import redis
from faststream.redis.schemas import ListSub, PubSub, StreamSub
+ from faststream.specification.schema.bindings import redis
-class RedisAsyncAPIProtocol(AsyncAPIOperation):
+class RedisSpecificationProtocol(EndpointSpecification[Any, T]):
@property
@abstractmethod
def channel_binding(self) -> "redis.ChannelBinding": ...
@@ -25,8 +25,11 @@ def validate_options(
stream: Union["StreamSub", str, None],
) -> None:
if all((channel, list)):
- raise SetupError("You can't use `PubSub` and `ListSub` both")
- elif all((channel, stream)):
- raise SetupError("You can't use `PubSub` and `StreamSub` both")
- elif all((list, stream)):
- raise SetupError("You can't use `ListSub` and `StreamSub` both")
+ msg = "You can't use `PubSub` and `ListSub` both"
+ raise SetupError(msg)
+ if all((channel, stream)):
+ msg = "You can't use `PubSub` and `StreamSub` both"
+ raise SetupError(msg)
+ if all((list, stream)):
+ msg = "You can't use `ListSub` and `StreamSub` both"
+ raise SetupError(msg)
diff --git a/faststream/redis/schemas/pub_sub.py b/faststream/redis/schemas/pub_sub.py
index 3026d6d2dc..88d41ac8eb 100644
--- a/faststream/redis/schemas/pub_sub.py
+++ b/faststream/redis/schemas/pub_sub.py
@@ -1,5 +1,5 @@
-from faststream.broker.schemas import NameRequired
-from faststream.utils.path import compile_path
+from faststream._internal.proto import NameRequired
+from faststream._internal.utils.path import compile_path
class PubSub(NameRequired):
@@ -32,6 +32,3 @@ def __init__(
self.path_regex = reg
self.pattern = channel if pattern else None
self.polling_interval = polling_interval
-
- def __hash__(self) -> int:
- return hash(f"pubsub:{self.name}")
diff --git a/faststream/redis/schemas/stream_sub.py b/faststream/redis/schemas/stream_sub.py
index 70e2d9b3c5..50a0b6d606 100644
--- a/faststream/redis/schemas/stream_sub.py
+++ b/faststream/redis/schemas/stream_sub.py
@@ -1,7 +1,7 @@
import warnings
from typing import Optional
-from faststream.broker.schemas import NameRequired
+from faststream._internal.proto import NameRequired
from faststream.exceptions import SetupError
@@ -33,7 +33,8 @@ def __init__(
max_records: Optional[int] = None,
) -> None:
if (group and not consumer) or (not group and consumer):
- raise SetupError("You should specify `group` and `consumer` both")
+ msg = "You should specify `group` and `consumer` both"
+ raise SetupError(msg)
if group and consumer and no_ack:
warnings.warn(
@@ -55,10 +56,3 @@ def __init__(
self.last_id = last_id
self.maxlen = maxlen
self.max_records = max_records
-
- def __hash__(self) -> int:
- if self.group is not None:
- return hash(
- f"stream:{self.name} group:{self.group} consumer:{self.consumer}"
- )
- return hash(f"stream:{self.name}")
diff --git a/faststream/redis/security.py b/faststream/redis/security.py
index 08db65778d..153c369c79 100644
--- a/faststream/redis/security.py
+++ b/faststream/redis/security.py
@@ -5,18 +5,18 @@
from faststream.security import BaseSecurity, SASLPlaintext
if TYPE_CHECKING:
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
def parse_security(security: Optional[BaseSecurity]) -> "AnyDict":
if security is None:
return {}
- elif isinstance(security, SASLPlaintext):
+ if isinstance(security, SASLPlaintext):
return _parse_sasl_plaintext(security)
- elif isinstance(security, BaseSecurity):
+ if isinstance(security, BaseSecurity):
return _parse_base_security(security)
- else:
- raise NotImplementedError(f"RedisBroker does not support {type(security)}")
+ msg = f"RedisBroker does not support {type(security)}"
+ raise NotImplementedError(msg)
def _parse_base_security(security: BaseSecurity) -> "AnyDict":
@@ -38,8 +38,7 @@ def _connection_arguments(self) -> Any:
}
return {"connection_class": SSLConnection}
- else:
- return {}
+ return {}
def _parse_sasl_plaintext(security: SASLPlaintext) -> "AnyDict":
diff --git a/faststream/redis/subscriber/asyncapi.py b/faststream/redis/subscriber/asyncapi.py
deleted file mode 100644
index 36171b247b..0000000000
--- a/faststream/redis/subscriber/asyncapi.py
+++ /dev/null
@@ -1,104 +0,0 @@
-from typing import Dict
-
-from faststream.asyncapi.schema import (
- Channel,
- ChannelBinding,
- CorrelationId,
- Message,
- Operation,
-)
-from faststream.asyncapi.schema.bindings import redis
-from faststream.asyncapi.utils import resolve_payloads
-from faststream.redis.schemas import ListSub, StreamSub
-from faststream.redis.schemas.proto import RedisAsyncAPIProtocol
-from faststream.redis.subscriber.usecase import (
- BatchListSubscriber,
- BatchStreamSubscriber,
- ChannelSubscriber,
- ListSubscriber,
- LogicSubscriber,
- StreamSubscriber,
-)
-
-
-class AsyncAPISubscriber(LogicSubscriber, RedisAsyncAPIProtocol):
- """A class to represent a Redis handler."""
-
- def get_schema(self) -> Dict[str, Channel]:
- payloads = self.get_payloads()
-
- return {
- self.name: Channel(
- description=self.description,
- subscribe=Operation(
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads),
- correlationId=CorrelationId(
- location="$message.header#/correlation_id"
- ),
- ),
- ),
- bindings=ChannelBinding(
- redis=self.channel_binding,
- ),
- )
- }
-
-
-class AsyncAPIChannelSubscriber(ChannelSubscriber, AsyncAPISubscriber):
- def get_name(self) -> str:
- return f"{self.channel.name}:{self.call_name}"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.channel.name,
- method="psubscribe" if self.channel.pattern else "subscribe",
- )
-
-
-class _StreamSubscriberMixin(AsyncAPISubscriber):
- stream_sub: StreamSub
-
- def get_name(self) -> str:
- return f"{self.stream_sub.name}:{self.call_name}"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.stream_sub.name,
- group_name=self.stream_sub.group,
- consumer_name=self.stream_sub.consumer,
- method="xreadgroup" if self.stream_sub.group else "xread",
- )
-
-
-class AsyncAPIStreamSubscriber(StreamSubscriber, _StreamSubscriberMixin):
- pass
-
-
-class AsyncAPIStreamBatchSubscriber(BatchStreamSubscriber, _StreamSubscriberMixin):
- pass
-
-
-class _ListSubscriberMixin(AsyncAPISubscriber):
- list_sub: ListSub
-
- def get_name(self) -> str:
- return f"{self.list_sub.name}:{self.call_name}"
-
- @property
- def channel_binding(self) -> "redis.ChannelBinding":
- return redis.ChannelBinding(
- channel=self.list_sub.name,
- method="lpop",
- )
-
-
-class AsyncAPIListSubscriber(ListSubscriber, _ListSubscriberMixin):
- pass
-
-
-class AsyncAPIListBatchSubscriber(BatchListSubscriber, _ListSubscriberMixin):
- pass
diff --git a/faststream/redis/subscriber/factory.py b/faststream/redis/subscriber/factory.py
index 9a43d054e8..0aa663d1ab 100644
--- a/faststream/redis/subscriber/factory.py
+++ b/faststream/redis/subscriber/factory.py
@@ -1,30 +1,40 @@
-from typing import TYPE_CHECKING, Iterable, Optional, Sequence, Union
+import warnings
+from collections.abc import Iterable, Sequence
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import TypeAlias
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub
from faststream.redis.schemas.proto import validate_options
-from faststream.redis.subscriber.asyncapi import (
- AsyncAPIChannelSubscriber,
- AsyncAPIListBatchSubscriber,
- AsyncAPIListSubscriber,
- AsyncAPIStreamBatchSubscriber,
- AsyncAPIStreamSubscriber,
+from faststream.redis.subscriber.specified import (
+ SpecificationChannelConcurrentSubscriber,
+ SpecificationChannelSubscriber,
+ SpecificationListBatchSubscriber,
+ SpecificationListConcurrentSubscriber,
+ SpecificationListSubscriber,
+ SpecificationStreamBatchSubscriber,
+ SpecificationStreamConcurrentSubscriber,
+ SpecificationStreamSubscriber,
)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream.broker.types import BrokerMiddleware
+ from faststream._internal.types import BrokerMiddleware
from faststream.redis.message import UnifyRedisDict
SubsciberType: TypeAlias = Union[
- "AsyncAPIChannelSubscriber",
- "AsyncAPIStreamBatchSubscriber",
- "AsyncAPIStreamSubscriber",
- "AsyncAPIListBatchSubscriber",
- "AsyncAPIListSubscriber",
+ SpecificationChannelSubscriber,
+ SpecificationStreamBatchSubscriber,
+ SpecificationStreamSubscriber,
+ SpecificationListBatchSubscriber,
+ SpecificationListSubscriber,
+ SpecificationChannelConcurrentSubscriber,
+ SpecificationListConcurrentSubscriber,
+ SpecificationStreamConcurrentSubscriber,
]
@@ -34,25 +44,47 @@ def create_subscriber(
list: Union["ListSub", str, None],
stream: Union["StreamSub", str, None],
# Subscriber args
- no_ack: bool = False,
+ ack_policy: "AckPolicy",
+ no_ack: bool,
no_reply: bool = False,
- retry: bool = False,
- broker_dependencies: Iterable["Depends"] = (),
+ broker_dependencies: Iterable["Dependant"] = (),
broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"] = (),
# AsyncAPI args
title_: Optional[str] = None,
description_: Optional[str] = None,
include_in_schema: bool = True,
+ max_workers: int = 1,
) -> SubsciberType:
- validate_options(channel=channel, list=list, stream=stream)
+ _validate_input_for_misconfigure(
+ channel=channel,
+ list=list,
+ stream=stream,
+ ack_policy=ack_policy,
+ no_ack=no_ack,
+ max_workers=max_workers,
+ )
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.DO_NOTHING if no_ack else AckPolicy.REJECT_ON_ERROR
if (channel_sub := PubSub.validate(channel)) is not None:
- return AsyncAPIChannelSubscriber(
+ if max_workers > 1:
+ return SpecificationChannelConcurrentSubscriber(
+ channel=channel_sub,
+ # basic args
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ max_workers=max_workers,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+ return SpecificationChannelSubscriber(
channel=channel_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -61,14 +93,13 @@ def create_subscriber(
include_in_schema=include_in_schema,
)
- elif (stream_sub := StreamSub.validate(stream)) is not None:
+ if (stream_sub := StreamSub.validate(stream)) is not None:
if stream_sub.batch:
- return AsyncAPIStreamBatchSubscriber(
+ return SpecificationStreamBatchSubscriber(
stream=stream_sub,
# basic args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -76,29 +107,39 @@ def create_subscriber(
description_=description_,
include_in_schema=include_in_schema,
)
- else:
- return AsyncAPIStreamSubscriber(
+ if max_workers > 1:
+ return SpecificationStreamConcurrentSubscriber(
stream=stream_sub,
# basic args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
+ max_workers=max_workers,
# AsyncAPI args
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+ return SpecificationStreamSubscriber(
+ stream=stream_sub,
+ # basic args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
- elif (list_sub := ListSub.validate(list)) is not None:
+ if (list_sub := ListSub.validate(list)) is not None:
if list_sub.batch:
- return AsyncAPIListBatchSubscriber(
+ return SpecificationListBatchSubscriber(
list=list_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -106,20 +147,70 @@ def create_subscriber(
description_=description_,
include_in_schema=include_in_schema,
)
- else:
- return AsyncAPIListSubscriber(
+ if max_workers > 1:
+ return SpecificationListConcurrentSubscriber(
list=list_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
+ max_workers=max_workers,
# AsyncAPI args
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+ return SpecificationListSubscriber(
+ list=list_sub,
+ # basic args
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ raise SetupError(INCORRECT_SETUP_MSG)
+
- else:
- raise SetupError(INCORRECT_SETUP_MSG)
+def _validate_input_for_misconfigure(
+ *,
+ channel: Union["PubSub", str, None],
+ list: Union["ListSub", str, None],
+ stream: Union["StreamSub", str, None],
+ ack_policy: AckPolicy,
+ no_ack: bool,
+ max_workers: int,
+) -> None:
+ validate_options(channel=channel, list=list, stream=stream)
+
+ if no_ack is not EMPTY:
+ warnings.warn(
+ "`no_ack` option was deprecated in prior to `ack_policy=AckPolicy.DO_NOTHING`. Scheduled to remove in 0.7.0",
+ category=DeprecationWarning,
+ stacklevel=4,
+ )
+
+ if ack_policy is not EMPTY:
+ msg = "You can't use deprecated `no_ack` and `ack_policy` simultaneously. Please, use `ack_policy` only."
+ raise SetupError(msg)
+ if stream and no_ack and max_workers > 1:
+ msg = "Max workers not work with manual no_ack mode."
+ raise SetupError(msg)
+
+ if ack_policy is not EMPTY:
+ if channel:
+ warnings.warn(
+ "You can't use acknowledgement policy with PubSub subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if list:
+ warnings.warn(
+ "You can't use acknowledgement policy with List subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
diff --git a/faststream/redis/subscriber/specified.py b/faststream/redis/subscriber/specified.py
new file mode 100644
index 0000000000..4598cfd987
--- /dev/null
+++ b/faststream/redis/subscriber/specified.py
@@ -0,0 +1,130 @@
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.redis.schemas import ListSub, StreamSub
+from faststream.redis.schemas.proto import RedisSpecificationProtocol
+from faststream.redis.subscriber.usecases.basic import ConcurrentSubscriber
+from faststream.redis.subscriber.usecases.channel_subscriber import (
+ ChannelSubscriber,
+ ConcurrentChannelSubscriber,
+)
+from faststream.redis.subscriber.usecases.list_subscriber import (
+ BatchListSubscriber,
+ ConcurrentListSubscriber,
+ ListSubscriber,
+)
+from faststream.redis.subscriber.usecases.stream_subscriber import (
+ ConcurrentStreamSubscriber,
+ StreamBatchSubscriber,
+ StreamSubscriber,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import ChannelBinding, redis
+
+
+class SpecificationSubscriber(
+ SpecificationSubscriberMixin, RedisSpecificationProtocol[SubscriberSpec]
+):
+ """A class to represent a Redis handler."""
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ bindings=None,
+ ),
+ bindings=ChannelBinding(
+ redis=self.channel_binding,
+ ),
+ ),
+ }
+
+
+class SpecificationChannelSubscriber(SpecificationSubscriber, ChannelSubscriber):
+ def get_default_name(self) -> str:
+ return f"{self.channel.name}:{self.call_name}"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.channel.name,
+ method="psubscribe" if self.channel.pattern else "subscribe",
+ )
+
+
+class _StreamSubscriberMixin(SpecificationSubscriber):
+ stream_sub: StreamSub
+
+ def get_default_name(self) -> str:
+ return f"{self.stream_sub.name}:{self.call_name}"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.stream_sub.name,
+ group_name=self.stream_sub.group,
+ consumer_name=self.stream_sub.consumer,
+ method="xreadgroup" if self.stream_sub.group else "xread",
+ )
+
+
+class SpecificationStreamSubscriber(_StreamSubscriberMixin, StreamSubscriber):
+ pass
+
+
+class SpecificationStreamBatchSubscriber(_StreamSubscriberMixin, StreamBatchSubscriber):
+ pass
+
+
+class _ListSubscriberMixin(SpecificationSubscriber):
+ list_sub: ListSub
+
+ def get_default_name(self) -> str:
+ return f"{self.list_sub.name}:{self.call_name}"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.list_sub.name,
+ method="lpop",
+ )
+
+
+class SpecificationListSubscriber(_ListSubscriberMixin, ListSubscriber):
+ pass
+
+
+class SpecificationListBatchSubscriber(_ListSubscriberMixin, BatchListSubscriber):
+ pass
+
+
+class SpecificationConcurrentSubscriber(
+ ConcurrentSubscriber, RedisSpecificationProtocol[SubscriberSpec]
+):
+ pass
+
+
+class SpecificationStreamConcurrentSubscriber(
+ ConcurrentStreamSubscriber, SpecificationStreamSubscriber
+):
+ pass
+
+
+class SpecificationChannelConcurrentSubscriber(
+ ConcurrentChannelSubscriber, SpecificationChannelSubscriber
+):
+ pass
+
+
+class SpecificationListConcurrentSubscriber(
+ ConcurrentListSubscriber, SpecificationListSubscriber
+):
+ pass
diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py
deleted file mode 100644
index 5215838a49..0000000000
--- a/faststream/redis/subscriber/usecase.py
+++ /dev/null
@@ -1,873 +0,0 @@
-import asyncio
-import math
-from abc import abstractmethod
-from contextlib import suppress
-from copy import deepcopy
-from typing import (
- TYPE_CHECKING,
- Any,
- Awaitable,
- Callable,
- Dict,
- Iterable,
- List,
- Optional,
- Sequence,
- Tuple,
-)
-
-import anyio
-from redis.asyncio.client import PubSub as RPubSub
-from redis.asyncio.client import Redis
-from redis.exceptions import ResponseError
-from typing_extensions import TypeAlias, override
-
-from faststream.broker.publisher.fake import FakePublisher
-from faststream.broker.subscriber.usecase import SubscriberUsecase
-from faststream.broker.utils import process_msg
-from faststream.redis.message import (
- BatchListMessage,
- BatchStreamMessage,
- DefaultListMessage,
- DefaultStreamMessage,
- PubSubMessage,
- RedisListMessage,
- RedisMessage,
- RedisStreamMessage,
- UnifyRedisDict,
-)
-from faststream.redis.parser import (
- RedisBatchListParser,
- RedisBatchStreamParser,
- RedisListParser,
- RedisPubSubParser,
- RedisStreamParser,
-)
-from faststream.redis.schemas import ListSub, PubSub, StreamSub
-
-if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
-
- from faststream.broker.message import StreamMessage as BrokerStreamMessage
- from faststream.broker.publisher.proto import ProducerProto
- from faststream.broker.types import (
- AsyncCallable,
- BrokerMiddleware,
- CustomCallable,
- )
- from faststream.types import AnyDict, Decorator, LoggerProto
-
-
-TopicName: TypeAlias = bytes
-Offset: TypeAlias = bytes
-
-
-class LogicSubscriber(SubscriberUsecase[UnifyRedisDict]):
- """A class to represent a Redis handler."""
-
- _client: Optional["Redis[bytes]"]
-
- def __init__(
- self,
- *,
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- default_parser=default_parser,
- default_decoder=default_decoder,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- self._client = None
- self.task: Optional[asyncio.Task[None]] = None
-
- @override
- def setup( # type: ignore[override]
- self,
- *,
- connection: Optional["Redis[bytes]"],
- # basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
- extra_context: "AnyDict",
- # broker options
- broker_parser: Optional["CustomCallable"],
- broker_decoder: Optional["CustomCallable"],
- # dependant args
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
- ) -> None:
- self._client = connection
-
- super().setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
- extra_context=extra_context,
- broker_parser=broker_parser,
- broker_decoder=broker_decoder,
- apply_types=apply_types,
- is_validate=is_validate,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
- )
-
- def _make_response_publisher(
- self,
- message: "BrokerStreamMessage[UnifyRedisDict]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
- return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "channel": message.reply_to,
- },
- ),
- )
-
- @override
- async def start(
- self,
- *args: Any,
- ) -> None:
- if self.task:
- return
-
- await super().start()
-
- start_signal = anyio.Event()
-
- if self.calls:
- self.task = asyncio.create_task(
- self._consume(*args, start_signal=start_signal)
- )
-
- with anyio.fail_after(3.0):
- await start_signal.wait()
-
- else:
- start_signal.set()
-
- async def _consume(self, *args: Any, start_signal: anyio.Event) -> None:
- connected = True
-
- while self.running:
- try:
- await self._get_msgs(*args)
-
- except Exception: # noqa: PERF203
- if connected:
- connected = False
- await anyio.sleep(5)
-
- else:
- if not connected:
- connected = True
-
- finally:
- if not start_signal.is_set():
- with suppress(Exception):
- start_signal.set()
-
- @abstractmethod
- async def _get_msgs(self, *args: Any) -> None:
- raise NotImplementedError()
-
- async def close(self) -> None:
- await super().close()
-
- if self.task is not None and not self.task.done():
- self.task.cancel()
- self.task = None
-
- @staticmethod
- def build_log_context(
- message: Optional["BrokerStreamMessage[Any]"],
- channel: str = "",
- ) -> Dict[str, str]:
- return {
- "channel": channel,
- "message_id": getattr(message, "message_id", ""),
- }
-
-
-class ChannelSubscriber(LogicSubscriber):
- subscription: Optional[RPubSub]
-
- def __init__(
- self,
- *,
- channel: "PubSub",
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = RedisPubSubParser(pattern=channel.path_regex)
- super().__init__(
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- self.channel = channel
- self.subscription = None
-
- def __hash__(self) -> int:
- return hash(self.channel)
-
- def get_log_context(
- self,
- message: Optional["BrokerStreamMessage[Any]"],
- ) -> Dict[str, str]:
- return self.build_log_context(
- message=message,
- channel=self.channel.name,
- )
-
- @override
- async def start(self) -> None:
- if self.subscription:
- return
-
- assert self._client, "You should setup subscriber at first." # nosec B101
-
- self.subscription = psub = self._client.pubsub()
-
- if self.channel.pattern:
- await psub.psubscribe(self.channel.name)
- else:
- await psub.subscribe(self.channel.name)
-
- await super().start(psub)
-
- async def close(self) -> None:
- if self.subscription is not None:
- await self.subscription.unsubscribe()
- await self.subscription.aclose() # type: ignore[attr-defined]
- self.subscription = None
-
- await super().close()
-
- @override
- async def get_one( # type: ignore[override]
- self,
- *,
- timeout: float = 5.0,
- ) -> "Optional[RedisMessage]":
- assert self.subscription, "You should start subscriber at first." # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- sleep_interval = timeout / 10
-
- message: Optional[PubSubMessage] = None
-
- with anyio.move_on_after(timeout):
- while (message := await self._get_message(self.subscription)) is None: # noqa: ASYNC110
- await anyio.sleep(sleep_interval)
-
- msg: Optional[RedisMessage] = await process_msg( # type: ignore[assignment]
- msg=message,
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
- parser=self._parser,
- decoder=self._decoder,
- )
- return msg
-
- async def _get_message(self, psub: RPubSub) -> Optional[PubSubMessage]:
- raw_msg = await psub.get_message(
- ignore_subscribe_messages=True,
- timeout=self.channel.polling_interval,
- )
-
- if raw_msg:
- return PubSubMessage(
- type=raw_msg["type"],
- data=raw_msg["data"],
- channel=raw_msg["channel"].decode(),
- pattern=raw_msg["pattern"],
- )
-
- return None
-
- async def _get_msgs(self, psub: RPubSub) -> None:
- if msg := await self._get_message(psub):
- await self.consume(msg) # type: ignore[arg-type]
-
- def add_prefix(self, prefix: str) -> None:
- new_ch = deepcopy(self.channel)
- new_ch.name = "".join((prefix, new_ch.name))
- self.channel = new_ch
-
-
-class _ListHandlerMixin(LogicSubscriber):
- def __init__(
- self,
- *,
- list: ListSub,
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- default_parser=default_parser,
- default_decoder=default_decoder,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- self.list_sub = list
-
- def __hash__(self) -> int:
- return hash(self.list_sub)
-
- def get_log_context(
- self,
- message: Optional["BrokerStreamMessage[Any]"],
- ) -> Dict[str, str]:
- return self.build_log_context(
- message=message,
- channel=self.list_sub.name,
- )
-
- @override
- async def _consume( # type: ignore[override]
- self,
- client: "Redis[bytes]",
- *,
- start_signal: "anyio.Event",
- ) -> None:
- start_signal.set()
- await super()._consume(client, start_signal=start_signal)
-
- @override
- async def start(self) -> None:
- if self.task:
- return
-
- assert self._client, "You should setup subscriber at first." # nosec B101
-
- await super().start(self._client)
-
- @override
- async def get_one( # type: ignore[override]
- self,
- *,
- timeout: float = 5.0,
- ) -> "Optional[RedisListMessage]":
- assert self._client, "You should start subscriber at first." # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- sleep_interval = timeout / 10
- raw_message = None
-
- with anyio.move_on_after(timeout):
- while ( # noqa: ASYNC110
- raw_message := await self._client.lpop(name=self.list_sub.name)
- ) is None:
- await anyio.sleep(sleep_interval)
-
- if not raw_message:
- return None
-
- msg: RedisListMessage = await process_msg( # type: ignore[assignment]
- msg=DefaultListMessage(
- type="list",
- data=raw_message,
- channel=self.list_sub.name,
- ),
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
- parser=self._parser,
- decoder=self._decoder,
- )
- return msg
-
- def add_prefix(self, prefix: str) -> None:
- new_list = deepcopy(self.list_sub)
- new_list.name = "".join((prefix, new_list.name))
- self.list_sub = new_list
-
-
-class ListSubscriber(_ListHandlerMixin):
- def __init__(
- self,
- *,
- list: ListSub,
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = RedisListParser()
- super().__init__(
- list=list,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- async def _get_msgs(self, client: "Redis[bytes]") -> None:
- raw_msg = await client.lpop(name=self.list_sub.name)
-
- if raw_msg:
- msg = DefaultListMessage(
- type="list",
- data=raw_msg,
- channel=self.list_sub.name,
- )
-
- await self.consume(msg) # type: ignore[arg-type]
-
- else:
- await anyio.sleep(self.list_sub.polling_interval)
-
-
-class BatchListSubscriber(_ListHandlerMixin):
- def __init__(
- self,
- *,
- list: ListSub,
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = RedisBatchListParser()
- super().__init__(
- list=list,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- async def _get_msgs(self, client: "Redis[bytes]") -> None:
- raw_msgs = await client.lpop(
- name=self.list_sub.name,
- count=self.list_sub.max_records,
- )
-
- if raw_msgs:
- msg = BatchListMessage(
- type="blist",
- channel=self.list_sub.name,
- data=raw_msgs,
- )
-
- await self.consume(msg) # type: ignore[arg-type]
-
- else:
- await anyio.sleep(self.list_sub.polling_interval)
-
-
-class _StreamHandlerMixin(LogicSubscriber):
- def __init__(
- self,
- *,
- stream: StreamSub,
- default_parser: "AsyncCallable",
- default_decoder: "AsyncCallable",
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- super().__init__(
- default_parser=default_parser,
- default_decoder=default_decoder,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- self.stream_sub = stream
- self.last_id = stream.last_id
-
- def __hash__(self) -> int:
- return hash(self.stream_sub)
-
- def get_log_context(
- self,
- message: Optional["BrokerStreamMessage[Any]"],
- ) -> Dict[str, str]:
- return self.build_log_context(
- message=message,
- channel=self.stream_sub.name,
- )
-
- @override
- async def start(self) -> None:
- if self.task:
- return
-
- assert self._client, "You should setup subscriber at first." # nosec B101
-
- client = self._client
-
- self.extra_watcher_options.update(
- redis=client,
- group=self.stream_sub.group,
- )
-
- stream = self.stream_sub
-
- read: Callable[
- [str],
- Awaitable[
- Tuple[
- Tuple[
- TopicName,
- Tuple[
- Tuple[
- Offset,
- Dict[bytes, bytes],
- ],
- ...,
- ],
- ],
- ...,
- ],
- ],
- ]
-
- if stream.group and stream.consumer:
- try:
- await client.xgroup_create(
- name=stream.name,
- id=self.last_id,
- groupname=stream.group,
- mkstream=True,
- )
- except ResponseError as e:
- if "already exists" not in str(e):
- raise e
-
- def read(
- _: str,
- ) -> Awaitable[
- Tuple[
- Tuple[
- TopicName,
- Tuple[
- Tuple[
- Offset,
- Dict[bytes, bytes],
- ],
- ...,
- ],
- ],
- ...,
- ],
- ]:
- return client.xreadgroup(
- groupname=stream.group,
- consumername=stream.consumer,
- streams={stream.name: ">"},
- count=stream.max_records,
- block=stream.polling_interval,
- noack=stream.no_ack,
- )
-
- else:
-
- def read(
- last_id: str,
- ) -> Awaitable[
- Tuple[
- Tuple[
- TopicName,
- Tuple[
- Tuple[
- Offset,
- Dict[bytes, bytes],
- ],
- ...,
- ],
- ],
- ...,
- ],
- ]:
- return client.xread(
- {stream.name: last_id},
- block=stream.polling_interval,
- count=stream.max_records,
- )
-
- await super().start(read)
-
- @override
- async def get_one( # type: ignore[override]
- self,
- *,
- timeout: float = 5.0,
- ) -> "Optional[RedisStreamMessage]":
- assert self._client, "You should start subscriber at first." # nosec B101
- assert ( # nosec B101
- not self.calls
- ), "You can't use `get_one` method if subscriber has registered handlers."
-
- stream_message = await self._client.xread(
- {self.stream_sub.name: self.last_id},
- block=math.ceil(timeout * 1000),
- count=1,
- )
-
- if not stream_message:
- return None
-
- ((stream_name, ((message_id, raw_message),)),) = stream_message
-
- self.last_id = message_id.decode()
-
- msg: RedisStreamMessage = await process_msg( # type: ignore[assignment]
- msg=DefaultStreamMessage(
- type="stream",
- channel=stream_name.decode(),
- message_ids=[message_id],
- data=raw_message,
- ),
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
- parser=self._parser,
- decoder=self._decoder,
- )
- return msg
-
- def add_prefix(self, prefix: str) -> None:
- new_stream = deepcopy(self.stream_sub)
- new_stream.name = "".join((prefix, new_stream.name))
- self.stream_sub = new_stream
-
-
-class StreamSubscriber(_StreamHandlerMixin):
- def __init__(
- self,
- *,
- stream: StreamSub,
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = RedisStreamParser()
- super().__init__(
- stream=stream,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- async def _get_msgs(
- self,
- read: Callable[
- [str],
- Awaitable[
- Tuple[
- Tuple[
- TopicName,
- Tuple[
- Tuple[
- Offset,
- Dict[bytes, bytes],
- ],
- ...,
- ],
- ],
- ...,
- ],
- ],
- ],
- ) -> None:
- for stream_name, msgs in await read(self.last_id):
- if msgs:
- self.last_id = msgs[-1][0].decode()
-
- for message_id, raw_msg in msgs:
- msg = DefaultStreamMessage(
- type="stream",
- channel=stream_name.decode(),
- message_ids=[message_id],
- data=raw_msg,
- )
-
- await self.consume(msg) # type: ignore[arg-type]
-
-
-class BatchStreamSubscriber(_StreamHandlerMixin):
- def __init__(
- self,
- *,
- stream: StreamSub,
- # Subscriber args
- no_ack: bool,
- no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
- broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> None:
- parser = RedisBatchStreamParser()
- super().__init__(
- stream=stream,
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
- # Propagated options
- no_ack=no_ack,
- no_reply=no_reply,
- retry=retry,
- broker_middlewares=broker_middlewares,
- broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
-
- async def _get_msgs(
- self,
- read: Callable[
- [str],
- Awaitable[
- Tuple[Tuple[bytes, Tuple[Tuple[bytes, Dict[bytes, bytes]], ...]], ...],
- ],
- ],
- ) -> None:
- for stream_name, msgs in await read(self.last_id):
- if msgs:
- self.last_id = msgs[-1][0].decode()
-
- data: List[Dict[bytes, bytes]] = []
- ids: List[bytes] = []
- for message_id, i in msgs:
- data.append(i)
- ids.append(message_id)
-
- msg = BatchStreamMessage(
- type="bstream",
- channel=stream_name.decode(),
- data=data,
- message_ids=ids,
- )
-
- await self.consume(msg) # type: ignore[arg-type]
diff --git a/faststream/redis/subscriber/usecases/__init__.py b/faststream/redis/subscriber/usecases/__init__.py
new file mode 100644
index 0000000000..32ad97f400
--- /dev/null
+++ b/faststream/redis/subscriber/usecases/__init__.py
@@ -0,0 +1,19 @@
+from .basic import LogicSubscriber
+from .channel_subscriber import ChannelSubscriber
+from .list_subscriber import BatchListSubscriber, ListSubscriber, _ListHandlerMixin
+from .stream_subscriber import (
+ StreamBatchSubscriber,
+ StreamSubscriber,
+ _StreamHandlerMixin,
+)
+
+__all__ = (
+ "BatchListSubscriber",
+ "ChannelSubscriber",
+ "ListSubscriber",
+ "LogicSubscriber",
+ "StreamBatchSubscriber",
+ "StreamSubscriber",
+ "_ListHandlerMixin",
+ "_StreamHandlerMixin",
+)
diff --git a/faststream/redis/subscriber/usecases/basic.py b/faststream/redis/subscriber/usecases/basic.py
new file mode 100644
index 0000000000..d4fd993394
--- /dev/null
+++ b/faststream/redis/subscriber/usecases/basic.py
@@ -0,0 +1,202 @@
+from abc import abstractmethod
+from collections.abc import Iterable, Sequence
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+import anyio
+from typing_extensions import TypeAlias, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream.redis.message import (
+ UnifyRedisDict,
+)
+from faststream.redis.publisher.fake import RedisFakePublisher
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from redis.asyncio.client import Redis
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.message import StreamMessage as BrokerStreamMessage
+ from faststream.middlewares import AckPolicy
+
+
+TopicName: TypeAlias = bytes
+Offset: TypeAlias = bytes
+
+
+class LogicSubscriber(TasksMixin, SubscriberUsecase[UnifyRedisDict]):
+ """A class to represent a Redis handler."""
+
+ _client: Optional["Redis[bytes]"]
+
+ def __init__(
+ self,
+ *,
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ super().__init__(
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ self._client = None
+
+ @override
+ def _setup( # type: ignore[override]
+ self,
+ *,
+ connection: Optional["Redis[bytes]"],
+ # basic args
+ extra_context: "AnyDict",
+ # broker options
+ broker_parser: Optional["CustomCallable"],
+ broker_decoder: Optional["CustomCallable"],
+ # dependant args
+ state: "Pointer[BrokerState]",
+ ) -> None:
+ self._client = connection
+
+ super()._setup(
+ extra_context=extra_context,
+ broker_parser=broker_parser,
+ broker_decoder=broker_decoder,
+ state=state,
+ )
+
+ def _make_response_publisher(
+ self,
+ message: "BrokerStreamMessage[UnifyRedisDict]",
+ ) -> Sequence["BasePublisherProto"]:
+ return (
+ RedisFakePublisher(
+ self._state.get().producer,
+ channel=message.reply_to,
+ ),
+ )
+
+ @override
+ async def start(
+ self,
+ *args: Any,
+ ) -> None:
+ if self.tasks:
+ return
+
+ await super().start()
+
+ start_signal = anyio.Event()
+
+ if self.calls:
+ self.add_task(self._consume(*args, start_signal=start_signal))
+
+ with anyio.fail_after(3.0):
+ await start_signal.wait()
+
+ else:
+ start_signal.set()
+
+ async def _consume(self, *args: Any, start_signal: anyio.Event) -> None:
+ connected = True
+
+ while self.running:
+ try:
+ await self._get_msgs(*args)
+
+ except Exception: # noqa: PERF203
+ if connected:
+ connected = False
+ await anyio.sleep(5)
+
+ else:
+ if not connected:
+ connected = True
+
+ finally:
+ if not start_signal.is_set():
+ with suppress(Exception):
+ start_signal.set()
+
+ @abstractmethod
+ async def _get_msgs(self, *args: Any) -> None:
+ raise NotImplementedError
+
+ @staticmethod
+ def build_log_context(
+ message: Optional["BrokerStreamMessage[Any]"],
+ channel: str = "",
+ ) -> dict[str, str]:
+ return {
+ "channel": channel,
+ "message_id": getattr(message, "message_id", ""),
+ }
+
+ async def consume_one(self, msg: "BrokerStreamMessage") -> None:
+ await self.consume(msg)
+
+
+class ConcurrentSubscriber(ConcurrentMixin["BrokerStreamMessage"], LogicSubscriber):
+ def __init__(
+ self,
+ *,
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ # Subscriber args
+ max_workers: int,
+ no_ack: bool,
+ no_reply: bool,
+ retry: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ max_workers=max_workers,
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated options
+ no_ack=no_ack,
+ no_reply=no_reply,
+ retry=retry,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ self._client = None
+
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "BrokerStreamMessage") -> None:
+ await self._put_msg(msg)
diff --git a/faststream/redis/subscriber/usecases/channel_subscriber.py b/faststream/redis/subscriber/usecases/channel_subscriber.py
new file mode 100644
index 0000000000..d7284e47c6
--- /dev/null
+++ b/faststream/redis/subscriber/usecases/channel_subscriber.py
@@ -0,0 +1,194 @@
+from collections.abc import Iterable, Sequence
+from copy import deepcopy
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+import anyio
+from redis.asyncio.client import (
+ PubSub as RPubSub,
+)
+from typing_extensions import TypeAlias, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.redis.message import (
+ PubSubMessage,
+ RedisMessage,
+ UnifyRedisDict,
+)
+from faststream.redis.parser import (
+ RedisPubSubParser,
+)
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage as BrokerStreamMessage
+ from faststream.redis.schemas import PubSub
+
+
+TopicName: TypeAlias = bytes
+Offset: TypeAlias = bytes
+
+
+class ChannelSubscriber(LogicSubscriber):
+ subscription: Optional[RPubSub]
+
+ def __init__(
+ self,
+ *,
+ channel: "PubSub",
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ parser = RedisPubSubParser(pattern=channel.path_regex)
+ super().__init__(
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ # Propagated options
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ self.channel = channel
+ self.subscription = None
+
+ def get_log_context(
+ self,
+ message: Optional["BrokerStreamMessage[Any]"],
+ ) -> dict[str, str]:
+ return self.build_log_context(
+ message=message,
+ channel=self.channel.name,
+ )
+
+ @override
+ async def start(self) -> None:
+ if self.subscription:
+ return
+
+ assert self._client, "You should setup subscriber at first." # nosec B101
+
+ self.subscription = psub = self._client.pubsub()
+
+ if self.channel.pattern:
+ await psub.psubscribe(self.channel.name)
+ else:
+ await psub.subscribe(self.channel.name)
+
+ await super().start(psub)
+
+ async def close(self) -> None:
+ if self.subscription is not None:
+ await self.subscription.unsubscribe()
+ await self.subscription.aclose() # type: ignore[attr-defined]
+ self.subscription = None
+
+ await super().close()
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[RedisMessage]":
+ assert self.subscription, "You should start subscriber at first." # nosec B101
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ sleep_interval = timeout / 10
+
+ raw_message: Optional[PubSubMessage] = None
+
+ with anyio.move_on_after(timeout):
+ while (raw_message := await self._get_message(self.subscription)) is None: # noqa: ASYNC110
+ await anyio.sleep(sleep_interval)
+
+ context = self._state.get().di_state.context
+
+ msg: Optional[RedisMessage] = await process_msg( # type: ignore[assignment]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ async def _get_message(self, psub: RPubSub) -> Optional[PubSubMessage]:
+ raw_msg = await psub.get_message(
+ ignore_subscribe_messages=True,
+ timeout=self.channel.polling_interval,
+ )
+
+ if raw_msg:
+ return PubSubMessage(
+ type=raw_msg["type"],
+ data=raw_msg["data"],
+ channel=raw_msg["channel"].decode(),
+ pattern=raw_msg["pattern"],
+ )
+
+ return None
+
+ async def _get_msgs(self, psub: RPubSub) -> None:
+ if msg := await self._get_message(psub):
+ await self.consume_one(msg)
+
+ def add_prefix(self, prefix: str) -> None:
+ new_ch = deepcopy(self.channel)
+ new_ch.name = f"{prefix}{new_ch.name}"
+ self.channel = new_ch
+
+
+class ConcurrentChannelSubscriber(
+ ConcurrentMixin["BrokerStreamMessage"], ChannelSubscriber
+):
+ def __init__(
+ self,
+ *,
+ channel: "PubSub",
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ max_workers: int,
+ ) -> None:
+ super().__init__(
+ # Propagated options
+ channel=channel,
+ no_reply=no_reply,
+ max_workers=max_workers,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "BrokerStreamMessage") -> None:
+ await self._put_msg(msg)
diff --git a/faststream/redis/subscriber/usecases/list_subscriber.py b/faststream/redis/subscriber/usecases/list_subscriber.py
new file mode 100644
index 0000000000..c002920002
--- /dev/null
+++ b/faststream/redis/subscriber/usecases/list_subscriber.py
@@ -0,0 +1,258 @@
+from collections.abc import Iterable, Sequence
+from copy import deepcopy
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+import anyio
+from typing_extensions import TypeAlias, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.redis.message import (
+ BatchListMessage,
+ DefaultListMessage,
+ RedisListMessage,
+ UnifyRedisDict,
+)
+from faststream.redis.parser import (
+ RedisBatchListParser,
+ RedisListParser,
+)
+from faststream.redis.schemas import ListSub
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from redis.asyncio.client import Redis
+
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage as BrokerStreamMessage
+
+
+TopicName: TypeAlias = bytes
+Offset: TypeAlias = bytes
+
+
+class _ListHandlerMixin(LogicSubscriber):
+ def __init__(
+ self,
+ *,
+ list: ListSub,
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ super().__init__(
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ self.list_sub = list
+
+ def get_log_context(
+ self,
+ message: Optional["BrokerStreamMessage[Any]"],
+ ) -> dict[str, str]:
+ return self.build_log_context(
+ message=message,
+ channel=self.list_sub.name,
+ )
+
+ @override
+ async def _consume( # type: ignore[override]
+ self,
+ client: "Redis[bytes]",
+ *,
+ start_signal: "anyio.Event",
+ ) -> None:
+ start_signal.set()
+ await super()._consume(client, start_signal=start_signal)
+
+ @override
+ async def start(self) -> None:
+ if self.tasks:
+ return
+
+ assert self._client, "You should setup subscriber at first." # nosec B101
+
+ await super().start(self._client)
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[RedisListMessage]":
+ assert self._client, "You should start subscriber at first." # nosec B101
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ sleep_interval = timeout / 10
+ raw_message = None
+
+ with anyio.move_on_after(timeout):
+ while ( # noqa: ASYNC110
+ raw_message := await self._client.lpop(name=self.list_sub.name)
+ ) is None:
+ await anyio.sleep(sleep_interval)
+
+ if not raw_message:
+ return None
+
+ redis_incoming_msg = DefaultListMessage(
+ type="list",
+ data=raw_message,
+ channel=self.list_sub.name,
+ )
+
+ context = self._state.get().di_state.context
+
+ msg: RedisListMessage = await process_msg( # type: ignore[assignment]
+ msg=redis_incoming_msg,
+ middlewares=(
+ m(redis_incoming_msg, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ def add_prefix(self, prefix: str) -> None:
+ new_list = deepcopy(self.list_sub)
+ new_list.name = f"{prefix}{new_list.name}"
+ self.list_sub = new_list
+
+
+class ListSubscriber(_ListHandlerMixin):
+ def __init__(
+ self,
+ *,
+ list: ListSub,
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ parser = RedisListParser()
+ super().__init__(
+ list=list,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ # Propagated options
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ async def _get_msgs(self, client: "Redis[bytes]") -> None:
+ raw_msg = await client.blpop(
+ self.list_sub.name,
+ timeout=self.list_sub.polling_interval,
+ )
+
+ if raw_msg:
+ _, msg_data = raw_msg
+
+ msg = DefaultListMessage(
+ type="list",
+ data=msg_data,
+ channel=self.list_sub.name,
+ )
+
+ await self.consume_one(msg)
+
+
+class BatchListSubscriber(_ListHandlerMixin):
+ def __init__(
+ self,
+ *,
+ list: ListSub,
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ parser = RedisBatchListParser()
+ super().__init__(
+ list=list,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ # Propagated options
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ async def _get_msgs(self, client: "Redis[bytes]") -> None:
+ raw_msgs = await client.lpop(
+ name=self.list_sub.name,
+ count=self.list_sub.max_records,
+ )
+
+ if raw_msgs:
+ msg = BatchListMessage(
+ type="blist",
+ channel=self.list_sub.name,
+ data=raw_msgs,
+ )
+
+ await self.consume_one(msg)
+
+ else:
+ await anyio.sleep(self.list_sub.polling_interval)
+
+
+class ConcurrentListSubscriber(ConcurrentMixin["BrokerStreamMessage"], ListSubscriber):
+ def __init__(
+ self,
+ *,
+ list: ListSub,
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ max_workers: int,
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ list=list,
+ # Propagated options
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ max_workers=max_workers,
+ # AsyncAPI
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "BrokerStreamMessage") -> None:
+ await self._put_msg(msg)
diff --git a/faststream/redis/subscriber/usecases/stream_subscriber.py b/faststream/redis/subscriber/usecases/stream_subscriber.py
new file mode 100644
index 0000000000..4037ea7a2d
--- /dev/null
+++ b/faststream/redis/subscriber/usecases/stream_subscriber.py
@@ -0,0 +1,377 @@
+import math
+from collections.abc import Awaitable, Iterable, Sequence
+from copy import deepcopy
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Optional,
+)
+
+from redis.exceptions import ResponseError
+from typing_extensions import TypeAlias, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.redis.message import (
+ BatchStreamMessage,
+ DefaultStreamMessage,
+ RedisStreamMessage,
+ UnifyRedisDict,
+)
+from faststream.redis.parser import (
+ RedisBatchStreamParser,
+ RedisStreamParser,
+)
+from faststream.redis.schemas import StreamSub
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage as BrokerStreamMessage
+ from faststream.middlewares import AckPolicy
+
+
+TopicName: TypeAlias = bytes
+Offset: TypeAlias = bytes
+
+
+class _StreamHandlerMixin(LogicSubscriber):
+ def __init__(
+ self,
+ *,
+ stream: StreamSub,
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ super().__init__(
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ self.stream_sub = stream
+ self.last_id = stream.last_id
+
+ def get_log_context(
+ self,
+ message: Optional["BrokerStreamMessage[Any]"],
+ ) -> dict[str, str]:
+ return self.build_log_context(
+ message=message,
+ channel=self.stream_sub.name,
+ )
+
+ @override
+ async def start(self) -> None:
+ if self.tasks:
+ return
+
+ assert self._client, "You should setup subscriber at first." # nosec B101
+
+ client = self._client
+
+ self.extra_watcher_options.update(
+ redis=client,
+ group=self.stream_sub.group,
+ )
+
+ stream = self.stream_sub
+
+ read: Callable[
+ [str],
+ Awaitable[
+ tuple[
+ tuple[
+ TopicName,
+ tuple[
+ tuple[
+ Offset,
+ dict[bytes, bytes],
+ ],
+ ...,
+ ],
+ ],
+ ...,
+ ],
+ ],
+ ]
+
+ if stream.group and stream.consumer:
+ try:
+ await client.xgroup_create(
+ name=stream.name,
+ id=self.last_id,
+ groupname=stream.group,
+ mkstream=True,
+ )
+ except ResponseError as e:
+ if "already exists" not in str(e):
+ raise
+
+ def read(
+ _: str,
+ ) -> Awaitable[
+ tuple[
+ tuple[
+ TopicName,
+ tuple[
+ tuple[
+ Offset,
+ dict[bytes, bytes],
+ ],
+ ...,
+ ],
+ ],
+ ...,
+ ],
+ ]:
+ return client.xreadgroup(
+ groupname=stream.group,
+ consumername=stream.consumer,
+ streams={stream.name: ">"},
+ count=stream.max_records,
+ block=stream.polling_interval,
+ noack=stream.no_ack,
+ )
+
+ else:
+
+ def read(
+ last_id: str,
+ ) -> Awaitable[
+ tuple[
+ tuple[
+ TopicName,
+ tuple[
+ tuple[
+ Offset,
+ dict[bytes, bytes],
+ ],
+ ...,
+ ],
+ ],
+ ...,
+ ],
+ ]:
+ return client.xread(
+ {stream.name: last_id},
+ block=stream.polling_interval,
+ count=stream.max_records,
+ )
+
+ await super().start(read)
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[RedisStreamMessage]":
+ assert self._client, "You should start subscriber at first." # nosec B101
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ stream_message = await self._client.xread(
+ {self.stream_sub.name: self.last_id},
+ block=math.ceil(timeout * 1000),
+ count=1,
+ )
+
+ if not stream_message:
+ return None
+
+ ((stream_name, ((message_id, raw_message),)),) = stream_message
+
+ self.last_id = message_id.decode()
+
+ redis_incoming_msg = DefaultStreamMessage(
+ type="stream",
+ channel=stream_name.decode(),
+ message_ids=[message_id],
+ data=raw_message,
+ )
+
+ context = self._state.get().di_state.context
+
+ msg: RedisStreamMessage = await process_msg( # type: ignore[assignment]
+ msg=redis_incoming_msg,
+ middlewares=(
+ m(redis_incoming_msg, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ def add_prefix(self, prefix: str) -> None:
+ new_stream = deepcopy(self.stream_sub)
+ new_stream.name = f"{prefix}{new_stream.name}"
+ self.stream_sub = new_stream
+
+
+class StreamSubscriber(_StreamHandlerMixin):
+ def __init__(
+ self,
+ *,
+ stream: StreamSub,
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ parser = RedisStreamParser()
+ super().__init__(
+ stream=stream,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ async def _get_msgs(
+ self,
+ read: Callable[
+ [str],
+ Awaitable[
+ tuple[
+ tuple[
+ TopicName,
+ tuple[
+ tuple[
+ Offset,
+ dict[bytes, bytes],
+ ],
+ ...,
+ ],
+ ],
+ ...,
+ ],
+ ],
+ ],
+ ) -> None:
+ for stream_name, msgs in await read(self.last_id):
+ if msgs:
+ self.last_id = msgs[-1][0].decode()
+
+ for message_id, raw_msg in msgs:
+ msg = DefaultStreamMessage(
+ type="stream",
+ channel=stream_name.decode(),
+ message_ids=[message_id],
+ data=raw_msg,
+ )
+
+ await self.consume_one(msg)
+
+
+class StreamBatchSubscriber(_StreamHandlerMixin):
+ def __init__(
+ self,
+ *,
+ stream: StreamSub,
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ ) -> None:
+ parser = RedisBatchStreamParser()
+ super().__init__(
+ stream=stream,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ )
+
+ async def _get_msgs(
+ self,
+ read: Callable[
+ [str],
+ Awaitable[
+ tuple[tuple[bytes, tuple[tuple[bytes, dict[bytes, bytes]], ...]], ...],
+ ],
+ ],
+ ) -> None:
+ for stream_name, msgs in await read(self.last_id):
+ if msgs:
+ self.last_id = msgs[-1][0].decode()
+
+ data: list[dict[bytes, bytes]] = []
+ ids: list[bytes] = []
+ for message_id, i in msgs:
+ data.append(i)
+ ids.append(message_id)
+
+ msg = BatchStreamMessage(
+ type="bstream",
+ channel=stream_name.decode(),
+ data=data,
+ message_ids=ids,
+ )
+
+ await self.consume_one(msg)
+
+
+class ConcurrentStreamSubscriber(
+ ConcurrentMixin["BrokerStreamMessage"], StreamSubscriber
+):
+ def __init__(
+ self,
+ *,
+ stream: StreamSub,
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Sequence["BrokerMiddleware[UnifyRedisDict]"],
+ max_workers: int,
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ stream=stream,
+ # Propagated options
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ max_workers=max_workers,
+ # AsyncAPI
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ async def start(self) -> None:
+ await super().start()
+ self.start_consume_task()
+
+ async def consume_one(self, msg: "BrokerStreamMessage") -> None:
+ await self._put_msg(msg)
diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py
index 16cf7f8abf..088115f97c 100644
--- a/faststream/redis/testing.py
+++ b/faststream/redis/testing.py
@@ -1,12 +1,11 @@
import re
+from collections.abc import Iterator, Sequence
+from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
- List,
Optional,
Protocol,
- Sequence,
- Tuple,
Union,
cast,
)
@@ -15,9 +14,10 @@
import anyio
from typing_extensions import TypedDict, override
-from faststream.broker.message import gen_cor_id
-from faststream.broker.utils import resolve_custom_func
-from faststream.exceptions import WRONG_PUBLISH_ARGS, SetupError, SubscriberNotFound
+from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream._internal.testing.broker import TestBroker
+from faststream.exceptions import SetupError, SubscriberNotFound
+from faststream.message import gen_cor_id
from faststream.redis.broker.broker import RedisBroker
from faststream.redis.message import (
BatchListMessage,
@@ -29,19 +29,16 @@
)
from faststream.redis.parser import RawMessage, RedisPubSubParser
from faststream.redis.publisher.producer import RedisFastProducer
+from faststream.redis.response import DestinationType, RedisPublishCommand
from faststream.redis.schemas import INCORRECT_SETUP_MSG
-from faststream.redis.subscriber.usecase import (
- ChannelSubscriber,
- LogicSubscriber,
- _ListHandlerMixin,
- _StreamHandlerMixin,
-)
-from faststream.testing.broker import TestBroker
-from faststream.utils.functions import timeout_scope
+from faststream.redis.subscriber.usecases.channel_subscriber import ChannelSubscriber
+from faststream.redis.subscriber.usecases.list_subscriber import _ListHandlerMixin
+from faststream.redis.subscriber.usecases.stream_subscriber import _StreamHandlerMixin
if TYPE_CHECKING:
- from faststream.redis.publisher.asyncapi import AsyncAPIPublisher
- from faststream.types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import AnyDict, SendableMessage
+ from faststream.redis.publisher.specified import SpecificationPublisher
+ from faststream.redis.subscriber.usecases.basic import LogicSubscriber
__all__ = ("TestRedisBroker",)
@@ -52,14 +49,14 @@ class TestRedisBroker(TestBroker[RedisBroker]):
@staticmethod
def create_publisher_fake_subscriber(
broker: RedisBroker,
- publisher: "AsyncAPIPublisher",
- ) -> Tuple["LogicSubscriber", bool]:
+ publisher: "SpecificationPublisher",
+ ) -> tuple["LogicSubscriber", bool]:
sub: Optional[LogicSubscriber] = None
named_property = publisher.subscriber_property(name_only=True)
visitors = (ChannelVisitor(), ListVisitor(), StreamVisitor())
- for handler in broker._subscribers.values(): # pragma: no branch
+ for handler in broker._subscribers: # pragma: no branch
for visitor in visitors:
if visitor.visit(**named_property, sub=handler):
sub = handler
@@ -74,20 +71,25 @@ def create_publisher_fake_subscriber(
return sub, is_real
+ @contextmanager
+ def _patch_producer(self, broker: RedisBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: RedisBroker,
*args: Any,
**kwargs: Any,
) -> AsyncMock:
- broker._producer = FakeProducer(broker)
connection = MagicMock()
pub_sub = AsyncMock()
async def get_msg(*args: Any, timeout: float, **kwargs: Any) -> None:
await anyio.sleep(timeout)
- return None
pub_sub.get_message = get_msg
@@ -112,35 +114,19 @@ def __init__(self, broker: RedisBroker) -> None:
@override
async def publish(
self,
- message: "SendableMessage",
- *,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
- rpc: bool = False,
- rpc_timeout: Optional[float] = 30.0,
- raise_timeout: bool = False,
- ) -> Optional[Any]:
- if rpc and reply_to:
- raise WRONG_PUBLISH_ARGS
-
- correlation_id = correlation_id or gen_cor_id()
-
+ cmd: "RedisPublishCommand",
+ ) -> None:
body = build_message(
- message=message,
- reply_to=reply_to,
- correlation_id=correlation_id,
- headers=headers,
+ message=cmd.body,
+ reply_to=cmd.reply_to,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- destination = _make_destionation_kwargs(channel, list, stream)
+ destination = _make_destionation_kwargs(cmd)
visitors = (ChannelVisitor(), ListVisitor(), StreamVisitor())
- for handler in self.broker._subscribers.values(): # pragma: no branch
+ for handler in self.broker._subscribers: # pragma: no branch
for visitor in visitors:
if visited_ch := visitor.visit(**destination, sub=handler):
msg = visitor.get_message(
@@ -149,38 +135,23 @@ async def publish(
handler, # type: ignore[arg-type]
)
- with timeout_scope(rpc_timeout, raise_timeout):
- response_msg = await self._execute_handler(msg, handler)
- if rpc:
- return await self._decoder(await self._parser(response_msg))
-
- return None
+ await self._execute_handler(msg, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- timeout: Optional[float] = 30.0,
+ cmd: "RedisPublishCommand",
) -> "PubSubMessage":
- correlation_id = correlation_id or gen_cor_id()
-
body = build_message(
- message=message,
- correlation_id=correlation_id,
- headers=headers,
+ message=cmd.body,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- destination = _make_destionation_kwargs(channel, list, stream)
+ destination = _make_destionation_kwargs(cmd)
visitors = (ChannelVisitor(), ListVisitor(), StreamVisitor())
- for handler in self.broker._subscribers.values(): # pragma: no branch
+ for handler in self.broker._subscribers: # pragma: no branch
for visitor in visitors:
if visited_ch := visitor.visit(**destination, sub=handler):
msg = visitor.get_message(
@@ -189,41 +160,40 @@ async def request( # type: ignore[override]
handler, # type: ignore[arg-type]
)
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
return await self._execute_handler(msg, handler)
raise SubscriberNotFound
async def publish_batch(
self,
- *msgs: "SendableMessage",
- list: str,
- headers: Optional["AnyDict"] = None,
- correlation_id: Optional[str] = None,
+ cmd: "RedisPublishCommand",
) -> None:
data_to_send = [
build_message(
m,
- correlation_id=correlation_id or gen_cor_id(),
- headers=headers,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- for m in msgs
+ for m in cmd.batch_bodies
]
visitor = ListVisitor()
- for handler in self.broker._subscribers.values(): # pragma: no branch
- if visitor.visit(list=list, sub=handler):
- casted_handler = cast(_ListHandlerMixin, handler)
+ for handler in self.broker._subscribers: # pragma: no branch
+ if visitor.visit(list=cmd.destination, sub=handler):
+ casted_handler = cast("_ListHandlerMixin", handler)
if casted_handler.list_sub.batch:
- msg = visitor.get_message(list, data_to_send, casted_handler)
+ msg = visitor.get_message(
+ cmd.destination, data_to_send, casted_handler
+ )
await self._execute_handler(msg, handler)
- return None
-
async def _execute_handler(
- self, msg: Any, handler: "LogicSubscriber"
+ self,
+ msg: Any,
+ handler: "LogicSubscriber",
) -> "PubSubMessage":
result = await handler.process_message(msg)
@@ -246,13 +216,12 @@ def build_message(
reply_to: str = "",
headers: Optional["AnyDict"] = None,
) -> bytes:
- data = RawMessage.encode(
+ return RawMessage.encode(
message=message,
reply_to=reply_to,
headers=headers,
correlation_id=correlation_id,
)
- return data
class Visitor(Protocol):
@@ -288,7 +257,7 @@ def visit(
re.match(
sub_channel.name.replace(".", "\\.").replace("*", ".*"),
channel or "",
- )
+ ),
)
) or channel == sub_channel.name:
return channel
@@ -336,15 +305,14 @@ def get_message( # type: ignore[override]
return BatchListMessage(
type="blist",
channel=channel,
- data=body if isinstance(body, List) else [body],
+ data=body if isinstance(body, list) else [body],
)
- else:
- return DefaultListMessage(
- type="list",
- channel=channel,
- data=body,
- )
+ return DefaultListMessage(
+ type="list",
+ channel=channel,
+ data=body,
+ )
class StreamVisitor(Visitor):
@@ -378,13 +346,12 @@ def get_message( # type: ignore[override]
message_ids=[],
)
- else:
- return DefaultStreamMessage(
- type="stream",
- channel=channel,
- data={bDATA_KEY: body},
- message_ids=[],
- )
+ return DefaultStreamMessage(
+ type="stream",
+ channel=channel,
+ data={bDATA_KEY: body},
+ message_ids=[],
+ )
class _DestinationKwargs(TypedDict, total=False):
@@ -393,18 +360,14 @@ class _DestinationKwargs(TypedDict, total=False):
stream: str
-def _make_destionation_kwargs(
- channel: Optional[str],
- list: Optional[str],
- stream: Optional[str],
-) -> _DestinationKwargs:
+def _make_destionation_kwargs(cmd: RedisPublishCommand) -> _DestinationKwargs:
destination: _DestinationKwargs = {}
- if channel:
- destination["channel"] = channel
- if list:
- destination["list"] = list
- if stream:
- destination["stream"] = stream
+ if cmd.destination_type is DestinationType.Channel:
+ destination["channel"] = cmd.destination
+ if cmd.destination_type is DestinationType.List:
+ destination["list"] = cmd.destination
+ if cmd.destination_type is DestinationType.Stream:
+ destination["stream"] = cmd.destination
if len(destination) != 1:
raise SetupError(INCORRECT_SETUP_MSG)
diff --git a/faststream/response/__init__.py b/faststream/response/__init__.py
new file mode 100644
index 0000000000..36c071f516
--- /dev/null
+++ b/faststream/response/__init__.py
@@ -0,0 +1,11 @@
+from .publish_type import PublishType
+from .response import BatchPublishCommand, PublishCommand, Response
+from .utils import ensure_response
+
+__all__ = (
+ "BatchPublishCommand",
+ "PublishCommand",
+ "PublishType",
+ "Response",
+ "ensure_response",
+)
diff --git a/faststream/response/publish_type.py b/faststream/response/publish_type.py
new file mode 100644
index 0000000000..ad74910a1e
--- /dev/null
+++ b/faststream/response/publish_type.py
@@ -0,0 +1,12 @@
+from enum import Enum
+
+
+class PublishType(str, Enum):
+ PUBLISH = "PUBLISH"
+ """Regular `broker/publisher.publish(...)` call."""
+
+ REPLY = "REPLY"
+ """Response to RPC/Reply-To request."""
+
+ REQUEST = "REQUEST"
+ """RPC request call."""
diff --git a/faststream/response/response.py b/faststream/response/response.py
new file mode 100644
index 0000000000..111a54149f
--- /dev/null
+++ b/faststream/response/response.py
@@ -0,0 +1,132 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional
+
+from .publish_type import PublishType
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+
+
+class Response:
+ def __init__(
+ self,
+ body: Any,
+ *,
+ headers: Optional["AnyDict"] = None,
+ correlation_id: Optional[str] = None,
+ ) -> None:
+ """Initialize a handler."""
+ self.body = body
+ self.headers = headers or {}
+ self.correlation_id = correlation_id
+
+ def as_publish_command(self) -> "PublishCommand":
+ """Method to transform handlers' Response result to DTO for publishers."""
+ return PublishCommand(
+ body=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.PUBLISH,
+ )
+
+
+class PublishCommand(Response):
+ def __init__(
+ self,
+ body: Any,
+ *,
+ _publish_type: PublishType,
+ reply_to: str = "",
+ destination: str = "",
+ correlation_id: Optional[str] = None,
+ headers: Optional["AnyDict"] = None,
+ ) -> None:
+ super().__init__(
+ body,
+ headers=headers,
+ correlation_id=correlation_id,
+ )
+
+ self.destination = destination
+ self.reply_to = reply_to
+
+ self.publish_type = _publish_type
+
+ @property
+ def batch_bodies(self) -> tuple["Any", ...]:
+ if self.body is not None:
+ return (self.body,)
+ return ()
+
+ def add_headers(
+ self,
+ headers: "AnyDict",
+ *,
+ override: bool = True,
+ ) -> None:
+ if override:
+ self.headers |= headers
+ else:
+ self.headers = headers | self.headers
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: "PublishCommand",
+ ) -> "PublishCommand":
+ raise NotImplementedError
+
+
+class BatchPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ body: Any,
+ /,
+ *bodies: Any,
+ _publish_type: PublishType,
+ reply_to: str = "",
+ destination: str = "",
+ correlation_id: Optional[str] = None,
+ headers: Optional["AnyDict"] = None,
+ ) -> None:
+ super().__init__(
+ body,
+ headers=headers,
+ correlation_id=correlation_id,
+ destination=destination,
+ reply_to=reply_to,
+ _publish_type=_publish_type,
+ )
+ self.extra_bodies = bodies
+
+ @property
+ def batch_bodies(self) -> tuple["Any", ...]:
+ return (*super().batch_bodies, *self.extra_bodies)
+
+ @batch_bodies.setter
+ def batch_bodies(self, value: Sequence["Any"]) -> None:
+ if len(value) == 0:
+ self.body = None
+ self.extra_bodies = ()
+ else:
+ self.body = value[0]
+ self.extra_bodies = tuple(value[1:])
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: "PublishCommand",
+ *,
+ batch: bool = False,
+ ) -> "BatchPublishCommand":
+ raise NotImplementedError
+
+ @staticmethod
+ def _parse_bodies(body: Any, *, batch: bool = False) -> tuple[Any, tuple[Any, ...]]:
+ extra_bodies = []
+ if batch and isinstance(body, Sequence) and not isinstance(body, (str, bytes)):
+ if body:
+ body, extra_bodies = body[0], body[1:]
+ else:
+ body = None
+ return body, tuple(extra_bodies)
diff --git a/faststream/response/utils.py b/faststream/response/utils.py
new file mode 100644
index 0000000000..c29f366a5d
--- /dev/null
+++ b/faststream/response/utils.py
@@ -0,0 +1,10 @@
+from typing import Any, Union
+
+from .response import Response
+
+
+def ensure_response(response: Union[Response, Any]) -> Response:
+ if isinstance(response, Response):
+ return response
+
+ return Response(response)
diff --git a/faststream/security.py b/faststream/security.py
index cc693057c6..e208d3023e 100644
--- a/faststream/security.py
+++ b/faststream/security.py
@@ -1,9 +1,9 @@
-from typing import TYPE_CHECKING, Dict, List, Optional
+from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from ssl import SSLContext
- from faststream.types import AnyDict
+ from faststream._internal.basic_types import AnyDict
class BaseSecurity:
@@ -30,11 +30,11 @@ def __init__(
self.use_ssl = use_ssl
self.ssl_context = ssl_context
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements."""
return []
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema."""
return {}
@@ -68,11 +68,11 @@ def __init__(
self.username = username
self.password = password
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements for SASL/PLAINTEXT authentication."""
return [{"user-password": []}]
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema for SASL/PLAINTEXT authentication."""
return {"user-password": {"type": "userPassword"}}
@@ -106,11 +106,11 @@ def __init__(
self.username = username
self.password = password
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements for SASL/SCRAM-SHA-256 authentication."""
return [{"scram256": []}]
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema for SASL/SCRAM-SHA-256 authentication."""
return {"scram256": {"type": "scramSha256"}}
@@ -144,11 +144,11 @@ def __init__(
self.username = username
self.password = password
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements for SASL/SCRAM-SHA-512 authentication."""
return [{"scram512": []}]
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema for SASL/SCRAM-SHA-512 authentication."""
return {"scram512": {"type": "scramSha512"}}
@@ -161,11 +161,11 @@ class SASLOAuthBearer(BaseSecurity):
__slots__ = ("ssl_context", "use_ssl")
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements for SASL/OAUTHBEARER authentication."""
return [{"oauthbearer": []}]
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema for SASL/OAUTHBEARER authentication."""
return {"oauthbearer": {"type": "oauth2", "$ref": ""}}
@@ -178,10 +178,10 @@ class SASLGSSAPI(BaseSecurity):
__slots__ = ("ssl_context", "use_ssl")
- def get_requirement(self) -> List["AnyDict"]:
+ def get_requirement(self) -> list["AnyDict"]:
"""Get the security requirements for SASL/GSSAPI authentication."""
return [{"gssapi": []}]
- def get_schema(self) -> Dict[str, Dict[str, str]]:
+ def get_schema(self) -> dict[str, dict[str, str]]:
"""Get the security schema for SASL/GSSAPI authentication."""
return {"gssapi": {"type": "gssapi"}}
diff --git a/faststream/specification/__init__.py b/faststream/specification/__init__.py
new file mode 100644
index 0000000000..7738408d36
--- /dev/null
+++ b/faststream/specification/__init__.py
@@ -0,0 +1,10 @@
+from .asyncapi.factory import AsyncAPI
+from .schema.extra import Contact, ExternalDocs, License, Tag
+
+__all__ = (
+ "AsyncAPI",
+ "Contact",
+ "ExternalDocs",
+ "License",
+ "Tag",
+)
diff --git a/faststream/specification/asyncapi/__init__.py b/faststream/specification/asyncapi/__init__.py
new file mode 100644
index 0000000000..fe93b5941d
--- /dev/null
+++ b/faststream/specification/asyncapi/__init__.py
@@ -0,0 +1,10 @@
+"""AsyncAPI related functions."""
+
+from faststream.specification.asyncapi.site import get_asyncapi_html
+
+from .factory import AsyncAPI
+
+__all__ = (
+ "AsyncAPI",
+ "get_asyncapi_html",
+)
diff --git a/faststream/specification/asyncapi/factory.py b/faststream/specification/asyncapi/factory.py
new file mode 100644
index 0000000000..cb3657e6fd
--- /dev/null
+++ b/faststream/specification/asyncapi/factory.py
@@ -0,0 +1,72 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Literal, Optional, Union
+
+from faststream.specification.base.specification import Specification
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, AnyHttpUrl
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream.specification.schema import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+ )
+
+
+def AsyncAPI( # noqa: N802
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ title: str = "FastStream",
+ app_version: str = "0.1.0",
+ schema_version: Union[Literal["3.0.0", "2.6.0"], str] = "3.0.0",
+ description: str = "",
+ terms_of_service: Optional["AnyHttpUrl"] = None,
+ license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None,
+ contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None,
+ tags: Sequence[Union["Tag", "TagDict", "AnyDict"]] = (),
+ external_docs: Optional[
+ Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]
+ ] = None,
+ identifier: Optional[str] = None,
+) -> Specification:
+ if schema_version.startswith("3.0."):
+ from .v3_0_0.facade import AsyncAPI3
+
+ return AsyncAPI3(
+ broker,
+ title=title,
+ app_version=app_version,
+ schema_version=schema_version,
+ description=description,
+ terms_of_service=terms_of_service,
+ contact=contact,
+ license=license,
+ identifier=identifier,
+ tags=tags,
+ external_docs=external_docs,
+ )
+
+ if schema_version.startswith("2.6."):
+ from .v2_6_0.facade import AsyncAPI2
+
+ return AsyncAPI2(
+ broker,
+ title=title,
+ app_version=app_version,
+ schema_version=schema_version,
+ description=description,
+ terms_of_service=terms_of_service,
+ contact=contact,
+ license=license,
+ identifier=identifier,
+ tags=tags,
+ external_docs=external_docs,
+ )
+
+ msg = f"Unsupported schema version: {schema_version}"
+ raise NotImplementedError(msg)
diff --git a/faststream/specification/asyncapi/message.py b/faststream/specification/asyncapi/message.py
new file mode 100644
index 0000000000..fe692d7085
--- /dev/null
+++ b/faststream/specification/asyncapi/message.py
@@ -0,0 +1,139 @@
+from collections.abc import Sequence
+from inspect import isclass
+from typing import TYPE_CHECKING, Optional, overload
+
+from pydantic import BaseModel, create_model
+
+from faststream._internal._compat import (
+ DEF_KEY,
+ PYDANTIC_V2,
+ get_model_fields,
+ model_schema,
+)
+from faststream._internal.basic_types import AnyDict
+
+if TYPE_CHECKING:
+ from fast_depends.core import CallModel
+
+
+def parse_handler_params(call: "CallModel", prefix: str = "") -> AnyDict:
+ """Parses the handler parameters."""
+ model = getattr(call, "serializer", call).model
+ assert model # nosec B101
+
+ body = get_model_schema(
+ create_model(
+ model.__name__,
+ **{p.field_name: (p.field_type, p.default_value) for p in call.flat_params},
+ ),
+ prefix=prefix,
+ exclude=tuple(call.custom_fields.keys()),
+ )
+
+ if body is None:
+ return {"title": "EmptyPayload", "type": "null"}
+
+ return body
+
+
+@overload
+def get_response_schema(call: None, prefix: str = "") -> None: ...
+
+
+@overload
+def get_response_schema(call: "CallModel", prefix: str = "") -> AnyDict: ...
+
+
+def get_response_schema(
+ call: Optional["CallModel"],
+ prefix: str = "",
+) -> Optional[AnyDict]:
+ """Get the response schema for a given call."""
+ return get_model_schema(
+ getattr(
+ call,
+ "response_model",
+ None,
+ ), # NOTE: FastAPI Dependant object compatibility
+ prefix=prefix,
+ )
+
+
+@overload
+def get_model_schema(
+ call: None,
+ prefix: str = "",
+ exclude: Sequence[str] = (),
+) -> None: ...
+
+
+@overload
+def get_model_schema(
+ call: type[BaseModel],
+ prefix: str = "",
+ exclude: Sequence[str] = (),
+) -> AnyDict: ...
+
+
+def get_model_schema(
+ call: Optional[type[BaseModel]],
+ prefix: str = "",
+ exclude: Sequence[str] = (),
+) -> Optional[AnyDict]:
+ """Get the schema of a model."""
+ if call is None:
+ return None
+
+ params = {k: v for k, v in get_model_fields(call).items() if k not in exclude}
+ params_number = len(params)
+
+ if params_number == 0:
+ return None
+
+ model = None
+ use_original_model = False
+ if params_number == 1:
+ name, param = next(iter(params.items()))
+ if (
+ param.annotation
+ and isclass(param.annotation)
+ and issubclass(param.annotation, BaseModel) # NOTE: 3.7-3.10 compatibility
+ ):
+ model = param.annotation
+ use_original_model = True
+
+ if model is None:
+ model = call
+
+ body: AnyDict = model_schema(model)
+ body["properties"] = body.get("properties", {})
+ for i in exclude:
+ body["properties"].pop(i, None)
+ if required := body.get("required"):
+ body["required"] = list(filter(lambda x: x not in exclude, required))
+
+ if params_number == 1 and not use_original_model:
+ param_body: AnyDict = body.get("properties", {})
+ param_body = param_body[name]
+
+ if defs := body.get(DEF_KEY):
+ # single argument with useless reference
+ if param_body.get("$ref"):
+ ref_obj: AnyDict = next(iter(defs.values()))
+ return ref_obj
+ param_body[DEF_KEY] = defs
+
+ original_title = param.title if PYDANTIC_V2 else param.field_info.title
+
+ if original_title:
+ use_original_model = True
+ param_body["title"] = original_title
+ else:
+ param_body["title"] = name
+
+ body = param_body
+
+ if not use_original_model:
+ body["title"] = f"{prefix}:Payload"
+
+ return body
diff --git a/faststream/asyncapi/site.py b/faststream/specification/asyncapi/site.py
similarity index 88%
rename from faststream/asyncapi/site.py
rename to faststream/specification/asyncapi/site.py
index 8cc837c69e..65013c3480 100644
--- a/faststream/asyncapi/site.py
+++ b/faststream/specification/asyncapi/site.py
@@ -1,24 +1,24 @@
from functools import partial
from http import server
-from typing import TYPE_CHECKING, Any, Dict
+from typing import TYPE_CHECKING, Any
from urllib.parse import parse_qs, urlparse
-from faststream._compat import json_dumps
-from faststream.log import logger
+from faststream._internal._compat import json_dumps
+from faststream._internal.log import logger
if TYPE_CHECKING:
- from faststream.asyncapi.schema import Schema
+ from faststream.specification.base.specification import Specification
-ASYNCAPI_JS_DEFAULT_URL = "https://unpkg.com/@asyncapi/react-component@1.0.0-next.47/browser/standalone/index.js"
+ASYNCAPI_JS_DEFAULT_URL = "https://unpkg.com/@asyncapi/react-component@1.0.0-next.54/browser/standalone/index.js"
ASYNCAPI_CSS_DEFAULT_URL = (
- "https://unpkg.com/@asyncapi/react-component@1.0.0-next.46/styles/default.min.css"
+ "https://unpkg.com/@asyncapi/react-component@1.0.0-next.54/styles/default.min.css"
)
def get_asyncapi_html(
- schema: "Schema",
+ schema: "Specification",
sidebar: bool = True,
info: bool = True,
servers: bool = True,
@@ -27,7 +27,6 @@ def get_asyncapi_html(
schemas: bool = True,
errors: bool = True,
expand_message_examples: bool = True,
- title: str = "FastStream",
asyncapi_js_url: str = ASYNCAPI_JS_DEFAULT_URL,
asyncapi_css_url: str = ASYNCAPI_CSS_DEFAULT_URL,
) -> str:
@@ -63,7 +62,7 @@ def get_asyncapi_html(
"""
f"""
- {title} AsyncAPI
+ {schema.schema.info.title} AsyncAPI
"""
"""
@@ -103,7 +102,7 @@ def get_asyncapi_html(
def serve_app(
- schema: "Schema",
+ schema: "Specification",
host: str,
port: int,
) -> None:
@@ -121,13 +120,13 @@ class _Handler(server.BaseHTTPRequestHandler):
def __init__(
self,
*args: Any,
- schema: "Schema",
+ schema: "Specification",
**kwargs: Any,
) -> None:
self.schema = schema
super().__init__(*args, **kwargs)
- def get_query_params(self) -> Dict[str, bool]:
+ def get_query_params(self) -> dict[str, bool]:
return {
i: _str_to_bool(next(iter(j))) if j else False
for i, j in parse_qs(urlparse(self.path).query).items()
@@ -148,7 +147,6 @@ def do_GET(self) -> None: # noqa: N802
schemas=query_dict.get("schemas", True),
errors=query_dict.get("errors", True),
expand_message_examples=query_dict.get("expandMessageExamples", True),
- title=self.schema.info.title,
)
body = html.encode(encoding=encoding)
@@ -160,4 +158,4 @@ def do_GET(self) -> None: # noqa: N802
def _str_to_bool(v: str) -> bool:
- return v.lower() in ("1", "t", "true", "y", "yes")
+ return v.lower() in {"1", "t", "true", "y", "yes"}
diff --git a/faststream/specification/asyncapi/utils.py b/faststream/specification/asyncapi/utils.py
new file mode 100644
index 0000000000..7f16a215dc
--- /dev/null
+++ b/faststream/specification/asyncapi/utils.py
@@ -0,0 +1,84 @@
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+
+
+def to_camelcase(*names: str) -> str:
+ return " ".join(names).replace("_", " ").title().replace(" ", "")
+
+
+def resolve_payloads(
+ payloads: list[tuple["AnyDict", str]],
+ extra: str = "",
+ served_words: int = 1,
+) -> "AnyDict":
+ ln = len(payloads)
+ payload: AnyDict
+ if ln > 1:
+ one_of_payloads = {}
+
+ for body, handler_name in payloads:
+ title = body["title"]
+ words = title.split(":")
+
+ if len(words) > 1: # not pydantic model case
+ body["title"] = title = ":".join(
+ filter(
+ bool,
+ (
+ handler_name,
+ extra if extra not in words else "",
+ *words[served_words:],
+ ),
+ ),
+ )
+
+ one_of_payloads[title] = body
+
+ payload = {"oneOf": one_of_payloads}
+
+ elif ln == 1:
+ payload = payloads[0][0]
+
+ else:
+ payload = {}
+
+ return payload
+
+
+def clear_key(key: str) -> str:
+ return key.replace("/", ".")
+
+
+def move_pydantic_refs(
+ original: Any,
+ key: str,
+) -> Any:
+ """Remove pydantic references and replacem them by real schemas."""
+ if not isinstance(original, dict):
+ return original
+
+ data = original.copy()
+
+ for k in data:
+ item = data[k]
+
+ if isinstance(item, str):
+ if key in item:
+ data[k] = data[k].replace(key, "components/schemas")
+
+ elif isinstance(item, dict):
+ data[k] = move_pydantic_refs(data[k], key)
+
+ elif isinstance(item, list):
+ for i in range(len(data[k])):
+ data[k][i] = move_pydantic_refs(item[i], key)
+
+ if (
+ isinstance(desciminator := data.get("discriminator"), dict)
+ and "propertyName" in desciminator
+ ):
+ data["discriminator"] = desciminator["propertyName"]
+
+ return data
diff --git a/faststream/specification/asyncapi/v2_6_0/__init__.py b/faststream/specification/asyncapi/v2_6_0/__init__.py
new file mode 100644
index 0000000000..dd1af249b3
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/__init__.py
@@ -0,0 +1,7 @@
+from .facade import AsyncAPI2
+from .generate import get_app_schema
+
+__all__ = (
+ "AsyncAPI2",
+ "get_app_schema",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/facade.py b/faststream/specification/asyncapi/v2_6_0/facade.py
new file mode 100644
index 0000000000..fcb3f66f38
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/facade.py
@@ -0,0 +1,77 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from faststream.specification.base.specification import Specification
+
+from .generate import get_app_schema
+from .schema import ApplicationSchema
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, AnyHttpUrl
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream.specification.schema import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+ )
+
+
+class AsyncAPI2(Specification):
+ def __init__(
+ self,
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ title: str = "FastStream",
+ app_version: str = "0.1.0",
+ schema_version: str = "3.0.0",
+ description: str = "",
+ terms_of_service: Optional["AnyHttpUrl"] = None,
+ contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None,
+ license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None,
+ identifier: Optional[str] = None,
+ tags: Sequence[Union["Tag", "TagDict", "AnyDict"]] = (),
+ external_docs: Optional[
+ Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]
+ ] = None,
+ ) -> None:
+ self.broker = broker
+ self.title = title
+ self.app_version = app_version
+ self.schema_version = schema_version
+ self.description = description
+ self.terms_of_service = terms_of_service
+ self.contact = contact
+ self.license = license
+ self.identifier = identifier
+ self.tags = tags
+ self.external_docs = external_docs
+
+ def to_json(self) -> str:
+ return self.schema.to_json()
+
+ def to_jsonable(self) -> Any:
+ return self.schema.to_jsonable()
+
+ def to_yaml(self) -> str:
+ return self.schema.to_yaml()
+
+ @property
+ def schema(self) -> ApplicationSchema:
+ return get_app_schema(
+ self.broker,
+ title=self.title,
+ app_version=self.app_version,
+ schema_version=self.schema_version,
+ description=self.description,
+ terms_of_service=self.terms_of_service,
+ contact=self.contact,
+ license=self.license,
+ identifier=self.identifier,
+ tags=self.tags,
+ external_docs=self.external_docs,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/generate.py b/faststream/specification/asyncapi/v2_6_0/generate.py
new file mode 100644
index 0000000000..139c1ff4d0
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/generate.py
@@ -0,0 +1,234 @@
+import warnings
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from faststream._internal._compat import DEF_KEY
+from faststream._internal.basic_types import AnyDict, AnyHttpUrl
+from faststream._internal.constants import ContentTypes
+from faststream.specification.asyncapi.utils import clear_key, move_pydantic_refs
+from faststream.specification.asyncapi.v2_6_0.schema import (
+ ApplicationInfo,
+ ApplicationSchema,
+ Channel,
+ Components,
+ Contact,
+ ExternalDocs,
+ License,
+ Message,
+ Reference,
+ Server,
+ Tag,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream._internal.types import ConnectionType, MsgType
+ from faststream.specification.schema.extra import (
+ Contact as SpecContact,
+ ContactDict,
+ ExternalDocs as SpecDocs,
+ ExternalDocsDict,
+ License as SpecLicense,
+ LicenseDict,
+ Tag as SpecTag,
+ TagDict,
+ )
+
+
+def get_app_schema(
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ title: str,
+ app_version: str,
+ schema_version: str,
+ description: str,
+ terms_of_service: Optional["AnyHttpUrl"],
+ contact: Optional[Union["SpecContact", "ContactDict", "AnyDict"]],
+ license: Optional[Union["SpecLicense", "LicenseDict", "AnyDict"]],
+ identifier: Optional[str],
+ tags: Sequence[Union["SpecTag", "TagDict", "AnyDict"]],
+ external_docs: Optional[Union["SpecDocs", "ExternalDocsDict", "AnyDict"]],
+) -> ApplicationSchema:
+ """Get the application schema."""
+ broker._setup()
+
+ servers = get_broker_server(broker)
+ channels = get_broker_channels(broker)
+
+ messages: dict[str, Message] = {}
+ payloads: dict[str, AnyDict] = {}
+
+ for channel in channels.values():
+ channel.servers = list(servers.keys())
+
+ for channel_name, ch in channels.items():
+ resolve_channel_messages(ch, channel_name, payloads, messages)
+
+ return ApplicationSchema(
+ info=ApplicationInfo(
+ title=title,
+ version=app_version,
+ description=description,
+ termsOfService=terms_of_service,
+ contact=Contact.from_spec(contact),
+ license=License.from_spec(license),
+ ),
+ tags=[Tag.from_spec(tag) for tag in tags] or None,
+ externalDocs=ExternalDocs.from_spec(external_docs),
+ asyncapi=schema_version,
+ defaultContentType=ContentTypes.JSON.value,
+ id=identifier,
+ servers=servers,
+ channels=channels,
+ components=Components(
+ messages=messages,
+ schemas=payloads,
+ securitySchemes=None
+ if broker.security is None
+ else broker.security.get_schema(),
+ ),
+ )
+
+
+def resolve_channel_messages(
+ channel: Channel,
+ channel_name: str,
+ payloads: dict[str, AnyDict],
+ messages: dict[str, Message],
+) -> None:
+ if channel.subscribe is not None:
+ assert isinstance(channel.subscribe.message, Message)
+
+ channel.subscribe.message = _resolve_msg_payloads(
+ channel.subscribe.message,
+ channel_name,
+ payloads,
+ messages,
+ )
+
+ if channel.publish is not None:
+ assert isinstance(channel.publish.message, Message)
+
+ channel.publish.message = _resolve_msg_payloads(
+ channel.publish.message,
+ channel_name,
+ payloads,
+ messages,
+ )
+
+
+def get_broker_server(
+ broker: "BrokerUsecase[MsgType, ConnectionType]",
+) -> dict[str, Server]:
+ """Get the broker server for an application."""
+ servers = {}
+
+ broker_meta: AnyDict = {
+ "protocol": broker.protocol,
+ "protocolVersion": broker.protocol_version,
+ "description": broker.description,
+ "tags": [Tag.from_spec(tag) for tag in broker.tags] or None,
+ "security": broker.security.get_requirement() if broker.security else None,
+ # TODO
+ # "variables": "",
+ # "bindings": "",
+ }
+
+ urls = broker.url if isinstance(broker.url, list) else [broker.url]
+
+ for i, url in enumerate(urls, 1):
+ server_name = "development" if len(urls) == 1 else f"Server{i}"
+ servers[server_name] = Server(url=url, **broker_meta)
+
+ return servers
+
+
+def get_broker_channels(
+ broker: "BrokerUsecase[MsgType, ConnectionType]",
+) -> dict[str, Channel]:
+ """Get the broker channels for an application."""
+ channels = {}
+
+ for h in broker._subscribers:
+ for key, sub in h.schema().items():
+ if key in channels:
+ warnings.warn(
+ f"Overwrite channel handler, channels have the same names: `{key}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+
+ channels[key] = Channel.from_sub(sub)
+
+ for p in broker._publishers:
+ for key, pub in p.schema().items():
+ if key in channels:
+ warnings.warn(
+ f"Overwrite channel handler, channels have the same names: `{key}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+
+ channels[key] = Channel.from_pub(pub)
+
+ return channels
+
+
+def _resolve_msg_payloads(
+ m: Message,
+ channel_name: str,
+ payloads: AnyDict,
+ messages: AnyDict,
+) -> Reference:
+ """Replace message payload by reference and normalize payloads.
+
+ Payloads and messages are editable dicts to store schemas for reference in AsyncAPI.
+ """
+ one_of_list: list[Reference] = []
+ m.payload = move_pydantic_refs(m.payload, DEF_KEY)
+
+ if DEF_KEY in m.payload:
+ payloads.update(m.payload.pop(DEF_KEY))
+
+ one_of = m.payload.get("oneOf")
+ if isinstance(one_of, dict):
+ for p_title, p in one_of.items():
+ formatted_payload_title = clear_key(p_title)
+ payloads.update(p.pop(DEF_KEY, {}))
+ if formatted_payload_title not in payloads:
+ payloads[formatted_payload_title] = p
+ one_of_list.append(
+ Reference(**{"$ref": f"#/components/schemas/{formatted_payload_title}"})
+ )
+
+ elif one_of is not None:
+ # Descriminator case
+ for p in one_of:
+ p_value = next(iter(p.values()))
+ p_title = p_value.split("/")[-1]
+ p_title = clear_key(p_title)
+ if p_title not in payloads:
+ payloads[p_title] = p
+ one_of_list.append(Reference(**{"$ref": f"#/components/schemas/{p_title}"}))
+
+ if not one_of_list:
+ payloads.update(m.payload.pop(DEF_KEY, {}))
+ p_title = m.payload.get("title", f"{channel_name}Payload")
+ p_title = clear_key(p_title)
+ if p_title in payloads and payloads[p_title] != m.payload:
+ warnings.warn(
+ f"Overwriting the message schema, data types have the same name: `{p_title}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+
+ payloads[p_title] = m.payload
+ m.payload = {"$ref": f"#/components/schemas/{p_title}"}
+
+ else:
+ m.payload["oneOf"] = one_of_list
+
+ assert m.title # nosec B101
+ message_title = clear_key(m.title)
+ messages[message_title] = m
+ return Reference(**{"$ref": f"#/components/messages/{message_title}"})
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/__init__.py
new file mode 100644
index 0000000000..e0cbcbd7b2
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/__init__.py
@@ -0,0 +1,31 @@
+from .channels import Channel
+from .components import Components
+from .contact import Contact
+from .docs import ExternalDocs
+from .info import ApplicationInfo
+from .license import License
+from .message import CorrelationId, Message
+from .operations import Operation
+from .schema import ApplicationSchema
+from .servers import Server, ServerVariable
+from .tag import Tag
+from .utils import Parameter, Reference
+
+__all__ = (
+ "ApplicationInfo",
+ "ApplicationSchema",
+ "Channel",
+ "Channel",
+ "Components",
+ "Contact",
+ "CorrelationId",
+ "ExternalDocs",
+ "License",
+ "Message",
+ "Operation",
+ "Parameter",
+ "Reference",
+ "Server",
+ "ServerVariable",
+ "Tag",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/__init__.py
new file mode 100644
index 0000000000..84b0fa22e8
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/__init__.py
@@ -0,0 +1,6 @@
+from .main import ChannelBinding, OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py
new file mode 100644
index 0000000000..aa729dce29
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py
@@ -0,0 +1,144 @@
+"""AsyncAPI AMQP bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/amqp
+"""
+
+from typing import Literal, Optional, overload
+
+from pydantic import BaseModel, Field
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import amqp
+
+
+class Queue(BaseModel):
+ """A class to represent a queue.
+
+ Attributes:
+ name : name of the queue
+ durable : indicates if the queue is durable
+ exclusive : indicates if the queue is exclusive
+ autoDelete : indicates if the queue should be automatically deleted
+ vhost : virtual host of the queue (default is "/")
+ """
+
+ name: str
+ durable: bool
+ exclusive: bool
+ autoDelete: bool
+ vhost: str = "/"
+
+ @overload
+ @classmethod
+ def from_spec(cls, binding: None, vhost: str) -> None: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, binding: amqp.Queue, vhost: str) -> Self: ...
+
+ @classmethod
+ def from_spec(cls, binding: Optional[amqp.Queue], vhost: str) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ name=binding.name,
+ durable=binding.durable,
+ exclusive=binding.exclusive,
+ autoDelete=binding.auto_delete,
+ vhost=vhost,
+ )
+
+
+class Exchange(BaseModel):
+ """A class to represent an exchange.
+
+ Attributes:
+ name : name of the exchange (optional)
+ type : type of the exchange, can be one of "default", "direct", "topic", "fanout", "headers"
+ durable : whether the exchange is durable (optional)
+ autoDelete : whether the exchange is automatically deleted (optional)
+ vhost : virtual host of the exchange, default is "/"
+ """
+
+ name: Optional[str] = None
+ type: Literal[
+ "default",
+ "direct",
+ "topic",
+ "fanout",
+ "headers",
+ "x-delayed-message",
+ "x-consistent-hash",
+ "x-modulus-hash",
+ ]
+ durable: Optional[bool] = None
+ autoDelete: Optional[bool] = None
+ vhost: str = "/"
+
+ @overload
+ @classmethod
+ def from_spec(cls, binding: None, vhost: str) -> None: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, binding: amqp.Exchange, vhost: str) -> Self: ...
+
+ @classmethod
+ def from_spec(cls, binding: Optional[amqp.Exchange], vhost: str) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ name=binding.name,
+ type=binding.type,
+ durable=binding.durable,
+ autoDelete=binding.auto_delete,
+ vhost=vhost,
+ )
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel binding.
+
+ Attributes:
+ is_ : Type of binding, can be "queue" or "routingKey"
+ bindingVersion : Version of the binding
+ queue : Optional queue object
+ exchange : Optional exchange object
+ """
+
+ is_: Literal["queue", "routingKey"] = Field(..., alias="is")
+ bindingVersion: str = "0.2.0"
+ queue: Optional[Queue] = None
+ exchange: Optional[Exchange] = None
+
+ @classmethod
+ def from_sub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ **{
+ "is": "routingKey",
+ "queue": Queue.from_spec(binding.queue, binding.virtual_host)
+ if binding.exchange.is_respect_routing_key
+ else None,
+ "exchange": Exchange.from_spec(binding.exchange, binding.virtual_host),
+ },
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ **{
+ "is": "routingKey",
+ "queue": Queue.from_spec(binding.queue, binding.virtual_host)
+ if binding.exchange.is_respect_routing_key and binding.queue.name
+ else None,
+ "exchange": Exchange.from_spec(binding.exchange, binding.virtual_host),
+ },
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py
new file mode 100644
index 0000000000..47ed19af93
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py
@@ -0,0 +1,59 @@
+"""AsyncAPI AMQP bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/amqp
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel, PositiveInt
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import amqp
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ cc : optional string representing the cc
+ ack : boolean indicating if the operation is acknowledged
+ replyTo : optional dictionary representing the replyTo
+ bindingVersion : string representing the binding version
+ """
+
+ cc: Optional[str] = None
+ ack: bool
+ replyTo: Optional[str] = None
+ deliveryMode: Optional[int] = None
+ mandatory: Optional[bool] = None
+ priority: Optional[PositiveInt] = None
+
+ bindingVersion: str = "0.2.0"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ cc=binding.routing_key if binding.exchange.is_respect_routing_key else None,
+ ack=binding.ack,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
+ mandatory=binding.mandatory,
+ priority=binding.priority,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ cc=binding.routing_key if binding.exchange.is_respect_routing_key else None,
+ ack=binding.ack,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
+ mandatory=binding.mandatory,
+ priority=binding.priority,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/channel.py
new file mode 100644
index 0000000000..1f304410ba
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/channel.py
@@ -0,0 +1,52 @@
+"""AsyncAPI Kafka bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/kafka
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel, PositiveInt
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import kafka
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent a channel binding.
+
+ Attributes:
+ topic : optional string representing the topic
+ partitions : optional positive integer representing the number of partitions
+ replicas : optional positive integer representing the number of replicas
+ bindingVersion : string representing the binding version
+ """
+
+ topic: Optional[str] = None
+ partitions: Optional[PositiveInt] = None
+ replicas: Optional[PositiveInt] = None
+ bindingVersion: str = "0.4.0"
+
+ # TODO:
+ # topicConfiguration
+
+ @classmethod
+ def from_sub(cls, binding: Optional[kafka.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ topic=binding.topic,
+ partitions=binding.partitions,
+ replicas=binding.replicas,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[kafka.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ topic=binding.topic,
+ partitions=binding.partitions,
+ replicas=binding.replicas,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/operation.py
new file mode 100644
index 0000000000..4155ce220e
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/kafka/operation.py
@@ -0,0 +1,50 @@
+"""AsyncAPI Kafka bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/kafka
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.bindings import kafka
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ groupId : optional dictionary representing the group ID
+ clientId : optional dictionary representing the client ID
+ replyTo : optional dictionary representing the reply-to
+ bindingVersion : version of the binding (default: "0.4.0")
+ """
+
+ groupId: Optional[AnyDict] = None
+ clientId: Optional[AnyDict] = None
+ replyTo: Optional[AnyDict] = None
+ bindingVersion: str = "0.4.0"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[kafka.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ groupId=binding.group_id,
+ clientId=binding.client_id,
+ replyTo=binding.reply_to,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[kafka.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ groupId=binding.group_id,
+ clientId=binding.client_id,
+ replyTo=binding.reply_to,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py
new file mode 100644
index 0000000000..bf4b7dbd98
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py
@@ -0,0 +1,116 @@
+from typing import Optional, overload
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v2_6_0.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
+)
+from faststream.specification.schema.bindings import ChannelBinding as SpecBinding
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel bindings.
+
+ Attributes:
+ amqp : AMQP channel binding (optional)
+ kafka : Kafka channel binding (optional)
+ sqs : SQS channel binding (optional)
+ nats : NATS channel binding (optional)
+ redis : Redis channel binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.ChannelBinding] = None
+ kafka: Optional[kafka_bindings.ChannelBinding] = None
+ sqs: Optional[sqs_bindings.ChannelBinding] = None
+ nats: Optional[nats_bindings.ChannelBinding] = None
+ redis: Optional[redis_bindings.ChannelBinding] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_sub(cls, binding: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_sub(cls, binding: SpecBinding) -> Self: ...
+
+ @classmethod
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @overload
+ @classmethod
+ def from_pub(cls, binding: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_pub(cls, binding: SpecBinding) -> Self: ...
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py
new file mode 100644
index 0000000000..7367b7921f
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py
@@ -0,0 +1,116 @@
+from typing import Optional, overload
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v2_6_0.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
+)
+from faststream.specification.schema.bindings import OperationBinding as SpecBinding
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ amqp : AMQP operation binding (optional)
+ kafka : Kafka operation binding (optional)
+ sqs : SQS operation binding (optional)
+ nats : NATS operation binding (optional)
+ redis : Redis operation binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.OperationBinding] = None
+ kafka: Optional[kafka_bindings.OperationBinding] = None
+ sqs: Optional[sqs_bindings.OperationBinding] = None
+ nats: Optional[nats_bindings.OperationBinding] = None
+ redis: Optional[redis_bindings.OperationBinding] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_sub(cls, binding: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_sub(cls, binding: SpecBinding) -> Self: ...
+
+ @classmethod
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @overload
+ @classmethod
+ def from_pub(cls, binding: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_pub(cls, binding: SpecBinding) -> Self: ...
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/channel.py
new file mode 100644
index 0000000000..4cc83faddb
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/channel.py
@@ -0,0 +1,47 @@
+"""AsyncAPI NATS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/nats
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import nats
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel binding.
+
+ Attributes:
+ subject : subject of the channel binding
+ queue : optional queue for the channel binding
+ bindingVersion : version of the channel binding, default is "custom"
+ """
+
+ subject: str
+ queue: Optional[str] = None
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[nats.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ subject=binding.subject,
+ queue=binding.queue,
+ bindingVersion="custom",
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[nats.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ subject=binding.subject,
+ queue=binding.queue,
+ bindingVersion="custom",
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/operation.py
new file mode 100644
index 0000000000..5e1514fcba
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/nats/operation.py
@@ -0,0 +1,42 @@
+"""AsyncAPI NATS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/nats
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.bindings import nats
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ replyTo : optional dictionary containing reply information
+ bindingVersion : version of the binding (default is "custom")
+ """
+
+ replyTo: Optional[AnyDict] = None
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[nats.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ replyTo=binding.reply_to,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[nats.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ replyTo=binding.reply_to,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/channel.py
new file mode 100644
index 0000000000..abc5bf96d6
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/channel.py
@@ -0,0 +1,51 @@
+"""AsyncAPI Redis bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/redis
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import redis
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel binding.
+
+ Attributes:
+ channel : the channel name
+ method : the method used for binding (ssubscribe, psubscribe, subscribe)
+ bindingVersion : the version of the binding
+ """
+
+ channel: str
+ method: Optional[str] = None
+ groupName: Optional[str] = None
+ consumerName: Optional[str] = None
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[redis.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ channel=binding.channel,
+ method=binding.method,
+ groupName=binding.group_name,
+ consumerName=binding.consumer_name,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[redis.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ channel=binding.channel,
+ method=binding.method,
+ groupName=binding.group_name,
+ consumerName=binding.consumer_name,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/operation.py
new file mode 100644
index 0000000000..cce0316160
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/redis/operation.py
@@ -0,0 +1,42 @@
+"""AsyncAPI Redis bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/redis
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.bindings import redis
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ replyTo : optional dictionary containing reply information
+ bindingVersion : version of the binding (default is "custom")
+ """
+
+ replyTo: Optional[AnyDict] = None
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[redis.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ replyTo=binding.reply_to,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[redis.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ replyTo=binding.reply_to,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/__init__.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/__init__.py
new file mode 100644
index 0000000000..33cdca3a8b
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/__init__.py
@@ -0,0 +1,4 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = ("ChannelBinding", "OperationBinding")
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/channel.py
new file mode 100644
index 0000000000..3145805c65
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/channel.py
@@ -0,0 +1,36 @@
+"""AsyncAPI SQS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/sqs
+"""
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.bindings import sqs
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel binding.
+
+ Attributes:
+ queue : a dictionary representing the queue
+ bindingVersion : a string representing the binding version (default: "custom")
+ """
+
+ queue: AnyDict
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_pub(cls, binding: sqs.ChannelBinding) -> Self:
+ return cls(
+ queue=binding.queue,
+ bindingVersion=binding.bindingVersion,
+ )
+
+ @classmethod
+ def from_sub(cls, binding: sqs.ChannelBinding) -> Self:
+ return cls(
+ queue=binding.queue,
+ bindingVersion=binding.bindingVersion,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/operation.py
new file mode 100644
index 0000000000..dca688bf95
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/sqs/operation.py
@@ -0,0 +1,38 @@
+"""AsyncAPI SQS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/sqs
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.bindings import sqs
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ replyTo : optional dictionary containing reply information
+ bindingVersion : version of the binding, default is "custom"
+ """
+
+ replyTo: Optional[AnyDict] = None
+ bindingVersion: str = "custom"
+
+ @classmethod
+ def from_pub(cls, binding: sqs.OperationBinding) -> Self:
+ return cls(
+ replyTo=binding.replyTo,
+ bindingVersion=binding.bindingVersion,
+ )
+
+ @classmethod
+ def from_sub(cls, binding: sqs.OperationBinding) -> Self:
+ return cls(
+ replyTo=binding.replyTo,
+ bindingVersion=binding.bindingVersion,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/channels.py b/faststream/specification/asyncapi/v2_6_0/schema/channels.py
new file mode 100644
index 0000000000..5310578554
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/channels.py
@@ -0,0 +1,63 @@
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.schema import PublisherSpec, SubscriberSpec
+
+from .bindings import ChannelBinding
+from .operations import Operation
+
+
+class Channel(BaseModel):
+ """A class to represent a channel.
+
+ Attributes:
+ description : optional description of the channel
+ servers : optional list of servers associated with the channel
+ bindings : optional channel binding
+ subscribe : optional operation for subscribing to the channel
+ publish : optional operation for publishing to the channel
+
+ Configurations:
+ model_config : configuration for the model (only applicable for Pydantic version 2)
+ Config : configuration for the class (only applicable for Pydantic version 1)
+ """
+
+ description: Optional[str] = None
+ servers: Optional[list[str]] = None
+ bindings: Optional[ChannelBinding] = None
+ subscribe: Optional[Operation] = None
+ publish: Optional[Operation] = None
+
+ # TODO:
+ # parameters: Optional[Parameter] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, subscriber: SubscriberSpec) -> Self:
+ return cls(
+ description=subscriber.description,
+ servers=None,
+ bindings=ChannelBinding.from_sub(subscriber.bindings),
+ subscribe=None,
+ publish=Operation.from_sub(subscriber.operation),
+ )
+
+ @classmethod
+ def from_pub(cls, publisher: PublisherSpec) -> Self:
+ return cls(
+ description=publisher.description,
+ servers=None,
+ bindings=ChannelBinding.from_pub(publisher.bindings),
+ subscribe=Operation.from_pub(publisher.operation),
+ publish=None,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/components.py b/faststream/specification/asyncapi/v2_6_0/schema/components.py
new file mode 100644
index 0000000000..a80c3420d0
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/components.py
@@ -0,0 +1,51 @@
+from typing import (
+ Optional,
+)
+
+from pydantic import BaseModel
+
+from faststream._internal._compat import (
+ PYDANTIC_V2,
+)
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.message import Message
+
+
+class Components(BaseModel):
+ # TODO
+ # servers
+ # serverVariables
+ # channels
+ """A class to represent components in a system.
+
+ Attributes:
+ messages : Optional dictionary of messages
+ schemas : Optional dictionary of schemas
+
+ Note:
+ The following attributes are not implemented yet:
+ - servers
+ - serverVariables
+ - channels
+ - securitySchemes
+ - parameters
+ - correlationIds
+ - operationTraits
+ - messageTraits
+ - serverBindings
+ - channelBindings
+ - operationBindings
+ - messageBindings
+ """
+
+ messages: Optional[dict[str, Message]] = None
+ schemas: Optional[dict[str, AnyDict]] = None
+ securitySchemes: Optional[dict[str, AnyDict]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/contact.py b/faststream/specification/asyncapi/v2_6_0/schema/contact.py
new file mode 100644
index 0000000000..d71cbdb781
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/contact.py
@@ -0,0 +1,73 @@
+from typing import Optional, Union, cast, overload
+
+from pydantic import AnyHttpUrl, BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2, EmailStr
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.utils.data import filter_by_dict
+from faststream.specification.schema.extra import (
+ Contact as SpecContact,
+ ContactDict,
+)
+
+
+class Contact(BaseModel):
+ """A class to represent a contact.
+
+ Attributes:
+ name : name of the contact (str)
+ url : URL of the contact (Optional[AnyHttpUrl])
+ email : email of the contact (Optional[EmailStr])
+ """
+
+ name: str
+ # Use default values to be able build from dict
+ url: Optional[AnyHttpUrl] = None
+ email: Optional[EmailStr] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_spec(cls, contact: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, contact: SpecContact) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, contact: ContactDict) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, contact: AnyDict) -> AnyDict: ...
+
+ @classmethod
+ def from_spec(
+ cls, contact: Union[SpecContact, ContactDict, AnyDict, None]
+ ) -> Union[Self, AnyDict, None]:
+ if contact is None:
+ return None
+
+ if isinstance(contact, SpecContact):
+ return cls(
+ name=contact.name,
+ url=contact.url,
+ email=contact.email,
+ )
+
+ contact = cast("AnyDict", contact)
+ contact_data, custom_data = filter_by_dict(ContactDict, contact)
+
+ if custom_data:
+ return contact
+
+ return cls(**contact_data)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/docs.py b/faststream/specification/asyncapi/v2_6_0/schema/docs.py
new file mode 100644
index 0000000000..0bbb933f6f
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/docs.py
@@ -0,0 +1,67 @@
+from typing import Optional, Union, cast, overload
+
+from pydantic import AnyHttpUrl, BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.utils.data import filter_by_dict
+from faststream.specification.schema.extra import (
+ ExternalDocs as SpecDocs,
+ ExternalDocsDict,
+)
+
+
+class ExternalDocs(BaseModel):
+ """A class to represent external documentation.
+
+ Attributes:
+ url : URL of the external documentation
+ description : optional description of the external documentation
+ """
+
+ url: AnyHttpUrl
+ # Use default values to be able build from dict
+ description: Optional[str] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_spec(cls, docs: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, docs: SpecDocs) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, docs: ExternalDocsDict) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, docs: AnyDict) -> AnyDict: ...
+
+ @classmethod
+ def from_spec(
+ cls, docs: Union[SpecDocs, ExternalDocsDict, AnyDict, None]
+ ) -> Union[Self, AnyDict, None]:
+ if docs is None:
+ return None
+
+ if isinstance(docs, SpecDocs):
+ return cls(url=docs.url, description=docs.description)
+
+ docs = cast("AnyDict", docs)
+ docs_data, custom_data = filter_by_dict(ExternalDocsDict, docs)
+
+ if custom_data:
+ return docs
+
+ return cls(**docs_data)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/info.py b/faststream/specification/asyncapi/v2_6_0/schema/info.py
new file mode 100644
index 0000000000..50f79fa026
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/info.py
@@ -0,0 +1,28 @@
+from typing import (
+ Optional,
+ Union,
+)
+
+from pydantic import AnyHttpUrl
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.contact import Contact
+from faststream.specification.asyncapi.v2_6_0.schema.license import License
+from faststream.specification.base.info import BaseApplicationInfo
+
+
+class ApplicationInfo(BaseApplicationInfo):
+ """A class to represent application information.
+
+ Attributes:
+ title : title of the information
+ version : version of the information
+ description : description of the information
+ termsOfService : terms of service for the information
+ contact : contact information for the information
+ license : license information for the information
+ """
+
+ termsOfService: Optional[AnyHttpUrl] = None
+ contact: Optional[Union[Contact, AnyDict]] = None
+ license: Optional[Union[License, AnyDict]] = None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/license.py b/faststream/specification/asyncapi/v2_6_0/schema/license.py
new file mode 100644
index 0000000000..fee3db4012
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/license.py
@@ -0,0 +1,73 @@
+from typing import Optional, Union, cast, overload
+
+from pydantic import AnyHttpUrl, BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.utils.data import filter_by_dict
+from faststream.specification.schema.extra import (
+ License as SpecLicense,
+ LicenseDict,
+)
+
+
+class License(BaseModel):
+ """A class to represent a license.
+
+ Attributes:
+ name : name of the license
+ url : URL of the license (optional)
+
+ Config:
+ extra : allow additional attributes in the model (PYDANTIC_V2)
+ """
+
+ name: str
+ # Use default values to be able build from dict
+ url: Optional[AnyHttpUrl] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_spec(cls, license: None) -> None: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, license: SpecLicense) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, license: LicenseDict) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, license: AnyDict) -> AnyDict: ...
+
+ @classmethod
+ def from_spec(
+ cls, license: Union[SpecLicense, LicenseDict, AnyDict, None]
+ ) -> Union[Self, AnyDict, None]:
+ if license is None:
+ return None
+
+ if isinstance(license, SpecLicense):
+ return cls(
+ name=license.name,
+ url=license.url,
+ )
+
+ license = cast("AnyDict", license)
+ license_data, custom_data = filter_by_dict(LicenseDict, license)
+
+ if custom_data:
+ return license
+
+ return cls(**license_data)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/message.py b/faststream/specification/asyncapi/v2_6_0/schema/message.py
new file mode 100644
index 0000000000..5f56df156c
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/message.py
@@ -0,0 +1,91 @@
+from typing import Optional, Union
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.tag import Tag
+from faststream.specification.schema.message import Message as SpecMessage
+
+
+class CorrelationId(BaseModel):
+ """A class to represent a correlation ID.
+
+ Attributes:
+ description : optional description of the correlation ID
+ location : location of the correlation ID
+
+ Configurations:
+ extra : allows extra fields in the correlation ID model
+ """
+
+ location: str
+ description: Optional[str] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+
+class Message(BaseModel):
+ """A class to represent a message.
+
+ Attributes:
+ title : title of the message
+ name : name of the message
+ summary : summary of the message
+ description : description of the message
+ messageId : ID of the message
+ correlationId : correlation ID of the message
+ contentType : content type of the message
+ payload : dictionary representing the payload of the message
+ tags : list of tags associated with the message
+ """
+
+ title: Optional[str] = None
+ name: Optional[str] = None
+ summary: Optional[str] = None
+ description: Optional[str] = None
+ messageId: Optional[str] = None
+ correlationId: Optional[CorrelationId] = None
+ contentType: Optional[str] = None
+
+ payload: AnyDict
+ # TODO:
+ # headers
+ # schemaFormat
+ # bindings
+ # examples
+ # traits
+
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_spec(cls, message: SpecMessage) -> Self:
+ return cls(
+ title=message.title,
+ payload=message.payload,
+ correlationId=CorrelationId(
+ description=None,
+ location="$message.header#/correlation_id",
+ ),
+ name=None,
+ summary=None,
+ description=None,
+ messageId=None,
+ contentType=None,
+ tags=None,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/operations.py b/faststream/specification/asyncapi/v2_6_0/schema/operations.py
new file mode 100644
index 0000000000..c837c844d7
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/operations.py
@@ -0,0 +1,74 @@
+from typing import Optional, Union
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.operation import Operation as OperationSpec
+
+from .bindings import OperationBinding
+from .message import Message
+from .tag import Tag
+from .utils import Reference
+
+
+class Operation(BaseModel):
+ """A class to represent an operation.
+
+ Attributes:
+ operationId : ID of the operation
+ summary : summary of the operation
+ description : description of the operation
+ bindings : bindings of the operation
+ message : message of the operation
+ security : security details of the operation
+ tags : tags associated with the operation
+ """
+
+ operationId: Optional[str] = None
+ summary: Optional[str] = None
+ description: Optional[str] = None
+
+ bindings: Optional[OperationBinding] = None
+
+ message: Union[Message, Reference]
+
+ security: Optional[dict[str, list[str]]] = None
+
+ # TODO
+ # traits
+
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, operation: OperationSpec) -> Self:
+ return cls(
+ message=Message.from_spec(operation.message),
+ bindings=OperationBinding.from_sub(operation.bindings),
+ operationId=None,
+ summary=None,
+ description=None,
+ tags=None,
+ security=None,
+ )
+
+ @classmethod
+ def from_pub(cls, operation: OperationSpec) -> Self:
+ return cls(
+ message=Message.from_spec(operation.message),
+ bindings=OperationBinding.from_pub(operation.bindings),
+ operationId=None,
+ summary=None,
+ description=None,
+ tags=None,
+ security=None,
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/schema.py b/faststream/specification/asyncapi/v2_6_0/schema/schema.py
new file mode 100644
index 0000000000..8f4a70a701
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/schema.py
@@ -0,0 +1,37 @@
+from typing import Literal, Optional, Union
+
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.channels import Channel
+from faststream.specification.asyncapi.v2_6_0.schema.components import Components
+from faststream.specification.asyncapi.v2_6_0.schema.docs import ExternalDocs
+from faststream.specification.asyncapi.v2_6_0.schema.info import ApplicationInfo
+from faststream.specification.asyncapi.v2_6_0.schema.servers import Server
+from faststream.specification.asyncapi.v2_6_0.schema.tag import Tag
+from faststream.specification.base.schema import BaseApplicationSchema
+
+
+class ApplicationSchema(BaseApplicationSchema):
+ """A class to represent an application schema.
+
+ Attributes:
+ asyncapi : version of the async API
+ id : optional ID
+ defaultContentType : optional default content type
+ info : information about the schema
+ servers : optional dictionary of servers
+ channels : dictionary of channels
+ components : optional components of the schema
+ tags : optional list of tags
+ externalDocs : optional external documentation
+ """
+
+ info: ApplicationInfo
+
+ asyncapi: Union[Literal["2.6.0"], str]
+ id: Optional[str] = None
+ defaultContentType: Optional[str] = None
+ servers: Optional[dict[str, Server]] = None
+ channels: dict[str, Channel]
+ components: Optional[Components] = None
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+ externalDocs: Optional[Union[ExternalDocs, AnyDict]] = None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/servers.py b/faststream/specification/asyncapi/v2_6_0/schema/servers.py
new file mode 100644
index 0000000000..cae721cfd1
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/servers.py
@@ -0,0 +1,69 @@
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.tag import Tag
+from faststream.specification.asyncapi.v2_6_0.schema.utils import Reference
+
+SecurityRequirement = list[dict[str, list[str]]]
+
+
+class ServerVariable(BaseModel):
+ """A class to represent a server variable.
+
+ Attributes:
+ enum : list of possible values for the server variable (optional)
+ default : default value for the server variable (optional)
+ description : description of the server variable (optional)
+ examples : list of example values for the server variable (optional)
+ """
+
+ enum: Optional[list[str]] = None
+ default: Optional[str] = None
+ description: Optional[str] = None
+ examples: Optional[list[str]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+
+class Server(BaseModel):
+ """A class to represent a server.
+
+ Attributes:
+ url : URL of the server
+ protocol : protocol used by the server
+ description : optional description of the server
+ protocolVersion : optional version of the protocol used by the server
+ tags : optional list of tags associated with the server
+ security : optional security requirement for the server
+ variables : optional dictionary of server variables
+ bindings : optional server binding
+
+ Note:
+ The attributes `description`, `protocolVersion`, `tags`, `security`, `variables`, and `bindings` are all optional.
+ """
+
+ url: str
+ protocol: str
+ protocolVersion: Optional[str]
+ description: Optional[str]
+ tags: Optional[list[Union[Tag, AnyDict]]]
+ security: Optional[SecurityRequirement]
+
+ variables: Optional[dict[str, Union[ServerVariable, Reference]]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/tag.py b/faststream/specification/asyncapi/v2_6_0/schema/tag.py
new file mode 100644
index 0000000000..86dff2e613
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/tag.py
@@ -0,0 +1,69 @@
+from typing import Optional, Union, cast, overload
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream._internal.utils.data import filter_by_dict
+from faststream.specification.asyncapi.v2_6_0.schema.docs import ExternalDocs
+from faststream.specification.schema.extra import (
+ Tag as SpecTag,
+ TagDict,
+)
+
+
+class Tag(BaseModel):
+ """A class to represent a tag.
+
+ Attributes:
+ name : name of the tag
+ description : description of the tag (optional)
+ externalDocs : external documentation for the tag (optional)
+ """
+
+ name: str
+ # Use default values to be able build from dict
+ description: Optional[str] = None
+ externalDocs: Optional[ExternalDocs] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @overload
+ @classmethod
+ def from_spec(cls, tag: SpecTag) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, tag: TagDict) -> Self: ...
+
+ @overload
+ @classmethod
+ def from_spec(cls, tag: AnyDict) -> AnyDict: ...
+
+ @classmethod
+ def from_spec(cls, tag: Union[SpecTag, TagDict, AnyDict]) -> Union[Self, AnyDict]:
+ if isinstance(tag, SpecTag):
+ return cls(
+ name=tag.name,
+ description=tag.description,
+ externalDocs=ExternalDocs.from_spec(tag.external_docs),
+ )
+
+ tag = cast("AnyDict", tag)
+ tag_data, custom_data = filter_by_dict(TagDict, tag)
+
+ if custom_data:
+ return tag
+
+ return cls(
+ name=tag_data.get("name"),
+ description=tag_data.get("description"),
+ externalDocs=tag_data.get("external_docs"),
+ )
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/utils.py b/faststream/specification/asyncapi/v2_6_0/schema/utils.py
new file mode 100644
index 0000000000..6d492ffeb5
--- /dev/null
+++ b/faststream/specification/asyncapi/v2_6_0/schema/utils.py
@@ -0,0 +1,17 @@
+from pydantic import BaseModel, Field
+
+
+class Reference(BaseModel):
+ """A class to represent a reference.
+
+ Attributes:
+ ref : the reference string
+ """
+
+ ref: str = Field(..., alias="$ref")
+
+
+class Parameter(BaseModel):
+ """A class to represent a parameter."""
+
+ # TODO
diff --git a/faststream/specification/asyncapi/v3_0_0/__init__.py b/faststream/specification/asyncapi/v3_0_0/__init__.py
new file mode 100644
index 0000000000..490431d760
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/__init__.py
@@ -0,0 +1,7 @@
+from .facade import AsyncAPI3
+from .generate import get_app_schema
+
+__all__ = (
+ "AsyncAPI3",
+ "get_app_schema",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/facade.py b/faststream/specification/asyncapi/v3_0_0/facade.py
new file mode 100644
index 0000000000..31542ab11b
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/facade.py
@@ -0,0 +1,77 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from faststream.specification.base.specification import Specification
+
+from .generate import get_app_schema
+from .schema import ApplicationSchema
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, AnyHttpUrl
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream.specification.schema.extra import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+ )
+
+
+class AsyncAPI3(Specification):
+ def __init__(
+ self,
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ title: str = "FastStream",
+ app_version: str = "0.1.0",
+ schema_version: str = "3.0.0",
+ description: str = "",
+ terms_of_service: Optional["AnyHttpUrl"] = None,
+ contact: Optional[Union["Contact", "ContactDict", "AnyDict"]] = None,
+ license: Optional[Union["License", "LicenseDict", "AnyDict"]] = None,
+ identifier: Optional[str] = None,
+ tags: Sequence[Union["Tag", "TagDict", "AnyDict"]] = (),
+ external_docs: Optional[
+ Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]
+ ] = None,
+ ) -> None:
+ self.broker = broker
+ self.title = title
+ self.app_version = app_version
+ self.schema_version = schema_version
+ self.description = description
+ self.terms_of_service = terms_of_service
+ self.contact = contact
+ self.license = license
+ self.identifier = identifier
+ self.tags = tags
+ self.external_docs = external_docs
+
+ def to_json(self) -> str:
+ return self.schema.to_json()
+
+ def to_jsonable(self) -> Any:
+ return self.schema.to_jsonable()
+
+ def to_yaml(self) -> str:
+ return self.schema.to_yaml()
+
+ @property
+ def schema(self) -> ApplicationSchema:
+ return get_app_schema(
+ self.broker,
+ title=self.title,
+ app_version=self.app_version,
+ schema_version=self.schema_version,
+ description=self.description,
+ terms_of_service=self.terms_of_service,
+ contact=self.contact,
+ license=self.license,
+ identifier=self.identifier,
+ tags=self.tags,
+ external_docs=self.external_docs,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/generate.py b/faststream/specification/asyncapi/v3_0_0/generate.py
new file mode 100644
index 0000000000..0ef22bf909
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/generate.py
@@ -0,0 +1,258 @@
+import warnings
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
+from urllib.parse import urlparse
+
+from faststream._internal._compat import DEF_KEY
+from faststream._internal.basic_types import AnyDict, AnyHttpUrl
+from faststream._internal.constants import ContentTypes
+from faststream.specification.asyncapi.utils import clear_key, move_pydantic_refs
+from faststream.specification.asyncapi.v3_0_0.schema import (
+ ApplicationInfo,
+ ApplicationSchema,
+ Channel,
+ Components,
+ Contact,
+ ExternalDocs,
+ License,
+ Message,
+ Operation,
+ Reference,
+ Server,
+ Tag,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.broker.broker import BrokerUsecase
+ from faststream._internal.types import ConnectionType, MsgType
+ from faststream.specification.schema.extra import (
+ Contact as SpecContact,
+ ContactDict,
+ ExternalDocs as SpecDocs,
+ ExternalDocsDict,
+ License as SpecLicense,
+ LicenseDict,
+ Tag as SpecTag,
+ TagDict,
+ )
+
+
+def get_app_schema(
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ title: str,
+ app_version: str,
+ schema_version: str,
+ description: str,
+ terms_of_service: Optional["AnyHttpUrl"],
+ contact: Optional[Union["SpecContact", "ContactDict", "AnyDict"]],
+ license: Optional[Union["SpecLicense", "LicenseDict", "AnyDict"]],
+ identifier: Optional[str],
+ tags: Optional[Sequence[Union["SpecTag", "TagDict", "AnyDict"]]],
+ external_docs: Optional[Union["SpecDocs", "ExternalDocsDict", "AnyDict"]],
+) -> ApplicationSchema:
+ """Get the application schema."""
+ broker._setup()
+
+ servers = get_broker_server(broker)
+ channels, operations = get_broker_channels(broker)
+
+ messages: dict[str, Message] = {}
+ payloads: dict[str, AnyDict] = {}
+
+ for channel in channels.values():
+ channel.servers = [
+ {"$ref": f"#/servers/{server_name}"} for server_name in list(servers.keys())
+ ]
+
+ for channel_name, channel in channels.items():
+ msgs: dict[str, Union[Message, Reference]] = {}
+ for message_name, message in channel.messages.items():
+ assert isinstance(message, Message)
+
+ msgs[message_name] = _resolve_msg_payloads(
+ message_name,
+ message,
+ channel_name,
+ payloads,
+ messages,
+ )
+
+ channel.messages = msgs
+
+ return ApplicationSchema(
+ info=ApplicationInfo(
+ title=title,
+ version=app_version,
+ description=description,
+ termsOfService=terms_of_service,
+ contact=Contact.from_spec(contact),
+ license=License.from_spec(license),
+ tags=[Tag.from_spec(tag) for tag in tags] or None if tags else None,
+ externalDocs=ExternalDocs.from_spec(external_docs),
+ ),
+ asyncapi=schema_version,
+ defaultContentType=ContentTypes.JSON.value,
+ id=identifier,
+ servers=servers,
+ channels=channels,
+ operations=operations,
+ components=Components(
+ messages=messages,
+ schemas=payloads,
+ securitySchemes=None
+ if broker.security is None
+ else broker.security.get_schema(),
+ ),
+ )
+
+
+def get_broker_server(
+ broker: "BrokerUsecase[MsgType, ConnectionType]",
+) -> dict[str, Server]:
+ """Get the broker server for an application."""
+ servers = {}
+
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+ if broker.tags:
+ tags = [Tag.from_spec(tag) for tag in broker.tags]
+
+ broker_meta: AnyDict = {
+ "protocol": broker.protocol,
+ "protocolVersion": broker.protocol_version,
+ "description": broker.description,
+ "tags": tags,
+ # TODO
+ # "variables": "",
+ # "bindings": "",
+ }
+
+ if broker.security is not None:
+ broker_meta["security"] = broker.security.get_requirement()
+
+ urls = broker.url if isinstance(broker.url, list) else [broker.url]
+
+ for i, broker_url in enumerate(urls, 1):
+ server_url = broker_url if "://" in broker_url else f"//{broker_url}"
+
+ parsed_url = urlparse(server_url)
+ server_name = "development" if len(urls) == 1 else f"Server{i}"
+ servers[server_name] = Server(
+ host=parsed_url.netloc,
+ pathname=parsed_url.path,
+ **broker_meta,
+ )
+
+ return servers
+
+
+def get_broker_channels(
+ broker: "BrokerUsecase[MsgType, ConnectionType]",
+) -> tuple[dict[str, Channel], dict[str, Operation]]:
+ """Get the broker channels for an application."""
+ channels = {}
+ operations = {}
+
+ for sub in broker._subscribers:
+ for sub_key, sub_channel in sub.schema().items():
+ channel_obj = Channel.from_sub(sub_key, sub_channel)
+
+ channel_key = clear_key(sub_key)
+ if channel_key in channels:
+ warnings.warn(
+ f"Overwrite channel handler, channels have the same names: `{channel_key}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+
+ channels[channel_key] = channel_obj
+
+ operations[f"{channel_key}Subscribe"] = Operation.from_sub(
+ messages=[
+ Reference(**{
+ "$ref": f"#/channels/{channel_key}/messages/{msg_name}"
+ })
+ for msg_name in channel_obj.messages
+ ],
+ channel=Reference(**{"$ref": f"#/channels/{channel_key}"}),
+ operation=sub_channel.operation,
+ )
+
+ for pub in broker._publishers:
+ for pub_key, pub_channel in pub.schema().items():
+ channel_obj = Channel.from_pub(pub_key, pub_channel)
+
+ channel_key = clear_key(pub_key)
+ if channel_key in channels:
+ warnings.warn(
+ f"Overwrite channel handler, channels have the same names: `{channel_key}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+ channels[channel_key] = channel_obj
+
+ operations[channel_key] = Operation.from_pub(
+ messages=[
+ Reference(**{
+ "$ref": f"#/channels/{channel_key}/messages/{msg_name}"
+ })
+ for msg_name in channel_obj.messages
+ ],
+ channel=Reference(**{"$ref": f"#/channels/{channel_key}"}),
+ operation=pub_channel.operation,
+ )
+
+ return channels, operations
+
+
+def _resolve_msg_payloads(
+ message_name: str,
+ m: Message,
+ channel_name: str,
+ payloads: AnyDict,
+ messages: AnyDict,
+) -> Reference:
+ assert isinstance(m.payload, dict)
+
+ m.payload = move_pydantic_refs(m.payload, DEF_KEY)
+
+ message_name = clear_key(message_name)
+ channel_name = clear_key(channel_name)
+
+ if DEF_KEY in m.payload:
+ payloads.update(m.payload.pop(DEF_KEY))
+
+ one_of = m.payload.get("oneOf", None)
+ if isinstance(one_of, dict):
+ one_of_list = []
+ processed_payloads: dict[str, AnyDict] = {}
+ for name, payload in one_of.items():
+ processed_payloads[clear_key(name)] = payload
+ one_of_list.append(Reference(**{"$ref": f"#/components/schemas/{name}"}))
+
+ payloads.update(processed_payloads)
+ m.payload["oneOf"] = one_of_list
+ assert m.title
+ messages[clear_key(m.title)] = m
+ return Reference(
+ **{"$ref": f"#/components/messages/{channel_name}:{message_name}"},
+ )
+
+ payloads.update(m.payload.pop(DEF_KEY, {}))
+ payload_name = m.payload.get("title", f"{channel_name}:{message_name}:Payload")
+ payload_name = clear_key(payload_name)
+
+ if payload_name in payloads and payloads[payload_name] != m.payload:
+ warnings.warn(
+ f"Overwriting the message schema, data types have the same name: `{payload_name}`",
+ RuntimeWarning,
+ stacklevel=1,
+ )
+
+ payloads[payload_name] = m.payload
+ m.payload = {"$ref": f"#/components/schemas/{payload_name}"}
+ assert m.title
+ messages[clear_key(m.title)] = m
+ return Reference(
+ **{"$ref": f"#/components/messages/{channel_name}:{message_name}"},
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/__init__.py
new file mode 100644
index 0000000000..e0cbcbd7b2
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/__init__.py
@@ -0,0 +1,31 @@
+from .channels import Channel
+from .components import Components
+from .contact import Contact
+from .docs import ExternalDocs
+from .info import ApplicationInfo
+from .license import License
+from .message import CorrelationId, Message
+from .operations import Operation
+from .schema import ApplicationSchema
+from .servers import Server, ServerVariable
+from .tag import Tag
+from .utils import Parameter, Reference
+
+__all__ = (
+ "ApplicationInfo",
+ "ApplicationSchema",
+ "Channel",
+ "Channel",
+ "Components",
+ "Contact",
+ "CorrelationId",
+ "ExternalDocs",
+ "License",
+ "Message",
+ "Operation",
+ "Parameter",
+ "Reference",
+ "Server",
+ "ServerVariable",
+ "Tag",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py
new file mode 100644
index 0000000000..c304608c5b
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py
@@ -0,0 +1,9 @@
+from .main import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py
new file mode 100644
index 0000000000..bfadb4c0f4
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py
@@ -0,0 +1,40 @@
+from typing import Optional
+
+from typing_extensions import Self
+
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp import (
+ ChannelBinding as V2Binding,
+)
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp.channel import (
+ Exchange,
+ Queue,
+)
+from faststream.specification.schema.bindings import amqp
+
+
+class ChannelBinding(V2Binding):
+ bindingVersion: str = "0.3.0"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ **{
+ "is": "queue",
+ "queue": Queue.from_spec(binding.queue, binding.virtual_host),
+ },
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ return cls(
+ **{
+ "is": "routingKey",
+ "exchange": Exchange.from_spec(binding.exchange, binding.virtual_host),
+ },
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py
new file mode 100644
index 0000000000..d6f95b68e8
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py
@@ -0,0 +1,54 @@
+"""AsyncAPI AMQP bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/amqp
+"""
+
+from typing import Optional
+
+from pydantic import BaseModel, PositiveInt
+from typing_extensions import Self
+
+from faststream.specification.schema.bindings import amqp
+
+
+class OperationBinding(BaseModel):
+ cc: Optional[list[str]] = None
+ ack: bool
+ replyTo: Optional[str] = None
+ deliveryMode: Optional[int] = None
+ mandatory: Optional[bool] = None
+ priority: Optional[PositiveInt] = None
+
+ bindingVersion: str = "0.3.0"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ cc=[binding.routing_key]
+ if (binding.routing_key and binding.exchange.is_respect_routing_key)
+ else None,
+ ack=binding.ack,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
+ mandatory=binding.mandatory,
+ priority=binding.priority,
+ )
+
+ @classmethod
+ def from_pub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
+ return cls(
+ cc=None
+ if (not binding.routing_key or not binding.exchange.is_respect_routing_key)
+ else [binding.routing_key],
+ ack=binding.ack,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
+ mandatory=binding.mandatory,
+ priority=binding.priority,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py
new file mode 100644
index 0000000000..5605abeefa
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.kafka import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py
new file mode 100644
index 0000000000..8555fd981a
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py
@@ -0,0 +1,7 @@
+from .channel import ChannelBinding
+from .operation import OperationBinding
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py
new file mode 100644
index 0000000000..c7552a11d1
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py
@@ -0,0 +1,100 @@
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
+)
+from faststream.specification.schema.bindings import ChannelBinding as SpecBinding
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel bindings.
+
+ Attributes:
+ amqp : AMQP channel binding (optional)
+ kafka : Kafka channel binding (optional)
+ sqs : SQS channel binding (optional)
+ nats : NATS channel binding (optional)
+ redis : Redis channel binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.ChannelBinding] = None
+ kafka: Optional[kafka_bindings.ChannelBinding] = None
+ sqs: Optional[sqs_bindings.ChannelBinding] = None
+ nats: Optional[nats_bindings.ChannelBinding] = None
+ redis: Optional[redis_bindings.ChannelBinding] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py
new file mode 100644
index 0000000000..fc37c3dc75
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py
@@ -0,0 +1,100 @@
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
+)
+from faststream.specification.schema.bindings import OperationBinding as SpecBinding
+
+
+class OperationBinding(BaseModel):
+ """A class to represent an operation binding.
+
+ Attributes:
+ amqp : AMQP operation binding (optional)
+ kafka : Kafka operation binding (optional)
+ sqs : SQS operation binding (optional)
+ nats : NATS operation binding (optional)
+ redis : Redis operation binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.OperationBinding] = None
+ kafka: Optional[kafka_bindings.OperationBinding] = None
+ sqs: Optional[sqs_bindings.OperationBinding] = None
+ nats: Optional[nats_bindings.OperationBinding] = None
+ redis: Optional[redis_bindings.OperationBinding] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py
new file mode 100644
index 0000000000..21d5c46926
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.nats import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py
new file mode 100644
index 0000000000..26d44644f7
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.redis import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py
new file mode 100644
index 0000000000..e437a1cc58
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.sqs import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/channels.py b/faststream/specification/asyncapi/v3_0_0/schema/channels.py
new file mode 100644
index 0000000000..c0a2dbe553
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/channels.py
@@ -0,0 +1,74 @@
+from typing import Optional, Union
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import ChannelBinding
+from faststream.specification.asyncapi.v3_0_0.schema.message import Message
+from faststream.specification.schema import PublisherSpec, SubscriberSpec
+
+from .utils import Reference
+
+
+class Channel(BaseModel):
+ """A class to represent a channel.
+
+ Attributes:
+ address: A string representation of this channel's address.
+ description : optional description of the channel
+ servers : optional list of servers associated with the channel
+ bindings : optional channel binding
+ parameters : optional parameters associated with the channel
+
+ Configurations:
+ model_config : configuration for the model (only applicable for Pydantic version 2)
+ Config : configuration for the class (only applicable for Pydantic version 1)
+ """
+
+ address: str
+ description: Optional[str] = None
+ servers: Optional[list[dict[str, str]]] = None
+ messages: dict[str, Union[Message, Reference]]
+ bindings: Optional[ChannelBinding] = None
+
+ # TODO:
+ # parameters: Optional[Parameter] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, address: str, subscriber: SubscriberSpec) -> Self:
+ message = subscriber.operation.message
+ assert message.title
+
+ *left, right = message.title.split(":")
+ message.title = ":".join((*left, f"Subscribe{right}"))
+
+ return cls(
+ description=subscriber.description,
+ address=address,
+ messages={
+ "SubscribeMessage": Message.from_spec(message),
+ },
+ bindings=ChannelBinding.from_sub(subscriber.bindings),
+ servers=None,
+ )
+
+ @classmethod
+ def from_pub(cls, address: str, publisher: PublisherSpec) -> Self:
+ return cls(
+ description=publisher.description,
+ address=address,
+ messages={
+ "Message": Message.from_spec(publisher.operation.message),
+ },
+ bindings=ChannelBinding.from_pub(publisher.bindings),
+ servers=None,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/components.py b/faststream/specification/asyncapi/v3_0_0/schema/components.py
new file mode 100644
index 0000000000..04f2957a7c
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/components.py
@@ -0,0 +1,56 @@
+from typing import Optional
+
+from pydantic import BaseModel
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema.message import Message
+
+
+class Components(BaseModel):
+ # TODO
+ # servers
+ # serverVariables
+ # channels
+ """A class to represent components in a system.
+
+ Attributes:
+ messages : Optional dictionary of messages
+ schemas : Optional dictionary of schemas
+
+ Note:
+ The following attributes are not implemented yet:
+ - servers
+ - serverVariables
+ - channels
+ - securitySchemes
+ - parameters
+ - correlationIds
+ - operationTraits
+ - messageTraits
+ - serverBindings
+ - channelBindings
+ - operationBindings
+ - messageBindings
+
+ """
+
+ messages: Optional[dict[str, Message]] = None
+ schemas: Optional[dict[str, AnyDict]] = None
+ securitySchemes: Optional[dict[str, AnyDict]] = None
+ # parameters
+ # correlationIds
+ # operationTraits
+ # messageTraits
+ # serverBindings
+ # channelBindings
+ # operationBindings
+ # messageBindings
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/contact.py b/faststream/specification/asyncapi/v3_0_0/schema/contact.py
new file mode 100644
index 0000000000..c42e750b28
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/contact.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Contact
+
+__all__ = ("Contact",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/docs.py b/faststream/specification/asyncapi/v3_0_0/schema/docs.py
new file mode 100644
index 0000000000..0a71688697
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/docs.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import ExternalDocs
+
+__all__ = ("ExternalDocs",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/info.py b/faststream/specification/asyncapi/v3_0_0/schema/info.py
new file mode 100644
index 0000000000..c9303e690c
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/info.py
@@ -0,0 +1,35 @@
+from typing import (
+ Optional,
+ Union,
+)
+
+from pydantic import AnyHttpUrl
+
+from faststream._internal.basic_types import (
+ AnyDict,
+)
+from faststream.specification.asyncapi.v2_6_0.schema import (
+ Contact,
+ ExternalDocs,
+ License,
+ Tag,
+)
+from faststream.specification.base.info import BaseApplicationInfo
+
+
+class ApplicationInfo(BaseApplicationInfo):
+ """A class to represent application information.
+
+ Attributes:
+ termsOfService : terms of service for the information
+ contact : contact information for the information
+ license : license information for the information
+ tags : optional list of tags
+ externalDocs : optional external documentation
+ """
+
+ termsOfService: Optional[AnyHttpUrl] = None
+ contact: Optional[Union[Contact, AnyDict]] = None
+ license: Optional[Union[License, AnyDict]] = None
+ tags: Optional[list[Union["Tag", "AnyDict"]]] = None
+ externalDocs: Optional[Union["ExternalDocs", "AnyDict"]] = None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/license.py b/faststream/specification/asyncapi/v3_0_0/schema/license.py
new file mode 100644
index 0000000000..44ee4b2813
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/license.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import License
+
+__all__ = ("License",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/message.py b/faststream/specification/asyncapi/v3_0_0/schema/message.py
new file mode 100644
index 0000000000..fa665082e9
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/message.py
@@ -0,0 +1,6 @@
+from faststream.specification.asyncapi.v2_6_0.schema.message import (
+ CorrelationId,
+ Message,
+)
+
+__all__ = ("CorrelationId", "Message")
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/operations.py b/faststream/specification/asyncapi/v3_0_0/schema/operations.py
new file mode 100644
index 0000000000..8afff3c5c6
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/operations.py
@@ -0,0 +1,94 @@
+from enum import Enum
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.schema.operation import Operation as OperationSpec
+
+from .bindings import OperationBinding
+from .channels import Channel
+from .tag import Tag
+from .utils import Reference
+
+
+class Action(str, Enum):
+ SEND = "send"
+ RECEIVE = "receive"
+
+
+class Operation(BaseModel):
+ """A class to represent an operation.
+
+ Attributes:
+ operation_id : ID of the operation
+ summary : summary of the operation
+ description : description of the operation
+ bindings : bindings of the operation
+ message : message of the operation
+ security : security details of the operation
+ tags : tags associated with the operation
+ """
+
+ action: Action
+ channel: Union[Channel, Reference]
+
+ summary: Optional[str] = None
+ description: Optional[str] = None
+
+ bindings: Optional[OperationBinding] = None
+
+ messages: list[Reference] = Field(default_factory=list)
+
+ security: Optional[dict[str, list[str]]] = None
+
+ # TODO
+ # traits
+
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(
+ cls,
+ messages: list[Reference],
+ channel: Reference,
+ operation: OperationSpec,
+ ) -> Self:
+ return cls(
+ action=Action.RECEIVE,
+ messages=messages,
+ channel=channel,
+ bindings=OperationBinding.from_sub(operation.bindings),
+ summary=None,
+ description=None,
+ security=None,
+ tags=None,
+ )
+
+ @classmethod
+ def from_pub(
+ cls,
+ messages: list[Reference],
+ channel: Reference,
+ operation: OperationSpec,
+ ) -> Self:
+ return cls(
+ action=Action.SEND,
+ messages=messages,
+ channel=channel,
+ bindings=OperationBinding.from_pub(operation.bindings),
+ summary=None,
+ description=None,
+ security=None,
+ tags=None,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/schema.py b/faststream/specification/asyncapi/v3_0_0/schema/schema.py
new file mode 100644
index 0000000000..dc894ecb4e
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/schema.py
@@ -0,0 +1,34 @@
+from typing import Literal, Optional, Union
+
+from pydantic import Field
+
+from faststream.specification.asyncapi.v3_0_0.schema.channels import Channel
+from faststream.specification.asyncapi.v3_0_0.schema.components import Components
+from faststream.specification.asyncapi.v3_0_0.schema.info import ApplicationInfo
+from faststream.specification.asyncapi.v3_0_0.schema.operations import Operation
+from faststream.specification.asyncapi.v3_0_0.schema.servers import Server
+from faststream.specification.base.schema import BaseApplicationSchema
+
+
+class ApplicationSchema(BaseApplicationSchema):
+ """A class to represent an application schema.
+
+ Attributes:
+ asyncapi : version of the async API
+ id : optional ID
+ defaultContentType : optional default content type
+ info : information about the schema
+ servers : optional dictionary of servers
+ channels : dictionary of channels
+ components : optional components of the schema
+ """
+
+ info: ApplicationInfo
+
+ asyncapi: Union[Literal["3.0.0"], str] = "3.0.0"
+ id: Optional[str] = None
+ defaultContentType: Optional[str] = None
+ servers: Optional[dict[str, Server]] = None
+ channels: dict[str, Channel] = Field(default_factory=dict)
+ operations: dict[str, Operation] = Field(default_factory=dict)
+ components: Optional[Components] = None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/servers.py b/faststream/specification/asyncapi/v3_0_0/schema/servers.py
new file mode 100644
index 0000000000..902ebc8f9f
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/servers.py
@@ -0,0 +1,56 @@
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.basic_types import AnyDict
+from faststream.specification.asyncapi.v2_6_0.schema import ServerVariable, Tag
+from faststream.specification.asyncapi.v2_6_0.schema.utils import Reference
+
+SecurityRequirement = list[dict[str, list[str]]]
+
+
+__all__ = (
+ "Server",
+ "ServerVariable",
+)
+
+
+class Server(BaseModel):
+ """A class to represent a server.
+
+ Attributes:
+ host : host of the server
+ pathname : pathname of the server
+ protocol : protocol used by the server
+ description : optional description of the server
+ protocolVersion : optional version of the protocol used by the server
+ tags : optional list of tags associated with the server
+ security : optional security requirement for the server
+ variables : optional dictionary of server variables
+
+ Note:
+ The attributes `description`, `protocolVersion`, `tags`, `security`, `variables`, and `bindings` are all optional.
+
+ Configurations:
+ If `PYDANTIC_V2` is True, the model configuration is set to allow extra attributes.
+ Otherwise, the `Config` class is defined with the `extra` attribute set to "allow".
+
+ """
+
+ host: str
+ pathname: str
+ protocol: str
+ description: Optional[str] = None
+ protocolVersion: Optional[str] = None
+ tags: Optional[list[Union[Tag, AnyDict]]] = None
+ security: Optional[SecurityRequirement] = None
+ variables: Optional[dict[str, Union[ServerVariable, Reference]]] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/tag.py b/faststream/specification/asyncapi/v3_0_0/schema/tag.py
new file mode 100644
index 0000000000..e16c4f61cd
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/tag.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Tag
+
+__all__ = ("Tag",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/utils.py b/faststream/specification/asyncapi/v3_0_0/schema/utils.py
new file mode 100644
index 0000000000..c53f3ce1a0
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/utils.py
@@ -0,0 +1,6 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Parameter, Reference
+
+__all__ = (
+ "Parameter",
+ "Reference",
+)
diff --git a/faststream/cli/utils/__init__.py b/faststream/specification/base/__init__.py
similarity index 100%
rename from faststream/cli/utils/__init__.py
rename to faststream/specification/base/__init__.py
diff --git a/faststream/specification/base/info.py b/faststream/specification/base/info.py
new file mode 100644
index 0000000000..6e282dc19e
--- /dev/null
+++ b/faststream/specification/base/info.py
@@ -0,0 +1,25 @@
+from pydantic import BaseModel
+
+from faststream._internal._compat import PYDANTIC_V2
+
+
+class BaseApplicationInfo(BaseModel):
+ """A class to represent basic application information.
+
+ Attributes:
+ title : application title
+ version : application version
+ description : application description
+ """
+
+ title: str
+ version: str
+ description: str
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
diff --git a/faststream/specification/base/schema.py b/faststream/specification/base/schema.py
new file mode 100644
index 0000000000..828e1699b7
--- /dev/null
+++ b/faststream/specification/base/schema.py
@@ -0,0 +1,48 @@
+from typing import Any
+
+from pydantic import BaseModel
+
+from faststream._internal._compat import model_to_json, model_to_jsonable
+
+from .info import BaseApplicationInfo
+
+
+class BaseApplicationSchema(BaseModel):
+ """A class to represent a Pydantic-serializable application schema.
+
+ Attributes:
+ info : information about the schema
+
+ Methods:
+ to_jsonable() -> Any: Convert the schema to a JSON-serializable object.
+ to_json() -> str: Convert the schema to a JSON string.
+ to_yaml() -> str: Convert the schema to a YAML string.
+ """
+
+ info: BaseApplicationInfo
+
+ def to_jsonable(self) -> Any:
+ """Convert the schema to a JSON-serializable object."""
+ return model_to_jsonable(
+ self,
+ by_alias=True,
+ exclude_none=True,
+ )
+
+ def to_json(self) -> str:
+ """Convert the schema to a JSON string."""
+ return model_to_json(
+ self,
+ by_alias=True,
+ exclude_none=True,
+ )
+
+ def to_yaml(self) -> str:
+ """Convert the schema to a YAML string."""
+ from io import StringIO
+
+ import yaml
+
+ io = StringIO(initial_value="", newline="\n")
+ yaml.dump(self.to_jsonable(), io, sort_keys=False)
+ return io.getvalue()
diff --git a/faststream/specification/base/specification.py b/faststream/specification/base/specification.py
new file mode 100644
index 0000000000..e8e674b25e
--- /dev/null
+++ b/faststream/specification/base/specification.py
@@ -0,0 +1,20 @@
+from abc import abstractmethod
+from typing import Any, Protocol, runtime_checkable
+
+from .schema import BaseApplicationSchema
+
+
+@runtime_checkable
+class Specification(Protocol):
+ @property
+ @abstractmethod
+ def schema(self) -> BaseApplicationSchema: ...
+
+ def to_json(self) -> str:
+ return self.schema.to_json()
+
+ def to_jsonable(self) -> Any:
+ return self.schema.to_jsonable()
+
+ def to_yaml(self) -> str:
+ return self.schema.to_yaml()
diff --git a/faststream/specification/proto/__init__.py b/faststream/specification/proto/__init__.py
new file mode 100644
index 0000000000..3189e7cc8f
--- /dev/null
+++ b/faststream/specification/proto/__init__.py
@@ -0,0 +1,4 @@
+from .broker import ServerSpecification
+from .endpoint import EndpointSpecification
+
+__all__ = ("EndpointSpecification", "ServerSpecification")
diff --git a/faststream/specification/proto/broker.py b/faststream/specification/proto/broker.py
new file mode 100644
index 0000000000..225393b24e
--- /dev/null
+++ b/faststream/specification/proto/broker.py
@@ -0,0 +1,14 @@
+from collections.abc import Iterable
+from typing import Optional, Protocol, Union
+
+from faststream.security import BaseSecurity
+from faststream.specification.schema.extra import Tag, TagDict
+
+
+class ServerSpecification(Protocol):
+ url: Union[str, list[str]]
+ protocol: Optional[str]
+ protocol_version: Optional[str]
+ description: Optional[str]
+ tags: Iterable[Union[Tag, TagDict]]
+ security: Optional[BaseSecurity]
diff --git a/faststream/specification/proto/endpoint.py b/faststream/specification/proto/endpoint.py
new file mode 100644
index 0000000000..b0991d43f8
--- /dev/null
+++ b/faststream/specification/proto/endpoint.py
@@ -0,0 +1,65 @@
+from abc import abstractmethod
+from typing import Any, Generic, Optional, TypeVar
+
+from faststream._internal.proto import EndpointWrapper
+from faststream._internal.types import MsgType
+
+T = TypeVar("T")
+
+
+class EndpointSpecification(EndpointWrapper[MsgType], Generic[MsgType, T]):
+ """A class representing an asynchronous API operation: Pub or Sub."""
+
+ title_: Optional[str]
+ description_: Optional[str]
+ include_in_schema: bool
+
+ def __init__(
+ self,
+ *args: Any,
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ **kwargs: Any,
+ ) -> None:
+ self.title_ = title_
+ self.description_ = description_
+ self.include_in_schema = include_in_schema
+
+ # Call next base class parent init
+ super().__init__(*args, **kwargs)
+
+ @property
+ def name(self) -> str:
+ """Returns the name of the API operation."""
+ return self.title_ or self.get_default_name()
+
+ @abstractmethod
+ def get_default_name(self) -> str:
+ """Name property fallback."""
+ raise NotImplementedError
+
+ @property
+ def description(self) -> Optional[str]:
+ """Returns the description of the API operation."""
+ return self.description_ or self.get_default_description()
+
+ def get_default_description(self) -> Optional[str]:
+ """Description property fallback."""
+ return None
+
+ def schema(self) -> dict[str, T]:
+ """Returns the schema of the API operation as a dictionary of channel names and channel objects."""
+ if self.include_in_schema:
+ return self.get_schema()
+ return {}
+
+ @abstractmethod
+ def get_schema(self) -> dict[str, T]:
+ """Generate AsyncAPI schema."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def get_payloads(self) -> Any:
+ """Generate AsyncAPI payloads."""
+ raise NotImplementedError
diff --git a/faststream/specification/schema/__init__.py b/faststream/specification/schema/__init__.py
new file mode 100644
index 0000000000..009a6a63d7
--- /dev/null
+++ b/faststream/specification/schema/__init__.py
@@ -0,0 +1,29 @@
+from .extra import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+)
+from .message import Message
+from .operation import Operation
+from .publisher import PublisherSpec
+from .subscriber import SubscriberSpec
+
+__all__ = (
+ "Contact",
+ "ContactDict",
+ "ExternalDocs",
+ "ExternalDocsDict",
+ "License",
+ "LicenseDict",
+ "Message",
+ "Operation",
+ "PublisherSpec",
+ "SubscriberSpec",
+ "Tag",
+ "TagDict",
+)
diff --git a/faststream/specification/schema/bindings/__init__.py b/faststream/specification/schema/bindings/__init__.py
new file mode 100644
index 0000000000..c304608c5b
--- /dev/null
+++ b/faststream/specification/schema/bindings/__init__.py
@@ -0,0 +1,9 @@
+from .main import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/schema/bindings/amqp.py b/faststream/specification/schema/bindings/amqp.py
new file mode 100644
index 0000000000..f15201bb8e
--- /dev/null
+++ b/faststream/specification/schema/bindings/amqp.py
@@ -0,0 +1,79 @@
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Literal, Optional
+
+if TYPE_CHECKING:
+ from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+
+
+@dataclass
+class Queue:
+ name: str
+ durable: bool
+ exclusive: bool
+ auto_delete: bool
+
+ @classmethod
+ def from_queue(cls, queue: "RabbitQueue") -> "Queue":
+ return cls(
+ name=queue.name,
+ durable=queue.durable,
+ exclusive=queue.exclusive,
+ auto_delete=queue.auto_delete,
+ )
+
+
+@dataclass
+class Exchange:
+ type: Literal[
+ "default",
+ "direct",
+ "topic",
+ "fanout",
+ "headers",
+ "x-delayed-message",
+ "x-consistent-hash",
+ "x-modulus-hash",
+ ]
+
+ name: Optional[str] = None
+ durable: Optional[bool] = None
+ auto_delete: Optional[bool] = None
+
+ @classmethod
+ def from_exchange(cls, exchange: "RabbitExchange") -> "Exchange":
+ if not exchange.name:
+ return cls(type="default")
+ return cls(
+ type=exchange.type.value,
+ name=exchange.name,
+ durable=exchange.durable,
+ auto_delete=exchange.auto_delete,
+ )
+
+ @property
+ def is_respect_routing_key(self) -> bool:
+ """Is exchange respects routing key or not."""
+ return self.type in {
+ "default",
+ "direct",
+ "topic",
+ }
+
+
+@dataclass
+class ChannelBinding:
+ queue: Queue
+ exchange: Exchange
+ virtual_host: str
+
+
+@dataclass
+class OperationBinding:
+ routing_key: Optional[str]
+ queue: Queue
+ exchange: Exchange
+ ack: bool
+ reply_to: Optional[str]
+ persist: Optional[bool]
+ mandatory: Optional[bool]
+ priority: Optional[int]
diff --git a/faststream/specification/schema/bindings/kafka.py b/faststream/specification/schema/bindings/kafka.py
new file mode 100644
index 0000000000..fc9d0867c8
--- /dev/null
+++ b/faststream/specification/schema/bindings/kafka.py
@@ -0,0 +1,40 @@
+"""AsyncAPI Kafka bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/kafka
+"""
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class ChannelBinding:
+ """A class to represent a channel binding.
+
+ Attributes:
+ topic : optional string representing the topic
+ partitions : optional positive integer representing the number of partitions
+ replicas : optional positive integer representing the number of replicas
+ """
+
+ topic: Optional[str]
+ partitions: Optional[int]
+ replicas: Optional[int]
+
+ # TODO:
+ # topicConfiguration
+
+
+@dataclass
+class OperationBinding:
+ """A class to represent an operation binding.
+
+ Attributes:
+ group_id : optional dictionary representing the group ID
+ client_id : optional dictionary representing the client ID
+ reply_to : optional dictionary representing the reply-to
+ """
+
+ group_id: Optional[dict[str, Any]]
+ client_id: Optional[dict[str, Any]]
+ reply_to: Optional[dict[str, Any]]
diff --git a/faststream/specification/schema/bindings/main.py b/faststream/specification/schema/bindings/main.py
new file mode 100644
index 0000000000..20402db1f7
--- /dev/null
+++ b/faststream/specification/schema/bindings/main.py
@@ -0,0 +1,48 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from faststream.specification.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
+)
+
+
+@dataclass
+class ChannelBinding:
+ """A class to represent channel bindings.
+
+ Attributes:
+ amqp : AMQP channel binding (optional)
+ kafka : Kafka channel binding (optional)
+ sqs : SQS channel binding (optional)
+ nats : NATS channel binding (optional)d
+ redis : Redis channel binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.ChannelBinding] = None
+ kafka: Optional[kafka_bindings.ChannelBinding] = None
+ sqs: Optional[sqs_bindings.ChannelBinding] = None
+ nats: Optional[nats_bindings.ChannelBinding] = None
+ redis: Optional[redis_bindings.ChannelBinding] = None
+
+
+@dataclass
+class OperationBinding:
+ """A class to represent an operation binding.
+
+ Attributes:
+ amqp : AMQP operation binding (optional)
+ kafka : Kafka operation binding (optional)
+ sqs : SQS operation binding (optional)
+ nats : NATS operation binding (optional)
+ redis : Redis operation binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.OperationBinding] = None
+ kafka: Optional[kafka_bindings.OperationBinding] = None
+ sqs: Optional[sqs_bindings.OperationBinding] = None
+ nats: Optional[nats_bindings.OperationBinding] = None
+ redis: Optional[redis_bindings.OperationBinding] = None
diff --git a/faststream/specification/schema/bindings/nats.py b/faststream/specification/schema/bindings/nats.py
new file mode 100644
index 0000000000..412f29d557
--- /dev/null
+++ b/faststream/specification/schema/bindings/nats.py
@@ -0,0 +1,31 @@
+"""AsyncAPI NATS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/nats
+"""
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class ChannelBinding:
+ """A class to represent channel binding.
+
+ Attributes:
+ subject : subject of the channel binding
+ queue : optional queue for the channel binding
+ """
+
+ subject: str
+ queue: Optional[str]
+
+
+@dataclass
+class OperationBinding:
+ """A class to represent an operation binding.
+
+ Attributes:
+ reply_to : optional dictionary containing reply information
+ """
+
+ reply_to: Optional[dict[str, Any]]
diff --git a/faststream/specification/schema/bindings/redis.py b/faststream/specification/schema/bindings/redis.py
new file mode 100644
index 0000000000..17287aa5e4
--- /dev/null
+++ b/faststream/specification/schema/bindings/redis.py
@@ -0,0 +1,33 @@
+"""AsyncAPI Redis bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/redis
+"""
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class ChannelBinding:
+ """A class to represent channel binding.
+
+ Attributes:
+ channel : the channel name
+ method : the method used for binding (ssubscribe, psubscribe, subscribe)
+ """
+
+ channel: str
+ method: Optional[str] = None
+ group_name: Optional[str] = None
+ consumer_name: Optional[str] = None
+
+
+@dataclass
+class OperationBinding:
+ """A class to represent an operation binding.
+
+ Attributes:
+ reply_to : optional dictionary containing reply information
+ """
+
+ reply_to: Optional[dict[str, Any]] = None
diff --git a/faststream/specification/schema/bindings/sqs.py b/faststream/specification/schema/bindings/sqs.py
new file mode 100644
index 0000000000..b516c4819a
--- /dev/null
+++ b/faststream/specification/schema/bindings/sqs.py
@@ -0,0 +1,33 @@
+"""AsyncAPI SQS bindings.
+
+References: https://github.com/asyncapi/bindings/tree/master/sqs
+"""
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class ChannelBinding:
+ """A class to represent channel binding.
+
+ Attributes:
+ queue : a dictionary representing the queue
+ bindingVersion : a string representing the binding version (default: "custom")
+ """
+
+ queue: dict[str, Any]
+ bindingVersion: str = "custom"
+
+
+@dataclass
+class OperationBinding:
+ """A class to represent an operation binding.
+
+ Attributes:
+ replyTo : optional dictionary containing reply information
+ bindingVersion : version of the binding, default is "custom"
+ """
+
+ replyTo: Optional[dict[str, Any]] = None
+ bindingVersion: str = "custom"
diff --git a/faststream/specification/schema/extra/__init__.py b/faststream/specification/schema/extra/__init__.py
new file mode 100644
index 0000000000..f2417a905f
--- /dev/null
+++ b/faststream/specification/schema/extra/__init__.py
@@ -0,0 +1,15 @@
+from .contact import Contact, ContactDict
+from .external_docs import ExternalDocs, ExternalDocsDict
+from .license import License, LicenseDict
+from .tag import Tag, TagDict
+
+__all__ = (
+ "Contact",
+ "ContactDict",
+ "ExternalDocs",
+ "ExternalDocsDict",
+ "License",
+ "LicenseDict",
+ "Tag",
+ "TagDict",
+)
diff --git a/faststream/specification/schema/extra/contact.py b/faststream/specification/schema/extra/contact.py
new file mode 100644
index 0000000000..dfabbbacb3
--- /dev/null
+++ b/faststream/specification/schema/extra/contact.py
@@ -0,0 +1,20 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from pydantic import AnyHttpUrl
+from typing_extensions import Required, TypedDict
+
+from faststream._internal._compat import EmailStr
+
+
+class ContactDict(TypedDict, total=False):
+ name: Required[str]
+ url: AnyHttpUrl
+ email: EmailStr
+
+
+@dataclass
+class Contact:
+ name: str
+ url: Optional[AnyHttpUrl] = None
+ email: Optional[EmailStr] = None
diff --git a/faststream/specification/schema/extra/external_docs.py b/faststream/specification/schema/extra/external_docs.py
new file mode 100644
index 0000000000..600a6d3a95
--- /dev/null
+++ b/faststream/specification/schema/extra/external_docs.py
@@ -0,0 +1,15 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from typing_extensions import Required, TypedDict
+
+
+class ExternalDocsDict(TypedDict, total=False):
+ url: Required[str]
+ description: str
+
+
+@dataclass
+class ExternalDocs:
+ url: str
+ description: Optional[str] = None
diff --git a/faststream/specification/schema/extra/license.py b/faststream/specification/schema/extra/license.py
new file mode 100644
index 0000000000..7bd4039621
--- /dev/null
+++ b/faststream/specification/schema/extra/license.py
@@ -0,0 +1,16 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from pydantic import AnyHttpUrl
+from typing_extensions import Required, TypedDict
+
+
+class LicenseDict(TypedDict, total=False):
+ name: Required[str]
+ url: AnyHttpUrl
+
+
+@dataclass
+class License:
+ name: str
+ url: Optional[AnyHttpUrl] = None
diff --git a/faststream/specification/schema/extra/tag.py b/faststream/specification/schema/extra/tag.py
new file mode 100644
index 0000000000..1d62ed7491
--- /dev/null
+++ b/faststream/specification/schema/extra/tag.py
@@ -0,0 +1,19 @@
+from dataclasses import dataclass
+from typing import Optional, Union
+
+from typing_extensions import Required, TypedDict
+
+from .external_docs import ExternalDocs, ExternalDocsDict
+
+
+class TagDict(TypedDict, total=False):
+ name: Required[str]
+ description: str
+ external_docs: Union[ExternalDocs, ExternalDocsDict]
+
+
+@dataclass
+class Tag:
+ name: str
+ description: Optional[str] = None
+ external_docs: Optional[Union[ExternalDocs, ExternalDocsDict]] = None
diff --git a/faststream/specification/schema/message/__init__.py b/faststream/specification/schema/message/__init__.py
new file mode 100644
index 0000000000..6221895ab5
--- /dev/null
+++ b/faststream/specification/schema/message/__init__.py
@@ -0,0 +1,3 @@
+from .model import Message
+
+__all__ = ("Message",)
diff --git a/faststream/specification/schema/message/model.py b/faststream/specification/schema/message/model.py
new file mode 100644
index 0000000000..8b8c37f24a
--- /dev/null
+++ b/faststream/specification/schema/message/model.py
@@ -0,0 +1,11 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from faststream._internal.basic_types import AnyDict
+
+
+@dataclass
+class Message:
+ payload: AnyDict # JSON Schema
+
+ title: Optional[str]
diff --git a/faststream/specification/schema/operation/__init__.py b/faststream/specification/schema/operation/__init__.py
new file mode 100644
index 0000000000..85cbafe10a
--- /dev/null
+++ b/faststream/specification/schema/operation/__init__.py
@@ -0,0 +1,3 @@
+from .model import Operation
+
+__all__ = ("Operation",)
diff --git a/faststream/specification/schema/operation/model.py b/faststream/specification/schema/operation/model.py
new file mode 100644
index 0000000000..2e72e523e9
--- /dev/null
+++ b/faststream/specification/schema/operation/model.py
@@ -0,0 +1,11 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from faststream.specification.schema.bindings import OperationBinding
+from faststream.specification.schema.message import Message
+
+
+@dataclass
+class Operation:
+ message: Message
+ bindings: Optional[OperationBinding]
diff --git a/faststream/specification/schema/publisher.py b/faststream/specification/schema/publisher.py
new file mode 100644
index 0000000000..9f199e99bd
--- /dev/null
+++ b/faststream/specification/schema/publisher.py
@@ -0,0 +1,12 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from .bindings import ChannelBinding
+from .operation import Operation
+
+
+@dataclass
+class PublisherSpec:
+ description: Optional[str]
+ operation: Operation
+ bindings: Optional[ChannelBinding]
diff --git a/faststream/specification/schema/subscriber.py b/faststream/specification/schema/subscriber.py
new file mode 100644
index 0000000000..9d41177b4f
--- /dev/null
+++ b/faststream/specification/schema/subscriber.py
@@ -0,0 +1,12 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from .bindings import ChannelBinding
+from .operation import Operation
+
+
+@dataclass
+class SubscriberSpec:
+ description: Optional[str]
+ operation: Operation
+ bindings: Optional[ChannelBinding]
diff --git a/faststream/testing/__init__.py b/faststream/testing/__init__.py
deleted file mode 100644
index f1a3c33c12..0000000000
--- a/faststream/testing/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from faststream.testing.app import TestApp
-
-__all__ = ("TestApp",)
diff --git a/faststream/testing/app.py b/faststream/testing/app.py
deleted file mode 100644
index c9fc4aa632..0000000000
--- a/faststream/testing/app.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from contextlib import ExitStack
-from functools import partial
-from typing import TYPE_CHECKING, Any, Dict, Optional, Type, TypeVar
-
-from anyio.from_thread import start_blocking_portal
-
-from faststream.broker.core.usecase import BrokerUsecase
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.app import FastStream
- from faststream.types import SettingField
-
-Broker = TypeVar("Broker", bound=BrokerUsecase[Any, Any])
-
-
-class TestApp:
- """A class to represent a test application."""
-
- __test__ = False
-
- app: "FastStream"
- _extra_options: Dict[str, "SettingField"]
-
- def __init__(
- self,
- app: "FastStream",
- run_extra_options: Optional[Dict[str, "SettingField"]] = None,
- ) -> None:
- self.app = app
- self._extra_options = run_extra_options or {}
-
- def __enter__(self) -> "FastStream":
- with ExitStack() as stack:
- portal = stack.enter_context(start_blocking_portal())
-
- lifespan_context = self.app.lifespan_context(**self._extra_options)
- stack.enter_context(portal.wrap_async_context_manager(lifespan_context))
- portal.call(partial(self.app.start, **self._extra_options))
-
- @stack.callback
- def wait_shutdown() -> None:
- portal.call(self.app.stop)
-
- self.exit_stack = stack.pop_all()
-
- return self.app
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> None:
- self.exit_stack.close()
-
- async def __aenter__(self) -> "FastStream":
- self.lifespan_scope = self.app.lifespan_context(**self._extra_options)
- await self.lifespan_scope.__aenter__()
- await self.app.start(**self._extra_options)
- return self.app
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> None:
- """Exit the asynchronous context manager."""
- await self.app.stop()
- await self.lifespan_scope.__aexit__(exc_type, exc_val, exc_tb)
diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py
deleted file mode 100644
index d27eacf1f0..0000000000
--- a/faststream/testing/broker.py
+++ /dev/null
@@ -1,200 +0,0 @@
-import warnings
-from abc import abstractmethod
-from contextlib import asynccontextmanager, contextmanager
-from functools import partial
-from typing import (
- TYPE_CHECKING,
- Any,
- AsyncGenerator,
- Generator,
- Generic,
- List,
- Optional,
- Tuple,
- Type,
- TypeVar,
-)
-from unittest import mock
-from unittest.mock import MagicMock
-
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.testing.app import TestApp
-from faststream.utils.ast import is_contains_context_name
-from faststream.utils.functions import sync_fake_context
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream.broker.subscriber.proto import SubscriberProto
-
-
-Broker = TypeVar("Broker", bound=BrokerUsecase[Any, Any])
-
-
-class TestBroker(Generic[Broker]):
- """A class to represent a test broker."""
-
- # This is set so pytest ignores this class
- __test__ = False
-
- def __init__(
- self,
- broker: Broker,
- with_real: bool = False,
- connect_only: Optional[bool] = None,
- ) -> None:
- self.with_real = with_real
- self.broker = broker
-
- if connect_only is None:
- try:
- connect_only = is_contains_context_name(
- self.__class__.__name__,
- TestApp.__name__,
- )
- except Exception: # pragma: no cover
- warnings.warn(
- (
- "\nError `{e!r}` occurred at `{self.__class__.__name__}` AST parsing."
- "\n`connect_only` is set to `False` by default."
- ),
- category=RuntimeWarning,
- stacklevel=1,
- )
-
- connect_only = False
-
- self.connect_only = connect_only
- self._fake_subscribers: List[SubscriberProto[Any]] = []
-
- async def __aenter__(self) -> Broker:
- self._ctx = self._create_ctx()
- return await self._ctx.__aenter__()
-
- async def __aexit__(self, *args: Any) -> None:
- await self._ctx.__aexit__(*args)
-
- @asynccontextmanager
- async def _create_ctx(self) -> AsyncGenerator[Broker, None]:
- if self.with_real:
- self._fake_start(self.broker)
- context = sync_fake_context()
- else:
- context = self._patch_broker(self.broker)
-
- with context:
- async with self.broker:
- try:
- if not self.connect_only:
- await self.broker.start()
- yield self.broker
- finally:
- self._fake_close(self.broker)
-
- @contextmanager
- def _patch_broker(self, broker: Broker) -> Generator[None, None, None]:
- with mock.patch.object(
- broker,
- "start",
- wraps=partial(self._fake_start, broker),
- ), mock.patch.object(
- broker,
- "_connect",
- wraps=partial(self._fake_connect, broker),
- ), mock.patch.object(
- broker,
- "close",
- ), mock.patch.object(
- broker,
- "_connection",
- new=None,
- ), mock.patch.object(
- broker,
- "_producer",
- new=None,
- ), mock.patch.object(
- broker,
- "ping",
- return_value=True,
- ):
- yield
-
- def _fake_start(self, broker: Broker, *args: Any, **kwargs: Any) -> None:
- broker.setup()
-
- patch_broker_calls(broker)
-
- for p in broker._publishers.values():
- if getattr(p, "_fake_handler", None):
- continue
-
- sub, is_real = self.create_publisher_fake_subscriber(broker, p)
-
- if not is_real:
- self._fake_subscribers.append(sub)
-
- if not sub.calls:
-
- @sub
- async def publisher_response_subscriber(msg: Any) -> None:
- pass
-
- broker.setup_subscriber(sub)
-
- if is_real:
- mock = MagicMock()
- p.set_test(mock=mock, with_fake=False) # type: ignore[attr-defined]
- for h in sub.calls:
- h.handler.set_test()
- assert h.handler.mock # nosec B101
- h.handler.mock.side_effect = mock
-
- else:
- handler = sub.calls[0].handler
- handler.set_test()
- assert handler.mock # nosec B101
- p.set_test(mock=handler.mock, with_fake=True) # type: ignore[attr-defined]
-
- for subscriber in broker._subscribers.values():
- subscriber.running = True
-
- def _fake_close(
- self,
- broker: Broker,
- exc_type: Optional[Type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> None:
- for p in broker._publishers.values():
- if getattr(p, "_fake_handler", None):
- p.reset_test() # type: ignore[attr-defined]
-
- for sub in self._fake_subscribers:
- self.broker._subscribers.pop(hash(sub), None) # type: ignore[attr-defined]
- self._fake_subscribers = []
-
- for h in broker._subscribers.values():
- h.running = False
- for call in h.calls:
- call.handler.reset_test()
-
- @staticmethod
- @abstractmethod
- def create_publisher_fake_subscriber(
- broker: Broker, publisher: Any
- ) -> Tuple["SubscriberProto[Any]", bool]:
- raise NotImplementedError
-
- @staticmethod
- @abstractmethod
- async def _fake_connect(broker: Broker, *args: Any, **kwargs: Any) -> None:
- raise NotImplementedError
-
-
-def patch_broker_calls(broker: "BrokerUsecase[Any, Any]") -> None:
- """Patch broker calls."""
- broker._abc_start()
-
- for handler in broker._subscribers.values():
- for h in handler.calls:
- h.handler.set_test()
diff --git a/faststream/types.py b/faststream/types.py
deleted file mode 100644
index ae34858025..0000000000
--- a/faststream/types.py
+++ /dev/null
@@ -1,120 +0,0 @@
-from datetime import datetime
-from decimal import Decimal
-from typing import (
- Any,
- AsyncContextManager,
- Awaitable,
- Callable,
- ClassVar,
- Dict,
- List,
- Mapping,
- Optional,
- Protocol,
- Sequence,
- TypeVar,
- Union,
-)
-
-from typing_extensions import ParamSpec, TypeAlias
-
-AnyDict: TypeAlias = Dict[str, Any]
-AnyHttpUrl: TypeAlias = str
-
-F_Return = TypeVar("F_Return")
-F_Spec = ParamSpec("F_Spec")
-
-AnyCallable: TypeAlias = Callable[..., Any]
-NoneCallable: TypeAlias = Callable[..., None]
-AsyncFunc: TypeAlias = Callable[..., Awaitable[Any]]
-AsyncFuncAny: TypeAlias = Callable[[Any], Awaitable[Any]]
-
-DecoratedCallable: TypeAlias = AnyCallable
-DecoratedCallableNone: TypeAlias = NoneCallable
-
-Decorator: TypeAlias = Callable[[AnyCallable], AnyCallable]
-
-JsonArray: TypeAlias = Sequence["DecodedMessage"]
-
-JsonTable: TypeAlias = Dict[str, "DecodedMessage"]
-
-JsonDecodable: TypeAlias = Union[
- bool,
- bytes,
- bytearray,
- float,
- int,
- str,
- None,
-]
-
-DecodedMessage: TypeAlias = Union[
- JsonDecodable,
- JsonArray,
- JsonTable,
-]
-
-SendableArray: TypeAlias = Sequence["BaseSendableMessage"]
-
-SendableTable: TypeAlias = Dict[str, "BaseSendableMessage"]
-
-
-class StandardDataclass(Protocol):
- """Protocol to check type is dataclass."""
-
- __dataclass_fields__: ClassVar[Dict[str, Any]]
-
-
-BaseSendableMessage: TypeAlias = Union[
- JsonDecodable,
- Decimal,
- datetime,
- StandardDataclass,
- SendableTable,
- SendableArray,
- None,
-]
-
-try:
- from faststream._compat import BaseModel
-
- SendableMessage: TypeAlias = Union[
- BaseModel,
- BaseSendableMessage,
- ]
-
-except ImportError:
- SendableMessage: TypeAlias = BaseSendableMessage # type: ignore[no-redef,misc]
-
-SettingField: TypeAlias = Union[
- bool,
- str,
- List[Union[bool, str]],
- List[str],
- List[bool],
-]
-
-Lifespan: TypeAlias = Callable[..., AsyncContextManager[None]]
-
-
-class LoggerProto(Protocol):
- def log(
- self,
- level: int,
- msg: Any,
- /,
- *,
- exc_info: Any = None,
- extra: Optional[Mapping[str, Any]] = None,
- ) -> None: ...
-
-
-class _EmptyPlaceholder:
- def __repr__(self) -> str:
- return "EMPTY"
-
- def __eq__(self, other: object) -> bool:
- return isinstance(other, _EmptyPlaceholder)
-
-
-EMPTY: Any = _EmptyPlaceholder()
diff --git a/faststream/utils/__init__.py b/faststream/utils/__init__.py
deleted file mode 100644
index 18f6b4c7f5..0000000000
--- a/faststream/utils/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from fast_depends import Depends
-from fast_depends import inject as apply_types
-
-from faststream.utils.context import Context, ContextRepo, Header, Path, context
-from faststream.utils.no_cast import NoCast
-
-__all__ = (
- "Context",
- "ContextRepo",
- "Depends",
- "Header",
- "NoCast",
- "Path",
- "apply_types",
- "context",
-)
diff --git a/faststream/utils/ast.py b/faststream/utils/ast.py
deleted file mode 100644
index 6063dbeb7b..0000000000
--- a/faststream/utils/ast.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import ast
-import traceback
-from functools import lru_cache
-from pathlib import Path
-from typing import Iterator, List, Optional, Union, cast
-
-
-def is_contains_context_name(scip_name: str, name: str) -> bool:
- stack = traceback.extract_stack()[-3]
- tree = read_source_ast(stack.filename)
- node = cast(Union[ast.With, ast.AsyncWith], find_ast_node(tree, stack.lineno))
- context_calls = get_withitem_calls(node)
-
- try:
- pos = context_calls.index(scip_name)
- except ValueError:
- pos = 1
-
- return name in context_calls[pos:]
-
-
-@lru_cache
-def read_source_ast(filename: str) -> ast.Module:
- return ast.parse(Path(filename).read_text())
-
-
-def find_ast_node(module: ast.Module, lineno: Optional[int]) -> Optional[ast.AST]:
- if lineno is not None: # pragma: no branch
- for i in getattr(module, "body", ()):
- if i.lineno == lineno:
- return cast(ast.AST, i)
-
- r = find_ast_node(i, lineno)
- if r is not None:
- return r
-
- return None
-
-
-def find_withitems(node: Union[ast.With, ast.AsyncWith]) -> Iterator[ast.withitem]:
- if isinstance(node, (ast.With, ast.AsyncWith)):
- yield from node.items
-
- for i in getattr(node, "body", ()):
- yield from find_withitems(i)
-
-
-def get_withitem_calls(node: Union[ast.With, ast.AsyncWith]) -> List[str]:
- return [
- id
- for i in find_withitems(node)
- if (id := getattr(i.context_expr.func, "id", None)) # type: ignore[attr-defined]
- ]
diff --git a/faststream/utils/classes.py b/faststream/utils/classes.py
deleted file mode 100644
index 1bf053cbce..0000000000
--- a/faststream/utils/classes.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from typing import Any, ClassVar, Optional
-
-from typing_extensions import Self
-
-
-class Singleton:
- """A class to implement the Singleton design pattern.
-
- Attributes:
- _instance : the single instance of the class
-
- Methods:
- __new__ : creates a new instance of the class if it doesn't exist, otherwise returns the existing instance
- _drop : sets the instance to None, allowing a new instance to be created
- """
-
- _instance: ClassVar[Optional[Self]] = None
-
- def __new__(cls, *args: Any, **kwargs: Any) -> Self:
- """Create a singleton instance of a class.
-
- Args:
- *args: Variable length argument list
- **kwargs: Arbitrary keyword arguments
-
- Returns:
- The singleton instance of the class
- """
- if cls._instance is None:
- cls._instance = super().__new__(cls)
- return cls._instance
-
- @classmethod
- def _drop(cls) -> None:
- """Drop the instance of a class.
-
- Returns:
- None
- """
- cls._instance = None
diff --git a/faststream/utils/context/__init__.py b/faststream/utils/context/__init__.py
deleted file mode 100644
index 054ce3f1a5..0000000000
--- a/faststream/utils/context/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from faststream.utils.context.builders import Context, Header, Path
-from faststream.utils.context.repository import ContextRepo, context
-
-__all__ = (
- "Context",
- "ContextRepo",
- "Header",
- "Path",
- "context",
-)
diff --git a/faststream/utils/context/builders.py b/faststream/utils/context/builders.py
deleted file mode 100644
index 76e7499ba3..0000000000
--- a/faststream/utils/context/builders.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import Any, Callable, Optional
-
-from faststream.types import EMPTY
-from faststream.utils.context.types import Context as Context_
-
-
-def Context( # noqa: N802
- real_name: str = "",
- *,
- cast: bool = False,
- default: Any = EMPTY,
- initial: Optional[Callable[..., Any]] = None,
-) -> Any:
- return Context_(
- real_name=real_name,
- cast=cast,
- default=default,
- initial=initial,
- )
-
-
-def Header( # noqa: N802
- real_name: str = "",
- *,
- cast: bool = True,
- default: Any = EMPTY,
-) -> Any:
- return Context_(
- real_name=real_name,
- cast=cast,
- default=default,
- prefix="message.headers.",
- )
-
-
-def Path( # noqa: N802
- real_name: str = "",
- *,
- cast: bool = True,
- default: Any = EMPTY,
-) -> Any:
- return Context_(
- real_name=real_name,
- cast=cast,
- default=default,
- prefix="message.path.",
- )
diff --git a/faststream/utils/context/types.py b/faststream/utils/context/types.py
deleted file mode 100644
index 5ca17d7ff3..0000000000
--- a/faststream/utils/context/types.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from typing import Any, Callable, Optional
-
-from fast_depends.library import CustomField
-
-from faststream.types import EMPTY, AnyDict
-from faststream.utils.context.repository import context
-
-
-class Context(CustomField):
- """A class to represent a context.
-
- Attributes:
- param_name : name of the parameter
-
- Methods:
- __init__ : constructor method
- use : method to use the context
- """
-
- param_name: str
-
- def __init__(
- self,
- real_name: str = "",
- *,
- default: Any = EMPTY,
- initial: Optional[Callable[..., Any]] = None,
- cast: bool = False,
- prefix: str = "",
- ) -> None:
- """Initialize the object.
-
- Args:
- real_name: The real name of the object.
- default: The default value of the object.
- initial: The initial value builder.
- cast: Whether to cast the object.
- prefix: The prefix to be added to the name of the object.
-
- Raises:
- TypeError: If the default value is not provided.
- """
- self.name = real_name
- self.default = default
- self.prefix = prefix
- self.initial = initial
- super().__init__(
- cast=cast,
- required=(default is EMPTY),
- )
-
- def use(self, /, **kwargs: Any) -> AnyDict:
- """Use the given keyword arguments.
-
- Args:
- **kwargs: Keyword arguments to be used
-
- Returns:
- A dictionary containing the updated keyword arguments
- """
- name = f"{self.prefix}{self.name or self.param_name}"
-
- if EMPTY != ( # noqa: SIM300
- v := resolve_context_by_name(
- name=name,
- default=self.default,
- initial=self.initial,
- )
- ):
- kwargs[self.param_name] = v
-
- else:
- kwargs.pop(self.param_name, None)
-
- return kwargs
-
-
-def resolve_context_by_name(
- name: str,
- default: Any,
- initial: Optional[Callable[..., Any]],
-) -> Any:
- value: Any = EMPTY
-
- try:
- value = context.resolve(name)
-
- except (KeyError, AttributeError):
- if EMPTY != default: # noqa: SIM300
- value = default
-
- elif initial is not None:
- value = initial()
- context.set_global(name, value)
-
- return value
diff --git a/faststream/utils/data.py b/faststream/utils/data.py
deleted file mode 100644
index cf00f649ef..0000000000
--- a/faststream/utils/data.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from typing import Type, TypeVar
-
-from faststream.types import AnyDict
-
-TypedDictCls = TypeVar("TypedDictCls")
-
-
-def filter_by_dict(typed_dict: Type[TypedDictCls], data: AnyDict) -> TypedDictCls:
- """Filter a dictionary based on a typed dictionary.
-
- Args:
- typed_dict: The typed dictionary to filter by.
- data: The dictionary to filter.
-
- Returns:
- A new instance of the typed dictionary with only the keys present in the data dictionary.
- """
- annotations = typed_dict.__annotations__
- return typed_dict( # type: ignore
- {k: v for k, v in data.items() if k in annotations}
- )
diff --git a/faststream/utils/functions.py b/faststream/utils/functions.py
deleted file mode 100644
index 5e4ce4b2c8..0000000000
--- a/faststream/utils/functions.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from contextlib import asynccontextmanager, contextmanager
-from functools import wraps
-from typing import (
- Any,
- AsyncIterator,
- Awaitable,
- Callable,
- ContextManager,
- Iterator,
- Optional,
- Union,
- overload,
-)
-
-import anyio
-from fast_depends.core import CallModel
-from fast_depends.utils import run_async as call_or_await
-
-from faststream.types import F_Return, F_Spec
-
-__all__ = (
- "call_or_await",
- "drop_response_type",
- "fake_context",
- "timeout_scope",
- "to_async",
-)
-
-
-@overload
-def to_async(
- func: Callable[F_Spec, Awaitable[F_Return]],
-) -> Callable[F_Spec, Awaitable[F_Return]]: ...
-
-
-@overload
-def to_async(
- func: Callable[F_Spec, F_Return],
-) -> Callable[F_Spec, Awaitable[F_Return]]: ...
-
-
-def to_async(
- func: Union[
- Callable[F_Spec, F_Return],
- Callable[F_Spec, Awaitable[F_Return]],
- ],
-) -> Callable[F_Spec, Awaitable[F_Return]]:
- """Converts a synchronous function to an asynchronous function."""
-
- @wraps(func)
- async def to_async_wrapper(*args: F_Spec.args, **kwargs: F_Spec.kwargs) -> F_Return:
- """Wraps a function to make it asynchronous."""
- return await call_or_await(func, *args, **kwargs)
-
- return to_async_wrapper
-
-
-def timeout_scope(
- timeout: Optional[float] = 30,
- raise_timeout: bool = False,
-) -> ContextManager[anyio.CancelScope]:
- scope: Callable[[Optional[float]], ContextManager[anyio.CancelScope]]
- scope = anyio.fail_after if raise_timeout else anyio.move_on_after
-
- return scope(timeout)
-
-
-@asynccontextmanager
-async def fake_context(*args: Any, **kwargs: Any) -> AsyncIterator[None]:
- yield None
-
-
-@contextmanager
-def sync_fake_context(*args: Any, **kwargs: Any) -> Iterator[None]:
- yield None
-
-
-def drop_response_type(
- model: CallModel[F_Spec, F_Return],
-) -> CallModel[F_Spec, F_Return]:
- model.response_model = None
- return model
-
-
-async def return_input(x: Any) -> Any:
- return x
diff --git a/faststream/utils/no_cast.py b/faststream/utils/no_cast.py
deleted file mode 100644
index 6a96fbd029..0000000000
--- a/faststream/utils/no_cast.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from typing import Any
-
-from fast_depends.library import CustomField
-
-from faststream.types import AnyDict
-
-
-class NoCast(CustomField):
- """A class that represents a custom field without casting.
-
- You can use it to annotate fields, that should not be casted.
-
- Usage:
-
- `data: Annotated[..., NoCast()]`
- """
-
- def __init__(self) -> None:
- super().__init__(cast=False)
-
- def use(self, **kwargs: Any) -> AnyDict:
- return kwargs
diff --git a/pyproject.toml b/pyproject.toml
index 3adecc6dd5..9ba8380b18 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,7 +20,7 @@ keywords = [
"message brokers",
]
-requires-python = ">=3.8"
+requires-python = ">=3.9"
classifiers = [
"Development Status :: 5 - Production/Stable",
@@ -29,7 +29,6 @@ classifiers = [
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
@@ -57,7 +56,7 @@ dynamic = ["version"]
dependencies = [
"anyio>=3.7.1,<5",
- "fast-depends>=2.4.0b0,<3.0.0",
+ "fast-depends[pydantic]>=3.0.0a3,<4.0.0",
"typing-extensions>=4.8.0",
]
@@ -134,7 +133,6 @@ test-core = [
"pytest-asyncio==0.25.0; python_version >= '3.9'",
"pytest-asyncio==0.24.0; python_version == '3.8'",
"dirty-equals==0.8.0",
- "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", # to fix dirty-equals
]
testing = [
@@ -148,8 +146,7 @@ testing = [
dev = [
"faststream[optionals,lint,testing,devdocs]",
- "pre-commit==3.5.0; python_version < '3.9'",
- "pre-commit==4.0.1; python_version >= '3.9'",
+ "pre-commit==4.0.1",
"detect-secrets==1.5.0",
]
@@ -173,10 +170,8 @@ exclude = ["/tests", "/docs"]
[tool.mypy]
files = ["faststream", "tests/mypy"]
strict = true
-python_version = "3.8"
+python_version = "3.9"
ignore_missing_imports = true
-install_types = true
-non_interactive = true
plugins = ["pydantic.mypy"]
# from https://blog.wolt.com/engineering/2021/09/30/professional-grade-mypy-configuration/
@@ -191,112 +186,6 @@ disallow_incomplete_defs = true
disallow_untyped_decorators = true
disallow_any_unimported = false
-[tool.ruff]
-fix = true
-line-length = 88
-target-version = "py38"
-include = [
- "faststream/**/*.py",
- "faststream/**/*.pyi",
- "tests/**/*.py",
- "docs/**/*.py",
- "pyproject.toml",
-]
-exclude = ["docs/docs_src"]
-
-[tool.ruff.lint]
-select = [
- "E", # pycodestyle errors https://docs.astral.sh/ruff/rules/#error-e
- "W", # pycodestyle warnings https://docs.astral.sh/ruff/rules/#warning-w
- "C90", # mccabe https://docs.astral.sh/ruff/rules/#mccabe-c90
- "N", # pep8-naming https://docs.astral.sh/ruff/rules/#pep8-naming-n
- "D", # pydocstyle https://docs.astral.sh/ruff/rules/#pydocstyle-d
- "I", # isort https://docs.astral.sh/ruff/rules/#isort-i
- "F", # pyflakes https://docs.astral.sh/ruff/rules/#pyflakes-f
- "ASYNC", # flake8-async https://docs.astral.sh/ruff/rules/#flake8-async-async
- "C4", # flake8-comprehensions https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4
- "B", # flake8-bugbear https://docs.astral.sh/ruff/rules/#flake8-bugbear-b
- "Q", # flake8-quotes https://docs.astral.sh/ruff/rules/#flake8-quotes-q
- "T20", # flake8-print https://docs.astral.sh/ruff/rules/#flake8-print-t20
- "SIM", # flake8-simplify https://docs.astral.sh/ruff/rules/#flake8-simplify-sim
- "PT", # flake8-pytest-style https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt
- "PTH", # flake8-use-pathlib https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
- "TCH", # flake8-type-checking https://docs.astral.sh/ruff/rules/#flake8-type-checking-tch
- "RUF", # Ruff-specific rules https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf
- "PERF", # Perflint https://docs.astral.sh/ruff/rules/#perflint-perf
- "UP", # pyupgrade https://docs.astral.sh/ruff/rules/#pyupgrade-up
-]
-
-ignore = [
- "ASYNC109", # own timeout implementation
-
- "E501", # line too long, handled by formatter later
- "C901", # too complex
-
- # todo pep8-naming
- "N817", # CamelCase `*` imported as acronym `*`
- "N815", # Variable `*` in class scope should not be mixedCase
- "N803", # Argument name `expandMessageExamples` should be lowercase
-
- # todo pydocstyle
- "D100", # missing docstring in public module
- "D101",
- "D102",
- "D103",
- "D104", # missing docstring in public package
- "D105", # missing docstring in magic methods
- "D106", # missing docstring in public nested class
- "D107", # missing docstring in __init__
-]
-
-[tool.ruff.lint.per-file-ignores]
-"tests/**" = [
- "D101", # docstrings
- "D102",
- "D103",
- "PLR2004", # magic-value-comparison
- "S101", # use assert
-]
-
-"docs/*.py" = [
- "D101", # docstrings
- "D102",
- "D103",
-]
-
-
-[tool.ruff.lint.isort]
-case-sensitive = true
-
-[tool.ruff.format]
-docstring-code-format = true
-
-[tool.ruff.lint.pydocstyle]
-convention = "google"
-
-[tool.ruff.lint.flake8-bugbear]
-extend-immutable-calls = [
- "faststream.Depends",
- "faststream.Context",
- "faststream.broker.fastapi.context.Context",
- "faststream.Header",
- "faststream.Path",
- "faststream.utils.Header",
- "faststream.utils.Path",
- "faststream.utils.Depends",
- "faststream.utils.Context",
- "faststream.utils.context.Depends",
- "faststream.utils.context.Context",
- "typer.Argument",
- "typer.Option",
- "pydantic.Field",
- "rocketry.args.Arg",
- "fastapi.Depends",
- "fastapi.Header",
- "fastapi.datastructures.Default",
- "kafka.partitioner.default.DefaultPartitioner",
-]
-
[tool.pytest.ini_options]
minversion = "7.0"
addopts = "-q -m 'not slow'"
diff --git a/ruff.toml b/ruff.toml
new file mode 100644
index 0000000000..f3ad63fd76
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,165 @@
+# Configuration file example: https://docs.astral.sh/ruff/configuration/
+# All settings: https://docs.astral.sh/ruff/settings/
+
+fix = true
+preview = true
+
+line-length = 88
+indent-width = 4
+
+target-version = "py39"
+
+include = [
+ "faststream/**/*.py",
+ "faststream/**/*.pyi",
+ "tests/**/*.py",
+ "docs/**/*.py",
+ "pyproject.toml",
+]
+
+exclude = [
+ "docs/docs_src"
+]
+
+[format]
+quote-style = "double"
+indent-style = "space"
+docstring-code-format = false
+
+[lint]
+select = [
+ "ALL",
+]
+
+ignore = [
+ "A",
+ "FA",
+ "TD",
+ "FIX",
+ "SLOT",
+ "ARG",
+ "EXE",
+
+ "ASYNC109",
+ "ANN401",
+ "COM812",
+ "ISC001",
+ "TRY301",
+ "S101",
+ "SLF001",
+ "PLR0911",
+ "PLR0912",
+ "PLR0913",
+ "PLR2004",
+ "PYI036",
+ "PYI051",
+ "G004",
+
+ "E501", # line too long, handled by formatter later
+ "C901", # too complex
+
+ # preview
+ "CPY",
+ "PLC0415",
+ "PLC2701", # FIXME
+ "PLC2801",
+ "PLR6301",
+ "PLW1641",
+ "RUF029",
+
+ # pep8-naming
+ "N817", # CamelCase `*` imported as acronym `*`
+
+ # FIXME pydocstyle
+ "D100", # missing docstring in public module
+ "D101",
+ "D102",
+ "D103",
+ "D104", # missing docstring in public package
+ "D105", # missing docstring in magic methods
+ "D106", # missing docstring in public nested class
+ "D107", # missing docstring in __init__
+ "DOC201",
+ "DOC202",
+ "DOC402",
+ "DOC501",
+ "DOC502",
+
+ "FBT", # FIXME
+ "PLW2901", # FIXME
+ "BLE001", # FIXME
+ "S110", # FIXME
+ "PLR0917" # FIXME
+]
+
+[lint.per-file-ignores]
+"faststream/specification/**/*.py" = [
+ "ERA001",
+ "N815", # Variable `*` in class scope should not be mixedCase
+]
+
+# FIXME
+# "faststream/specification/asyncapi/**/*.py" = [
+# "ERA001",
+# "N815", # Variable `*` in class scope should not be mixedCase
+# ]
+
+"**/fastapi/**/*.py" = [
+ "N803", # Argument name `expandMessageExamples` should be lowercase
+]
+
+"**/_compat.py" = [
+ "PYI063",
+ "PLW3201",
+]
+
+"tests/**/*.py" = [
+ "ANN", # FIXME
+ "S",
+ "PLR0904",
+ "PLR0914",
+ "PLC1901",
+]
+
+"docs/*.py" = [
+ "ALL", # FIXME
+]
+
+[lint.isort]
+case-sensitive = true
+combine-as-imports = true
+force-wrap-aliases = true
+
+[lint.pydocstyle]
+convention = "google"
+ignore-decorators = ["typing.overload"]
+
+[lint.flake8-bugbear]
+extend-immutable-calls = [
+ "faststream.Header",
+ "faststream.Path",
+ "faststream.Depends",
+ "faststream.Context",
+ "faststream.Depends",
+ "faststream.params.Header",
+ "faststream.params.Path",
+ "faststream.params.Depends",
+ "faststream.params.Context",
+ "faststream.params.Depends",
+ "faststream._internal.fastapi.context.Context",
+ "typer.Argument",
+ "typer.Option",
+ "pydantic.Field",
+ "rocketry.args.Arg",
+ "fastapi.Depends",
+ "fastapi.Header",
+ "fastapi.datastructures.Default",
+ "kafka.partitioner.default.DefaultPartitioner",
+]
+
+[lint.flake8-pytest-style]
+fixture-parentheses = true
+mark-parentheses = true
+parametrize-names-type = "tuple"
+parametrize-values-type = "tuple"
+parametrize-values-row-type = "tuple"
diff --git a/tests/a_docs/confluent/ack/test_errors.py b/tests/a_docs/confluent/ack/test_errors.py
deleted file mode 100644
index 08017ba472..0000000000
--- a/tests/a_docs/confluent/ack/test_errors.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-
-from faststream.confluent import TestApp, TestKafkaBroker
-from faststream.confluent.client import AsyncConfluentConsumer
-from tests.tools import spy_decorator
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-@pytest.mark.slow
-async def test_ack_exc():
- from docs.docs_src.confluent.ack.errors import app, broker, handle
-
- with patch.object(
- AsyncConfluentConsumer, "commit", spy_decorator(AsyncConfluentConsumer.commit)
- ) as m:
- async with TestKafkaBroker(broker, with_real=True), TestApp(app):
- await handle.wait_call(20)
-
- assert m.mock.call_count
diff --git a/tests/a_docs/confluent/additional_config/test_app.py b/tests/a_docs/confluent/additional_config/test_app.py
deleted file mode 100644
index b93aec1667..0000000000
--- a/tests/a_docs/confluent/additional_config/test_app.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.additional_config.app import (
- HelloWorld,
- broker,
- on_hello_world,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
- on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/confluent/basic/test_basic.py b/tests/a_docs/confluent/basic/test_basic.py
deleted file mode 100644
index 60828d8564..0000000000
--- a/tests/a_docs/confluent/basic/test_basic.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_basic():
- from docs.docs_src.confluent.basic.basic import broker, on_input_data
-
- publisher = list(broker._publishers.values())[0] # noqa: RUF015
-
- async with TestKafkaBroker(broker) as br:
- await br.publish({"data": 1.0}, "input_data")
- on_input_data.mock.assert_called_once_with({"data": 1.0})
- publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/a_docs/confluent/basic/test_cmd_run.py b/tests/a_docs/confluent/basic/test_cmd_run.py
deleted file mode 100644
index 46a63b4257..0000000000
--- a/tests/a_docs/confluent/basic/test_cmd_run.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import asyncio
-from unittest.mock import Mock
-
-import pytest
-from typer.testing import CliRunner
-
-from faststream.app import FastStream
-from faststream.cli.main import cli
-
-
-@pytest.fixture
-def confluent_basic_project():
- return "docs.docs_src.confluent.basic.basic:app"
-
-
-@pytest.mark.confluent
-def test_run_cmd(
- runner: CliRunner,
- mock: Mock,
- event: asyncio.Event,
- monkeypatch: pytest.MonkeyPatch,
- confluent_basic_project,
-):
- async def patched_run(self: FastStream, *args, **kwargs):
- await self.start()
- await self.stop()
- mock()
-
- with monkeypatch.context() as m:
- m.setattr(FastStream, "run", patched_run)
- r = runner.invoke(
- cli,
- [
- "run",
- confluent_basic_project,
- ],
- )
-
- assert r.exit_code == 0
- mock.assert_called_once()
diff --git a/tests/a_docs/confluent/batch_consuming_pydantic/test_app.py b/tests/a_docs/confluent/batch_consuming_pydantic/test_app.py
deleted file mode 100644
index 66539c1f06..0000000000
--- a/tests/a_docs/confluent/batch_consuming_pydantic/test_app.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.batch_consuming_pydantic.app import (
- HelloWorld,
- broker,
- handle_batch,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_me():
- async with TestKafkaBroker(broker):
- await broker.publish_batch(
- HelloWorld(msg="First Hello"),
- HelloWorld(msg="Second Hello"),
- topic="test_batch",
- )
- handle_batch.mock.assert_called_with(
- [dict(HelloWorld(msg="First Hello")), dict(HelloWorld(msg="Second Hello"))]
- )
diff --git a/tests/a_docs/confluent/consumes_basics/test_app.py b/tests/a_docs/confluent/consumes_basics/test_app.py
deleted file mode 100644
index ae1016cafd..0000000000
--- a/tests/a_docs/confluent/consumes_basics/test_app.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.consumes_basics.app import (
- HelloWorld,
- broker,
- on_hello_world,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
- on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/confluent/publish_batch/test_app.py b/tests/a_docs/confluent/publish_batch/test_app.py
deleted file mode 100644
index 9e3b3ecbf3..0000000000
--- a/tests/a_docs/confluent/publish_batch/test_app.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.publish_batch.app import (
- Data,
- broker,
- decrease_and_increase,
- on_input_data_1,
- on_input_data_2,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_batch_publish_decorator():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=2.0), "input_data_1")
-
- on_input_data_1.mock.assert_called_once_with(dict(Data(data=2.0)))
- decrease_and_increase.mock.assert_called_once_with(
- [dict(Data(data=1.0)), dict(Data(data=4.0))]
- )
-
-
-@pytest.mark.asyncio
-async def test_batch_publish_call():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=2.0), "input_data_2")
-
- on_input_data_2.mock.assert_called_once_with(dict(Data(data=2.0)))
- decrease_and_increase.mock.assert_called_once_with(
- [dict(Data(data=1.0)), dict(Data(data=4.0))]
- )
diff --git a/tests/a_docs/confluent/publish_batch/test_issues.py b/tests/a_docs/confluent/publish_batch/test_issues.py
deleted file mode 100644
index 1cbace9a89..0000000000
--- a/tests/a_docs/confluent/publish_batch/test_issues.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import List
-
-import pytest
-
-from faststream import FastStream
-from faststream.confluent import KafkaBroker, TestKafkaBroker
-
-broker = KafkaBroker()
-batch_producer = broker.publisher("response", batch=True)
-
-
-@batch_producer
-@broker.subscriber("test")
-async def handle(msg: str) -> List[int]:
- return [1, 2, 3]
-
-
-app = FastStream(broker)
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish("", "test")
diff --git a/tests/a_docs/confluent/publish_example/test_app.py b/tests/a_docs/confluent/publish_example/test_app.py
deleted file mode 100644
index 84d0d61bd8..0000000000
--- a/tests/a_docs/confluent/publish_example/test_app.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.publish_example.app import (
- Data,
- broker,
- on_input_data,
- to_output_data,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=0.2), "input_data")
-
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
diff --git a/tests/a_docs/confluent/publish_with_partition_key/test_app.py b/tests/a_docs/confluent/publish_with_partition_key/test_app.py
deleted file mode 100644
index 8d2b98324a..0000000000
--- a/tests/a_docs/confluent/publish_with_partition_key/test_app.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import pytest
-
-from docs.docs_src.confluent.publish_with_partition_key.app import (
- Data,
- broker,
- on_input_data,
- to_output_data,
-)
-from faststream.confluent import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_app():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
-
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
-
-
-@pytest.mark.skip("we are not checking the key")
-@pytest.mark.asyncio
-async def test_keys():
- async with TestKafkaBroker(broker):
- # we should be able to publish a message with the key
- await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
-
- # we need to check the key as well
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)), key=b"my_key")
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)), key=b"key")
diff --git a/tests/a_docs/confluent/test_security.py b/tests/a_docs/confluent/test_security.py
deleted file mode 100644
index b77c91eb8a..0000000000
--- a/tests/a_docs/confluent/test_security.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import pytest
-
-from tests.brokers.confluent.test_security import patch_aio_consumer_and_producer
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_base_security():
- from docs.docs_src.confluent.security.basic import broker as basic_broker
-
- with patch_aio_consumer_and_producer() as producer:
- async with basic_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_scram256():
- from docs.docs_src.confluent.security.sasl_scram256 import (
- broker as scram256_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with scram256_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "SCRAM-SHA-256"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert (
- producer_call_kwargs["security_protocol"]
- == call_kwargs["security_protocol"]
- )
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_scram512():
- from docs.docs_src.confluent.security.sasl_scram512 import (
- broker as scram512_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with scram512_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "SCRAM-SHA-512"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert (
- producer_call_kwargs["security_protocol"]
- == call_kwargs["security_protocol"]
- )
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_plaintext():
- from docs.docs_src.confluent.security.plaintext import (
- broker as plaintext_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with plaintext_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "PLAIN"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert (
- producer_call_kwargs["security_protocol"]
- == call_kwargs["security_protocol"]
- )
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_oathbearer():
- from docs.docs_src.confluent.security.sasl_oauthbearer import (
- broker as oauthbearer_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with oauthbearer_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "OAUTHBEARER"
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert (
- producer_call_kwargs["security_protocol"]
- == call_kwargs["security_protocol"]
- )
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_gssapi():
- from docs.docs_src.confluent.security.sasl_gssapi import (
- broker as gssapi_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with gssapi_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {
- "sasl_mechanism": "GSSAPI",
- "security_protocol": "SASL_SSL",
- }
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert (
- producer_call_kwargs["security_protocol"]
- == call_kwargs["security_protocol"]
- )
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_basic.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_basic.py
deleted file mode 100644
index a2167425a5..0000000000
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_basic.py
+++ /dev/null
@@ -1,64 +0,0 @@
-from docs.docs_src.getting_started.asyncapi.asyncapi_customization.basic import app
-from faststream.asyncapi.generate import get_app_schema
-
-
-def test_basic_customization():
- schema = get_app_schema(app).to_jsonable()
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {
- "input_data:OnInputData": {
- "bindings": {
- "kafka": {"bindingVersion": "0.4.0", "topic": "input_data"}
- },
- "servers": ["development"],
- "subscribe": {
- "message": {
- "$ref": "#/components/messages/input_data:OnInputData:Message"
- }
- },
- },
- "output_data:Publisher": {
- "bindings": {
- "kafka": {"bindingVersion": "0.4.0", "topic": "output_data"}
- },
- "publish": {
- "message": {
- "$ref": "#/components/messages/output_data:Publisher:Message"
- }
- },
- "servers": ["development"],
- },
- },
- "components": {
- "messages": {
- "input_data:OnInputData:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {
- "$ref": "#/components/schemas/OnInputData:Message:Payload"
- },
- "title": "input_data:OnInputData:Message",
- },
- "output_data:Publisher:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {
- "$ref": "#/components/schemas/output_data:PublisherPayload"
- },
- "title": "output_data:Publisher:Message",
- },
- },
- "schemas": {
- "OnInputData:Message:Payload": {"title": "OnInputData:Message:Payload"},
- "output_data:PublisherPayload": {},
- },
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost:9092",
- }
- },
- }
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_broker.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_broker.py
deleted file mode 100644
index 25c886853d..0000000000
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_broker.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_broker import (
- app,
-)
-from faststream.asyncapi.generate import get_app_schema
-
-
-def test_broker_customization():
- schema = get_app_schema(app).to_jsonable()
-
- assert schema["servers"] == {
- "development": {
- "url": "non-sensitive-url:9092",
- "protocol": "kafka",
- "description": "Kafka broker running locally",
- "protocolVersion": "auto",
- }
- }
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
deleted file mode 100644
index 31eead7a3b..0000000000
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from dirty_equals import IsPartialDict
-
-from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_handler import (
- app,
-)
-from faststream.asyncapi.generate import get_app_schema
-
-
-def test_handler_customization():
- schema = get_app_schema(app).to_jsonable()
-
- (subscriber_key, subscriber_value), (publisher_key, publisher_value) = schema[
- "channels"
- ].items()
-
- assert subscriber_key == "input_data:Consume", subscriber_key
- assert subscriber_value == IsPartialDict(
- {
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "input_data", "bindingVersion": "0.4.0"}},
- "subscribe": {
- "message": {"$ref": "#/components/messages/input_data:Consume:Message"}
- },
- }
- ), subscriber_value
- desc = subscriber_value["description"]
- assert ( # noqa: PT018
- "Consumer function\n\n" in desc
- and "Args:\n" in desc
- and " msg: input msg" in desc
- ), desc
-
- assert publisher_key == "output_data:Produce", publisher_key
- assert publisher_value == {
- "description": "My publisher description",
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "output_data", "bindingVersion": "0.4.0"}},
- "publish": {
- "message": {"$ref": "#/components/messages/output_data:Produce:Message"}
- },
- }
diff --git a/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py b/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py
deleted file mode 100644
index fa686ad864..0000000000
--- a/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from faststream.confluent import TestKafkaBroker
-from tests.marks import pydantic_v2
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-async def test():
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.cli.confluent_context import app, broker
-
- async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py b/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py
deleted file mode 100644
index 8f1cb886c9..0000000000
--- a/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from faststream.kafka import TestKafkaBroker
-from tests.marks import pydantic_v2
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-async def test():
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.cli.kafka_context import app, broker
-
- async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/nats/test_nats_context.py b/tests/a_docs/getting_started/cli/nats/test_nats_context.py
deleted file mode 100644
index 3f6764861b..0000000000
--- a/tests/a_docs/getting_started/cli/nats/test_nats_context.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from faststream.nats import TestNatsBroker
-from tests.marks import pydantic_v2
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-async def test():
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.cli.nats_context import app, broker
-
- async with TestNatsBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py b/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py
deleted file mode 100644
index 71d55dcb34..0000000000
--- a/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from faststream.rabbit import TestRabbitBroker
-from tests.marks import pydantic_v2
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-async def test():
- with mock_pydantic_settings_env(
- {"host": "amqp://guest:guest@localhost:5673/"} # pragma: allowlist secret
- ):
- from docs.docs_src.getting_started.cli.rabbit_context import app, broker
-
- async with TestRabbitBroker(broker), TestApp(app, {"env": ".env"}):
- assert (
- context.get("settings").host
- == "amqp://guest:guest@localhost:5673/" # pragma: allowlist secret
- )
diff --git a/tests/a_docs/getting_started/cli/redis/test_redis_context.py b/tests/a_docs/getting_started/cli/redis/test_redis_context.py
deleted file mode 100644
index 1696bbcf61..0000000000
--- a/tests/a_docs/getting_started/cli/redis/test_redis_context.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from faststream.redis import TestRedisBroker
-from tests.marks import pydantic_v2
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-async def test():
- with mock_pydantic_settings_env({"host": "redis://localhost:6380"}):
- from docs.docs_src.getting_started.cli.redis_context import app, broker
-
- async with TestRedisBroker(broker), TestApp(app, {"env": ".env"}):
- assert context.get("settings").host == "redis://localhost:6380"
diff --git a/tests/a_docs/getting_started/context/test_annotated.py b/tests/a_docs/getting_started/context/test_annotated.py
deleted file mode 100644
index 4e07a231d1..0000000000
--- a/tests/a_docs/getting_started/context/test_annotated.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import pytest
-
-from tests.marks import (
- python39,
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@python39
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_annotated_kafka():
- from docs.docs_src.getting_started.context.kafka.annotated import (
- base_handler,
- broker,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@python39
-@pytest.mark.asyncio
-@require_confluent
-async def test_annotated_confluent():
- from docs.docs_src.getting_started.context.confluent.annotated import (
- base_handler,
- broker,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@python39
-@pytest.mark.asyncio
-@require_aiopika
-async def test_annotated_rabbit():
- from docs.docs_src.getting_started.context.rabbit.annotated import (
- base_handler,
- broker,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@python39
-@pytest.mark.asyncio
-@require_nats
-async def test_annotated_nats():
- from docs.docs_src.getting_started.context.nats.annotated import (
- base_handler,
- broker,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@python39
-@pytest.mark.asyncio
-@require_redis
-async def test_annotated_redis():
- from docs.docs_src.getting_started.context.redis.annotated import (
- base_handler,
- broker,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/context/test_base.py b/tests/a_docs/getting_started/context/test_base.py
deleted file mode 100644
index d2fa65ebe2..0000000000
--- a/tests/a_docs/getting_started/context/test_base.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_base_kafka():
- from docs.docs_src.getting_started.context.kafka.base import base_handler, broker
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_base_confluent():
- from docs.docs_src.getting_started.context.confluent.base import (
- base_handler,
- broker,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_base_rabbit():
- from docs.docs_src.getting_started.context.rabbit.base import base_handler, broker
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_base_nats():
- from docs.docs_src.getting_started.context.nats.base import base_handler, broker
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_base_redis():
- from docs.docs_src.getting_started.context.redis.base import base_handler, broker
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish("Hi!", "test")
-
- base_handler.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/context/test_initial.py b/tests/a_docs/getting_started/context/test_initial.py
deleted file mode 100644
index 799bfd173a..0000000000
--- a/tests/a_docs/getting_started/context/test_initial.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import pytest
-
-from faststream import context
-from tests.marks import (
- python39,
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@python39
-@require_aiokafka
-async def test_kafka():
- from docs.docs_src.getting_started.context.kafka.initial import broker
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish("", "test-topic")
- await br.publish("", "test-topic")
-
- assert context.get("collector") == ["", ""]
- context.clear()
-
-
-@pytest.mark.asyncio
-@python39
-@require_confluent
-async def test_confluent():
- from docs.docs_src.getting_started.context.confluent.initial import broker
- from faststream.confluent import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish("", "test-topic")
- await br.publish("", "test-topic")
-
- assert context.get("collector") == ["", ""]
- context.clear()
-
-
-@pytest.mark.asyncio
-@python39
-@require_aiopika
-async def test_rabbit():
- from docs.docs_src.getting_started.context.rabbit.initial import broker
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("", "test-queue")
- await br.publish("", "test-queue")
-
- assert context.get("collector") == ["", ""]
- context.clear()
-
-
-@pytest.mark.asyncio
-@python39
-@require_nats
-async def test_nats():
- from docs.docs_src.getting_started.context.nats.initial import broker
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish("", "test-subject")
- await br.publish("", "test-subject")
-
- assert context.get("collector") == ["", ""]
- context.clear()
-
-
-@pytest.mark.asyncio
-@python39
-@require_redis
-async def test_redis():
- from docs.docs_src.getting_started.context.redis.initial import broker
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish("", "test-channel")
- await br.publish("", "test-channel")
-
- assert context.get("collector") == ["", ""]
- context.clear()
diff --git a/tests/a_docs/getting_started/dependencies/basic/test_depends.py b/tests/a_docs/getting_started/dependencies/basic/test_depends.py
deleted file mode 100644
index 5c9027d5eb..0000000000
--- a/tests/a_docs/getting_started/dependencies/basic/test_depends.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_depends_kafka():
- from docs.docs_src.getting_started.dependencies.basic.kafka.depends import (
- broker,
- handler,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker):
- await broker.publish({}, "test")
- handler.mock.assert_called_once_with({})
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_depends_confluent():
- from docs.docs_src.getting_started.dependencies.basic.confluent.depends import (
- broker,
- handler,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker):
- await broker.publish({}, "test")
- handler.mock.assert_called_once_with({})
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_depends_rabbit():
- from docs.docs_src.getting_started.dependencies.basic.rabbit.depends import (
- broker,
- handler,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker):
- await broker.publish({}, "test")
- handler.mock.assert_called_once_with({})
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_depends_nats():
- from docs.docs_src.getting_started.dependencies.basic.nats.depends import (
- broker,
- handler,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker):
- await broker.publish({}, "test")
- handler.mock.assert_called_once_with({})
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_depends_redis():
- from docs.docs_src.getting_started.dependencies.basic.redis.depends import (
- broker,
- handler,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker):
- await broker.publish({}, "test")
- handler.mock.assert_called_once_with({})
diff --git a/tests/a_docs/getting_started/dependencies/test_basic.py b/tests/a_docs/getting_started/dependencies/test_basic.py
deleted file mode 100644
index 79add9edc5..0000000000
--- a/tests/a_docs/getting_started/dependencies/test_basic.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import require_aiokafka
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_basic_kafka():
- from docs.docs_src.getting_started.dependencies.basic_kafka import (
- app,
- broker,
- handle,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with(
- {
- "name": "John",
- "user_id": 1,
- }
- )
diff --git a/tests/a_docs/getting_started/index/test_basic.py b/tests/a_docs/getting_started/index/test_basic.py
deleted file mode 100644
index ed05ff61d4..0000000000
--- a/tests/a_docs/getting_started/index/test_basic.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_quickstart_index_kafka():
- from docs.docs_src.getting_started.index.base_kafka import base_handler, broker
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_quickstart_index_confluent():
- from docs.docs_src.getting_started.index.base_confluent import base_handler, broker
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br:
- await br.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_quickstart_index_rabbit():
- from docs.docs_src.getting_started.index.base_rabbit import base_handler, broker
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_quickstart_index_nats():
- from docs.docs_src.getting_started.index.base_nats import base_handler, broker
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_quickstart_index_redis():
- from docs.docs_src.getting_started.index.base_redis import base_handler, broker
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
diff --git a/tests/a_docs/getting_started/lifespan/test_basic.py b/tests/a_docs/getting_started/lifespan/test_basic.py
deleted file mode 100644
index e301441660..0000000000
--- a/tests/a_docs/getting_started/lifespan/test_basic.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-from tests.marks import (
- pydantic_v2,
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-from tests.mocks import mock_pydantic_settings_env
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit_basic_lifespan():
- from faststream.rabbit import TestRabbitBroker
-
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker
-
- async with TestRabbitBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka_basic_lifespan():
- from faststream.kafka import TestKafkaBroker
-
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker
-
- async with TestKafkaBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent_basic_lifespan():
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.lifespan.confluent.basic import app, broker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-@require_nats
-async def test_nats_basic_lifespan():
- from faststream.nats import TestNatsBroker
-
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.lifespan.nats.basic import app, broker
-
- async with TestNatsBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
-
-
-@pydantic_v2
-@pytest.mark.asyncio
-@require_redis
-async def test_redis_basic_lifespan():
- from faststream.redis import TestRedisBroker
-
- with mock_pydantic_settings_env({"host": "localhost"}):
- from docs.docs_src.getting_started.lifespan.redis.basic import app, broker
-
- async with TestRedisBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/lifespan/test_ml.py b/tests/a_docs/getting_started/lifespan/test_ml.py
deleted file mode 100644
index 0060f0719a..0000000000
--- a/tests/a_docs/getting_started/lifespan/test_ml.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.rabbit.ml import app, broker, predict
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.kafka.ml import app, broker, predict
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.confluent.ml import app, broker, predict
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_nats_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.nats.ml import app, broker, predict
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_redis_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.redis.ml import app, broker, predict
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
diff --git a/tests/a_docs/getting_started/lifespan/test_ml_context.py b/tests/a_docs/getting_started/lifespan/test_ml_context.py
deleted file mode 100644
index e239e831a9..0000000000
--- a/tests/a_docs/getting_started/lifespan/test_ml_context.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.rabbit.ml_context import (
- app,
- broker,
- predict,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.kafka.ml_context import (
- app,
- broker,
- predict,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.confluent.ml_context import (
- app,
- broker,
- predict,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_nats_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.nats.ml_context import (
- app,
- broker,
- predict,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_redis_ml_lifespan():
- from docs.docs_src.getting_started.lifespan.redis.ml_context import (
- app,
- broker,
- predict,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker), TestApp(app):
- assert await broker.publish(1.0, "test", rpc=True) == {"result": 42.0}
-
- predict.mock.assert_called_once_with(1.0)
diff --git a/tests/a_docs/getting_started/lifespan/test_multi.py b/tests/a_docs/getting_started/lifespan/test_multi.py
deleted file mode 100644
index eb272f3e2a..0000000000
--- a/tests/a_docs/getting_started/lifespan/test_multi.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import pytest
-
-from faststream import TestApp, context
-
-
-@pytest.mark.asyncio
-async def test_multi_lifespan():
- from docs.docs_src.getting_started.lifespan.multiple import app
-
- async with TestApp(app):
- assert context.get("field") == 1
diff --git a/tests/a_docs/getting_started/lifespan/test_testing.py b/tests/a_docs/getting_started/lifespan/test_testing.py
deleted file mode 100644
index 796675e1f0..0000000000
--- a/tests/a_docs/getting_started/lifespan/test_testing.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import pytest
-
-from tests.marks import (
- python39,
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@python39
-@require_redis
-async def test_lifespan_redis():
- from docs.docs_src.getting_started.lifespan.redis.testing import (
- test_lifespan as _test_lifespan_red,
- )
-
- await _test_lifespan_red()
-
-
-@pytest.mark.asyncio
-@python39
-@require_confluent
-async def test_lifespan_confluent():
- from docs.docs_src.getting_started.lifespan.confluent.testing import (
- test_lifespan as _test_lifespan_confluent,
- )
-
- await _test_lifespan_confluent()
-
-
-@pytest.mark.asyncio
-@python39
-@require_aiokafka
-async def test_lifespan_kafka():
- from docs.docs_src.getting_started.lifespan.kafka.testing import (
- test_lifespan as _test_lifespan_k,
- )
-
- await _test_lifespan_k()
-
-
-@pytest.mark.asyncio
-@python39
-@require_aiopika
-async def test_lifespan_rabbit():
- from docs.docs_src.getting_started.lifespan.rabbit.testing import (
- test_lifespan as _test_lifespan_r,
- )
-
- await _test_lifespan_r()
-
-
-@pytest.mark.asyncio
-@python39
-@require_nats
-async def test_lifespan_nats():
- from docs.docs_src.getting_started.lifespan.nats.testing import (
- test_lifespan as _test_lifespan_n,
- )
-
- await _test_lifespan_n()
diff --git a/tests/a_docs/getting_started/publishing/test_broker.py b/tests/a_docs/getting_started/publishing/test_broker.py
deleted file mode 100644
index 94947dddd1..0000000000
--- a/tests/a_docs/getting_started/publishing/test_broker.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_broker_kafka():
- from docs.docs_src.getting_started.publishing.kafka.broker import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_broker_confluent():
- from docs.docs_src.getting_started.publishing.confluent.broker import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_broker_rabbit():
- from docs.docs_src.getting_started.publishing.rabbit.broker import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_broker_nats():
- from docs.docs_src.getting_started.publishing.nats.broker import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_broker_redis():
- from docs.docs_src.getting_started.publishing.redis.broker import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/publishing/test_decorator.py b/tests/a_docs/getting_started/publishing/test_decorator.py
deleted file mode 100644
index e97d65e567..0000000000
--- a/tests/a_docs/getting_started/publishing/test_decorator.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_decorator_kafka():
- from docs.docs_src.getting_started.publishing.kafka.decorator import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
- next(iter(broker._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_decorator_confluent():
- from docs.docs_src.getting_started.publishing.confluent.decorator import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
- next(iter(broker._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_decorator_rabbit():
- from docs.docs_src.getting_started.publishing.rabbit.decorator import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
- next(iter(broker._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_decorator_nats():
- from docs.docs_src.getting_started.publishing.nats.decorator import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
- next(iter(broker._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_decorator_redis():
- from docs.docs_src.getting_started.publishing.redis.decorator import (
- app,
- broker,
- handle,
- handle_next,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with("")
- handle_next.mock.assert_called_once_with("Hi!")
- next(iter(broker._publishers.values())).mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/publishing/test_direct.py b/tests/a_docs/getting_started/publishing/test_direct.py
deleted file mode 100644
index d7be3a59a0..0000000000
--- a/tests/a_docs/getting_started/publishing/test_direct.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_handle_kafka():
- from docs.docs_src.getting_started.publishing.kafka.direct_testing import (
- test_handle as test_handle_k,
- )
-
- await test_handle_k()
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_handle_confluent():
- from docs.docs_src.getting_started.publishing.confluent.direct_testing import (
- test_handle as test_handle_confluent,
- )
-
- await test_handle_confluent()
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_handle_rabbit():
- from docs.docs_src.getting_started.publishing.rabbit.direct_testing import (
- test_handle as test_handle_r,
- )
-
- await test_handle_r()
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_handle_nats():
- from docs.docs_src.getting_started.publishing.nats.direct_testing import (
- test_handle as test_handle_n,
- )
-
- await test_handle_n()
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_handle_redis():
- from docs.docs_src.getting_started.publishing.redis.direct_testing import (
- test_handle as test_handle_red,
- )
-
- await test_handle_red()
diff --git a/tests/a_docs/getting_started/publishing/test_object.py b/tests/a_docs/getting_started/publishing/test_object.py
deleted file mode 100644
index 48f3e4b0a9..0000000000
--- a/tests/a_docs/getting_started/publishing/test_object.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_handle_kafka():
- from docs.docs_src.getting_started.publishing.kafka.object_testing import (
- test_handle as test_handle_k,
- )
-
- await test_handle_k()
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_handle_confluent():
- from docs.docs_src.getting_started.publishing.confluent.object_testing import (
- test_handle as test_handle_confluent,
- )
-
- await test_handle_confluent()
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_handle_rabbit():
- from docs.docs_src.getting_started.publishing.rabbit.object_testing import (
- test_handle as test_handle_r,
- )
-
- await test_handle_r()
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_handle_nats():
- from docs.docs_src.getting_started.publishing.nats.object_testing import (
- test_handle as test_handle_n,
- )
-
- await test_handle_n()
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_handle_redis():
- from docs.docs_src.getting_started.publishing.redis.object_testing import (
- test_handle as test_handle_red,
- )
-
- await test_handle_red()
diff --git a/tests/a_docs/getting_started/routers/test_base.py b/tests/a_docs/getting_started/routers/test_base.py
deleted file mode 100644
index 1d77657cf2..0000000000
--- a/tests/a_docs/getting_started/routers/test_base.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_base_router_kafka():
- from docs.docs_src.getting_started.routers.kafka.router import (
- app,
- broker,
- handle,
- handle_response,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
- handle_response.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_base_router_confluent():
- from docs.docs_src.getting_started.routers.confluent.router import (
- app,
- broker,
- handle,
- handle_response,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
- handle_response.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_base_router_rabbit():
- from docs.docs_src.getting_started.routers.rabbit.router import (
- app,
- broker,
- handle,
- handle_response,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
- handle_response.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_base_router_nats():
- from docs.docs_src.getting_started.routers.nats.router import (
- app,
- broker,
- handle,
- handle_response,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
- handle_response.mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_base_router_redis():
- from docs.docs_src.getting_started.routers.redis.router import (
- app,
- broker,
- handle,
- handle_response,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker), TestApp(app):
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
- handle_response.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/routers/test_delay.py b/tests/a_docs/getting_started/routers/test_delay.py
deleted file mode 100644
index 733c278fe5..0000000000
--- a/tests/a_docs/getting_started/routers/test_delay.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_delay_router_kafka():
- from docs.docs_src.getting_started.routers.kafka.router_delay import (
- app,
- broker,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_delay_router_confluent():
- from docs.docs_src.getting_started.routers.confluent.router_delay import (
- app,
- broker,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_delay_router_rabbit():
- from docs.docs_src.getting_started.routers.rabbit.router_delay import (
- app,
- broker,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_delay_router_nats():
- from docs.docs_src.getting_started.routers.nats.router_delay import (
- app,
- broker,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_delay_router_redis():
- from docs.docs_src.getting_started.routers.redis.router_delay import (
- app,
- broker,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/routers/test_delay_equal.py b/tests/a_docs/getting_started/routers/test_delay_equal.py
deleted file mode 100644
index 8e34b434fc..0000000000
--- a/tests/a_docs/getting_started/routers/test_delay_equal.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_delay_router_kafka():
- from docs.docs_src.getting_started.routers.kafka.delay_equal import (
- app,
- broker,
- )
- from docs.docs_src.getting_started.routers.kafka.router_delay import (
- broker as control_broker,
- )
- from faststream.kafka import TestKafkaBroker
-
- assert broker._subscribers.keys() == control_broker._subscribers.keys()
- assert broker._publishers.keys() == control_broker._publishers.keys()
-
- async with TestKafkaBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_delay_router_confluent():
- from docs.docs_src.getting_started.routers.confluent.delay_equal import (
- app,
- broker,
- )
- from docs.docs_src.getting_started.routers.confluent.router_delay import (
- broker as control_broker,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- assert broker._subscribers.keys() == control_broker._subscribers.keys()
- assert broker._publishers.keys() == control_broker._publishers.keys()
-
- async with TestConfluentKafkaBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_delay_router_rabbit():
- from docs.docs_src.getting_started.routers.rabbit.delay_equal import (
- app,
- broker,
- )
- from docs.docs_src.getting_started.routers.rabbit.router_delay import (
- broker as control_broker,
- )
- from faststream.rabbit import TestRabbitBroker
-
- assert broker._subscribers.keys() == control_broker._subscribers.keys()
- assert broker._publishers.keys() == control_broker._publishers.keys()
-
- async with TestRabbitBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_delay_router_nats():
- from docs.docs_src.getting_started.routers.nats.delay_equal import (
- app,
- broker,
- )
- from docs.docs_src.getting_started.routers.nats.router_delay import (
- broker as control_broker,
- )
- from faststream.nats import TestNatsBroker
-
- assert broker._subscribers.keys() == control_broker._subscribers.keys()
- assert broker._publishers.keys() == control_broker._publishers.keys()
-
- async with TestNatsBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_delay_router_redis():
- from docs.docs_src.getting_started.routers.redis.delay_equal import (
- app,
- broker,
- )
- from docs.docs_src.getting_started.routers.redis.router_delay import (
- broker as control_broker,
- )
- from faststream.redis import TestRedisBroker
-
- assert broker._subscribers.keys() == control_broker._subscribers.keys()
- assert broker._publishers.keys() == control_broker._publishers.keys()
-
- async with TestRedisBroker(broker) as br, TestApp(app):
- next(iter(br._subscribers.values())).calls[
- 0
- ].handler.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- next(iter(br._publishers.values())).mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/subscription/test_annotated.py b/tests/a_docs/getting_started/subscription/test_annotated.py
deleted file mode 100644
index e331c5f037..0000000000
--- a/tests/a_docs/getting_started/subscription/test_annotated.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import pytest
-from pydantic import ValidationError
-
-from tests.marks import (
- python39,
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@python39
-class BaseCase:
- async def test_handle(self, setup):
- broker, handle, test_class = setup
-
- async with test_class(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
- assert handle.mock is None
-
- async def test_validation_error(self, setup):
- broker, handle, test_class = setup
-
- async with test_class(broker) as br:
- with pytest.raises(ValidationError):
- await br.publish("wrong message", "test")
-
- handle.mock.assert_called_once_with("wrong message")
-
-
-@require_aiokafka
-class TestKafka(BaseCase):
- @pytest.fixture(scope="class")
- def setup(self):
- from docs.docs_src.getting_started.subscription.kafka.pydantic_annotated_fields import (
- broker,
- handle,
- )
- from faststream.kafka import TestKafkaBroker
-
- return (broker, handle, TestKafkaBroker)
-
-
-@require_confluent
-class TestConfluent(BaseCase):
- @pytest.fixture(scope="class")
- def setup(self):
- from docs.docs_src.getting_started.subscription.confluent.pydantic_annotated_fields import (
- broker,
- handle,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- return (broker, handle, TestConfluentKafkaBroker)
-
-
-@require_aiopika
-class TestRabbit(BaseCase):
- @pytest.fixture(scope="class")
- def setup(self):
- from docs.docs_src.getting_started.subscription.rabbit.pydantic_annotated_fields import (
- broker,
- handle,
- )
- from faststream.rabbit import TestRabbitBroker
-
- return (broker, handle, TestRabbitBroker)
-
-
-@require_nats
-class TestNats(BaseCase):
- @pytest.fixture(scope="class")
- def setup(self):
- from docs.docs_src.getting_started.subscription.nats.pydantic_annotated_fields import (
- broker,
- handle,
- )
- from faststream.nats import TestNatsBroker
-
- return (broker, handle, TestNatsBroker)
-
-
-@require_redis
-class TestRedis(BaseCase):
- @pytest.fixture(scope="class")
- def setup(self):
- from docs.docs_src.getting_started.subscription.redis.pydantic_annotated_fields import (
- broker,
- handle,
- )
- from faststream.redis import TestRedisBroker
-
- return (broker, handle, TestRedisBroker)
diff --git a/tests/a_docs/getting_started/subscription/test_pydantic.py b/tests/a_docs/getting_started/subscription/test_pydantic.py
deleted file mode 100644
index 8a5ba2eac0..0000000000
--- a/tests/a_docs/getting_started/subscription/test_pydantic.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_pydantic_model_rabbit():
- from docs.docs_src.getting_started.subscription.rabbit.pydantic_model import (
- broker,
- handle,
- )
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test-queue")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_pydantic_model_kafka():
- from docs.docs_src.getting_started.subscription.kafka.pydantic_model import (
- broker,
- handle,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test-topic")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_pydantic_model_confluent():
- from docs.docs_src.getting_started.subscription.confluent.pydantic_model import (
- broker,
- handle,
- )
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test-topic")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_pydantic_model_nats():
- from docs.docs_src.getting_started.subscription.nats.pydantic_model import (
- broker,
- handle,
- )
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test-subject")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_pydantic_model_redis():
- from docs.docs_src.getting_started.subscription.redis.pydantic_model import (
- broker,
- handle,
- )
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish({"name": "John", "user_id": 1}, "test-channel")
- handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
diff --git a/tests/a_docs/getting_started/subscription/test_real.py b/tests/a_docs/getting_started/subscription/test_real.py
deleted file mode 100644
index 2862cfa5dc..0000000000
--- a/tests/a_docs/getting_started/subscription/test_real.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.kafka
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_handle_kafka():
- from docs.docs_src.getting_started.subscription.kafka.real_testing import (
- test_handle as test_handle_k,
- )
-
- await test_handle_k()
-
-
-@pytest.mark.kafka
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_validate_kafka():
- from docs.docs_src.getting_started.subscription.kafka.real_testing import (
- test_validation_error as test_validation_error_k,
- )
-
- await test_validation_error_k()
-
-
-@pytest.mark.confluent
-@pytest.mark.asyncio
-@require_confluent
-async def test_handle_confluent():
- from docs.docs_src.getting_started.subscription.confluent.real_testing import (
- test_handle as test_handle_confluent,
- )
-
- await test_handle_confluent()
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-@require_confluent
-async def test_validate_confluent():
- from docs.docs_src.getting_started.subscription.confluent.real_testing import (
- test_validation_error as test_validation_error_confluent,
- )
-
- await test_validation_error_confluent()
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-@require_aiopika
-async def test_handle_rabbit():
- from docs.docs_src.getting_started.subscription.rabbit.real_testing import (
- test_handle as test_handle_r,
- )
-
- await test_handle_r()
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-@require_aiopika
-async def test_validate_rabbit():
- from docs.docs_src.getting_started.subscription.rabbit.real_testing import (
- test_validation_error as test_validation_error_r,
- )
-
- await test_validation_error_r()
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-@require_nats
-async def test_handle_nats():
- from docs.docs_src.getting_started.subscription.nats.real_testing import (
- test_handle as test_handle_n,
- )
-
- await test_handle_n()
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-@require_nats
-async def test_validate_nats():
- from docs.docs_src.getting_started.subscription.nats.real_testing import (
- test_validation_error as test_validation_error_n,
- )
-
- await test_validation_error_n()
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-@require_redis
-async def test_handle_redis():
- from docs.docs_src.getting_started.subscription.redis.real_testing import (
- test_handle as test_handle_red,
- )
-
- await test_handle_red()
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-@require_redis
-async def test_validate_redis():
- from docs.docs_src.getting_started.subscription.redis.real_testing import (
- test_validation_error as test_validation_error_red,
- )
-
- await test_validation_error_red()
diff --git a/tests/a_docs/getting_started/subscription/test_testing.py b/tests/a_docs/getting_started/subscription/test_testing.py
deleted file mode 100644
index bb60dcc318..0000000000
--- a/tests/a_docs/getting_started/subscription/test_testing.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.kafka
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_handle_kafka():
- from docs.docs_src.getting_started.subscription.kafka.testing import (
- test_handle as test_handle_k,
- )
-
- await test_handle_k()
-
-
-@pytest.mark.kafka
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_validate_kafka():
- from docs.docs_src.getting_started.subscription.kafka.testing import (
- test_validation_error as test_validation_error_k,
- )
-
- await test_validation_error_k()
-
-
-@pytest.mark.confluent
-@pytest.mark.asyncio
-@require_confluent
-async def test_handle_confluent():
- from docs.docs_src.getting_started.subscription.confluent.testing import (
- test_handle as test_handle_confluent,
- )
-
- await test_handle_confluent()
-
-
-@pytest.mark.asyncio
-@pytest.mark.confluent
-@require_confluent
-async def test_validate_confluent():
- from docs.docs_src.getting_started.subscription.confluent.testing import (
- test_validation_error as test_validation_error_confluent,
- )
-
- await test_validation_error_confluent()
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-@require_aiopika
-async def test_handle_rabbit():
- from docs.docs_src.getting_started.subscription.rabbit.testing import (
- test_handle as test_handle_r,
- )
-
- await test_handle_r()
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-@require_aiopika
-async def test_validate_rabbit():
- from docs.docs_src.getting_started.subscription.rabbit.testing import (
- test_validation_error as test_validation_error_r,
- )
-
- await test_validation_error_r()
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-@require_nats
-async def test_handle_nats():
- from docs.docs_src.getting_started.subscription.nats.testing import (
- test_handle as test_handle_n,
- )
-
- await test_handle_n()
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-@require_nats
-async def test_validate_nats():
- from docs.docs_src.getting_started.subscription.nats.testing import (
- test_validation_error as test_validation_error_n,
- )
-
- await test_validation_error_n()
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-@require_redis
-async def test_handle_redis():
- from docs.docs_src.getting_started.subscription.redis.testing import (
- test_handle as test_handle_rd,
- )
-
- await test_handle_rd()
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-@require_redis
-async def test_validate_redis():
- from docs.docs_src.getting_started.subscription.redis.testing import (
- test_validation_error as test_validation_error_rd,
- )
-
- await test_validation_error_rd()
diff --git a/tests/a_docs/index/test_basic.py b/tests/a_docs/index/test_basic.py
deleted file mode 100644
index e56f220300..0000000000
--- a/tests/a_docs/index/test_basic.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_index_kafka_base():
- from docs.docs_src.index.kafka.basic import broker, handle_msg
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker) as br:
- await br.publish({"user": "John", "user_id": 1}, "in-topic")
-
- handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
-
- list(br._publishers.values())[0].mock.assert_called_once_with( # noqa: RUF015
- "User: 1 - John registered"
- )
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_index_confluent_base():
- from docs.docs_src.index.confluent.basic import broker, handle_msg
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(broker) as br:
- await br.publish({"user": "John", "user_id": 1}, "in-topic")
-
- handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
-
- list(br._publishers.values())[0].mock.assert_called_once_with( # noqa: RUF015
- "User: 1 - John registered"
- )
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_index_rabbit_base():
- from docs.docs_src.index.rabbit.basic import broker, handle_msg
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(broker) as br:
- await br.publish({"user": "John", "user_id": 1}, "in-queue")
-
- handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
-
- list(br._publishers.values())[0].mock.assert_called_once_with( # noqa: RUF015
- "User: 1 - John registered"
- )
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_index_nats_base():
- from docs.docs_src.index.nats.basic import broker, handle_msg
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(broker) as br:
- await br.publish({"user": "John", "user_id": 1}, "in-subject")
-
- handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
-
- list(br._publishers.values())[0].mock.assert_called_once_with( # noqa: RUF015
- "User: 1 - John registered"
- )
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_index_redis_base():
- from docs.docs_src.index.redis.basic import broker, handle_msg
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(broker) as br:
- await br.publish({"user": "John", "user_id": 1}, "in-channel")
-
- handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
-
- list(br._publishers.values())[0].mock.assert_called_once_with( # noqa: RUF015
- "User: 1 - John registered"
- )
diff --git a/tests/a_docs/index/test_pydantic.py b/tests/a_docs/index/test_pydantic.py
deleted file mode 100644
index 977ff484c9..0000000000
--- a/tests/a_docs/index/test_pydantic.py
+++ /dev/null
@@ -1,93 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka_correct():
- from docs.docs_src.index.kafka.test import test_correct as test_k_correct
-
- await test_k_correct()
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka_invalid():
- from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid
-
- await test_k_invalid()
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent_correct():
- from docs.docs_src.index.confluent.test import (
- test_correct as test_confluent_correct,
- )
-
- await test_confluent_correct()
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent_invalid():
- from docs.docs_src.index.confluent.test import (
- test_invalid as test_confluent_invalid,
- )
-
- await test_confluent_invalid()
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit_correct():
- from docs.docs_src.index.rabbit.test import test_correct as test_r_correct
-
- await test_r_correct()
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit_invalid():
- from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid
-
- await test_r_invalid()
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_nats_correct():
- from docs.docs_src.index.nats.test import test_correct as test_n_correct
-
- await test_n_correct()
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_nats_invalid():
- from docs.docs_src.index.nats.test import test_invalid as test_n_invalid
-
- await test_n_invalid()
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_redis_correct():
- from docs.docs_src.index.redis.test import test_correct as test_red_correct
-
- await test_red_correct()
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_redis_invalid():
- from docs.docs_src.index.redis.test import test_invalid as test_red_invalid
-
- await test_red_invalid()
diff --git a/tests/a_docs/integration/fastapi/test_base.py b/tests/a_docs/integration/fastapi/test_base.py
deleted file mode 100644
index 205db8d785..0000000000
--- a/tests/a_docs/integration/fastapi/test_base.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import pytest
-from fastapi.testclient import TestClient
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_fastapi_kafka_base():
- from docs.docs_src.integrations.fastapi.kafka.base import app, hello, router
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(router.broker) as br:
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- await br.publish({"m": {}}, "test")
-
- hello.mock.assert_called_once_with({"m": {}})
-
- list(br._publishers.values())[0].mock.assert_called_with( # noqa: RUF015
- {"response": "Hello, Kafka!"}
- )
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_fastapi_confluent_base():
- from docs.docs_src.integrations.fastapi.confluent.base import app, hello, router
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- async with TestConfluentKafkaBroker(router.broker) as br:
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- await br.publish({"m": {}}, "test")
-
- hello.mock.assert_called_once_with({"m": {}})
-
- list(br._publishers.values())[0].mock.assert_called_with( # noqa: RUF015
- {"response": "Hello, Kafka!"}
- )
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_fastapi_rabbit_base():
- from docs.docs_src.integrations.fastapi.rabbit.base import app, hello, router
- from faststream.rabbit import TestRabbitBroker
-
- async with TestRabbitBroker(router.broker) as br:
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- await br.publish({"m": {}}, "test")
-
- hello.mock.assert_called_once_with({"m": {}})
-
- list(br._publishers.values())[0].mock.assert_called_with( # noqa: RUF015
- {"response": "Hello, Rabbit!"}
- )
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_fastapi_nats_base():
- from docs.docs_src.integrations.fastapi.nats.base import app, hello, router
- from faststream.nats import TestNatsBroker
-
- async with TestNatsBroker(router.broker) as br:
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- await br.publish({"m": {}}, "test")
-
- hello.mock.assert_called_once_with({"m": {}})
-
- list(br._publishers.values())[0].mock.assert_called_with( # noqa: RUF015
- {"response": "Hello, NATS!"}
- )
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_fastapi_redis_base():
- from docs.docs_src.integrations.fastapi.redis.base import app, hello, router
- from faststream.redis import TestRedisBroker
-
- async with TestRedisBroker(router.broker) as br:
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- await br.publish({"m": {}}, "test")
-
- hello.mock.assert_called_once_with({"m": {}})
-
- list(br._publishers.values())[0].mock.assert_called_with( # noqa: RUF015
- {"response": "Hello, Redis!"}
- )
diff --git a/tests/a_docs/integration/fastapi/test_depends.py b/tests/a_docs/integration/fastapi/test_depends.py
deleted file mode 100644
index 0f798fd922..0000000000
--- a/tests/a_docs/integration/fastapi/test_depends.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import pytest
-from fastapi.testclient import TestClient
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_fastapi_kafka_depends():
- from docs.docs_src.integrations.fastapi.kafka.depends import app, router
- from faststream.kafka import TestKafkaBroker
-
- @router.subscriber("test")
- async def handler(): ...
-
- async with TestKafkaBroker(router.broker):
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- handler.mock.assert_called_once_with("Hello, Kafka!")
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_fastapi_confluent_depends():
- from docs.docs_src.integrations.fastapi.confluent.depends import app, router
- from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
-
- @router.subscriber("test")
- async def handler(): ...
-
- async with TestConfluentKafkaBroker(router.broker):
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- handler.mock.assert_called_once_with("Hello, Kafka!")
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_fastapi_rabbit_depends():
- from docs.docs_src.integrations.fastapi.rabbit.depends import app, router
- from faststream.rabbit import TestRabbitBroker
-
- @router.subscriber("test")
- async def handler(): ...
-
- async with TestRabbitBroker(router.broker):
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- handler.mock.assert_called_once_with("Hello, Rabbit!")
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_fastapi_nats_depends():
- from docs.docs_src.integrations.fastapi.nats.depends import app, router
- from faststream.nats import TestNatsBroker
-
- @router.subscriber("test")
- async def handler(): ...
-
- async with TestNatsBroker(router.broker):
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- handler.mock.assert_called_once_with("Hello, NATS!")
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_fastapi_redis_depends():
- from docs.docs_src.integrations.fastapi.redis.depends import app, router
- from faststream.redis import TestRedisBroker
-
- @router.subscriber("test")
- async def handler(): ...
-
- async with TestRedisBroker(router.broker):
- with TestClient(app) as client:
- assert client.get("/").text == '"Hello, HTTP!"'
-
- handler.mock.assert_called_once_with("Hello, Redis!")
diff --git a/tests/a_docs/integration/fastapi/test_test.py b/tests/a_docs/integration/fastapi/test_test.py
deleted file mode 100644
index 1e79da205c..0000000000
--- a/tests/a_docs/integration/fastapi/test_test.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import pytest
-
-from tests.marks import (
- require_aiokafka,
- require_aiopika,
- require_confluent,
- require_nats,
- require_redis,
-)
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_kafka():
- from docs.docs_src.integrations.fastapi.kafka.test import test_router
-
- await test_router()
-
-
-@pytest.mark.asyncio
-@require_confluent
-async def test_confluent():
- from docs.docs_src.integrations.fastapi.confluent.test import test_router
-
- await test_router()
-
-
-@pytest.mark.asyncio
-@require_aiopika
-async def test_rabbit():
- from docs.docs_src.integrations.fastapi.rabbit.test import test_router
-
- await test_router()
-
-
-@pytest.mark.asyncio
-@require_nats
-async def test_nats():
- from docs.docs_src.integrations.fastapi.nats.test import test_router
-
- await test_router()
-
-
-@pytest.mark.asyncio
-@require_redis
-async def test_redis():
- from docs.docs_src.integrations.fastapi.redis.test import test_router
-
- await test_router()
diff --git a/tests/a_docs/integration/http/test_fastapi.py b/tests/a_docs/integration/http/test_fastapi.py
deleted file mode 100644
index b257f92501..0000000000
--- a/tests/a_docs/integration/http/test_fastapi.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import pytest
-from fastapi.testclient import TestClient
-
-from tests.marks import require_aiokafka
-
-
-@pytest.mark.asyncio
-@require_aiokafka
-async def test_fastapi_raw_integration():
- from docs.docs_src.integrations.http_frameworks_integrations.fastapi import (
- app,
- base_handler,
- broker,
- )
- from faststream.kafka import TestKafkaBroker
-
- async with TestKafkaBroker(broker):
- with TestClient(app) as client:
- assert client.get("/").json() == {"Hello": "World"}
-
- await broker.publish("", "test")
-
- base_handler.mock.assert_called_once_with("")
diff --git a/tests/a_docs/kafka/ack/test_errors.py b/tests/a_docs/kafka/ack/test_errors.py
deleted file mode 100644
index a17bb1ad46..0000000000
--- a/tests/a_docs/kafka/ack/test_errors.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-from aiokafka import AIOKafkaConsumer
-
-from faststream.kafka import TestApp, TestKafkaBroker
-from tests.tools import spy_decorator
-
-
-@pytest.mark.asyncio
-@pytest.mark.kafka
-@pytest.mark.slow
-async def test_ack_exc():
- from docs.docs_src.kafka.ack.errors import app, broker, handle
-
- with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
- ) as m:
- async with TestKafkaBroker(broker, with_real=True), TestApp(app):
- await handle.wait_call(10)
-
- assert m.mock.call_count
diff --git a/tests/a_docs/kafka/basic/test_basic.py b/tests/a_docs/kafka/basic/test_basic.py
deleted file mode 100644
index 624cb73ca2..0000000000
--- a/tests/a_docs/kafka/basic/test_basic.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_basic():
- from docs.docs_src.kafka.basic.basic import broker, on_input_data
-
- publisher = list(broker._publishers.values())[0] # noqa: RUF015
-
- async with TestKafkaBroker(broker) as br:
- await br.publish({"data": 1.0}, "input_data")
- on_input_data.mock.assert_called_once_with({"data": 1.0})
- publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/a_docs/kafka/basic/test_cmd_run.py b/tests/a_docs/kafka/basic/test_cmd_run.py
deleted file mode 100644
index 0d7609faf6..0000000000
--- a/tests/a_docs/kafka/basic/test_cmd_run.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import asyncio
-from unittest.mock import Mock
-
-import pytest
-from typer.testing import CliRunner
-
-from faststream.app import FastStream
-from faststream.cli.main import cli
-
-
-@pytest.mark.kafka
-def test_run_cmd(
- runner: CliRunner,
- mock: Mock,
- event: asyncio.Event,
- monkeypatch: pytest.MonkeyPatch,
- kafka_basic_project,
-):
- async def patched_run(self: FastStream, *args, **kwargs):
- await self.start()
- await self.stop()
- mock()
-
- with monkeypatch.context() as m:
- m.setattr(FastStream, "run", patched_run)
- r = runner.invoke(
- cli,
- [
- "run",
- kafka_basic_project,
- ],
- )
-
- assert r.exit_code == 0
- mock.assert_called_once()
diff --git a/tests/a_docs/kafka/batch_consuming_pydantic/test_app.py b/tests/a_docs/kafka/batch_consuming_pydantic/test_app.py
deleted file mode 100644
index 4b39b471f5..0000000000
--- a/tests/a_docs/kafka/batch_consuming_pydantic/test_app.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import pytest
-
-from docs.docs_src.kafka.batch_consuming_pydantic.app import (
- HelloWorld,
- broker,
- handle_batch,
-)
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_me():
- async with TestKafkaBroker(broker):
- await broker.publish_batch(
- HelloWorld(msg="First Hello"),
- HelloWorld(msg="Second Hello"),
- topic="test_batch",
- )
- handle_batch.mock.assert_called_with(
- [dict(HelloWorld(msg="First Hello")), dict(HelloWorld(msg="Second Hello"))]
- )
diff --git a/tests/a_docs/kafka/consumes_basics/test_app.py b/tests/a_docs/kafka/consumes_basics/test_app.py
deleted file mode 100644
index bcf5c9f630..0000000000
--- a/tests/a_docs/kafka/consumes_basics/test_app.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from docs.docs_src.kafka.consumes_basics.app import (
- HelloWorld,
- broker,
- on_hello_world,
-)
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
- on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/kafka/publish_batch/test_app.py b/tests/a_docs/kafka/publish_batch/test_app.py
deleted file mode 100644
index 99bf043700..0000000000
--- a/tests/a_docs/kafka/publish_batch/test_app.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import pytest
-
-from docs.docs_src.kafka.publish_batch.app import (
- Data,
- broker,
- decrease_and_increase,
- on_input_data_1,
- on_input_data_2,
-)
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_batch_publish_decorator():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=2.0), "input_data_1")
-
- on_input_data_1.mock.assert_called_once_with(dict(Data(data=2.0)))
- decrease_and_increase.mock.assert_called_once_with(
- [dict(Data(data=1.0)), dict(Data(data=4.0))]
- )
-
-
-@pytest.mark.asyncio
-async def test_batch_publish_call():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=2.0), "input_data_2")
-
- on_input_data_2.mock.assert_called_once_with(dict(Data(data=2.0)))
- decrease_and_increase.mock.assert_called_once_with(
- [dict(Data(data=1.0)), dict(Data(data=4.0))]
- )
diff --git a/tests/a_docs/kafka/publish_batch/test_issues.py b/tests/a_docs/kafka/publish_batch/test_issues.py
deleted file mode 100644
index 65526eaeee..0000000000
--- a/tests/a_docs/kafka/publish_batch/test_issues.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import List
-
-import pytest
-
-from faststream import FastStream
-from faststream.kafka import KafkaBroker, TestKafkaBroker
-
-broker = KafkaBroker()
-batch_producer = broker.publisher("response", batch=True)
-
-
-@batch_producer
-@broker.subscriber("test")
-async def handle(msg: str) -> List[int]:
- return [1, 2, 3]
-
-
-app = FastStream(broker)
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish("", "test")
diff --git a/tests/a_docs/kafka/publish_example/test_app.py b/tests/a_docs/kafka/publish_example/test_app.py
deleted file mode 100644
index 659e6c19a1..0000000000
--- a/tests/a_docs/kafka/publish_example/test_app.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import pytest
-
-from docs.docs_src.kafka.publish_example.app import (
- Data,
- broker,
- on_input_data,
- to_output_data,
-)
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_base_app():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=0.2), "input_data")
-
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
diff --git a/tests/a_docs/kafka/publish_with_partition_key/test_app.py b/tests/a_docs/kafka/publish_with_partition_key/test_app.py
deleted file mode 100644
index 8cf871f80c..0000000000
--- a/tests/a_docs/kafka/publish_with_partition_key/test_app.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import pytest
-
-from docs.docs_src.kafka.publish_with_partition_key.app import (
- Data,
- broker,
- on_input_data,
- to_output_data,
-)
-from faststream.kafka import TestKafkaBroker
-
-
-@pytest.mark.asyncio
-async def test_app():
- async with TestKafkaBroker(broker):
- await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
-
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
-
-
-@pytest.mark.skip("we are not checking the key")
-@pytest.mark.asyncio
-async def test_keys():
- async with TestKafkaBroker(broker):
- # we should be able to publish a message with the key
- await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
-
- # we need to check the key as well
- on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)), key=b"my_key")
- to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)), key=b"key")
diff --git a/tests/a_docs/kafka/test_security.py b/tests/a_docs/kafka/test_security.py
deleted file mode 100644
index 3fe39831b0..0000000000
--- a/tests/a_docs/kafka/test_security.py
+++ /dev/null
@@ -1,121 +0,0 @@
-import ssl
-from contextlib import contextmanager
-from typing import Tuple
-from unittest.mock import AsyncMock, MagicMock, patch
-
-import pytest
-
-
-@contextmanager
-def patch_aio_consumer_and_producer() -> Tuple[MagicMock, MagicMock]:
- try:
- producer = MagicMock(return_value=AsyncMock())
-
- with patch("aiokafka.AIOKafkaProducer", new=producer):
- yield producer
- finally:
- pass
-
-
-@pytest.mark.asyncio
-@pytest.mark.kafka
-async def test_base_security():
- from docs.docs_src.kafka.security.basic import broker as basic_broker
-
- with patch_aio_consumer_and_producer() as producer:
- async with basic_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["security_protocol"] = "SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
-
-
-@pytest.mark.asyncio
-@pytest.mark.kafka
-async def test_scram256():
- from docs.docs_src.kafka.security.sasl_scram256 import (
- broker as scram256_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with scram256_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "SCRAM-SHA-256"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
-
-
-@pytest.mark.asyncio
-@pytest.mark.kafka
-async def test_scram512():
- from docs.docs_src.kafka.security.sasl_scram512 import (
- broker as scram512_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with scram512_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "SCRAM-SHA-512"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
-
-
-@pytest.mark.asyncio
-@pytest.mark.kafka
-async def test_plaintext():
- from docs.docs_src.kafka.security.plaintext import (
- broker as plaintext_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with plaintext_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {}
- call_kwargs["sasl_mechanism"] = "PLAIN"
- call_kwargs["sasl_plain_username"] = "admin"
- call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
- call_kwargs["security_protocol"] = "SASL_SSL"
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
-
-
-@pytest.mark.kafka
-@pytest.mark.asyncio
-async def test_gssapi():
- from docs.docs_src.kafka.security.sasl_gssapi import (
- broker as gssapi_broker,
- )
-
- with patch_aio_consumer_and_producer() as producer:
- async with gssapi_broker:
- producer_call_kwargs = producer.call_args.kwargs
-
- call_kwargs = {
- "sasl_mechanism": "GSSAPI",
- "security_protocol": "SASL_SSL",
- }
-
- assert call_kwargs.items() <= producer_call_kwargs.items()
-
- assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
diff --git a/tests/a_docs/nats/ack/test_errors.py b/tests/a_docs/nats/ack/test_errors.py
deleted file mode 100644
index 32e4379c15..0000000000
--- a/tests/a_docs/nats/ack/test_errors.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-from nats.aio.msg import Msg
-
-from faststream.nats import TestApp, TestNatsBroker
-from tests.tools import spy_decorator
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_ack_exc():
- from docs.docs_src.nats.ack.errors import app, broker, handle
-
- with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m:
- async with TestNatsBroker(broker, with_real=True), TestApp(app):
- await handle.wait_call(3)
-
- assert m.mock.call_count
diff --git a/tests/a_docs/nats/js/test_object.py b/tests/a_docs/nats/js/test_object.py
deleted file mode 100644
index 10f9fd99f8..0000000000
--- a/tests/a_docs/nats/js/test_object.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import pytest
-
-from faststream import TestApp
-from faststream.nats import TestNatsBroker
-
-
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_basic():
- from docs.docs_src.nats.js.object import app, broker, handler
-
- async with (
- TestNatsBroker(broker, with_real=True, connect_only=True),
- TestApp(app),
- ):
- await handler.wait_call(3.0)
- handler.mock.assert_called_once_with("file.txt")
diff --git a/tests/a_docs/nats/test_direct.py b/tests/a_docs/nats/test_direct.py
deleted file mode 100644
index d64e849fc8..0000000000
--- a/tests/a_docs/nats/test_direct.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import pytest
-
-from faststream.nats import TestApp, TestNatsBroker
-
-
-@pytest.mark.asyncio
-async def test_pattern():
- from docs.docs_src.nats.direct import (
- app,
- base_handler1,
- base_handler2,
- base_handler3,
- broker,
- )
-
- async with TestNatsBroker(broker), TestApp(app):
- assert base_handler1.mock.call_count == 2
- assert base_handler2.mock.call_count == 0
- assert base_handler3.mock.call_count == 1
diff --git a/tests/a_docs/rabbit/ack/test_errors.py b/tests/a_docs/rabbit/ack/test_errors.py
deleted file mode 100644
index 8e8f98e3c7..0000000000
--- a/tests/a_docs/rabbit/ack/test_errors.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-from aio_pika import IncomingMessage
-
-from faststream.rabbit import TestApp, TestRabbitBroker
-from tests.tools import spy_decorator
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_ack_exc():
- from docs.docs_src.rabbit.ack.errors import app, broker, handle
-
- with patch.object(IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)) as m:
- async with TestRabbitBroker(broker, with_real=True), TestApp(app):
- await handle.wait_call(3)
-
- m.mock.assert_called_once()
diff --git a/tests/a_docs/rabbit/subscription/test_direct.py b/tests/a_docs/rabbit/subscription/test_direct.py
deleted file mode 100644
index aa13430de8..0000000000
--- a/tests/a_docs/rabbit/subscription/test_direct.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import pytest
-
-from faststream.rabbit import TestApp, TestRabbitBroker
-
-
-@pytest.mark.asyncio
-async def test_index():
- from docs.docs_src.rabbit.subscription.direct import (
- app,
- base_handler1,
- base_handler3,
- broker,
- )
-
- async with TestRabbitBroker(broker), TestApp(app):
- base_handler1.mock.assert_called_with(b"")
- base_handler3.mock.assert_called_once_with(b"")
diff --git a/tests/a_docs/rabbit/test_security.py b/tests/a_docs/rabbit/test_security.py
deleted file mode 100644
index 30572bf947..0000000000
--- a/tests/a_docs/rabbit/test_security.py
+++ /dev/null
@@ -1,66 +0,0 @@
-import pytest
-from aiormq.exceptions import AMQPConnectionError
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_base_security():
- from docs.docs_src.rabbit.security.basic import broker
-
- with pytest.raises(AMQPConnectionError):
- async with broker:
- pass
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqps",
- "protocolVersion": "0.9.1",
- "security": [],
- "url": "amqps://guest:guest@localhost:5672/", # pragma: allowlist secret
- }
- },
- }
-
-
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_plaintext_security():
- from docs.docs_src.rabbit.security.plaintext import broker
-
- with pytest.raises(AMQPConnectionError):
- async with broker:
- pass
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqps",
- "protocolVersion": "0.9.1",
- "security": [{"user-password": []}],
- "url": "amqps://admin:password@localhost:5672/", # pragma: allowlist secret
- }
- },
- }
- )
diff --git a/tests/a_docs/redis/list/test_list_pub.py b/tests/a_docs/redis/list/test_list_pub.py
deleted file mode 100644
index 0ef35761b4..0000000000
--- a/tests/a_docs/redis/list/test_list_pub.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pytest
-
-from faststream.redis import TestRedisBroker
-
-
-@pytest.mark.asyncio
-async def test_list_publisher():
- from docs.docs_src.redis.list.list_pub import broker, on_input_data
-
- publisher = list(broker._publishers.values())[0] # noqa: RUF015
-
- async with TestRedisBroker(broker) as br:
- await br.publish({"data": 1.0}, list="input-list")
- on_input_data.mock.assert_called_once_with({"data": 1.0})
- publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/a_docs/redis/test_rpc.py b/tests/a_docs/redis/test_rpc.py
deleted file mode 100644
index 5f8a7ca580..0000000000
--- a/tests/a_docs/redis/test_rpc.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import pytest
-
-from faststream.redis import TestApp, TestRedisBroker
-
-
-@pytest.mark.asyncio
-async def test_rpc():
- from docs.docs_src.redis.rpc.app import (
- app,
- broker,
- )
-
- async with TestRedisBroker(broker), TestApp(app):
- pass
diff --git a/tests/a_docs/redis/test_security.py b/tests/a_docs/redis/test_security.py
deleted file mode 100644
index 1b7efecd3f..0000000000
--- a/tests/a_docs/redis/test_security.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from contextlib import contextmanager
-from typing import Tuple
-from unittest.mock import AsyncMock, MagicMock, patch
-
-import pytest
-from redis.exceptions import AuthenticationError
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-
-
-@contextmanager
-def patch_asyncio_open_connection() -> Tuple[MagicMock, MagicMock]:
- try:
- reader = MagicMock()
- reader.readline = AsyncMock(return_value=b":1\r\n")
- reader.read = AsyncMock(return_value=b"")
-
- writer = MagicMock()
- writer.drain = AsyncMock()
- writer.wait_closed = AsyncMock()
-
- open_connection = AsyncMock(return_value=(reader, writer))
-
- with patch("asyncio.open_connection", new=open_connection):
- yield open_connection
- finally:
- pass
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-async def test_base_security():
- with patch_asyncio_open_connection() as connection:
- from docs.docs_src.redis.security.basic import broker
-
- async with broker:
- await broker.ping(3.0)
-
- assert connection.call_args.kwargs["ssl"]
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "security": [],
- "url": "redis://localhost:6379",
- }
- },
- }
-
-
-@pytest.mark.asyncio
-@pytest.mark.redis
-async def test_plaintext_security():
- with patch_asyncio_open_connection() as connection:
- from docs.docs_src.redis.security.plaintext import broker
-
- with pytest.raises(AuthenticationError):
- async with broker:
- await broker._connection.ping()
-
- assert connection.call_args.kwargs["ssl"]
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "security": [{"user-password": []}],
- "url": "redis://localhost:6379",
- }
- },
- }
diff --git a/tests/a_docs/__init__.py b/tests/application/__init__.py
similarity index 100%
rename from tests/a_docs/__init__.py
rename to tests/application/__init__.py
diff --git a/tests/application/test_delayed_broker.py b/tests/application/test_delayed_broker.py
new file mode 100644
index 0000000000..c2eb32cf57
--- /dev/null
+++ b/tests/application/test_delayed_broker.py
@@ -0,0 +1,38 @@
+import pytest
+
+from faststream._internal.application import StartAbleApplication
+from faststream.exceptions import SetupError
+from faststream.rabbit import RabbitBroker
+
+
+def test_set_broker() -> None:
+ app = StartAbleApplication()
+
+ assert app.broker is None
+
+ broker = RabbitBroker()
+ app.set_broker(broker)
+
+ assert app.broker is broker
+
+
+def test_set_more_than_once_broker() -> None:
+ app = StartAbleApplication()
+ broker_1 = RabbitBroker()
+ broker_2 = RabbitBroker()
+
+ app.set_broker(broker_1)
+
+ with pytest.raises(
+ SetupError,
+ match=f"`{app}` already has a broker. You can't use multiple brokers until 1.0.0 release.",
+ ):
+ app.set_broker(broker_2)
+
+
+@pytest.mark.asyncio()
+async def test_start_not_setup_broker() -> None:
+ app = StartAbleApplication()
+
+ with pytest.raises(AssertionError, match="You should setup a broker"):
+ await app._start_broker()
diff --git a/tests/asgi/confluent/test_asgi.py b/tests/asgi/confluent/test_asgi.py
index 75e4b37254..bd37b1d58f 100644
--- a/tests/asgi/confluent/test_asgi.py
+++ b/tests/asgi/confluent/test_asgi.py
@@ -3,8 +3,8 @@
class TestConfluentAsgi(AsgiTestcase):
- def get_broker(self):
- return KafkaBroker()
+ def get_broker(self, **kwargs) -> KafkaBroker:
+ return KafkaBroker(**kwargs)
- def get_test_broker(self, broker):
+ def get_test_broker(self, broker) -> TestKafkaBroker:
return TestKafkaBroker(broker)
diff --git a/tests/asgi/kafka/test_asgi.py b/tests/asgi/kafka/test_asgi.py
index cb26b402dc..c180e57c35 100644
--- a/tests/asgi/kafka/test_asgi.py
+++ b/tests/asgi/kafka/test_asgi.py
@@ -1,10 +1,12 @@
+from typing import Any
+
from faststream.kafka import KafkaBroker, TestKafkaBroker
from tests.asgi.testcase import AsgiTestcase
class TestKafkaAsgi(AsgiTestcase):
- def get_broker(self):
- return KafkaBroker()
+ def get_broker(self, **kwargs: Any) -> KafkaBroker:
+ return KafkaBroker(**kwargs)
- def get_test_broker(self, broker):
+ def get_test_broker(self, broker: KafkaBroker) -> TestKafkaBroker:
return TestKafkaBroker(broker)
diff --git a/tests/asgi/nats/test_asgi.py b/tests/asgi/nats/test_asgi.py
index f54f52b25a..2388c9dfea 100644
--- a/tests/asgi/nats/test_asgi.py
+++ b/tests/asgi/nats/test_asgi.py
@@ -1,10 +1,12 @@
+from typing import Any
+
from faststream.nats import NatsBroker, TestNatsBroker
from tests.asgi.testcase import AsgiTestcase
class TestNatsAsgi(AsgiTestcase):
- def get_broker(self):
- return NatsBroker()
+ def get_broker(self, **kwargs: Any) -> NatsBroker:
+ return NatsBroker(**kwargs)
- def get_test_broker(self, broker):
+ def get_test_broker(self, broker: NatsBroker) -> TestNatsBroker:
return TestNatsBroker(broker)
diff --git a/tests/asgi/rabbit/test_asgi.py b/tests/asgi/rabbit/test_asgi.py
index 9df4794225..11c6580f1f 100644
--- a/tests/asgi/rabbit/test_asgi.py
+++ b/tests/asgi/rabbit/test_asgi.py
@@ -1,10 +1,12 @@
+from typing import Any
+
from faststream.rabbit import RabbitBroker, TestRabbitBroker
from tests.asgi.testcase import AsgiTestcase
class TestRabbitAsgi(AsgiTestcase):
- def get_broker(self):
- return RabbitBroker()
+ def get_broker(self, **kwargs: Any) -> RabbitBroker:
+ return RabbitBroker(**kwargs)
- def get_test_broker(self, broker):
+ def get_test_broker(self, broker: RabbitBroker) -> TestRabbitBroker:
return TestRabbitBroker(broker)
diff --git a/tests/asgi/redis/test_asgi.py b/tests/asgi/redis/test_asgi.py
index 3b3e5a38be..e1ee6b28e0 100644
--- a/tests/asgi/redis/test_asgi.py
+++ b/tests/asgi/redis/test_asgi.py
@@ -1,10 +1,12 @@
+from typing import Any
+
from faststream.redis import RedisBroker, TestRedisBroker
from tests.asgi.testcase import AsgiTestcase
class TestRedisAsgi(AsgiTestcase):
- def get_broker(self):
- return RedisBroker()
+ def get_broker(self, **kwargs: Any) -> RedisBroker:
+ return RedisBroker(**kwargs)
- def get_test_broker(self, broker):
+ def get_test_broker(self, broker: RedisBroker) -> TestRedisBroker:
return TestRedisBroker(broker)
diff --git a/tests/asgi/testcase.py b/tests/asgi/testcase.py
index fccfedb7c2..438bd60d49 100644
--- a/tests/asgi/testcase.py
+++ b/tests/asgi/testcase.py
@@ -1,49 +1,58 @@
from typing import Any
+from unittest.mock import AsyncMock
import pytest
from starlette.testclient import TestClient
from starlette.websockets import WebSocketDisconnect
-from faststream.asgi import AsgiFastStream, AsgiResponse, get, make_ping_asgi
+from faststream.asgi import (
+ AsgiFastStream,
+ AsgiResponse,
+ get,
+ make_asyncapi_asgi,
+ make_ping_asgi,
+)
+from faststream.specification import AsyncAPI
class AsgiTestcase:
- def get_broker(self) -> Any:
- raise NotImplementedError()
+ def get_broker(self, **kwargs) -> Any:
+ raise NotImplementedError
def get_test_broker(self, broker) -> Any:
- raise NotImplementedError()
+ raise NotImplementedError
- def test_not_found(self):
- app = AsgiFastStream()
+ def test_not_found(self) -> None:
+ app = AsgiFastStream(AsyncMock())
with TestClient(app) as client:
response = client.get("/")
assert response.status_code == 404
- def test_ws_not_found(self):
- app = AsgiFastStream()
+ def test_ws_not_found(self) -> None:
+ app = AsgiFastStream(AsyncMock())
with TestClient(app) as client: # noqa: SIM117
with pytest.raises(WebSocketDisconnect):
with client.websocket_connect("/ws"): # raises error
pass
- def test_asgi_ping_unhealthy(self):
+ def test_asgi_ping_unhealthy(self) -> None:
broker = self.get_broker()
app = AsgiFastStream(
+ AsyncMock(),
asgi_routes=[
("/health", make_ping_asgi(broker, timeout=5.0)),
- ]
+ ],
)
with TestClient(app) as client:
response = client.get("/health")
- assert response.status_code == 500
+ assert response.status_code == 500, response.status_code
- @pytest.mark.asyncio
- async def test_asgi_ping_healthy(self):
+ @pytest.mark.asyncio()
+ async def test_asgi_ping_healthy(self) -> None:
broker = self.get_broker()
app = AsgiFastStream(
@@ -56,11 +65,14 @@ async def test_asgi_ping_healthy(self):
response = client.get("/health")
assert response.status_code == 204
- @pytest.mark.asyncio
- async def test_asyncapi_asgi(self):
+ @pytest.mark.asyncio()
+ async def test_asyncapi_asgi(self) -> None:
broker = self.get_broker()
- app = AsgiFastStream(broker, asyncapi_path="/docs")
+ app = AsgiFastStream(
+ broker,
+ asgi_routes=[("/docs", make_asyncapi_asgi(AsyncAPI(broker)))],
+ )
async with self.get_test_broker(broker):
with TestClient(app) as client:
@@ -68,12 +80,12 @@ async def test_asyncapi_asgi(self):
assert response.status_code == 200
assert response.text
- def test_get_decorator(self):
+ def test_get_decorator(self) -> None:
@get
- async def some_handler(scope):
+ async def some_handler(scope) -> AsgiResponse:
return AsgiResponse(body=b"test", status_code=200)
- app = AsgiFastStream(asgi_routes=[("/test", some_handler)])
+ app = AsgiFastStream(AsyncMock(), asgi_routes=[("/test", some_handler)])
with TestClient(app) as client:
response = client.get("/test")
diff --git a/tests/asyncapi/base/arguments.py b/tests/asyncapi/base/arguments.py
deleted file mode 100644
index af334cdb34..0000000000
--- a/tests/asyncapi/base/arguments.py
+++ /dev/null
@@ -1,675 +0,0 @@
-from dataclasses import dataclass
-from enum import Enum
-from typing import Optional, Type, Union
-
-import pydantic
-from dirty_equals import IsDict, IsPartialDict, IsStr
-from fast_depends import Depends
-from fastapi import Depends as APIDepends
-from typing_extensions import Annotated, Literal
-
-from faststream import Context, FastStream
-from faststream._compat import PYDANTIC_V2
-from faststream.asyncapi.generate import get_app_schema
-from faststream.broker.core.usecase import BrokerUsecase
-from tests.marks import pydantic_v2
-
-
-class FastAPICompatible:
- broker_class: Type[BrokerUsecase]
- dependency_builder = staticmethod(APIDepends)
-
- def build_app(self, broker):
- """Patch it to test FastAPI scheme generation too."""
- return FastStream(broker)
-
- def test_custom_naming(self):
- broker = self.broker_class()
-
- @broker.subscriber("test", title="custom_name", description="test description")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert key == "custom_name"
- assert schema["channels"][key]["description"] == "test description"
-
- def test_docstring_description(self):
- broker = self.broker_class()
-
- @broker.subscriber("test", title="custom_name")
- async def handle(msg):
- """Test description."""
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert key == "custom_name"
- assert schema["channels"][key]["description"] == "Test description.", schema[
- "channels"
- ][key]["description"]
-
- def test_empty(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "EmptyPayload"
- assert v == {
- "title": key,
- "type": "null",
- }
-
- def test_no_type(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {"title": key}
-
- def test_simple_type(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg: int): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- assert next(iter(schema["channels"].values())).get("description") is None
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {"title": key, "type": "integer"}
-
- def test_simple_optional_type(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg: Optional[int]): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == IsDict(
- {
- "anyOf": [{"type": "integer"}, {"type": "null"}],
- "title": key,
- }
- ) | IsDict(
- { # TODO: remove when deprecating PydanticV1
- "title": key,
- "type": "integer",
- }
- ), v
-
- def test_simple_type_with_default(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg: int = 1): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {
- "default": 1,
- "title": key,
- "type": "integer",
- }
-
- def test_multi_args_no_type(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg, another): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {
- "properties": {
- "another": {"title": "Another"},
- "msg": {"title": "Msg"},
- },
- "required": ["msg", "another"],
- "title": key,
- "type": "object",
- }
-
- def test_multi_args_with_type(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg: str, another: int): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {
- "properties": {
- "another": {"title": "Another", "type": "integer"},
- "msg": {"title": "Msg", "type": "string"},
- },
- "required": ["msg", "another"],
- "title": key,
- "type": "object",
- }
-
- def test_multi_args_with_default(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg: str, another: Optional[int] = None): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
-
- assert v == {
- "properties": {
- "another": IsDict(
- {
- "anyOf": [{"type": "integer"}, {"type": "null"}],
- "default": None,
- "title": "Another",
- }
- )
- | IsDict(
- { # TODO: remove when deprecating PydanticV1
- "title": "Another",
- "type": "integer",
- }
- ),
- "msg": {"title": "Msg", "type": "string"},
- },
- "required": ["msg"],
- "title": key,
- "type": "object",
- }
-
- def test_dataclass(self):
- @dataclass
- class User:
- id: int
- name: str = ""
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: User): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "User"
- assert v == {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- },
- "required": ["id"],
- "title": key,
- "type": "object",
- }
-
- def test_pydantic_model(self):
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: User): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "User"
- assert v == {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- },
- "required": ["id"],
- "title": key,
- "type": "object",
- }
-
- def test_pydantic_model_with_enum(self):
- class Status(str, Enum):
- registered = "registered"
- banned = "banned"
-
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
- status: Status
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: User): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- assert payload == {
- "Status": IsPartialDict(
- {
- "enum": ["registered", "banned"],
- "title": "Status",
- "type": "string",
- }
- ),
- "User": {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- "status": {"$ref": "#/components/schemas/Status"},
- },
- "required": ["id", "status"],
- "title": "User",
- "type": "object",
- },
- }, payload
-
- def test_pydantic_model_mixed_regular(self):
- class Email(pydantic.BaseModel):
- addr: str
-
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
- email: Email
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: User, description: str = ""): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- assert payload == {
- "Email": {
- "title": "Email",
- "type": "object",
- "properties": {"addr": {"title": "Addr", "type": "string"}},
- "required": ["addr"],
- },
- "User": {
- "title": "User",
- "type": "object",
- "properties": {
- "name": {"title": "Name", "default": "", "type": "string"},
- "id": {"title": "Id", "type": "integer"},
- "email": {"$ref": "#/components/schemas/Email"},
- },
- "required": ["id", "email"],
- },
- "Handle:Message:Payload": {
- "title": "Handle:Message:Payload",
- "type": "object",
- "properties": {
- "user": {"$ref": "#/components/schemas/User"},
- "description": {
- "title": "Description",
- "default": "",
- "type": "string",
- },
- },
- "required": ["user"],
- },
- }
-
- def test_pydantic_model_with_example(self):
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
-
- if PYDANTIC_V2:
- model_config = {
- "json_schema_extra": {"examples": [{"name": "john", "id": 1}]}
- }
-
- else:
-
- class Config:
- schema_extra = {"examples": [{"name": "john", "id": 1}]} # noqa: RUF012
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: User): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "User"
- assert v == {
- "examples": [{"id": 1, "name": "john"}],
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- },
- "required": ["id"],
- "title": "User",
- "type": "object",
- }
-
- def test_pydantic_model_with_keyword_property(self):
- class TestModel(pydantic.BaseModel):
- discriminator: int = 0
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(model: TestModel): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "TestModel"
- assert v == {
- "properties": {
- "discriminator": {
- "default": 0,
- "title": "Discriminator",
- "type": "integer",
- },
- },
- "title": key,
- "type": "object",
- }
-
- def test_ignores_depends(self):
- broker = self.broker_class()
-
- def dep(name: str = ""):
- return name
-
- def dep2(name2: str):
- return name2
-
- dependencies = (self.dependency_builder(dep2),)
- message = self.dependency_builder(dep)
-
- @broker.subscriber("test", dependencies=dependencies)
- async def handle(id: int, message=message): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Handle:Message:Payload"
- assert v == {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- "name2": {"title": "Name2", "type": "string"},
- },
- "required": ["id", "name2"],
- "title": key,
- "type": "object",
- }, v
-
- @pydantic_v2
- def test_descriminator(self):
- class Sub2(pydantic.BaseModel):
- type: Literal["sub2"]
-
- class Sub(pydantic.BaseModel):
- type: Literal["sub"]
-
- descriminator = Annotated[
- Union[Sub2, Sub], pydantic.Field(discriminator="type")
- ]
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: descriminator): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- key = next(iter(schema["components"]["messages"].keys()))
- assert key == IsStr(regex=r"test[\w:]*:Handle:Message")
- assert schema["components"] == {
- "messages": {
- key: {
- "title": key,
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {
- "discriminator": "type",
- "oneOf": [
- {"$ref": "#/components/schemas/Sub2"},
- {"$ref": "#/components/schemas/Sub"},
- ],
- "title": "Handle:Message:Payload",
- },
- }
- },
- "schemas": {
- "Sub": {
- "properties": {
- "type": IsPartialDict({"const": "sub", "title": "Type"})
- },
- "required": ["type"],
- "title": "Sub",
- "type": "object",
- },
- "Sub2": {
- "properties": {
- "type": IsPartialDict({"const": "sub2", "title": "Type"})
- },
- "required": ["type"],
- "title": "Sub2",
- "type": "object",
- },
- },
- }, schema["components"]
-
- @pydantic_v2
- def test_nested_descriminator(self):
- class Sub2(pydantic.BaseModel):
- type: Literal["sub2"]
-
- class Sub(pydantic.BaseModel):
- type: Literal["sub"]
-
- class Model(pydantic.BaseModel):
- msg: Union[Sub2, Sub] = pydantic.Field(..., discriminator="type")
-
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(user: Model): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- key = next(iter(schema["components"]["messages"].keys()))
- assert key == IsStr(regex=r"test[\w:]*:Handle:Message")
- assert schema["components"] == {
- "messages": {
- key: {
- "title": key,
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {"$ref": "#/components/schemas/Model"},
- }
- },
- "schemas": {
- "Sub": {
- "properties": {
- "type": IsPartialDict({"const": "sub", "title": "Type"})
- },
- "required": ["type"],
- "title": "Sub",
- "type": "object",
- },
- "Sub2": {
- "properties": {
- "type": IsPartialDict({"const": "sub2", "title": "Type"})
- },
- "required": ["type"],
- "title": "Sub2",
- "type": "object",
- },
- "Model": {
- "properties": {
- "msg": {
- "discriminator": "type",
- "oneOf": [
- {"$ref": "#/components/schemas/Sub2"},
- {"$ref": "#/components/schemas/Sub"},
- ],
- "title": "Msg",
- }
- },
- "required": ["msg"],
- "title": "Model",
- "type": "object",
- },
- },
- }, schema["components"]
-
- def test_with_filter(self):
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
-
- broker = self.broker_class()
-
- sub = broker.subscriber("test/one")
-
- @sub(
- filter=lambda m: m.content_type == "application/json",
- )
- async def handle(id: int): ...
-
- @sub
- async def handle_default(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- name, message = next(iter(schema["components"]["messages"].items()))
-
- assert name == IsStr(regex=r"test.one[\w:]*:Handle:Message"), name
-
- assert len(message["payload"]["oneOf"]) == 2
-
- payload = schema["components"]["schemas"]
-
- assert "Handle:Message:Payload" in list(payload.keys())
- assert "HandleDefault:Message:Payload" in list(payload.keys())
-
-
-class ArgumentsTestcase(FastAPICompatible):
- dependency_builder = staticmethod(Depends)
-
- def test_pydantic_field(self):
- broker = self.broker_class()
-
- @broker.subscriber("msg")
- async def msg(
- msg: pydantic.PositiveInt = pydantic.Field(
- 1,
- description="some field",
- title="Perfect",
- examples=[1],
- ),
- ): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert key == "Perfect"
-
- assert v == {
- "default": 1,
- "description": "some field",
- "examples": [1],
- "exclusiveMinimum": 0,
- "title": "Perfect",
- "type": "integer",
- }
-
- def test_ignores_custom_field(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(id: int, user: Optional[str] = None, message=Context()): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert v == IsDict(
- {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "user": {
- "anyOf": [{"type": "string"}, {"type": "null"}],
- "default": None,
- "title": "User",
- },
- },
- "required": ["id"],
- "title": key,
- "type": "object",
- }
- ) | IsDict( # TODO: remove when deprecating PydanticV1
- {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "user": {"title": "User", "type": "string"},
- },
- "required": ["id"],
- "title": "Handle:Message:Payload",
- "type": "object",
- }
- )
diff --git a/tests/asyncapi/base/fastapi.py b/tests/asyncapi/base/fastapi.py
deleted file mode 100644
index 58ded71515..0000000000
--- a/tests/asyncapi/base/fastapi.py
+++ /dev/null
@@ -1,127 +0,0 @@
-from typing import Any, Callable, Type
-
-import pytest
-from dirty_equals import IsStr
-from fastapi import FastAPI
-from fastapi.testclient import TestClient
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.types import MsgType
-
-
-class FastAPITestCase:
- broker_class: Type[StreamRouter[MsgType]]
- broker_wrapper: Callable[[BrokerUsecase[MsgType, Any]], BrokerUsecase[MsgType, Any]]
-
- @pytest.mark.asyncio
- async def test_fastapi_full_information(self):
- broker = self.broker_class(
- protocol="custom",
- protocol_version="1.1.1",
- description="Test broker description",
- schema_url="/asyncapi_schema",
- asyncapi_tags=[{"name": "test"}],
- )
-
- app = FastAPI(
- title="CustomApp",
- version="1.1.1",
- description="Test description",
- contact={"name": "support", "url": "https://support.com"},
- license_info={"name": "some", "url": "https://some.com"},
- )
- app.include_router(broker)
-
- async with self.broker_wrapper(broker.broker):
- with TestClient(app) as client:
- response_json = client.get("/asyncapi_schema.json")
-
- assert response_json.json() == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {
- "title": "CustomApp",
- "version": "1.1.1",
- "description": "Test description",
- "contact": {
- "name": "support",
- "url": IsStr(regex=r"https\:\/\/support\.com\/?"),
- },
- "license": {
- "name": "some",
- "url": IsStr(regex=r"https\:\/\/some\.com\/?"),
- },
- },
- "servers": {
- "development": {
- "url": IsStr(),
- "protocol": "custom",
- "description": "Test broker description",
- "protocolVersion": "1.1.1",
- "tags": [{"name": "test"}],
- }
- },
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- }
-
- @pytest.mark.asyncio
- async def test_fastapi_asyncapi_routes(self):
- broker = self.broker_class(schema_url="/asyncapi_schema")
-
- @broker.subscriber("test")
- async def handler(): ...
-
- app = FastAPI()
- app.include_router(broker)
-
- async with self.broker_wrapper(broker.broker):
- with TestClient(app) as client:
- schema = get_app_schema(broker)
-
- response_json = client.get("/asyncapi_schema.json")
- assert response_json.json() == schema.to_jsonable()
-
- response_yaml = client.get("/asyncapi_schema.yaml")
- assert response_yaml.text == schema.to_yaml()
-
- response_html = client.get("/asyncapi_schema")
- assert response_html.status_code == 200
-
- @pytest.mark.asyncio
- async def test_fastapi_asyncapi_not_fount(self):
- broker = self.broker_class(include_in_schema=False)
-
- app = FastAPI()
- app.include_router(broker)
-
- async with self.broker_wrapper(broker.broker):
- with TestClient(app) as client:
- response_json = client.get("/asyncapi.json")
- assert response_json.status_code == 404
-
- response_yaml = client.get("/asyncapi.yaml")
- assert response_yaml.status_code == 404
-
- response_html = client.get("/asyncapi")
- assert response_html.status_code == 404
-
- @pytest.mark.asyncio
- async def test_fastapi_asyncapi_not_fount_by_url(self):
- broker = self.broker_class(schema_url=None)
-
- app = FastAPI()
- app.include_router(broker)
-
- async with self.broker_wrapper(broker.broker):
- with TestClient(app) as client:
- response_json = client.get("/asyncapi.json")
- assert response_json.status_code == 404
-
- response_yaml = client.get("/asyncapi.yaml")
- assert response_yaml.status_code == 404
-
- response_html = client.get("/asyncapi")
- assert response_html.status_code == 404
diff --git a/tests/asyncapi/base/naming.py b/tests/asyncapi/base/naming.py
deleted file mode 100644
index 0c3fc9454c..0000000000
--- a/tests/asyncapi/base/naming.py
+++ /dev/null
@@ -1,398 +0,0 @@
-from typing import Any, Type
-
-from dirty_equals import Contains, IsStr
-from pydantic import create_model
-
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.broker.core.usecase import BrokerUsecase
-
-
-class BaseNaming:
- broker_class: Type[BrokerUsecase[Any, Any]]
-
-
-class SubscriberNaming(BaseNaming):
- def test_subscriber_naming(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle_user_created(msg: str): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated")
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Message:Payload"
- ]
-
- def test_pydantic_subscriber_naming(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle_user_created(msg: create_model("SimpleModel")): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated")
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == ["SimpleModel"]
-
- def test_multi_subscribers_naming(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- @broker.subscriber("test2")
- async def handle_user_created(msg: str): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated"),
- IsStr(regex=r"test2[\w:]*:HandleUserCreated"),
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
- IsStr(regex=r"test2[\w:]*:HandleUserCreated:Message"),
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Message:Payload"
- ]
-
- def test_subscriber_naming_manual(self):
- broker = self.broker_class()
-
- @broker.subscriber("test", title="custom")
- async def handle_user_created(msg: str): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "custom:Message:Payload"
- ]
-
- def test_subscriber_naming_default(self):
- broker = self.broker_class()
-
- broker.subscriber("test")
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:Subscriber")
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:Subscriber:Message")
- ]
-
- for key, v in schema["components"]["schemas"].items():
- assert key == "Subscriber:Message:Payload"
- assert v == {"title": key}
-
- def test_subscriber_naming_default_with_title(self):
- broker = self.broker_class()
-
- broker.subscriber("test", title="custom")
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "custom:Message:Payload"
- ]
-
- assert schema["components"]["schemas"]["custom:Message:Payload"] == {
- "title": "custom:Message:Payload"
- }
-
- def test_multi_subscribers_naming_default(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle_user_created(msg: str): ...
-
- broker.subscriber("test2")
- broker.subscriber("test3")
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated"),
- IsStr(regex=r"test2[\w:]*:Subscriber"),
- IsStr(regex=r"test3[\w:]*:Subscriber"),
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
- IsStr(regex=r"test2[\w:]*:Subscriber:Message"),
- IsStr(regex=r"test3[\w:]*:Subscriber:Message"),
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Message:Payload",
- "Subscriber:Message:Payload",
- ]
-
- assert schema["components"]["schemas"]["Subscriber:Message:Payload"] == {
- "title": "Subscriber:Message:Payload"
- }
-
-
-class FilterNaming(BaseNaming):
- def test_subscriber_filter_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle_user_created(msg: str): ...
-
- @broker.subscriber("test")
- async def handle_user_id(msg: int): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated")
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Message:Payload",
- "HandleUserId:Message:Payload",
- ]
-
- def test_subscriber_filter_pydantic(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle_user_created(msg: create_model("SimpleModel")): ...
-
- @broker.subscriber("test")
- async def handle_user_id(msg: int): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated")
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:HandleUserCreated:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "SimpleModel",
- "HandleUserId:Message:Payload",
- ]
-
- def test_subscriber_filter_with_title(self):
- broker = self.broker_class()
-
- @broker.subscriber("test", title="custom")
- async def handle_user_created(msg: str): ...
-
- @broker.subscriber("test", title="custom")
- async def handle_user_id(msg: int): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Message:Payload",
- "HandleUserId:Message:Payload",
- ]
-
-
-class PublisherNaming(BaseNaming):
- def test_publisher_naming_base(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle_user_created() -> str: ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message:Payload")
- ]
-
- def test_publisher_naming_pydantic(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle_user_created() -> create_model("SimpleModel"): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "SimpleModel",
- ]
-
- def test_publisher_manual_naming(self):
- broker = self.broker_class()
-
- @broker.publisher("test", title="custom")
- async def handle_user_created() -> str: ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "custom:Message:Payload"
- ]
-
- def test_publisher_with_schema_naming(self):
- broker = self.broker_class()
-
- @broker.publisher("test", schema=str)
- async def handle_user_created(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message")
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message:Payload")
- ]
-
- def test_publisher_manual_naming_with_schema(self):
- broker = self.broker_class()
-
- @broker.publisher("test", title="custom", schema=str)
- async def handle_user_created(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "custom:Message:Payload"
- ]
-
- def test_multi_publishers_naming(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- @broker.publisher("test2")
- async def handle_user_created() -> str: ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- names = list(schema["channels"].keys())
- assert names == Contains(
- IsStr(regex=r"test2[\w:]*:Publisher"),
- IsStr(regex=r"test[\w:]*:Publisher"),
- ), names
-
- messages = list(schema["components"]["messages"].keys())
- assert messages == Contains(
- IsStr(regex=r"test2[\w:]*:Publisher:Message"),
- IsStr(regex=r"test[\w:]*:Publisher:Message"),
- ), messages
-
- payloads = list(schema["components"]["schemas"].keys())
- assert payloads == Contains(
- IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload"),
- IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
- ), payloads
-
- def test_multi_publisher_usages(self):
- broker = self.broker_class()
-
- pub = broker.publisher("test")
-
- @pub
- async def handle_user_created() -> str: ...
-
- @pub
- async def handle() -> int: ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher"),
- ]
-
- assert list(schema["components"]["messages"].keys()) == [
- IsStr(regex=r"test[\w:]*:Publisher:Message"),
- ]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Publisher:Message:Payload",
- "Handle:Publisher:Message:Payload",
- ]
-
- def test_multi_publisher_usages_with_custom(self):
- broker = self.broker_class()
-
- pub = broker.publisher("test", title="custom")
-
- @pub
- async def handle_user_created() -> str: ...
-
- @pub
- async def handle() -> int: ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["custom"]
-
- assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
-
- assert list(schema["components"]["schemas"].keys()) == [
- "HandleUserCreated:Publisher:Message:Payload",
- "Handle:Publisher:Message:Payload",
- ]
-
-
-class NamingTestCase(SubscriberNaming, FilterNaming, PublisherNaming):
- pass
diff --git a/tests/asyncapi/base/publisher.py b/tests/asyncapi/base/publisher.py
deleted file mode 100644
index 00b574e817..0000000000
--- a/tests/asyncapi/base/publisher.py
+++ /dev/null
@@ -1,154 +0,0 @@
-from typing import Type
-
-import pydantic
-
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.broker.core.usecase import BrokerUsecase
-
-
-class PublisherTestcase:
- broker_class: Type[BrokerUsecase]
-
- def build_app(self, broker):
- """Patch it to test FastAPI scheme generation too."""
- return FastStream(broker)
-
- def test_publisher_with_description(self):
- broker = self.broker_class()
-
- @broker.publisher("test", description="test description")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
- assert schema["channels"][key]["description"] == "test description"
-
- def test_basic_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
- assert schema["channels"][key].get("description") is None
- assert schema["channels"][key].get("publish") is not None
-
- payload = schema["components"]["schemas"]
- for v in payload.values():
- assert v == {}
-
- def test_none_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- for v in payload.values():
- assert v == {}
-
- def test_typed_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg) -> int: ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- for v in payload.values():
- assert v["type"] == "integer"
-
- def test_pydantic_model_publisher(self):
- class User(pydantic.BaseModel):
- name: str = ""
- id: int
-
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg) -> User: ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert v == {
- "properties": {
- "id": {"title": "Id", "type": "integer"},
- "name": {"default": "", "title": "Name", "type": "string"},
- },
- "required": ["id"],
- "title": key,
- "type": "object",
- }
-
- def test_delayed(self):
- broker = self.broker_class()
-
- pub = broker.publisher("test")
-
- @pub
- async def handle(msg) -> int: ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- for v in payload.values():
- assert v["type"] == "integer"
-
- def test_with_schema(self):
- broker = self.broker_class()
-
- broker.publisher("test", title="Custom", schema=int)
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- for v in payload.values():
- assert v["type"] == "integer"
-
- def test_not_include(self):
- broker = self.broker_class()
-
- @broker.publisher("test", include_in_schema=False)
- @broker.subscriber("in-test", include_in_schema=False)
- async def handler(msg: str):
- pass
-
- schema = get_app_schema(self.build_app(broker))
-
- assert schema.channels == {}, schema.channels
-
- def test_pydantic_model_with_keyword_property_publisher(self):
- class TestModel(pydantic.BaseModel):
- discriminator: int = 0
-
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg) -> TestModel: ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
-
- for key, v in payload.items():
- assert v == {
- "properties": {
- "discriminator": {
- "default": 0,
- "title": "Discriminator",
- "type": "integer",
- },
- },
- "title": key,
- "type": "object",
- }
diff --git a/tests/asyncapi/base/router.py b/tests/asyncapi/base/router.py
deleted file mode 100644
index 84996ccb06..0000000000
--- a/tests/asyncapi/base/router.py
+++ /dev/null
@@ -1,165 +0,0 @@
-from typing import Type
-
-from dirty_equals import IsStr
-
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-
-
-class RouterTestcase:
- broker_class: Type[BrokerUsecase]
- router_class: Type[BrokerRouter]
- publisher_class: Type[ArgsContainer]
- route_class: Type[SubscriberRoute]
-
- def test_delay_subscriber(self):
- broker = self.broker_class()
-
- async def handle(msg): ...
-
- router = self.router_class(
- handlers=(self.route_class(handle, "test"),),
- )
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- payload = schema["components"]["schemas"]
- key = list(payload.keys())[0] # noqa: RUF015
- assert payload[key]["title"] == key == "Handle:Message:Payload"
-
- def test_delay_publisher(self):
- broker = self.broker_class()
-
- async def handle(msg): ...
-
- router = self.router_class(
- handlers=(
- self.route_class(
- handle,
- "test",
- publishers=(self.publisher_class("test2", schema=int),),
- ),
- ),
- )
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
- schemas = schema.components.schemas
- del schemas["Handle:Message:Payload"]
-
- for i, j in schemas.items():
- assert (
- i == j["title"] == IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload")
- )
- assert j["type"] == "integer"
-
- def test_not_include(self):
- broker = self.broker_class()
- router = self.router_class(include_in_schema=False)
-
- @router.subscriber("test")
- @router.publisher("test")
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
- assert schema.channels == {}, schema.channels
-
- def test_not_include_in_method(self):
- broker = self.broker_class()
- router = self.router_class()
-
- @router.subscriber("test")
- @router.publisher("test")
- async def handle(msg): ...
-
- broker.include_router(router, include_in_schema=False)
-
- schema = get_app_schema(FastStream(broker))
- assert schema.channels == {}, schema.channels
-
- def test_respect_subrouter(self):
- broker = self.broker_class()
- router = self.router_class()
- router2 = self.router_class(include_in_schema=False)
-
- @router2.subscriber("test")
- @router2.publisher("test")
- async def handle(msg): ...
-
- router.include_router(router2)
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
-
- assert schema.channels == {}, schema.channels
-
- def test_not_include_subrouter(self):
- broker = self.broker_class()
- router = self.router_class(include_in_schema=False)
- router2 = self.router_class()
-
- @router2.subscriber("test")
- @router2.publisher("test")
- async def handle(msg): ...
-
- router.include_router(router2)
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
-
- assert schema.channels == {}
-
- def test_not_include_subrouter_by_method(self):
- broker = self.broker_class()
- router = self.router_class()
- router2 = self.router_class()
-
- @router2.subscriber("test")
- @router2.publisher("test")
- async def handle(msg): ...
-
- router.include_router(router2, include_in_schema=False)
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
-
- assert schema.channels == {}
-
- def test_all_nested_routers_by_method(self):
- broker = self.broker_class()
- router = self.router_class()
- router2 = self.router_class()
-
- @router2.subscriber("test")
- @router2.publisher("test")
- async def handle(msg): ...
-
- router.include_router(router2)
- broker.include_router(router, include_in_schema=False)
-
- schema = get_app_schema(FastStream(broker))
-
- assert schema.channels == {}
-
- def test_include_subrouter(self):
- broker = self.broker_class()
- router = self.router_class()
- router2 = self.router_class()
-
- @router2.subscriber("test")
- @router2.publisher("test")
- async def handle(msg): ...
-
- router.include_router(router2)
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker))
-
- assert len(schema.channels) == 2
diff --git a/tests/a_docs/confluent/ack/__init__.py b/tests/asyncapi/base/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/ack/__init__.py
rename to tests/asyncapi/base/v2_6_0/__init__.py
diff --git a/tests/asyncapi/base/v2_6_0/arguments.py b/tests/asyncapi/base/v2_6_0/arguments.py
new file mode 100644
index 0000000000..69d9f676f9
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/arguments.py
@@ -0,0 +1,744 @@
+from dataclasses import dataclass
+from enum import Enum
+from typing import Annotated, Any, Optional, Union
+
+import pydantic
+import pytest
+from dirty_equals import IsDict, IsPartialDict, IsStr
+from fast_depends import Depends
+from typing_extensions import Literal
+
+from faststream import Context
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream.specification.asyncapi import AsyncAPI
+from tests.marks import pydantic_v2
+
+
+class FastAPICompatible:
+ broker_class: type[BrokerUsecase]
+ dependency_builder = staticmethod(Depends)
+
+ def build_app(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]:
+ """Patch it to test FastAPI scheme generation too."""
+ return broker
+
+ def test_custom_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", title="custom_name", description="test description")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert key == "custom_name"
+ assert schema["channels"][key]["description"] == "test description"
+
+ def test_slash_in_title(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", title="/")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ assert next(iter(schema["channels"].keys())) == "/"
+
+ assert next(iter(schema["components"]["messages"].keys())) == ".:Message"
+ assert schema["components"]["messages"][".:Message"]["title"] == "/:Message"
+
+ assert next(iter(schema["components"]["schemas"].keys())) == ".:Message:Payload"
+ assert (
+ schema["components"]["schemas"][".:Message:Payload"]["title"]
+ == "/:Message:Payload"
+ )
+
+ def test_docstring_description(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", title="custom_name")
+ async def handle(msg) -> None:
+ """Test description."""
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert key == "custom_name"
+ assert schema["channels"][key]["description"] == "Test description.", schema[
+ "channels"
+ ][key]
+
+ def test_empty(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "EmptyPayload"
+ assert v == {
+ "title": key,
+ "type": "null",
+ }
+
+ def test_no_type(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {"title": key}
+
+ def test_simple_type(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg: int) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ assert next(iter(schema["channels"].values())).get("description") is None
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {"title": key, "type": "integer"}
+
+ def test_simple_optional_type(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg: Optional[int]) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == IsDict(
+ {
+ "anyOf": [{"type": "integer"}, {"type": "null"}],
+ "title": key,
+ },
+ ) | IsDict(
+ { # TODO: remove when deprecating PydanticV1
+ "title": key,
+ "type": "integer",
+ },
+ ), v
+
+ def test_simple_type_with_default(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg: int = 1) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "default": 1,
+ "title": key,
+ "type": "integer",
+ }
+
+ def test_multi_args_no_type(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg, another) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "another": {"title": "Another"},
+ "msg": {"title": "Msg"},
+ },
+ "required": ["msg", "another"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_multi_args_with_type(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg: str, another: int) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "another": {"title": "Another", "type": "integer"},
+ "msg": {"title": "Msg", "type": "string"},
+ },
+ "required": ["msg", "another"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_multi_args_with_default(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg: str, another: Optional[int] = None) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+
+ assert v == {
+ "properties": {
+ "another": IsDict(
+ {
+ "anyOf": [{"type": "integer"}, {"type": "null"}],
+ "default": None,
+ "title": "Another",
+ },
+ )
+ | IsDict(
+ { # TODO: remove when deprecating PydanticV1
+ "title": "Another",
+ "type": "integer",
+ },
+ ),
+ "msg": {"title": "Msg", "type": "string"},
+ },
+ "required": ["msg"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_dataclass(self) -> None:
+ @dataclass
+ class User:
+ id: int
+ name: str = ""
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_pydantic_model(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_pydantic_model_with_enum(self) -> None:
+ class Status(str, Enum):
+ registered = "registered"
+ banned = "banned"
+
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+ status: Status
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert payload == {
+ "Status": IsPartialDict(
+ {
+ "enum": ["registered", "banned"],
+ "title": "Status",
+ "type": "string",
+ },
+ ),
+ "User": {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ "status": {"$ref": "#/components/schemas/Status"},
+ },
+ "required": ["id", "status"],
+ "title": "User",
+ "type": "object",
+ },
+ }, payload
+
+ def test_pydantic_model_mixed_regular(self) -> None:
+ class Email(pydantic.BaseModel):
+ addr: str
+
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+ email: Email
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User, description: str = "") -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert payload == {
+ "Email": {
+ "title": "Email",
+ "type": "object",
+ "properties": {"addr": {"title": "Addr", "type": "string"}},
+ "required": ["addr"],
+ },
+ "User": {
+ "title": "User",
+ "type": "object",
+ "properties": {
+ "name": {"title": "Name", "default": "", "type": "string"},
+ "id": {"title": "Id", "type": "integer"},
+ "email": {"$ref": "#/components/schemas/Email"},
+ },
+ "required": ["id", "email"],
+ },
+ "Handle:Message:Payload": {
+ "title": "Handle:Message:Payload",
+ "type": "object",
+ "properties": {
+ "user": {"$ref": "#/components/schemas/User"},
+ "description": {
+ "title": "Description",
+ "default": "",
+ "type": "string",
+ },
+ },
+ "required": ["user"],
+ },
+ }
+
+ def test_pydantic_model_with_example(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ if PYDANTIC_V2:
+ model_config = {
+ "json_schema_extra": {"examples": [{"name": "john", "id": 1}]},
+ }
+
+ else:
+
+ class Config:
+ schema_extra = {"examples": [{"name": "john", "id": 1}]} # noqa: RUF012
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "examples": [{"id": 1, "name": "john"}],
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": "User",
+ "type": "object",
+ }
+
+ def test_pydantic_model_with_keyword_property(self) -> None:
+ class TestModel(pydantic.BaseModel):
+ discriminator: int = 0
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(model: TestModel) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "TestModel"
+ assert v == {
+ "properties": {
+ "discriminator": {
+ "default": 0,
+ "title": "Discriminator",
+ "type": "integer",
+ },
+ },
+ "title": key,
+ "type": "object",
+ }
+
+ def test_ignores_depends(self) -> None:
+ broker = self.broker_class()
+
+ def dep(name: str = "") -> str:
+ return name
+
+ def dep2(name2: str) -> str:
+ return name2
+
+ dependencies = (self.dependency_builder(dep2),)
+ message = self.dependency_builder(dep)
+
+ @broker.subscriber("test", dependencies=dependencies)
+ async def handle(id: int, message=message) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ "name2": {"title": "Name2", "type": "string"},
+ },
+ "required": ["id", "name2"],
+ "title": key,
+ "type": "object",
+ }, v
+
+ @pydantic_v2
+ def test_descriminator(self) -> None:
+ class Sub2(pydantic.BaseModel):
+ type: Literal["sub2"]
+
+ class Sub(pydantic.BaseModel):
+ type: Literal["sub"]
+
+ descriminator = Annotated[
+ Union[Sub2, Sub],
+ pydantic.Field(discriminator="type"),
+ ]
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: descriminator) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ key = next(iter(schema["components"]["messages"].keys()))
+ assert key == IsStr(regex=r"test[\w:]*:Handle:Message")
+ assert schema["components"] == {
+ "messages": {
+ key: {
+ "title": key,
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "discriminator": "type",
+ "oneOf": [
+ {"$ref": "#/components/schemas/Sub2"},
+ {"$ref": "#/components/schemas/Sub"},
+ ],
+ "title": "Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Sub": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub",
+ "type": "object",
+ },
+ "Sub2": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub2", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub2",
+ "type": "object",
+ },
+ },
+ }, schema["components"]
+
+ @pydantic_v2
+ def test_nested_descriminator(self) -> None:
+ class Sub2(pydantic.BaseModel):
+ type: Literal["sub2"]
+
+ class Sub(pydantic.BaseModel):
+ type: Literal["sub"]
+
+ class Model(pydantic.BaseModel):
+ msg: Union[Sub2, Sub] = pydantic.Field(..., discriminator="type")
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: Model) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ key = next(iter(schema["components"]["messages"].keys()))
+ assert key == IsStr(regex=r"test[\w:]*:Handle:Message")
+ assert schema["components"] == {
+ "messages": {
+ key: {
+ "title": key,
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/Model"},
+ },
+ },
+ "schemas": {
+ "Sub": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub",
+ "type": "object",
+ },
+ "Sub2": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub2", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub2",
+ "type": "object",
+ },
+ "Model": {
+ "properties": {
+ "msg": {
+ "discriminator": "type",
+ "oneOf": [
+ {"$ref": "#/components/schemas/Sub2"},
+ {"$ref": "#/components/schemas/Sub"},
+ ],
+ "title": "Msg",
+ },
+ },
+ "required": ["msg"],
+ "title": "Model",
+ "type": "object",
+ },
+ },
+ }, schema["components"]
+
+ def test_with_filter(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test")
+
+ @sub(
+ filter=lambda m: m.content_type == "application/json",
+ )
+ async def handle(id: int) -> None: ...
+
+ @sub
+ async def handle_default(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ assert (
+ len(
+ next(iter(schema["components"]["messages"].values()))["payload"][
+ "oneOf"
+ ],
+ )
+ == 2
+ )
+
+ payload = schema["components"]["schemas"]
+
+ assert "Handle:Message:Payload" in list(payload.keys())
+ assert "HandleDefault:Message:Payload" in list(payload.keys())
+
+
+class ArgumentsTestcase(FastAPICompatible):
+ dependency_builder = staticmethod(Depends)
+
+ def test_pydantic_field(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("msg")
+ async def msg(
+ msg: pydantic.PositiveInt = pydantic.Field(
+ 1,
+ description="some field",
+ title="Perfect",
+ examples=[1],
+ ),
+ ) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Perfect"
+
+ assert v == {
+ "default": 1,
+ "description": "some field",
+ "examples": [1],
+ "exclusiveMinimum": 0,
+ "title": "Perfect",
+ "type": "integer",
+ }
+
+ def test_ignores_custom_field(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(
+ id: int, user: Optional[str] = None, message=Context()
+ ) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert v == IsDict(
+ {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "user": {
+ "anyOf": [{"type": "string"}, {"type": "null"}],
+ "default": None,
+ "title": "User",
+ },
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ },
+ ) | IsDict( # TODO: remove when deprecating PydanticV1
+ {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "user": {"title": "User", "type": "string"},
+ },
+ "required": ["id"],
+ "title": "Handle:Message:Payload",
+ "type": "object",
+ },
+ )
+
+ def test_overwrite_schema(self) -> None:
+ @dataclass
+ class User:
+ id: int
+ name: str = ""
+
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ @dataclass
+ class User:
+ id: int
+ email: str = ""
+
+ @broker.subscriber("test2")
+ async def second_handle(user: User) -> None: ...
+
+ with pytest.warns(
+ RuntimeWarning,
+ match="Overwriting the message schema, data types have the same name",
+ ):
+ schema = AsyncAPI(
+ self.build_app(broker), schema_version="2.6.0"
+ ).to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert len(payload) == 1
+
+ key, value = next(iter(payload.items()))
+
+ assert key == "User"
+ assert value == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "email": {"default": "", "title": "Email", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
diff --git a/tests/asyncapi/base/v2_6_0/fastapi.py b/tests/asyncapi/base/v2_6_0/fastapi.py
new file mode 100644
index 0000000000..c6b1bd6a2d
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/fastapi.py
@@ -0,0 +1,131 @@
+from typing import Any, Callable
+
+import pytest
+from dirty_equals import IsStr
+from fastapi import Depends, FastAPI
+from fastapi.testclient import TestClient
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.fastapi.router import StreamRouter
+from faststream._internal.types import MsgType
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class FastAPITestCase:
+ router_class: type[StreamRouter[MsgType]]
+ broker_wrapper: Callable[[BrokerUsecase[MsgType, Any]], BrokerUsecase[MsgType, Any]]
+
+ dependency_builder = staticmethod(Depends)
+
+ @pytest.mark.skip()
+ @pytest.mark.asyncio()
+ async def test_fastapi_full_information(self) -> None:
+ router = self.router_class(
+ protocol="custom",
+ protocol_version="1.1.1",
+ description="Test broker description",
+ schema_url="/asyncapi_schema",
+ specification_tags=[{"name": "test"}],
+ )
+
+ app = FastAPI(
+ title="CustomApp",
+ version="1.1.1",
+ description="Test description",
+ contact={"name": "support", "url": "https://support.com"},
+ license_info={"name": "some", "url": "https://some.com"},
+ )
+ app.include_router(router)
+
+ async with self.broker_wrapper(router.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi_schema.json")
+
+ assert response_json.json() == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {
+ "title": "CustomApp",
+ "version": "1.1.1",
+ "description": "Test description",
+ "contact": {
+ "name": "support",
+ "url": IsStr(regex=r"https\:\/\/support\.com\/?"),
+ },
+ "license": {
+ "name": "some",
+ "url": IsStr(regex=r"https\:\/\/some\.com\/?"),
+ },
+ },
+ "servers": {
+ "development": {
+ "url": IsStr(),
+ "protocol": "custom",
+ "description": "Test broker description",
+ "protocolVersion": "1.1.1",
+ "tags": [{"name": "test"}],
+ },
+ },
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ }
+
+ @pytest.mark.skip()
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_routes(self) -> None:
+ router = self.router_class(schema_url="/asyncapi_schema")
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ app = FastAPI()
+ app.include_router(router)
+
+ async with self.broker_wrapper(router.broker):
+ with TestClient(app) as client:
+ schema = AsyncAPI(router.broker, schema_version="2.6.0")
+
+ response_json = client.get("/asyncapi_schema.json")
+ assert response_json.json() == schema.to_jsonable()
+
+ response_yaml = client.get("/asyncapi_schema.yaml")
+ assert response_yaml.text == schema.to_yaml()
+
+ response_html = client.get("/asyncapi_schema")
+ assert response_html.status_code == 200
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_not_fount(self) -> None:
+ router = self.router_class(include_in_schema=False)
+
+ app = FastAPI()
+ app.include_router(router)
+
+ async with self.broker_wrapper(router.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi.json")
+ assert response_json.status_code == 404
+
+ response_yaml = client.get("/asyncapi.yaml")
+ assert response_yaml.status_code == 404
+
+ response_html = client.get("/asyncapi")
+ assert response_html.status_code == 404
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_not_fount_by_url(self) -> None:
+ router = self.router_class(schema_url=None)
+
+ app = FastAPI()
+ app.include_router(router)
+
+ async with self.broker_wrapper(router.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi.json")
+ assert response_json.status_code == 404
+
+ response_yaml = client.get("/asyncapi.yaml")
+ assert response_yaml.status_code == 404
+
+ response_html = client.get("/asyncapi")
+ assert response_html.status_code == 404
diff --git a/tests/a_docs/confluent/additional_config/__init__.py b/tests/asyncapi/base/v2_6_0/from_spec/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/additional_config/__init__.py
rename to tests/asyncapi/base/v2_6_0/from_spec/__init__.py
diff --git a/tests/asyncapi/base/v2_6_0/from_spec/test_contact.py b/tests/asyncapi/base/v2_6_0/from_spec/test_contact.py
new file mode 100644
index 0000000000..ad97d872ce
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/from_spec/test_contact.py
@@ -0,0 +1,59 @@
+from typing import Any
+
+import pytest
+
+from faststream.specification import Contact
+from faststream.specification.asyncapi.v2_6_0.schema import Contact as AsyncAPIContact
+
+
+@pytest.mark.parametrize(
+ ("arg", "result"),
+ (
+ pytest.param(
+ None,
+ None,
+ id="None",
+ ),
+ pytest.param(
+ Contact(
+ name="test",
+ url="http://contact.com",
+ email="support@gmail.com",
+ ),
+ AsyncAPIContact(
+ name="test",
+ url="http://contact.com",
+ email="support@gmail.com",
+ ),
+ id="Contact object",
+ ),
+ pytest.param(
+ {
+ "name": "test",
+ "url": "http://contact.com",
+ },
+ AsyncAPIContact(
+ name="test",
+ url="http://contact.com",
+ ),
+ id="Contact dict",
+ ),
+ pytest.param(
+ {
+ "name": "test",
+ "url": "http://contact.com",
+ "email": "support@gmail.com",
+ "extra": "test",
+ },
+ {
+ "name": "test",
+ "url": "http://contact.com",
+ "email": "support@gmail.com",
+ "extra": "test",
+ },
+ id="Unknown dict",
+ ),
+ ),
+)
+def test_contact_factory_method(arg: Any, result: Any) -> None:
+ assert AsyncAPIContact.from_spec(arg) == result
diff --git a/tests/asyncapi/base/v2_6_0/from_spec/test_external_docs.py b/tests/asyncapi/base/v2_6_0/from_spec/test_external_docs.py
new file mode 100644
index 0000000000..7b2ede38c8
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/from_spec/test_external_docs.py
@@ -0,0 +1,35 @@
+from typing import Any
+
+import pytest
+
+from faststream.specification import ExternalDocs
+from faststream.specification.asyncapi.v2_6_0.schema import ExternalDocs as AsyncAPIDocs
+
+
+@pytest.mark.parametrize(
+ ("arg", "result"),
+ (
+ pytest.param(
+ None,
+ None,
+ id="None",
+ ),
+ pytest.param(
+ ExternalDocs(description="test", url="http://docs.com"),
+ AsyncAPIDocs(description="test", url="http://docs.com"),
+ id="ExternalDocs object",
+ ),
+ pytest.param(
+ {"description": "test", "url": "http://docs.com"},
+ AsyncAPIDocs(description="test", url="http://docs.com"),
+ id="ExternalDocs dict",
+ ),
+ pytest.param(
+ {"description": "test", "url": "http://docs.com", "extra": "test"},
+ {"description": "test", "url": "http://docs.com", "extra": "test"},
+ id="Unknown dict",
+ ),
+ ),
+)
+def test_external_docs_factory_method(arg: Any, result: Any) -> None:
+ assert AsyncAPIDocs.from_spec(arg) == result
diff --git a/tests/asyncapi/base/v2_6_0/from_spec/test_license.py b/tests/asyncapi/base/v2_6_0/from_spec/test_license.py
new file mode 100644
index 0000000000..c6e2e9421b
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/from_spec/test_license.py
@@ -0,0 +1,35 @@
+from typing import Any
+
+import pytest
+
+from faststream.specification import License
+from faststream.specification.asyncapi.v2_6_0.schema import License as AsyncAPICLicense
+
+
+@pytest.mark.parametrize(
+ ("arg", "result"),
+ (
+ pytest.param(
+ None,
+ None,
+ id="None",
+ ),
+ pytest.param(
+ License(name="test", url="http://license.com"),
+ AsyncAPICLicense(name="test", url="http://license.com"),
+ id="License object",
+ ),
+ pytest.param(
+ {"name": "test", "url": "http://license.com"},
+ AsyncAPICLicense(name="test", url="http://license.com"),
+ id="License dict",
+ ),
+ pytest.param(
+ {"name": "test", "url": "http://license.com", "extra": "test"},
+ {"name": "test", "url": "http://license.com", "extra": "test"},
+ id="Unknown dict",
+ ),
+ ),
+)
+def test_license_factory_method(arg: Any, result: Any) -> None:
+ assert AsyncAPICLicense.from_spec(arg) == result
diff --git a/tests/asyncapi/base/v2_6_0/from_spec/test_tag.py b/tests/asyncapi/base/v2_6_0/from_spec/test_tag.py
new file mode 100644
index 0000000000..66eedcd811
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/from_spec/test_tag.py
@@ -0,0 +1,49 @@
+from typing import Any
+
+import pytest
+
+from faststream.specification import ExternalDocs, Tag
+from faststream.specification.asyncapi.v2_6_0.schema import (
+ ExternalDocs as AsyncAPIDocs,
+ Tag as AsyncAPITag,
+)
+
+
+@pytest.mark.parametrize(
+ ("arg", "result"),
+ (
+ pytest.param(
+ Tag(
+ name="test",
+ description="test",
+ external_docs=ExternalDocs(url="http://docs.com"),
+ ),
+ AsyncAPITag(
+ name="test",
+ description="test",
+ externalDocs=AsyncAPIDocs(url="http://docs.com"),
+ ),
+ id="Tag object",
+ ),
+ pytest.param(
+ {
+ "name": "test",
+ "description": "test",
+ "external_docs": {"url": "http://docs.com"},
+ },
+ AsyncAPITag(
+ name="test",
+ description="test",
+ externalDocs=AsyncAPIDocs(url="http://docs.com"),
+ ),
+ id="Tag dict",
+ ),
+ pytest.param(
+ {"name": "test", "description": "test", "extra": "test"},
+ {"name": "test", "description": "test", "extra": "test"},
+ id="Unknown dict",
+ ),
+ ),
+)
+def test_tag_factory_method(arg: Any, result: Any) -> None:
+ assert AsyncAPITag.from_spec(arg) == result
diff --git a/tests/asyncapi/base/v2_6_0/naming.py b/tests/asyncapi/base/v2_6_0/naming.py
new file mode 100644
index 0000000000..648217df2a
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/naming.py
@@ -0,0 +1,403 @@
+from typing import Any
+
+from dirty_equals import Contains, IsStr
+from pydantic import create_model
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class BaseNaming:
+ broker_class: type[BrokerUsecase[Any, Any]]
+
+
+class SubscriberNaming(BaseNaming):
+ def test_subscriber_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ ]
+
+ def test_pydantic_subscriber_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: create_model("SimpleModel")) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == ["SimpleModel"]
+
+ def test_multi_subscribers_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ @broker.subscriber("test2")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ IsStr(regex=r"test2[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ IsStr(regex=r"test2[\w:]*:HandleUserCreated:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ ]
+
+ def test_subscriber_naming_manual(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", title="custom")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_subscriber_naming_default(self) -> None:
+ broker = self.broker_class()
+
+ broker.subscriber("test")
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Subscriber"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Subscriber:Message"),
+ ]
+
+ for key, v in schema["components"]["schemas"].items():
+ assert key == "Subscriber:Message:Payload"
+ assert v == {"title": key}
+
+ def test_subscriber_naming_default_with_title(self) -> None:
+ broker = self.broker_class()
+
+ broker.subscriber("test", title="custom")
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ assert schema["components"]["schemas"]["custom:Message:Payload"] == {
+ "title": "custom:Message:Payload",
+ }
+
+ def test_multi_subscribers_naming_default(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: str) -> None: ...
+
+ broker.subscriber("test2")
+ broker.subscriber("test3")
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ IsStr(regex=r"test2[\w:]*:Subscriber"),
+ IsStr(regex=r"test3[\w:]*:Subscriber"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ IsStr(regex=r"test2[\w:]*:Subscriber:Message"),
+ IsStr(regex=r"test3[\w:]*:Subscriber:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "Subscriber:Message:Payload",
+ ]
+
+ assert schema["components"]["schemas"]["Subscriber:Message:Payload"] == {
+ "title": "Subscriber:Message:Payload",
+ }
+
+
+class FilterNaming(BaseNaming):
+ def test_subscriber_filter_base(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test")
+
+ @sub
+ async def handle_user_created(msg: str) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "HandleUserId:Message:Payload",
+ ]
+
+ def test_subscriber_filter_pydantic(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test")
+
+ @sub
+ async def handle_user_created(msg: create_model("SimpleModel")) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "SimpleModel",
+ "HandleUserId:Message:Payload",
+ ]
+
+ def test_subscriber_filter_with_title(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test", title="custom")
+
+ @sub
+ async def handle_user_created(msg: str) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "HandleUserId:Message:Payload",
+ ]
+
+
+class PublisherNaming(BaseNaming):
+ def test_publisher_naming_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ]
+
+ def test_publisher_naming_pydantic(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle_user_created() -> create_model("SimpleModel"): ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "SimpleModel",
+ ]
+
+ def test_publisher_manual_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", title="custom")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_publisher_with_schema_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", schema=str)
+ async def handle_user_created() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ]
+
+ def test_publisher_manual_naming_with_schema(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", title="custom", schema=str)
+ async def handle_user_created() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_multi_publishers_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ @broker.publisher("test2")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ names = list(schema["channels"].keys())
+ assert names == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher"),
+ IsStr(regex=r"test[\w:]*:Publisher"),
+ ), names
+
+ messages = list(schema["components"]["messages"].keys())
+ assert messages == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher:Message"),
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ), messages
+
+ payloads = list(schema["components"]["schemas"].keys())
+ assert payloads == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload"),
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ), payloads
+
+ def test_multi_publisher_usages(self) -> None:
+ broker = self.broker_class()
+
+ pub = broker.publisher("test")
+
+ @pub
+ async def handle_user_created() -> str: ...
+
+ @pub
+ async def handle() -> int: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Publisher:Message:Payload",
+ "Handle:Publisher:Message:Payload",
+ ]
+
+ def test_multi_publisher_usages_with_custom(self) -> None:
+ broker = self.broker_class()
+
+ pub = broker.publisher("test", title="custom")
+
+ @pub
+ async def handle_user_created() -> str: ...
+
+ @pub
+ async def handle() -> int: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Publisher:Message:Payload",
+ "Handle:Publisher:Message:Payload",
+ ]
+
+
+class NamingTestCase(SubscriberNaming, FilterNaming, PublisherNaming):
+ pass
diff --git a/tests/asyncapi/base/v2_6_0/publisher.py b/tests/asyncapi/base/v2_6_0/publisher.py
new file mode 100644
index 0000000000..d61baa2d19
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/publisher.py
@@ -0,0 +1,151 @@
+import pydantic
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class PublisherTestcase:
+ broker_class: type[BrokerUsecase]
+
+ def build_app(self, broker):
+ """Patch it to test FastAPI scheme generation too."""
+ return broker
+
+ def test_publisher_with_description(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", description="test description")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ assert schema["channels"][key]["description"] == "test description"
+
+ def test_basic_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ assert schema["channels"][key].get("description") is None
+ assert schema["channels"][key].get("subscribe") is not None
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v == {}
+
+ def test_none_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v == {}
+
+ def test_typed_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> int: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_pydantic_model_publisher(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> User: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_delayed(self) -> None:
+ broker = self.broker_class()
+
+ pub = broker.publisher("test")
+
+ @pub
+ async def handle(msg) -> int: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_with_schema(self) -> None:
+ broker = self.broker_class()
+
+ broker.publisher("test", title="Custom", schema=int)
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_not_include(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", include_in_schema=False)
+ @broker.subscriber("in-test", include_in_schema=False)
+ async def handler(msg: str) -> None:
+ pass
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0")
+
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_pydantic_model_with_keyword_property_publisher(self) -> None:
+ class TestModel(pydantic.BaseModel):
+ discriminator: int = 0
+
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> TestModel: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert v == {
+ "properties": {
+ "discriminator": {
+ "default": 0,
+ "title": "Discriminator",
+ "type": "integer",
+ },
+ },
+ "title": key,
+ "type": "object",
+ }
diff --git a/tests/asyncapi/base/v2_6_0/router.py b/tests/asyncapi/base/v2_6_0/router.py
new file mode 100644
index 0000000000..bcbe99ce1e
--- /dev/null
+++ b/tests/asyncapi/base/v2_6_0/router.py
@@ -0,0 +1,166 @@
+from dirty_equals import IsStr
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class RouterTestcase:
+ broker_class: type[BrokerUsecase]
+ router_class: type[BrokerRouter]
+ publisher_class: type[ArgsContainer]
+ route_class: type[SubscriberRoute]
+
+ def test_delay_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ async def handle(msg) -> None: ...
+
+ router = self.router_class(
+ handlers=(self.route_class(handle, "test"),),
+ )
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ key = list(payload.keys())[0] # noqa: RUF015
+ assert payload[key]["title"] == key == "Handle:Message:Payload"
+
+ def test_delay_publisher(self) -> None:
+ broker = self.broker_class()
+
+ async def handle(msg) -> None: ...
+
+ router = self.router_class(
+ handlers=(
+ self.route_class(
+ handle,
+ "test",
+ publishers=(self.publisher_class("test2", schema=int),),
+ ),
+ ),
+ )
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+ schemas = schema.to_jsonable()["components"]["schemas"]
+ del schemas["Handle:Message:Payload"]
+
+ for i, j in schemas.items():
+ assert (
+ i == j["title"] == IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload")
+ )
+ assert j["type"] == "integer"
+
+ def test_not_include(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class(include_in_schema=False)
+
+ @router.subscriber("test")
+ @router.publisher("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_not_include_in_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+
+ @router.subscriber("test")
+ @router.publisher("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router, include_in_schema=False)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_respect_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class(include_in_schema=False)
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_not_include_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class(include_in_schema=False)
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_not_include_subrouter_by_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2, include_in_schema=False)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_all_nested_routers_by_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router, include_in_schema=False)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_include_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0")
+
+ assert len(schema.to_jsonable()["channels"]) == 2
diff --git a/tests/a_docs/confluent/basic/__init__.py b/tests/asyncapi/base/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/basic/__init__.py
rename to tests/asyncapi/base/v3_0_0/__init__.py
diff --git a/tests/asyncapi/base/v3_0_0/arguments.py b/tests/asyncapi/base/v3_0_0/arguments.py
new file mode 100644
index 0000000000..a3afe8a6fb
--- /dev/null
+++ b/tests/asyncapi/base/v3_0_0/arguments.py
@@ -0,0 +1,728 @@
+from dataclasses import dataclass
+from enum import Enum
+from typing import Annotated, Optional, Union
+
+import pydantic
+import pytest
+from dirty_equals import IsDict, IsPartialDict, IsStr
+from fast_depends import Depends
+from fastapi import Depends as APIDepends
+from typing_extensions import Literal
+
+from faststream import Context
+from faststream._internal._compat import PYDANTIC_V2
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.fastapi import StreamRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.marks import pydantic_v2
+
+
+class FastAPICompatible:
+ broker_factory: Union[BrokerUsecase, StreamRouter]
+ dependency_builder = staticmethod(APIDepends)
+
+ def build_app(self, broker):
+ """Patch it to test FastAPI scheme generation too."""
+ return broker
+
+ def test_custom_naming(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test", title="custom_name", description="test description")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert key == "custom_name"
+ assert schema["channels"][key]["description"] == "test description"
+
+ def test_slash_in_title(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test", title="/")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ assert next(iter(schema["channels"].keys())) == "."
+ assert schema["channels"]["."]["address"] == "/"
+
+ assert next(iter(schema["operations"].keys())) == ".Subscribe"
+
+ assert (
+ next(iter(schema["components"]["messages"].keys())) == ".:SubscribeMessage"
+ )
+ assert (
+ schema["components"]["messages"][".:SubscribeMessage"]["title"]
+ == "/:SubscribeMessage"
+ )
+
+ assert next(iter(schema["components"]["schemas"].keys())) == ".:Message:Payload"
+ assert (
+ schema["components"]["schemas"][".:Message:Payload"]["title"]
+ == "/:Message:Payload"
+ )
+
+ def test_docstring_description(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test", title="custom_name")
+ async def handle(msg) -> None:
+ """Test description."""
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert key == "custom_name"
+ assert schema["channels"][key]["description"] == "Test description.", schema[
+ "channels"
+ ][key]["description"]
+
+ def test_empty(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "EmptyPayload"
+ assert v == {
+ "title": key,
+ "type": "null",
+ }
+
+ def test_no_type(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {"title": key}
+
+ def test_simple_type(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg: int) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ assert next(iter(schema["channels"].values())).get("description") is None
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {"title": key, "type": "integer"}
+
+ def test_simple_optional_type(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg: Optional[int]) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == IsDict(
+ {
+ "anyOf": [{"type": "integer"}, {"type": "null"}],
+ "title": key,
+ },
+ ) | IsDict(
+ { # TODO: remove when deprecating PydanticV1
+ "title": key,
+ "type": "integer",
+ },
+ ), v
+
+ def test_simple_type_with_default(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg: int = 1) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "default": 1,
+ "title": key,
+ "type": "integer",
+ }
+
+ def test_multi_args_no_type(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg, another) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "another": {"title": "Another"},
+ "msg": {"title": "Msg"},
+ },
+ "required": ["msg", "another"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_multi_args_with_type(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg: str, another: int) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "another": {"title": "Another", "type": "integer"},
+ "msg": {"title": "Msg", "type": "string"},
+ },
+ "required": ["msg", "another"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_multi_args_with_default(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg: str, another: Optional[int] = None) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+
+ assert v == {
+ "properties": {
+ "another": IsDict(
+ {
+ "anyOf": [{"type": "integer"}, {"type": "null"}],
+ "default": None,
+ "title": "Another",
+ },
+ )
+ | IsDict(
+ { # TODO: remove when deprecating PydanticV1
+ "title": "Another",
+ "type": "integer",
+ },
+ ),
+ "msg": {"title": "Msg", "type": "string"},
+ },
+ "required": ["msg"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_dataclass(self) -> None:
+ @dataclass
+ class User:
+ id: int
+ name: str = ""
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_pydantic_model(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_pydantic_model_with_enum(self) -> None:
+ class Status(str, Enum):
+ registered = "registered"
+ banned = "banned"
+
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+ status: Status
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert payload == {
+ "Status": IsPartialDict(
+ {
+ "enum": ["registered", "banned"],
+ "title": "Status",
+ "type": "string",
+ },
+ ),
+ "User": {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ "status": {"$ref": "#/components/schemas/Status"},
+ },
+ "required": ["id", "status"],
+ "title": "User",
+ "type": "object",
+ },
+ }, payload
+
+ def test_pydantic_model_mixed_regular(self) -> None:
+ class Email(pydantic.BaseModel):
+ addr: str
+
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+ email: Email
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User, description: str = "") -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert payload == {
+ "Email": {
+ "title": "Email",
+ "type": "object",
+ "properties": {"addr": {"title": "Addr", "type": "string"}},
+ "required": ["addr"],
+ },
+ "User": {
+ "title": "User",
+ "type": "object",
+ "properties": {
+ "name": {"title": "Name", "default": "", "type": "string"},
+ "id": {"title": "Id", "type": "integer"},
+ "email": {"$ref": "#/components/schemas/Email"},
+ },
+ "required": ["id", "email"],
+ },
+ "Handle:Message:Payload": {
+ "title": "Handle:Message:Payload",
+ "type": "object",
+ "properties": {
+ "user": {"$ref": "#/components/schemas/User"},
+ "description": {
+ "title": "Description",
+ "default": "",
+ "type": "string",
+ },
+ },
+ "required": ["user"],
+ },
+ }
+
+ def test_pydantic_model_with_example(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ if PYDANTIC_V2:
+ model_config = {
+ "json_schema_extra": {"examples": [{"name": "john", "id": 1}]},
+ }
+
+ else:
+
+ class Config:
+ schema_extra = {"examples": [{"name": "john", "id": 1}]} # noqa: RUF012
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "User"
+ assert v == {
+ "examples": [{"id": 1, "name": "john"}],
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": "User",
+ "type": "object",
+ }
+
+ def test_with_filter(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_factory()
+
+ sub = broker.subscriber("test")
+
+ @sub( # pragma: no branch
+ filter=lambda m: m.content_type == "application/json",
+ )
+ async def handle(id: int) -> None: ...
+
+ @sub
+ async def handle_default(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ assert (
+ len(
+ next(iter(schema["components"]["messages"].values()))["payload"][
+ "oneOf"
+ ],
+ )
+ == 2
+ )
+
+ payload = schema["components"]["schemas"]
+
+ assert "Handle:Message:Payload" in list(payload.keys())
+ assert "HandleDefault:Message:Payload" in list(payload.keys())
+
+ def test_ignores_depends(self) -> None:
+ broker = self.broker_factory()
+
+ def dep(name: str = ""):
+ return name
+
+ def dep2(name2: str):
+ return name2
+
+ dependencies = (self.dependency_builder(dep2),)
+ message = self.dependency_builder(dep)
+
+ @broker.subscriber("test", dependencies=dependencies)
+ async def handle(id: int, message=message) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Handle:Message:Payload"
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ "name2": {"title": "Name2", "type": "string"},
+ },
+ "required": ["id", "name2"],
+ "title": key,
+ "type": "object",
+ }, v
+
+ @pydantic_v2
+ def test_descriminator(self) -> None:
+ class Sub2(pydantic.BaseModel):
+ type: Literal["sub2"]
+
+ class Sub(pydantic.BaseModel):
+ type: Literal["sub"]
+
+ descriminator = Annotated[
+ Union[Sub2, Sub],
+ pydantic.Field(discriminator="type"),
+ ]
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: descriminator) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = next(iter(schema["components"]["messages"].keys()))
+ assert key == IsStr(regex=r"test[\w:]*:Handle:SubscribeMessage")
+
+ assert schema["components"] == {
+ "messages": {
+ key: {
+ "title": key,
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/Handle:Message:Payload"},
+ },
+ },
+ "schemas": {
+ "Sub": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub",
+ "type": "object",
+ },
+ "Sub2": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub2", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub2",
+ "type": "object",
+ },
+ "Handle:Message:Payload": {
+ "discriminator": "type",
+ "oneOf": [
+ {"$ref": "#/components/schemas/Sub2"},
+ {"$ref": "#/components/schemas/Sub"},
+ ],
+ "title": "Handle:Message:Payload",
+ },
+ },
+ }, schema["components"]
+
+ @pydantic_v2
+ def test_nested_descriminator(self) -> None:
+ class Sub2(pydantic.BaseModel):
+ type: Literal["sub2"]
+
+ class Sub(pydantic.BaseModel):
+ type: Literal["sub"]
+
+ class Model(pydantic.BaseModel):
+ msg: Union[Sub2, Sub] = pydantic.Field(..., discriminator="type")
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: Model) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ key = next(iter(schema["components"]["messages"].keys()))
+ assert key == IsStr(regex=r"test[\w:]*:Handle:SubscribeMessage")
+ assert schema["components"] == {
+ "messages": {
+ key: {
+ "title": key,
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/Model"},
+ },
+ },
+ "schemas": {
+ "Sub": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub",
+ "type": "object",
+ },
+ "Sub2": {
+ "properties": {
+ "type": IsPartialDict({"const": "sub2", "title": "Type"}),
+ },
+ "required": ["type"],
+ "title": "Sub2",
+ "type": "object",
+ },
+ "Model": {
+ "properties": {
+ "msg": {
+ "discriminator": "type",
+ "oneOf": [
+ {"$ref": "#/components/schemas/Sub2"},
+ {"$ref": "#/components/schemas/Sub"},
+ ],
+ "title": "Msg",
+ },
+ },
+ "required": ["msg"],
+ "title": "Model",
+ "type": "object",
+ },
+ },
+ }, schema["components"]
+
+
+class ArgumentsTestcase(FastAPICompatible):
+ dependency_builder = staticmethod(Depends)
+
+ def test_pydantic_field(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("msg")
+ async def msg(
+ msg: pydantic.PositiveInt = pydantic.Field(
+ 1,
+ description="some field",
+ title="Perfect",
+ examples=[1],
+ ),
+ ) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert key == "Perfect"
+
+ assert v == {
+ "default": 1,
+ "description": "some field",
+ "examples": [1],
+ "exclusiveMinimum": 0,
+ "title": "Perfect",
+ "type": "integer",
+ }
+
+ def test_ignores_custom_field(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(
+ id: int, user: Optional[str] = None, message=Context()
+ ) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert v == IsDict(
+ {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "user": {
+ "anyOf": [{"type": "string"}, {"type": "null"}],
+ "default": None,
+ "title": "User",
+ },
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ },
+ ) | IsDict( # TODO: remove when deprecating PydanticV1
+ {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "user": {"title": "User", "type": "string"},
+ },
+ "required": ["id"],
+ "title": "Handle:Message:Payload",
+ "type": "object",
+ },
+ )
+
+ def test_overwrite_schema(self) -> None:
+ @dataclass
+ class User:
+ id: int
+ name: str = ""
+
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(user: User) -> None: ...
+
+ @dataclass
+ class User:
+ id: int
+ email: str = ""
+
+ @broker.subscriber("test2")
+ async def second_handle(user: User) -> None: ...
+
+ with pytest.warns(
+ RuntimeWarning,
+ match="Overwriting the message schema, data types have the same name",
+ ):
+ schema = AsyncAPI(
+ self.build_app(broker), schema_version="3.0.0"
+ ).to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ assert len(payload) == 1
+
+ key, value = next(iter(payload.items()))
+
+ assert key == "User"
+ assert value == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "email": {"default": "", "title": "Email", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
diff --git a/tests/asyncapi/base/v3_0_0/fastapi.py b/tests/asyncapi/base/v3_0_0/fastapi.py
new file mode 100644
index 0000000000..9920514b86
--- /dev/null
+++ b/tests/asyncapi/base/v3_0_0/fastapi.py
@@ -0,0 +1,139 @@
+from typing import Any, Callable
+
+import pytest
+from dirty_equals import IsStr
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.fastapi.router import StreamRouter
+from faststream._internal.types import MsgType
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class FastAPITestCase:
+ router_factory: type[StreamRouter[MsgType]]
+ broker_wrapper: Callable[[BrokerUsecase[MsgType, Any]], BrokerUsecase[MsgType, Any]]
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_full_information(self) -> None:
+ broker = self.router_factory(
+ protocol="custom",
+ protocol_version="1.1.1",
+ description="Test broker description",
+ schema_url="/asyncapi_schema",
+ specification_tags=[{"name": "test"}],
+ )
+
+ app = FastAPI(
+ title="CustomApp",
+ version="1.1.1",
+ description="Test description",
+ contact={"name": "support", "url": "https://support.com"},
+ license_info={"name": "some", "url": "https://some.com"},
+ )
+ app.include_router(broker)
+
+ async with self.broker_wrapper(broker.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi_schema.json")
+
+ assert response_json.json() == {
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "info": {
+ "title": "CustomApp",
+ "version": "1.1.1",
+ "description": "Test description",
+ "contact": {
+ "name": "support",
+ "url": IsStr(regex=r"https\:\/\/support\.com\/?"),
+ },
+ "license": {
+ "name": "some",
+ "url": IsStr(regex=r"https\:\/\/some\.com\/?"),
+ },
+ },
+ "servers": {
+ "development": {
+ "host": IsStr(),
+ "pathname": IsStr(),
+ "protocol": "custom",
+ "description": "Test broker description",
+ "protocolVersion": "1.1.1",
+ "tags": [{"name": "test"}],
+ },
+ },
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ }, response_json.json()
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_routes(self) -> None:
+ router = self.router_factory(schema_url="/asyncapi_schema")
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ app = FastAPI()
+ app.include_router(router)
+
+ async with self.broker_wrapper(router.broker):
+ with TestClient(app) as client:
+ schema = AsyncAPI(
+ router.broker,
+ title=router.title,
+ description=router.description,
+ app_version=router.version,
+ contact=router.contact,
+ license=router.license,
+ schema_version="3.0.0",
+ )
+
+ response_json = client.get("/asyncapi_schema.json")
+ assert response_json.json() == schema.to_jsonable(), (
+ schema.to_jsonable()
+ )
+
+ response_yaml = client.get("/asyncapi_schema.yaml")
+ assert response_yaml.text == schema.to_yaml()
+
+ response_html = client.get("/asyncapi_schema")
+ assert response_html.status_code == 200
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_not_fount(self) -> None:
+ broker = self.router_factory(include_in_schema=False)
+
+ app = FastAPI()
+ app.include_router(broker)
+
+ async with self.broker_wrapper(broker.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi.json")
+ assert response_json.status_code == 404
+
+ response_yaml = client.get("/asyncapi.yaml")
+ assert response_yaml.status_code == 404
+
+ response_html = client.get("/asyncapi")
+ assert response_html.status_code == 404
+
+ @pytest.mark.asyncio()
+ async def test_fastapi_asyncapi_not_fount_by_url(self) -> None:
+ broker = self.router_factory(schema_url=None)
+
+ app = FastAPI()
+ app.include_router(broker)
+
+ async with self.broker_wrapper(broker.broker):
+ with TestClient(app) as client:
+ response_json = client.get("/asyncapi.json")
+ assert response_json.status_code == 404
+
+ response_yaml = client.get("/asyncapi.yaml")
+ assert response_yaml.status_code == 404
+
+ response_html = client.get("/asyncapi")
+ assert response_html.status_code == 404
diff --git a/tests/asyncapi/base/v3_0_0/naming.py b/tests/asyncapi/base/v3_0_0/naming.py
new file mode 100644
index 0000000000..3ea08e7275
--- /dev/null
+++ b/tests/asyncapi/base/v3_0_0/naming.py
@@ -0,0 +1,409 @@
+from typing import Any
+
+from dirty_equals import Contains, IsStr
+from pydantic import create_model
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class BaseNaming:
+ broker_class: type[BrokerUsecase[Any, Any]]
+
+
+class SubscriberNaming(BaseNaming):
+ def test_subscriber_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ ]
+
+ def test_pydantic_subscriber_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: create_model("SimpleModel")) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == ["SimpleModel"]
+
+ def test_multi_subscribers_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ @broker.subscriber("test2")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ IsStr(regex=r"test2[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ IsStr(regex=r"test2[\w:]*:HandleUserCreated:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ ]
+
+ def test_subscriber_naming_manual(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", title="custom")
+ async def handle_user_created(msg: str) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "custom:SubscribeMessage",
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_subscriber_naming_default(self) -> None:
+ broker = self.broker_class()
+
+ broker.subscriber("test")
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Subscriber"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Subscriber:SubscribeMessage"),
+ ]
+
+ for key, v in schema["components"]["schemas"].items():
+ assert key == "Subscriber:Message:Payload"
+ assert v == {"title": key}
+
+ def test_subscriber_naming_default_with_title(self) -> None:
+ broker = self.broker_class()
+
+ broker.subscriber("test", title="custom")
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "custom:SubscribeMessage",
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ assert schema["components"]["schemas"]["custom:Message:Payload"] == {
+ "title": "custom:Message:Payload",
+ }
+
+ def test_multi_subscribers_naming_default(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle_user_created(msg: str) -> None: ...
+
+ broker.subscriber("test2")
+ broker.subscriber("test3")
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ IsStr(regex=r"test2[\w:]*:Subscriber"),
+ IsStr(regex=r"test3[\w:]*:Subscriber"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ IsStr(regex=r"test2[\w:]*:Subscriber:SubscribeMessage"),
+ IsStr(regex=r"test3[\w:]*:Subscriber:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "Subscriber:Message:Payload",
+ ]
+
+ assert schema["components"]["schemas"]["Subscriber:Message:Payload"] == {
+ "title": "Subscriber:Message:Payload",
+ }
+
+
+class FilterNaming(BaseNaming):
+ def test_subscriber_filter_base(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test")
+
+ @sub
+ async def handle_user_created(msg: str) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "HandleUserId:Message:Payload",
+ ]
+
+ def test_subscriber_filter_pydantic(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test")
+
+ @sub
+ async def handle_user_created(msg: create_model("SimpleModel")) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:HandleUserCreated:SubscribeMessage"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "SimpleModel",
+ "HandleUserId:Message:Payload",
+ ]
+
+ def test_subscriber_filter_with_title(self) -> None:
+ broker = self.broker_class()
+
+ sub = broker.subscriber("test", title="custom")
+
+ @sub
+ async def handle_user_created(msg: str) -> None: ...
+
+ @sub
+ async def handle_user_id(msg: int) -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "custom:SubscribeMessage",
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Message:Payload",
+ "HandleUserId:Message:Payload",
+ ]
+
+
+class PublisherNaming(BaseNaming):
+ def test_publisher_naming_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ]
+
+ def test_publisher_naming_pydantic(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle_user_created() -> create_model("SimpleModel"): ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "SimpleModel",
+ ], list(schema["components"]["schemas"].keys())
+
+ def test_publisher_manual_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", title="custom")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_publisher_with_schema_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", schema=str)
+ async def handle_user_created() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [IsStr(regex=r"test[\w:]*:Publisher")]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ]
+
+ def test_publisher_manual_naming_with_schema(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", title="custom", schema=str)
+ async def handle_user_created() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "custom:Message:Payload",
+ ]
+
+ def test_multi_publishers_naming(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ @broker.publisher("test2")
+ async def handle_user_created() -> str: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ names = list(schema["channels"].keys())
+ assert names == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher"),
+ IsStr(regex=r"test[\w:]*:Publisher"),
+ ), names
+
+ messages = list(schema["components"]["messages"].keys())
+ assert messages == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher:Message"),
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ), messages
+
+ payloads = list(schema["components"]["schemas"].keys())
+ assert payloads == Contains(
+ IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload"),
+ IsStr(regex=r"test[\w:]*:Publisher:Message:Payload"),
+ ), payloads
+
+ def test_multi_publisher_usages(self) -> None:
+ broker = self.broker_class()
+
+ pub = broker.publisher("test")
+
+ @pub
+ async def handle_user_created() -> str: ...
+
+ @pub
+ async def handle() -> int: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher"),
+ ]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ IsStr(regex=r"test[\w:]*:Publisher:Message"),
+ ]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Publisher:Message:Payload",
+ "Handle:Publisher:Message:Payload",
+ ], list(schema["components"]["schemas"].keys())
+
+ def test_multi_publisher_usages_with_custom(self) -> None:
+ broker = self.broker_class()
+
+ pub = broker.publisher("test", title="custom")
+
+ @pub
+ async def handle_user_created() -> str: ...
+
+ @pub
+ async def handle() -> int: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["custom"]
+
+ assert list(schema["components"]["messages"].keys()) == ["custom:Message"]
+
+ assert list(schema["components"]["schemas"].keys()) == [
+ "HandleUserCreated:Publisher:Message:Payload",
+ "Handle:Publisher:Message:Payload",
+ ]
+
+
+class NamingTestCase(SubscriberNaming, FilterNaming, PublisherNaming):
+ pass
diff --git a/tests/asyncapi/base/v3_0_0/publisher.py b/tests/asyncapi/base/v3_0_0/publisher.py
new file mode 100644
index 0000000000..a3270718f2
--- /dev/null
+++ b/tests/asyncapi/base/v3_0_0/publisher.py
@@ -0,0 +1,128 @@
+from typing import Union
+
+import pydantic
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.fastapi import StreamRouter
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class PublisherTestcase:
+ broker_factory: Union[BrokerUsecase, StreamRouter]
+
+ def build_app(self, broker):
+ """Patch it to test FastAPI scheme generation too."""
+ return broker
+
+ def test_publisher_with_description(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test", description="test description")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ assert schema["channels"][key]["description"] == "test description"
+
+ def test_basic_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ assert schema["channels"][key].get("description") is None
+ assert schema["operations"][key] is not None
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v == {}
+
+ def test_none_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v == {}
+
+ def test_typed_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> int: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_pydantic_model_publisher(self) -> None:
+ class User(pydantic.BaseModel):
+ name: str = ""
+ id: int
+
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> User: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+
+ for key, v in payload.items():
+ assert v == {
+ "properties": {
+ "id": {"title": "Id", "type": "integer"},
+ "name": {"default": "", "title": "Name", "type": "string"},
+ },
+ "required": ["id"],
+ "title": key,
+ "type": "object",
+ }
+
+ def test_delayed(self) -> None:
+ broker = self.broker_factory()
+
+ pub = broker.publisher("test")
+
+ @pub
+ async def handle(msg) -> int: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_with_schema(self) -> None:
+ broker = self.broker_factory()
+
+ broker.publisher("test", title="Custom", schema=int)
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ for v in payload.values():
+ assert v["type"] == "integer"
+
+ def test_not_include(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test", include_in_schema=False)
+ @broker.subscriber("in-test", include_in_schema=False)
+ async def handler(msg: str) -> None:
+ pass
+
+ schema = AsyncAPI(self.build_app(broker))
+
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
diff --git a/tests/asyncapi/base/v3_0_0/router.py b/tests/asyncapi/base/v3_0_0/router.py
new file mode 100644
index 0000000000..cd141cf9a3
--- /dev/null
+++ b/tests/asyncapi/base/v3_0_0/router.py
@@ -0,0 +1,166 @@
+from dirty_equals import IsStr
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+class RouterTestcase:
+ broker_class: type[BrokerUsecase]
+ router_class: type[BrokerRouter]
+ publisher_class: type[ArgsContainer]
+ route_class: type[SubscriberRoute]
+
+ def test_delay_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ async def handle(msg) -> None: ...
+
+ router = self.router_class(
+ handlers=(self.route_class(handle, "test"),),
+ )
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ payload = schema["components"]["schemas"]
+ key = list(payload.keys())[0] # noqa: RUF015
+ assert payload[key]["title"] == key == "Handle:Message:Payload"
+
+ def test_delay_publisher(self) -> None:
+ broker = self.broker_class()
+
+ async def handle(msg) -> None: ...
+
+ router = self.router_class(
+ handlers=(
+ self.route_class(
+ handle,
+ "test",
+ publishers=(self.publisher_class("test2", schema=int),),
+ ),
+ ),
+ )
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+ schemas = schema.to_jsonable()["components"]["schemas"]
+ del schemas["Handle:Message:Payload"]
+
+ for i, j in schemas.items():
+ assert (
+ i == j["title"] == IsStr(regex=r"test2[\w:]*:Publisher:Message:Payload")
+ )
+ assert j["type"] == "integer"
+
+ def test_not_include(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class(include_in_schema=False)
+
+ @router.subscriber("test")
+ @router.publisher("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_not_include_in_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+
+ @router.subscriber("test")
+ @router.publisher("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router, include_in_schema=False)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_respect_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class(include_in_schema=False)
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+
+ assert schema.to_jsonable()["channels"] == {}, schema.to_jsonable()["channels"]
+
+ def test_not_include_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class(include_in_schema=False)
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_not_include_subrouter_by_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2, include_in_schema=False)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_all_nested_routers_by_method(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router, include_in_schema=False)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+
+ assert schema.to_jsonable()["channels"] == {}
+
+ def test_include_subrouter(self) -> None:
+ broker = self.broker_class()
+ router = self.router_class()
+ router2 = self.router_class()
+
+ @router2.subscriber("test")
+ @router2.publisher("test")
+ async def handle(msg) -> None: ...
+
+ router.include_router(router2)
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0")
+
+ assert len(schema.to_jsonable()["channels"]) == 2
diff --git a/tests/asyncapi/confluent/test_arguments.py b/tests/asyncapi/confluent/test_arguments.py
deleted file mode 100644
index 2eb5c91cd1..0000000000
--- a/tests/asyncapi/confluent/test_arguments.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent import KafkaBroker, TopicPartition
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-
-
-class TestArguments(ArgumentsTestcase):
- broker_class = KafkaBroker
-
- def test_subscriber_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "test"}
- }
-
- def test_subscriber_with_one_topic_partitions(self):
- broker = self.broker_class()
-
- part1 = TopicPartition("topic_name", 1)
- part2 = TopicPartition("topic_name", 2)
-
- @broker.subscriber(partitions=[part1, part2])
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "topic_name"}
- }
-
- def test_subscriber_with_multi_topics_partitions(self):
- broker = self.broker_class()
-
- part1 = TopicPartition("topic_name1", 1)
- part2 = TopicPartition("topic_name2", 2)
-
- @broker.subscriber(partitions=[part1, part2])
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key1 = tuple(schema["channels"].keys())[0] # noqa: RUF015
- key2 = tuple(schema["channels"].keys())[1]
-
- assert sorted(
- (
- schema["channels"][key1]["bindings"]["kafka"]["topic"],
- schema["channels"][key2]["bindings"]["kafka"]["topic"],
- )
- ) == sorted(("topic_name1", "topic_name2"))
diff --git a/tests/asyncapi/confluent/test_connection.py b/tests/asyncapi/confluent/test_connection.py
deleted file mode 100644
index d37cefbfa7..0000000000
--- a/tests/asyncapi/confluent/test_connection.py
+++ /dev/null
@@ -1,92 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Tag
-from faststream.confluent import KafkaBroker
-
-
-def test_base():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(
- "kafka:9092",
- protocol="plaintext",
- protocol_version="0.9.0",
- description="Test description",
- tags=(Tag(name="some-tag", description="experimental"),),
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "description": "Test description",
- "protocol": "plaintext",
- "protocolVersion": "0.9.0",
- "tags": [{"description": "experimental", "name": "some-tag"}],
- "url": "kafka:9092",
- }
- },
- }
-
-
-def test_multi():
- schema = get_app_schema(
- FastStream(KafkaBroker(["kafka:9092", "kafka:9093"]))
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9092",
- },
- "Server2": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9093",
- },
- },
- }
-
-
-def test_custom():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(
- ["kafka:9092", "kafka:9093"],
- asyncapi_url=["kafka:9094", "kafka:9095"],
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9094",
- },
- "Server2": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9095",
- },
- },
- }
diff --git a/tests/asyncapi/confluent/test_fastapi.py b/tests/asyncapi/confluent/test_fastapi.py
deleted file mode 100644
index adf7b5cb28..0000000000
--- a/tests/asyncapi/confluent/test_fastapi.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Type
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent.fastapi import KafkaRouter
-from faststream.confluent.testing import TestKafkaBroker
-from faststream.security import SASLPlaintext
-from tests.asyncapi.base.arguments import FastAPICompatible
-from tests.asyncapi.base.fastapi import FastAPITestCase
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestRouterArguments(FastAPITestCase, FastAPICompatible):
- broker_class: Type[KafkaRouter] = KafkaRouter
- broker_wrapper = staticmethod(TestKafkaBroker)
-
- def build_app(self, router):
- return router
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- return router
-
-
-def test_fastapi_security_schema():
- security = SASLPlaintext(username="user", password="pass", use_ssl=False)
-
- broker = KafkaRouter("localhost:9092", security=security)
-
- schema = get_app_schema(broker).to_jsonable()
-
- assert schema["servers"]["development"] == {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "security": [{"user-password": []}],
- "url": "localhost:9092",
- }
- assert schema["components"]["securitySchemes"] == {
- "user-password": {"type": "userPassword"}
- }
diff --git a/tests/asyncapi/confluent/test_naming.py b/tests/asyncapi/confluent/test_naming.py
deleted file mode 100644
index cdcd3dc17c..0000000000
--- a/tests/asyncapi/confluent/test_naming.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent import KafkaBroker
-from tests.asyncapi.base.naming import NamingTestCase
-
-
-class TestNaming(NamingTestCase):
- broker_class = KafkaBroker
-
- def test_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "localhost",
- "protocol": "kafka",
- "protocolVersion": "auto",
- }
- },
- "channels": {
- "test:Handle": {
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
- "subscribe": {
- "message": {"$ref": "#/components/messages/test:Handle:Message"}
- },
- }
- },
- "components": {
- "messages": {
- "test:Handle:Message": {
- "title": "test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {"$ref": "#/components/schemas/EmptyPayload"},
- }
- },
- "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
- },
- }
diff --git a/tests/asyncapi/confluent/test_publisher.py b/tests/asyncapi/confluent/test_publisher.py
deleted file mode 100644
index b6ee208854..0000000000
--- a/tests/asyncapi/confluent/test_publisher.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent import KafkaBroker
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestArguments(PublisherTestcase):
- broker_class = KafkaBroker
-
- def test_publisher_bindings(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "test"}
- }
diff --git a/tests/asyncapi/confluent/test_router.py b/tests/asyncapi/confluent/test_router.py
deleted file mode 100644
index 44424190d8..0000000000
--- a/tests/asyncapi/confluent/test_router.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-from tests.asyncapi.base.publisher import PublisherTestcase
-from tests.asyncapi.base.router import RouterTestcase
-
-
-class TestRouter(RouterTestcase):
- broker_class = KafkaBroker
- router_class = KafkaRouter
- route_class = KafkaRoute
- publisher_class = KafkaPublisher
-
- def test_prefix(self):
- broker = self.broker_class()
-
- router = self.router_class(prefix="test_")
-
- @router.subscriber("test")
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "localhost",
- "protocol": "kafka",
- "protocolVersion": "auto",
- }
- },
- "channels": {
- "test_test:Handle": {
- "servers": ["development"],
- "bindings": {
- "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"}
- },
- "subscribe": {
- "message": {
- "$ref": "#/components/messages/test_test:Handle:Message"
- }
- },
- }
- },
- "components": {
- "messages": {
- "test_test:Handle:Message": {
- "title": "test_test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/Handle:Message:Payload"
- },
- }
- },
- "schemas": {
- "Handle:Message:Payload": {"title": "Handle:Message:Payload"}
- },
- },
- }
-
-
-class TestRouterArguments(ArgumentsTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- broker = KafkaBroker()
- broker.include_router(router)
- return FastStream(broker)
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- broker = KafkaBroker()
- broker.include_router(router)
- return FastStream(broker)
diff --git a/tests/asyncapi/confluent/test_security.py b/tests/asyncapi/confluent/test_security.py
deleted file mode 100644
index 915621ab84..0000000000
--- a/tests/asyncapi/confluent/test_security.py
+++ /dev/null
@@ -1,221 +0,0 @@
-import ssl
-from copy import deepcopy
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.confluent import KafkaBroker
-from faststream.security import (
- SASLGSSAPI,
- BaseSecurity,
- SASLOAuthBearer,
- SASLPlaintext,
- SASLScram256,
- SASLScram512,
-)
-
-basic_schema = {
- "asyncapi": "2.6.0",
- "channels": {
- "test_1:TestTopic": {
- "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_1"}},
- "servers": ["development"],
- "subscribe": {
- "message": {"$ref": "#/components/messages/test_1:TestTopic:Message"}
- },
- },
- "test_2:Publisher": {
- "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_2"}},
- "publish": {
- "message": {"$ref": "#/components/messages/test_2:Publisher:Message"}
- },
- "servers": ["development"],
- },
- },
- "components": {
- "messages": {
- "test_1:TestTopic:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
- "title": "test_1:TestTopic:Message",
- },
- "test_2:Publisher:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {
- "$ref": "#/components/schemas/test_2:Publisher:Message:Payload"
- },
- "title": "test_2:Publisher:Message",
- },
- },
- "schemas": {
- "TestTopic:Message:Payload": {
- "title": "TestTopic:Message:Payload",
- "type": "string",
- },
- "test_2:Publisher:Message:Payload": {
- "title": "test_2:Publisher:Message:Payload",
- "type": "string",
- },
- },
- "securitySchemes": {},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "kafka-secure",
- "protocolVersion": "auto",
- "security": [],
- "url": "localhost:9092",
- }
- },
-}
-
-
-def test_base_security_schema():
- ssl_context = ssl.create_default_context()
- security = BaseSecurity(ssl_context=ssl_context)
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- assert schema == basic_schema
-
-
-def test_plaintext_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLPlaintext(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- plaintext_security_schema = deepcopy(basic_schema)
- plaintext_security_schema["servers"]["development"]["security"] = [
- {"user-password": []}
- ]
- plaintext_security_schema["components"]["securitySchemes"] = {
- "user-password": {"type": "userPassword"}
- }
-
- assert schema == plaintext_security_schema
-
-
-def test_scram256_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLScram256(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl256_security_schema = deepcopy(basic_schema)
- sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
- sasl256_security_schema["components"]["securitySchemes"] = {
- "scram256": {"type": "scramSha256"}
- }
-
- assert schema == sasl256_security_schema
-
-
-def test_scram512_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLScram512(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl512_security_schema = deepcopy(basic_schema)
- sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
- sasl512_security_schema["components"]["securitySchemes"] = {
- "scram512": {"type": "scramSha512"}
- }
-
- assert schema == sasl512_security_schema
-
-
-def test_oauthbearer_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLOAuthBearer(
- ssl_context=ssl_context,
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl_oauthbearer_security_schema = deepcopy(basic_schema)
- sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
- {"oauthbearer": []}
- ]
- sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
- "oauthbearer": {"type": "oauth2", "$ref": ""}
- }
-
- assert schema == sasl_oauthbearer_security_schema
-
-
-def test_gssapi_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLGSSAPI(ssl_context=ssl_context)
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- gssapi_security_schema = deepcopy(basic_schema)
- gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
- gssapi_security_schema["components"]["securitySchemes"] = {
- "gssapi": {"type": "gssapi"}
- }
-
- assert schema == gssapi_security_schema
diff --git a/tests/a_docs/confluent/__init__.py b/tests/asyncapi/confluent/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/__init__.py
rename to tests/asyncapi/confluent/v2_6_0/__init__.py
diff --git a/tests/asyncapi/confluent/v2_6_0/test_arguments.py b/tests/asyncapi/confluent/v2_6_0/test_arguments.py
new file mode 100644
index 0000000000..4bc16826e8
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_arguments.py
@@ -0,0 +1,54 @@
+from faststream.confluent import KafkaBroker, TopicPartition
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_class = KafkaBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
+
+ def test_subscriber_with_one_topic_partitions(self) -> None:
+ broker = self.broker_class()
+
+ part1 = TopicPartition("topic_name", 1)
+ part2 = TopicPartition("topic_name", 2)
+
+ @broker.subscriber(partitions=[part1, part2])
+ async def handle(msg): ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "topic_name"}
+ }
+
+ def test_subscriber_with_multi_topics_partitions(self) -> None:
+ broker = self.broker_class()
+
+ part1 = TopicPartition("topic_name1", 1)
+ part2 = TopicPartition("topic_name2", 2)
+
+ @broker.subscriber(partitions=[part1, part2])
+ async def handle(msg): ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key1 = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ key2 = tuple(schema["channels"].keys())[1]
+
+ assert sorted((
+ schema["channels"][key1]["bindings"]["kafka"]["topic"],
+ schema["channels"][key2]["bindings"]["kafka"]["topic"],
+ )) == sorted(("topic_name1", "topic_name2"))
diff --git a/tests/asyncapi/confluent/v2_6_0/test_connection.py b/tests/asyncapi/confluent/v2_6_0/test_connection.py
new file mode 100644
index 0000000000..368bbc00dd
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_connection.py
@@ -0,0 +1,90 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ "kafka:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "url": "kafka:9092",
+ },
+ },
+ }
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(["kafka:9092", "kafka:9093"]),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9092",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9093",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ ["kafka:9092", "kafka:9093"],
+ specification_url=["kafka:9094", "kafka:9095"],
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9094",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9095",
+ },
+ },
+ }
diff --git a/tests/asyncapi/confluent/v2_6_0/test_fastapi.py b/tests/asyncapi/confluent/v2_6_0/test_fastapi.py
new file mode 100644
index 0000000000..30fb263bd2
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_fastapi.py
@@ -0,0 +1,41 @@
+from faststream.confluent.fastapi import KafkaRouter
+from faststream.confluent.testing import TestKafkaBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v2_6_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_class = staticmethod(lambda: KafkaRouter().broker)
+ router_class = KafkaRouter
+ broker_wrapper = staticmethod(TestKafkaBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = staticmethod(lambda: KafkaRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = KafkaRouter("localhost:9092", security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "security": [{"user-password": []}],
+ "url": "localhost:9092",
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/confluent/v2_6_0/test_naming.py b/tests/asyncapi/confluent/v2_6_0/test_naming.py
new file mode 100644
index 0000000000..2fdbd64687
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_naming.py
@@ -0,0 +1,51 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = KafkaBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "localhost",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test:Handle:Message"
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:Message": {
+ "title": "test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/confluent/v2_6_0/test_publisher.py b/tests/asyncapi/confluent/v2_6_0/test_publisher.py
new file mode 100644
index 0000000000..353774d562
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_class = KafkaBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/confluent/v2_6_0/test_router.py b/tests/asyncapi/confluent/v2_6_0/test_router.py
new file mode 100644
index 0000000000..c73885cddb
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_router.py
@@ -0,0 +1,84 @@
+from faststream.confluent import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v2_6_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = KafkaBroker
+ router_class = KafkaRouter
+ route_class = KafkaRoute
+ publisher_class = KafkaPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "localhost",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"},
+ },
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test_test:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:Message": {
+ "title": "test_test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/confluent/v2_6_0/test_security.py b/tests/asyncapi/confluent/v2_6_0/test_security.py
new file mode 100644
index 0000000000..f3a1bcf913
--- /dev/null
+++ b/tests/asyncapi/confluent/v2_6_0/test_security.py
@@ -0,0 +1,214 @@
+import ssl
+from copy import deepcopy
+
+from faststream.confluent import KafkaBroker
+from faststream.security import (
+ SASLGSSAPI,
+ BaseSecurity,
+ SASLOAuthBearer,
+ SASLPlaintext,
+ SASLScram256,
+ SASLScram512,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+basic_schema = {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "test_1:TestTopic": {
+ "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_1"}},
+ "servers": ["development"],
+ "publish": {
+ "message": {"$ref": "#/components/messages/test_1:TestTopic:Message"},
+ },
+ },
+ "test_2:Publisher": {
+ "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_2"}},
+ "subscribe": {
+ "message": {"$ref": "#/components/messages/test_2:Publisher:Message"},
+ },
+ "servers": ["development"],
+ },
+ },
+ "components": {
+ "messages": {
+ "test_1:TestTopic:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
+ "title": "test_1:TestTopic:Message",
+ },
+ "test_2:Publisher:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/test_2:Publisher:Message:Payload",
+ },
+ "title": "test_2:Publisher:Message",
+ },
+ },
+ "schemas": {
+ "TestTopic:Message:Payload": {
+ "title": "TestTopic:Message:Payload",
+ "type": "string",
+ },
+ "test_2:Publisher:Message:Payload": {
+ "title": "test_2:Publisher:Message:Payload",
+ "type": "string",
+ },
+ },
+ "securitySchemes": {},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "kafka-secure",
+ "protocolVersion": "auto",
+ "security": [],
+ "url": "localhost:9092",
+ },
+ },
+}
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == basic_schema
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ plaintext_security_schema = deepcopy(basic_schema)
+ plaintext_security_schema["servers"]["development"]["security"] = [
+ {"user-password": []},
+ ]
+ plaintext_security_schema["components"]["securitySchemes"] = {
+ "user-password": {"type": "userPassword"},
+ }
+
+ assert schema == plaintext_security_schema
+
+
+def test_scram256_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram256(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl256_security_schema = deepcopy(basic_schema)
+ sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
+ sasl256_security_schema["components"]["securitySchemes"] = {
+ "scram256": {"type": "scramSha256"},
+ }
+
+ assert schema == sasl256_security_schema
+
+
+def test_scram512_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram512(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl512_security_schema = deepcopy(basic_schema)
+ sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
+ sasl512_security_schema["components"]["securitySchemes"] = {
+ "scram512": {"type": "scramSha512"},
+ }
+
+ assert schema == sasl512_security_schema
+
+
+def test_oauthbearer_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLOAuthBearer(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl_oauthbearer_security_schema = deepcopy(basic_schema)
+ sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
+ {"oauthbearer": []},
+ ]
+ sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
+ "oauthbearer": {"type": "oauth2", "$ref": ""}
+ }
+
+ assert schema == sasl_oauthbearer_security_schema
+
+
+def test_gssapi_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLGSSAPI(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ gssapi_security_schema = deepcopy(basic_schema)
+ gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
+ gssapi_security_schema["components"]["securitySchemes"] = {
+ "gssapi": {"type": "gssapi"},
+ }
+
+ assert schema == gssapi_security_schema
diff --git a/tests/a_docs/getting_started/cli/confluent/__init__.py b/tests/asyncapi/confluent/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/confluent/__init__.py
rename to tests/asyncapi/confluent/v3_0_0/__init__.py
diff --git a/tests/asyncapi/confluent/v3_0_0/test_arguments.py b/tests/asyncapi/confluent/v3_0_0/test_arguments.py
new file mode 100644
index 0000000000..99b88175e2
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_arguments.py
@@ -0,0 +1,20 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_factory = KafkaBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/confluent/v3_0_0/test_connection.py b/tests/asyncapi/confluent/v3_0_0/test_connection.py
new file mode 100644
index 0000000000..63b9c51da3
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_connection.py
@@ -0,0 +1,98 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ "kafka:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "host": "kafka:9092",
+ "pathname": "",
+ },
+ },
+ }
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(["kafka:9092", "kafka:9093"]),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9092",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9093",
+ "pathname": "",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ ["kafka:9092", "kafka:9093"],
+ specification_url=["kafka:9094", "kafka:9095"],
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9094",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9095",
+ "pathname": "",
+ },
+ },
+ }
diff --git a/tests/asyncapi/confluent/v3_0_0/test_fastapi.py b/tests/asyncapi/confluent/v3_0_0/test_fastapi.py
new file mode 100644
index 0000000000..48a4ae5dda
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_fastapi.py
@@ -0,0 +1,42 @@
+from faststream.confluent.fastapi import KafkaRouter
+from faststream.confluent.testing import TestKafkaBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v3_0_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_factory = staticmethod(lambda: KafkaRouter().broker)
+ router_factory = KafkaRouter
+ broker_wrapper = staticmethod(TestKafkaBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_factory = staticmethod(lambda: KafkaRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = KafkaRouter("localhost:9092", security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "security": [{"user-password": []}],
+ "host": "localhost:9092",
+ "pathname": "",
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/confluent/v3_0_0/test_naming.py b/tests/asyncapi/confluent/v3_0_0/test_naming.py
new file mode 100644
index 0000000000..54ee7f2703
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_naming.py
@@ -0,0 +1,70 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = KafkaBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "host": "localhost",
+ "pathname": "",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "address": "test:Handle",
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test:Handle:SubscribeMessage",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:HandleSubscribe": {
+ "action": "receive",
+ "channel": {
+ "$ref": "#/channels/test:Handle",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:SubscribeMessage": {
+ "title": "test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/confluent/v3_0_0/test_publisher.py b/tests/asyncapi/confluent/v3_0_0/test_publisher.py
new file mode 100644
index 0000000000..d6707c186d
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.confluent import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_factory = KafkaBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/confluent/v3_0_0/test_router.py b/tests/asyncapi/confluent/v3_0_0/test_router.py
new file mode 100644
index 0000000000..0cb1cf9bcd
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_router.py
@@ -0,0 +1,97 @@
+from faststream.confluent import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v3_0_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = KafkaBroker
+ router_class = KafkaRouter
+ route_class = KafkaRoute
+ publisher_class = KafkaPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost",
+ "pathname": "",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "address": "test_test:Handle",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_test:Handle:SubscribeMessage",
+ },
+ },
+ "bindings": {
+ "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"},
+ },
+ },
+ },
+ "operations": {
+ "test_test:HandleSubscribe": {
+ "action": "receive",
+ "messages": [
+ {
+ "$ref": "#/channels/test_test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ "channel": {"$ref": "#/channels/test_test:Handle"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:SubscribeMessage": {
+ "title": "test_test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/confluent/v3_0_0/test_security.py b/tests/asyncapi/confluent/v3_0_0/test_security.py
new file mode 100644
index 0000000000..2aa802e304
--- /dev/null
+++ b/tests/asyncapi/confluent/v3_0_0/test_security.py
@@ -0,0 +1,233 @@
+import ssl
+from copy import deepcopy
+
+from faststream.confluent import KafkaBroker
+from faststream.security import (
+ SASLGSSAPI,
+ BaseSecurity,
+ SASLOAuthBearer,
+ SASLPlaintext,
+ SASLScram256,
+ SASLScram512,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+basic_schema = {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost:9092",
+ "pathname": "",
+ "protocol": "kafka-secure",
+ "protocolVersion": "auto",
+ "security": [],
+ },
+ },
+ "channels": {
+ "test_1:TestTopic": {
+ "address": "test_1:TestTopic",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_1:TestTopic:SubscribeMessage",
+ },
+ },
+ "bindings": {"kafka": {"topic": "test_1", "bindingVersion": "0.4.0"}},
+ },
+ "test_2:Publisher": {
+ "address": "test_2:Publisher",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "Message": {"$ref": "#/components/messages/test_2:Publisher:Message"},
+ },
+ "bindings": {"kafka": {"topic": "test_2", "bindingVersion": "0.4.0"}},
+ },
+ },
+ "operations": {
+ "test_1:TestTopicSubscribe": {
+ "action": "receive",
+ "messages": [
+ {"$ref": "#/channels/test_1:TestTopic/messages/SubscribeMessage"},
+ ],
+ "channel": {"$ref": "#/channels/test_1:TestTopic"},
+ },
+ "test_2:Publisher": {
+ "action": "send",
+ "messages": [{"$ref": "#/channels/test_2:Publisher/messages/Message"}],
+ "channel": {"$ref": "#/channels/test_2:Publisher"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_1:TestTopic:SubscribeMessage": {
+ "title": "test_1:TestTopic:SubscribeMessage",
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
+ },
+ "test_2:Publisher:Message": {
+ "title": "test_2:Publisher:Message",
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/test_2:Publisher:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "TestTopic:Message:Payload": {
+ "title": "TestTopic:Message:Payload",
+ "type": "string",
+ },
+ "test_2:Publisher:Message:Payload": {
+ "title": "test_2:Publisher:Message:Payload",
+ "type": "string",
+ },
+ },
+ "securitySchemes": {},
+ },
+}
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == basic_schema
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ plaintext_security_schema = deepcopy(basic_schema)
+ plaintext_security_schema["servers"]["development"]["security"] = [
+ {"user-password": []},
+ ]
+ plaintext_security_schema["components"]["securitySchemes"] = {
+ "user-password": {"type": "userPassword"},
+ }
+
+ assert schema == plaintext_security_schema
+
+
+def test_scram256_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram256(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl256_security_schema = deepcopy(basic_schema)
+ sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
+ sasl256_security_schema["components"]["securitySchemes"] = {
+ "scram256": {"type": "scramSha256"},
+ }
+
+ assert schema == sasl256_security_schema
+
+
+def test_scram512_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram512(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl512_security_schema = deepcopy(basic_schema)
+ sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
+ sasl512_security_schema["components"]["securitySchemes"] = {
+ "scram512": {"type": "scramSha512"},
+ }
+
+ assert schema == sasl512_security_schema
+
+
+def test_oauthbearer_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLOAuthBearer(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl_oauthbearer_security_schema = deepcopy(basic_schema)
+ sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
+ {"oauthbearer": []},
+ ]
+ sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
+ "oauthbearer": {"type": "oauth2", "$ref": ""}
+ }
+
+ assert schema == sasl_oauthbearer_security_schema
+
+
+def test_gssapi_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLGSSAPI(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ gssapi_security_schema = deepcopy(basic_schema)
+ gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
+ gssapi_security_schema["components"]["securitySchemes"] = {
+ "gssapi": {"type": "gssapi"},
+ }
+
+ assert schema == gssapi_security_schema
diff --git a/tests/asyncapi/kafka/test_app.py b/tests/asyncapi/kafka/test_app.py
deleted file mode 100644
index 4a79ad1e96..0000000000
--- a/tests/asyncapi/kafka/test_app.py
+++ /dev/null
@@ -1,194 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Contact, ExternalDocs, License, Tag
-from faststream.kafka import KafkaBroker
-
-
-def test_base():
- schema = get_app_schema(FastStream(KafkaBroker())).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost",
- }
- },
- }
-
-
-def test_with_name():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(),
- title="My App",
- version="1.0.0",
- description="Test description",
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {
- "description": "Test description",
- "title": "My App",
- "version": "1.0.0",
- },
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost",
- }
- },
- }
-
-
-def test_full():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(),
- title="My App",
- version="1.0.0",
- description="Test description",
- license=License(name="MIT", url="https://mit.com/"),
- terms_of_service="https://my-terms.com/",
- contact=Contact(name="support", url="https://help.com/"),
- tags=(Tag(name="some-tag", description="experimental"),),
- identifier="some-unique-uuid",
- external_docs=ExternalDocs(
- url="https://extra-docs.py/",
- ),
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "externalDocs": {"url": "https://extra-docs.py/"},
- "id": "some-unique-uuid",
- "info": {
- "contact": {"name": "support", "url": "https://help.com/"},
- "description": "Test description",
- "license": {"name": "MIT", "url": "https://mit.com/"},
- "termsOfService": "https://my-terms.com/",
- "title": "My App",
- "version": "1.0.0",
- },
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost",
- }
- },
- "tags": [{"description": "experimental", "name": "some-tag"}],
- }
-
-
-def test_full_dict():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(),
- title="My App",
- version="1.0.0",
- description="Test description",
- license={"name": "MIT", "url": "https://mit.com/"},
- terms_of_service="https://my-terms.com/",
- contact={"name": "support", "url": "https://help.com/"},
- tags=({"name": "some-tag", "description": "experimental"},),
- identifier="some-unique-uuid",
- external_docs={
- "url": "https://extra-docs.py/",
- },
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "externalDocs": {"url": "https://extra-docs.py/"},
- "id": "some-unique-uuid",
- "info": {
- "contact": {"name": "support", "url": "https://help.com/"},
- "description": "Test description",
- "license": {"name": "MIT", "url": "https://mit.com/"},
- "termsOfService": "https://my-terms.com/",
- "title": "My App",
- "version": "1.0.0",
- },
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost",
- }
- },
- "tags": [{"description": "experimental", "name": "some-tag"}],
- }
-
-
-def test_extra():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(),
- title="My App",
- version="1.0.0",
- description="Test description",
- license={"name": "MIT", "url": "https://mit.com/", "x-field": "extra"},
- terms_of_service="https://my-terms.com/",
- contact={"name": "support", "url": "https://help.com/", "x-field": "extra"},
- tags=(
- {"name": "some-tag", "description": "experimental", "x-field": "extra"},
- ),
- identifier="some-unique-uuid",
- external_docs={
- "url": "https://extra-docs.py/",
- "x-field": "extra",
- },
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "externalDocs": {"url": "https://extra-docs.py/", "x-field": "extra"},
- "id": "some-unique-uuid",
- "info": {
- "contact": {
- "name": "support",
- "url": "https://help.com/",
- "x-field": "extra",
- },
- "description": "Test description",
- "license": {"name": "MIT", "url": "https://mit.com/", "x-field": "extra"},
- "termsOfService": "https://my-terms.com/",
- "title": "My App",
- "version": "1.0.0",
- },
- "servers": {
- "development": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "localhost",
- }
- },
- "tags": [
- {"description": "experimental", "name": "some-tag", "x-field": "extra"}
- ],
- }
diff --git a/tests/asyncapi/kafka/test_arguments.py b/tests/asyncapi/kafka/test_arguments.py
deleted file mode 100644
index 5b289ffd97..0000000000
--- a/tests/asyncapi/kafka/test_arguments.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from aiokafka import TopicPartition
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka import KafkaBroker
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-
-
-class TestArguments(ArgumentsTestcase):
- broker_class = KafkaBroker
-
- def test_subscriber_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "test"}
- }
-
- def test_subscriber_with_one_topic_partitions(self):
- broker = self.broker_class()
-
- part1 = TopicPartition("topic_name", 1)
- part2 = TopicPartition("topic_name", 2)
-
- @broker.subscriber(partitions=[part1, part2])
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "topic_name"}
- }
-
- def test_subscriber_with_multi_topics_partitions(self):
- broker = self.broker_class()
-
- part1 = TopicPartition("topic_name1", 1)
- part2 = TopicPartition("topic_name2", 2)
-
- @broker.subscriber(partitions=[part1, part2])
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key1 = tuple(schema["channels"].keys())[0] # noqa: RUF015
- key2 = tuple(schema["channels"].keys())[1]
-
- assert sorted(
- (
- schema["channels"][key1]["bindings"]["kafka"]["topic"],
- schema["channels"][key2]["bindings"]["kafka"]["topic"],
- )
- ) == sorted(("topic_name1", "topic_name2"))
diff --git a/tests/asyncapi/kafka/test_connection.py b/tests/asyncapi/kafka/test_connection.py
deleted file mode 100644
index 25eb392361..0000000000
--- a/tests/asyncapi/kafka/test_connection.py
+++ /dev/null
@@ -1,92 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Tag
-from faststream.kafka import KafkaBroker
-
-
-def test_base():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(
- "kafka:9092",
- protocol="plaintext",
- protocol_version="0.9.0",
- description="Test description",
- tags=(Tag(name="some-tag", description="experimental"),),
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "description": "Test description",
- "protocol": "plaintext",
- "protocolVersion": "0.9.0",
- "tags": [{"description": "experimental", "name": "some-tag"}],
- "url": "kafka:9092",
- }
- },
- }
-
-
-def test_multi():
- schema = get_app_schema(
- FastStream(KafkaBroker(["kafka:9092", "kafka:9093"]))
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9092",
- },
- "Server2": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9093",
- },
- },
- }
-
-
-def test_custom():
- schema = get_app_schema(
- FastStream(
- KafkaBroker(
- ["kafka:9092", "kafka:9093"],
- asyncapi_url=["kafka:9094", "kafka:9095"],
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9094",
- },
- "Server2": {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "url": "kafka:9095",
- },
- },
- }
diff --git a/tests/asyncapi/kafka/test_fastapi.py b/tests/asyncapi/kafka/test_fastapi.py
deleted file mode 100644
index 0991c3586d..0000000000
--- a/tests/asyncapi/kafka/test_fastapi.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Type
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka.fastapi import KafkaRouter
-from faststream.kafka.testing import TestKafkaBroker
-from faststream.security import SASLPlaintext
-from tests.asyncapi.base.arguments import FastAPICompatible
-from tests.asyncapi.base.fastapi import FastAPITestCase
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestRouterArguments(FastAPITestCase, FastAPICompatible):
- broker_class: Type[KafkaRouter] = KafkaRouter
- broker_wrapper = staticmethod(TestKafkaBroker)
-
- def build_app(self, router):
- return router
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- return router
-
-
-def test_fastapi_security_schema():
- security = SASLPlaintext(username="user", password="pass", use_ssl=False)
-
- broker = KafkaRouter("localhost:9092", security=security)
-
- schema = get_app_schema(broker).to_jsonable()
-
- assert schema["servers"]["development"] == {
- "protocol": "kafka",
- "protocolVersion": "auto",
- "security": [{"user-password": []}],
- "url": "localhost:9092",
- }
- assert schema["components"]["securitySchemes"] == {
- "user-password": {"type": "userPassword"}
- }
diff --git a/tests/asyncapi/kafka/test_naming.py b/tests/asyncapi/kafka/test_naming.py
deleted file mode 100644
index ed8f18bd98..0000000000
--- a/tests/asyncapi/kafka/test_naming.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka import KafkaBroker
-from tests.asyncapi.base.naming import NamingTestCase
-
-
-class TestNaming(NamingTestCase):
- broker_class = KafkaBroker
-
- def test_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "localhost",
- "protocol": "kafka",
- "protocolVersion": "auto",
- }
- },
- "channels": {
- "test:Handle": {
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
- "subscribe": {
- "message": {"$ref": "#/components/messages/test:Handle:Message"}
- },
- }
- },
- "components": {
- "messages": {
- "test:Handle:Message": {
- "title": "test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {"$ref": "#/components/schemas/EmptyPayload"},
- }
- },
- "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
- },
- }
diff --git a/tests/asyncapi/kafka/test_publisher.py b/tests/asyncapi/kafka/test_publisher.py
deleted file mode 100644
index 0b90bd6f4f..0000000000
--- a/tests/asyncapi/kafka/test_publisher.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka import KafkaBroker
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestArguments(PublisherTestcase):
- broker_class = KafkaBroker
-
- def test_publisher_bindings(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "kafka": {"bindingVersion": "0.4.0", "topic": "test"}
- }
diff --git a/tests/asyncapi/kafka/test_router.py b/tests/asyncapi/kafka/test_router.py
deleted file mode 100644
index 5cb2cc8168..0000000000
--- a/tests/asyncapi/kafka/test_router.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-from tests.asyncapi.base.publisher import PublisherTestcase
-from tests.asyncapi.base.router import RouterTestcase
-
-
-class TestRouter(RouterTestcase):
- broker_class = KafkaBroker
- router_class = KafkaRouter
- route_class = KafkaRoute
- publisher_class = KafkaPublisher
-
- def test_prefix(self):
- broker = self.broker_class()
-
- router = self.router_class(prefix="test_")
-
- @router.subscriber("test")
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "localhost",
- "protocol": "kafka",
- "protocolVersion": "auto",
- }
- },
- "channels": {
- "test_test:Handle": {
- "servers": ["development"],
- "bindings": {
- "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"}
- },
- "subscribe": {
- "message": {
- "$ref": "#/components/messages/test_test:Handle:Message"
- }
- },
- }
- },
- "components": {
- "messages": {
- "test_test:Handle:Message": {
- "title": "test_test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/Handle:Message:Payload"
- },
- }
- },
- "schemas": {
- "Handle:Message:Payload": {"title": "Handle:Message:Payload"}
- },
- },
- }
-
-
-class TestRouterArguments(ArgumentsTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- broker = KafkaBroker()
- broker.include_router(router)
- return FastStream(broker)
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = KafkaRouter
-
- def build_app(self, router):
- broker = KafkaBroker()
- broker.include_router(router)
- return FastStream(broker)
diff --git a/tests/asyncapi/kafka/test_security.py b/tests/asyncapi/kafka/test_security.py
deleted file mode 100644
index 62e30e9ccf..0000000000
--- a/tests/asyncapi/kafka/test_security.py
+++ /dev/null
@@ -1,223 +0,0 @@
-import ssl
-from copy import deepcopy
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.kafka import KafkaBroker
-from faststream.security import (
- SASLGSSAPI,
- BaseSecurity,
- SASLOAuthBearer,
- SASLPlaintext,
- SASLScram256,
- SASLScram512,
-)
-
-basic_schema = {
- "asyncapi": "2.6.0",
- "channels": {
- "test_1:TestTopic": {
- "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_1"}},
- "servers": ["development"],
- "subscribe": {
- "message": {"$ref": "#/components/messages/test_1:TestTopic:Message"}
- },
- },
- "test_2:Publisher": {
- "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_2"}},
- "publish": {
- "message": {"$ref": "#/components/messages/test_2:Publisher:Message"}
- },
- "servers": ["development"],
- },
- },
- "components": {
- "messages": {
- "test_1:TestTopic:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
- "title": "test_1:TestTopic:Message",
- },
- "test_2:Publisher:Message": {
- "correlationId": {"location": "$message.header#/correlation_id"},
- "payload": {
- "$ref": "#/components/schemas/test_2:Publisher:Message:Payload"
- },
- "title": "test_2:Publisher:Message",
- },
- },
- "schemas": {
- "TestTopic:Message:Payload": {
- "title": "TestTopic:Message:Payload",
- "type": "string",
- },
- "test_2:Publisher:Message:Payload": {
- "title": "test_2:Publisher:Message:Payload",
- "type": "string",
- },
- },
- "securitySchemes": {},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "kafka-secure",
- "protocolVersion": "auto",
- "security": [],
- "url": "localhost:9092",
- }
- },
-}
-
-
-def test_base_security_schema():
- ssl_context = ssl.create_default_context()
- security = BaseSecurity(ssl_context=ssl_context)
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- assert schema == basic_schema
-
-
-def test_plaintext_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLPlaintext(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- plaintext_security_schema = deepcopy(basic_schema)
- plaintext_security_schema["servers"]["development"]["security"] = [
- {"user-password": []}
- ]
- plaintext_security_schema["components"]["securitySchemes"] = {
- "user-password": {"type": "userPassword"}
- }
-
- assert schema == plaintext_security_schema
-
-
-def test_scram256_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLScram256(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl256_security_schema = deepcopy(basic_schema)
- sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
- sasl256_security_schema["components"]["securitySchemes"] = {
- "scram256": {"type": "scramSha256"}
- }
-
- assert schema == sasl256_security_schema
-
-
-def test_scram512_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLScram512(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl512_security_schema = deepcopy(basic_schema)
- sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
- sasl512_security_schema["components"]["securitySchemes"] = {
- "scram512": {"type": "scramSha512"}
- }
-
- assert schema == sasl512_security_schema
-
-
-def test_oauthbearer_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLOAuthBearer(
- ssl_context=ssl_context,
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- sasl_oauthbearer_security_schema = deepcopy(basic_schema)
- sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
- {"oauthbearer": []}
- ]
- sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
- "oauthbearer": {"type": "oauth2", "$ref": ""}
- }
-
- assert schema == sasl_oauthbearer_security_schema
-
-
-def test_gssapi_security_schema():
- ssl_context = ssl.create_default_context()
- security = SASLGSSAPI(
- ssl_context=ssl_context,
- )
-
- broker = KafkaBroker("localhost:9092", security=security)
- app = FastStream(broker)
-
- @broker.publisher("test_2")
- @broker.subscriber("test_1")
- async def test_topic(msg: str) -> str:
- pass
-
- schema = get_app_schema(app).to_jsonable()
-
- gssapi_security_schema = deepcopy(basic_schema)
- gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
- gssapi_security_schema["components"]["securitySchemes"] = {
- "gssapi": {"type": "gssapi"}
- }
-
- assert schema == gssapi_security_schema
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/__init__.py b/tests/asyncapi/kafka/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/asyncapi/asyncapi_customization/__init__.py
rename to tests/asyncapi/kafka/v2_6_0/__init__.py
diff --git a/tests/asyncapi/kafka/v2_6_0/test_app.py b/tests/asyncapi/kafka/v2_6_0/test_app.py
new file mode 100644
index 0000000000..2bd9b5a916
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_app.py
@@ -0,0 +1,187 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification import Contact, ExternalDocs, License, Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(KafkaBroker(), schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost",
+ },
+ },
+ }
+
+
+def test_with_name() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(),
+ title="My App",
+ app_version="1.0.0",
+ description="Test description",
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {
+ "description": "Test description",
+ "title": "My App",
+ "version": "1.0.0",
+ },
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost",
+ },
+ },
+ }
+
+
+def test_full() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(),
+ title="My App",
+ app_version="1.0.0",
+ description="Test description",
+ license=License(name="MIT", url="https://mit.com/"),
+ terms_of_service="https://my-terms.com/",
+ contact=Contact(name="support", url="https://help.com/"),
+ tags=(Tag(name="some-tag", description="experimental"),),
+ identifier="some-unique-uuid",
+ external_docs=ExternalDocs(
+ url="https://extra-docs.py/",
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "externalDocs": {"url": "https://extra-docs.py/"},
+ "id": "some-unique-uuid",
+ "info": {
+ "contact": {"name": "support", "url": "https://help.com/"},
+ "description": "Test description",
+ "license": {"name": "MIT", "url": "https://mit.com/"},
+ "termsOfService": "https://my-terms.com/",
+ "title": "My App",
+ "version": "1.0.0",
+ },
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost",
+ },
+ },
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ }
+
+
+def test_full_dict() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(),
+ title="My App",
+ app_version="1.0.0",
+ description="Test description",
+ license={"name": "MIT", "url": "https://mit.com/"},
+ terms_of_service="https://my-terms.com/",
+ contact={"name": "support", "url": "https://help.com/"},
+ tags=({"name": "some-tag", "description": "experimental"},),
+ identifier="some-unique-uuid",
+ external_docs={
+ "url": "https://extra-docs.py/",
+ },
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "externalDocs": {"url": "https://extra-docs.py/"},
+ "id": "some-unique-uuid",
+ "info": {
+ "contact": {"name": "support", "url": "https://help.com/"},
+ "description": "Test description",
+ "license": {"name": "MIT", "url": "https://mit.com/"},
+ "termsOfService": "https://my-terms.com/",
+ "title": "My App",
+ "version": "1.0.0",
+ },
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost",
+ },
+ },
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ }
+
+
+def test_extra() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(),
+ title="My App",
+ app_version="1.0.0",
+ description="Test description",
+ license={"name": "MIT", "url": "https://mit.com/", "x-field": "extra"},
+ terms_of_service="https://my-terms.com/",
+ contact={"name": "support", "url": "https://help.com/", "x-field": "extra"},
+ tags=({"name": "some-tag", "description": "experimental", "x-field": "extra"},),
+ identifier="some-unique-uuid",
+ external_docs={
+ "url": "https://extra-docs.py/",
+ "x-field": "extra",
+ },
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "externalDocs": {"url": "https://extra-docs.py/", "x-field": "extra"},
+ "id": "some-unique-uuid",
+ "info": {
+ "contact": {
+ "name": "support",
+ "url": "https://help.com/",
+ "x-field": "extra",
+ },
+ "description": "Test description",
+ "license": {"name": "MIT", "url": "https://mit.com/", "x-field": "extra"},
+ "termsOfService": "https://my-terms.com/",
+ "title": "My App",
+ "version": "1.0.0",
+ },
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost",
+ },
+ },
+ "tags": [
+ {"description": "experimental", "name": "some-tag", "x-field": "extra"},
+ ],
+ }
diff --git a/tests/asyncapi/kafka/v2_6_0/test_arguments.py b/tests/asyncapi/kafka/v2_6_0/test_arguments.py
new file mode 100644
index 0000000000..c6af2e02ac
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_arguments.py
@@ -0,0 +1,54 @@
+from faststream.kafka import KafkaBroker, TopicPartition
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_class = KafkaBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
+
+ def test_subscriber_with_one_topic_partitions(self) -> None:
+ broker = self.broker_class()
+
+ part1 = TopicPartition("topic_name", 1)
+ part2 = TopicPartition("topic_name", 2)
+
+ @broker.subscriber(partitions=[part1, part2])
+ async def handle(msg): ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "topic_name"}
+ }
+
+ def test_subscriber_with_multi_topics_partitions(self) -> None:
+ broker = self.broker_class()
+
+ part1 = TopicPartition("topic_name1", 1)
+ part2 = TopicPartition("topic_name2", 2)
+
+ @broker.subscriber(partitions=[part1, part2])
+ async def handle(msg): ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key1 = tuple(schema["channels"].keys())[0] # noqa: RUF015
+ key2 = tuple(schema["channels"].keys())[1]
+
+ assert sorted((
+ schema["channels"][key1]["bindings"]["kafka"]["topic"],
+ schema["channels"][key2]["bindings"]["kafka"]["topic"],
+ )) == sorted(("topic_name1", "topic_name2"))
diff --git a/tests/asyncapi/kafka/v2_6_0/test_connection.py b/tests/asyncapi/kafka/v2_6_0/test_connection.py
new file mode 100644
index 0000000000..2107e3882b
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_connection.py
@@ -0,0 +1,90 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ "kafka:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "url": "kafka:9092",
+ },
+ },
+ }
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(["kafka:9092", "kafka:9093"]),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9092",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9093",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ ["kafka:9092", "kafka:9093"],
+ specification_url=["kafka:9094", "kafka:9095"],
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9094",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "kafka:9095",
+ },
+ },
+ }
diff --git a/tests/asyncapi/kafka/v2_6_0/test_fastapi.py b/tests/asyncapi/kafka/v2_6_0/test_fastapi.py
new file mode 100644
index 0000000000..7e55fa1db3
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_fastapi.py
@@ -0,0 +1,41 @@
+from faststream.kafka.fastapi import KafkaRouter
+from faststream.kafka.testing import TestKafkaBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v2_6_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_class = staticmethod(lambda: KafkaRouter().broker)
+ router_class = KafkaRouter
+ broker_wrapper = staticmethod(TestKafkaBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = staticmethod(lambda: KafkaRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = KafkaRouter("localhost:9092", security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "security": [{"user-password": []}],
+ "url": "localhost:9092",
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/kafka/v2_6_0/test_naming.py b/tests/asyncapi/kafka/v2_6_0/test_naming.py
new file mode 100644
index 0000000000..bba38e11b7
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_naming.py
@@ -0,0 +1,51 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = KafkaBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "localhost",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test:Handle:Message"
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:Message": {
+ "title": "test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/kafka/v2_6_0/test_publisher.py b/tests/asyncapi/kafka/v2_6_0/test_publisher.py
new file mode 100644
index 0000000000..da3098352a
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_class = KafkaBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/kafka/v2_6_0/test_router.py b/tests/asyncapi/kafka/v2_6_0/test_router.py
new file mode 100644
index 0000000000..2fd0342eb0
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_router.py
@@ -0,0 +1,84 @@
+from faststream.kafka import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v2_6_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = KafkaBroker
+ router_class = KafkaRouter
+ route_class = KafkaRoute
+ publisher_class = KafkaPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "localhost",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"},
+ },
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test_test:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:Message": {
+ "title": "test_test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/kafka/v2_6_0/test_security.py b/tests/asyncapi/kafka/v2_6_0/test_security.py
new file mode 100644
index 0000000000..b1275e242d
--- /dev/null
+++ b/tests/asyncapi/kafka/v2_6_0/test_security.py
@@ -0,0 +1,216 @@
+import ssl
+from copy import deepcopy
+
+from faststream.kafka import KafkaBroker
+from faststream.security import (
+ SASLGSSAPI,
+ BaseSecurity,
+ SASLOAuthBearer,
+ SASLPlaintext,
+ SASLScram256,
+ SASLScram512,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+basic_schema = {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "test_1:TestTopic": {
+ "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_1"}},
+ "servers": ["development"],
+ "publish": {
+ "message": {"$ref": "#/components/messages/test_1:TestTopic:Message"},
+ },
+ },
+ "test_2:Publisher": {
+ "bindings": {"kafka": {"bindingVersion": "0.4.0", "topic": "test_2"}},
+ "subscribe": {
+ "message": {"$ref": "#/components/messages/test_2:Publisher:Message"},
+ },
+ "servers": ["development"],
+ },
+ },
+ "components": {
+ "messages": {
+ "test_1:TestTopic:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
+ "title": "test_1:TestTopic:Message",
+ },
+ "test_2:Publisher:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/test_2:Publisher:Message:Payload",
+ },
+ "title": "test_2:Publisher:Message",
+ },
+ },
+ "schemas": {
+ "TestTopic:Message:Payload": {
+ "title": "TestTopic:Message:Payload",
+ "type": "string",
+ },
+ "test_2:Publisher:Message:Payload": {
+ "title": "test_2:Publisher:Message:Payload",
+ "type": "string",
+ },
+ },
+ "securitySchemes": {},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "kafka-secure",
+ "protocolVersion": "auto",
+ "security": [],
+ "url": "localhost:9092",
+ },
+ },
+}
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == basic_schema
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ plaintext_security_schema = deepcopy(basic_schema)
+ plaintext_security_schema["servers"]["development"]["security"] = [
+ {"user-password": []},
+ ]
+ plaintext_security_schema["components"]["securitySchemes"] = {
+ "user-password": {"type": "userPassword"},
+ }
+
+ assert schema == plaintext_security_schema
+
+
+def test_scram256_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram256(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl256_security_schema = deepcopy(basic_schema)
+ sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
+ sasl256_security_schema["components"]["securitySchemes"] = {
+ "scram256": {"type": "scramSha256"},
+ }
+
+ assert schema == sasl256_security_schema
+
+
+def test_scram512_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram512(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl512_security_schema = deepcopy(basic_schema)
+ sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
+ sasl512_security_schema["components"]["securitySchemes"] = {
+ "scram512": {"type": "scramSha512"},
+ }
+
+ assert schema == sasl512_security_schema
+
+
+def test_oauthbearer_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLOAuthBearer(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ sasl_oauthbearer_security_schema = deepcopy(basic_schema)
+ sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
+ {"oauthbearer": []},
+ ]
+ sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
+ "oauthbearer": {"type": "oauth2", "$ref": ""}
+ }
+
+ assert schema == sasl_oauthbearer_security_schema
+
+
+def test_gssapi_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLGSSAPI(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ gssapi_security_schema = deepcopy(basic_schema)
+ gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
+ gssapi_security_schema["components"]["securitySchemes"] = {
+ "gssapi": {"type": "gssapi"},
+ }
+
+ assert schema == gssapi_security_schema
diff --git a/tests/a_docs/confluent/batch_consuming_pydantic/__init__.py b/tests/asyncapi/kafka/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/batch_consuming_pydantic/__init__.py
rename to tests/asyncapi/kafka/v3_0_0/__init__.py
diff --git a/tests/asyncapi/kafka/v3_0_0/test_arguments.py b/tests/asyncapi/kafka/v3_0_0/test_arguments.py
new file mode 100644
index 0000000000..6fa7a44400
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_arguments.py
@@ -0,0 +1,20 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_factory = KafkaBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/kafka/v3_0_0/test_connection.py b/tests/asyncapi/kafka/v3_0_0/test_connection.py
new file mode 100644
index 0000000000..e1fb6cfaab
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_connection.py
@@ -0,0 +1,98 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ "kafka:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "host": "kafka:9092",
+ "pathname": "",
+ },
+ },
+ }
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(["kafka:9092", "kafka:9093"]),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9092",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9093",
+ "pathname": "",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ KafkaBroker(
+ ["kafka:9092", "kafka:9093"],
+ specification_url=["kafka:9094", "kafka:9095"],
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9094",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "host": "kafka:9095",
+ "pathname": "",
+ },
+ },
+ }
diff --git a/tests/asyncapi/kafka/v3_0_0/test_fastapi.py b/tests/asyncapi/kafka/v3_0_0/test_fastapi.py
new file mode 100644
index 0000000000..32ce017bed
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_fastapi.py
@@ -0,0 +1,42 @@
+from faststream.kafka.fastapi import KafkaRouter
+from faststream.kafka.testing import TestKafkaBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v3_0_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_factory = staticmethod(lambda: KafkaRouter().broker)
+ router_factory = KafkaRouter
+ broker_wrapper = staticmethod(TestKafkaBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_factory = staticmethod(lambda: KafkaRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = KafkaRouter("localhost:9092", security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "security": [{"user-password": []}],
+ "host": "localhost:9092",
+ "pathname": "",
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/kafka/v3_0_0/test_naming.py b/tests/asyncapi/kafka/v3_0_0/test_naming.py
new file mode 100644
index 0000000000..e63d62cad9
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_naming.py
@@ -0,0 +1,70 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = KafkaBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "host": "localhost",
+ "pathname": "",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "address": "test:Handle",
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "bindings": {"kafka": {"topic": "test", "bindingVersion": "0.4.0"}},
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test:Handle:SubscribeMessage",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:HandleSubscribe": {
+ "action": "receive",
+ "channel": {
+ "$ref": "#/channels/test:Handle",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:SubscribeMessage": {
+ "title": "test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/kafka/v3_0_0/test_publisher.py b/tests/asyncapi/kafka/v3_0_0/test_publisher.py
new file mode 100644
index 0000000000..1f6e6b8a08
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.kafka import KafkaBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_factory = KafkaBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "test"},
+ }
diff --git a/tests/asyncapi/kafka/v3_0_0/test_router.py b/tests/asyncapi/kafka/v3_0_0/test_router.py
new file mode 100644
index 0000000000..ac1fed9404
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_router.py
@@ -0,0 +1,97 @@
+from faststream.kafka import KafkaBroker, KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v3_0_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = KafkaBroker
+ router_class = KafkaRouter
+ route_class = KafkaRoute
+ publisher_class = KafkaPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost",
+ "pathname": "",
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "address": "test_test:Handle",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_test:Handle:SubscribeMessage",
+ },
+ },
+ "bindings": {
+ "kafka": {"topic": "test_test", "bindingVersion": "0.4.0"},
+ },
+ },
+ },
+ "operations": {
+ "test_test:HandleSubscribe": {
+ "action": "receive",
+ "messages": [
+ {
+ "$ref": "#/channels/test_test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ "channel": {"$ref": "#/channels/test_test:Handle"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:SubscribeMessage": {
+ "title": "test_test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = KafkaRouter
+
+ def build_app(self, router):
+ broker = KafkaBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/kafka/v3_0_0/test_security.py b/tests/asyncapi/kafka/v3_0_0/test_security.py
new file mode 100644
index 0000000000..ddb06cce77
--- /dev/null
+++ b/tests/asyncapi/kafka/v3_0_0/test_security.py
@@ -0,0 +1,235 @@
+import ssl
+from copy import deepcopy
+
+from faststream.kafka import KafkaBroker
+from faststream.security import (
+ SASLGSSAPI,
+ BaseSecurity,
+ SASLOAuthBearer,
+ SASLPlaintext,
+ SASLScram256,
+ SASLScram512,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+basic_schema = {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost:9092",
+ "pathname": "",
+ "protocol": "kafka-secure",
+ "protocolVersion": "auto",
+ "security": [],
+ },
+ },
+ "channels": {
+ "test_1:TestTopic": {
+ "address": "test_1:TestTopic",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_1:TestTopic:SubscribeMessage",
+ },
+ },
+ "bindings": {"kafka": {"topic": "test_1", "bindingVersion": "0.4.0"}},
+ },
+ "test_2:Publisher": {
+ "address": "test_2:Publisher",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "Message": {"$ref": "#/components/messages/test_2:Publisher:Message"},
+ },
+ "bindings": {"kafka": {"topic": "test_2", "bindingVersion": "0.4.0"}},
+ },
+ },
+ "operations": {
+ "test_1:TestTopicSubscribe": {
+ "action": "receive",
+ "messages": [
+ {"$ref": "#/channels/test_1:TestTopic/messages/SubscribeMessage"},
+ ],
+ "channel": {"$ref": "#/channels/test_1:TestTopic"},
+ },
+ "test_2:Publisher": {
+ "action": "send",
+ "messages": [{"$ref": "#/channels/test_2:Publisher/messages/Message"}],
+ "channel": {"$ref": "#/channels/test_2:Publisher"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_1:TestTopic:SubscribeMessage": {
+ "title": "test_1:TestTopic:SubscribeMessage",
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {"$ref": "#/components/schemas/TestTopic:Message:Payload"},
+ },
+ "test_2:Publisher:Message": {
+ "title": "test_2:Publisher:Message",
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/test_2:Publisher:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "TestTopic:Message:Payload": {
+ "title": "TestTopic:Message:Payload",
+ "type": "string",
+ },
+ "test_2:Publisher:Message:Payload": {
+ "title": "test_2:Publisher:Message:Payload",
+ "type": "string",
+ },
+ },
+ "securitySchemes": {},
+ },
+}
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == basic_schema
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ plaintext_security_schema = deepcopy(basic_schema)
+ plaintext_security_schema["servers"]["development"]["security"] = [
+ {"user-password": []},
+ ]
+ plaintext_security_schema["components"]["securitySchemes"] = {
+ "user-password": {"type": "userPassword"},
+ }
+
+ assert schema == plaintext_security_schema
+
+
+def test_scram256_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram256(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl256_security_schema = deepcopy(basic_schema)
+ sasl256_security_schema["servers"]["development"]["security"] = [{"scram256": []}]
+ sasl256_security_schema["components"]["securitySchemes"] = {
+ "scram256": {"type": "scramSha256"},
+ }
+
+ assert schema == sasl256_security_schema
+
+
+def test_scram512_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLScram512(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl512_security_schema = deepcopy(basic_schema)
+ sasl512_security_schema["servers"]["development"]["security"] = [{"scram512": []}]
+ sasl512_security_schema["components"]["securitySchemes"] = {
+ "scram512": {"type": "scramSha512"},
+ }
+
+ assert schema == sasl512_security_schema
+
+
+def test_oauthbearer_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLOAuthBearer(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ sasl_oauthbearer_security_schema = deepcopy(basic_schema)
+ sasl_oauthbearer_security_schema["servers"]["development"]["security"] = [
+ {"oauthbearer": []},
+ ]
+ sasl_oauthbearer_security_schema["components"]["securitySchemes"] = {
+ "oauthbearer": {"type": "oauth2", "$ref": ""}
+ }
+
+ assert schema == sasl_oauthbearer_security_schema
+
+
+def test_gssapi_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = SASLGSSAPI(
+ ssl_context=ssl_context,
+ )
+
+ broker = KafkaBroker("localhost:9092", security=security)
+
+ @broker.publisher("test_2")
+ @broker.subscriber("test_1")
+ async def test_topic(msg: str) -> str:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ gssapi_security_schema = deepcopy(basic_schema)
+ gssapi_security_schema["servers"]["development"]["security"] = [{"gssapi": []}]
+ gssapi_security_schema["components"]["securitySchemes"] = {
+ "gssapi": {"type": "gssapi"},
+ }
+
+ assert schema == gssapi_security_schema
diff --git a/tests/asyncapi/nats/test_arguments.py b/tests/asyncapi/nats/test_arguments.py
deleted file mode 100644
index 4749b85b5a..0000000000
--- a/tests/asyncapi/nats/test_arguments.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-
-
-class TestArguments(ArgumentsTestcase):
- broker_class = NatsBroker
-
- def test_subscriber_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "nats": {"bindingVersion": "custom", "subject": "test"}
- }
diff --git a/tests/asyncapi/nats/test_connection.py b/tests/asyncapi/nats/test_connection.py
deleted file mode 100644
index 0f1f5c057e..0000000000
--- a/tests/asyncapi/nats/test_connection.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Tag
-from faststream.nats import NatsBroker
-
-
-def test_base():
- schema = get_app_schema(
- FastStream(
- NatsBroker(
- "nats:9092",
- protocol="plaintext",
- protocol_version="0.9.0",
- description="Test description",
- tags=(Tag(name="some-tag", description="experimental"),),
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "description": "Test description",
- "protocol": "plaintext",
- "protocolVersion": "0.9.0",
- "tags": [{"description": "experimental", "name": "some-tag"}],
- "url": "nats:9092",
- }
- },
- }, schema
-
-
-def test_multi():
- schema = get_app_schema(
- FastStream(NatsBroker(["nats:9092", "nats:9093"]))
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "nats",
- "protocolVersion": "custom",
- "url": "nats:9092",
- },
- "Server2": {
- "protocol": "nats",
- "protocolVersion": "custom",
- "url": "nats:9093",
- },
- },
- }
-
-
-def test_custom():
- schema = get_app_schema(
- FastStream(
- NatsBroker(
- ["nats:9092", "nats:9093"], asyncapi_url=["nats:9094", "nats:9095"]
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "Server1": {
- "protocol": "nats",
- "protocolVersion": "custom",
- "url": "nats:9094",
- },
- "Server2": {
- "protocol": "nats",
- "protocolVersion": "custom",
- "url": "nats:9095",
- },
- },
- }
diff --git a/tests/asyncapi/nats/test_fastapi.py b/tests/asyncapi/nats/test_fastapi.py
deleted file mode 100644
index 3b4a777523..0000000000
--- a/tests/asyncapi/nats/test_fastapi.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from typing import Type
-
-from faststream.nats import TestNatsBroker
-from faststream.nats.fastapi import NatsRouter
-from tests.asyncapi.base.arguments import FastAPICompatible
-from tests.asyncapi.base.fastapi import FastAPITestCase
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestRouterArguments(FastAPITestCase, FastAPICompatible):
- broker_class: Type[NatsRouter] = NatsRouter
- broker_wrapper = staticmethod(TestNatsBroker)
-
- def build_app(self, router):
- return router
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = NatsRouter
-
- def build_app(self, router):
- return router
diff --git a/tests/asyncapi/nats/test_kv_schema.py b/tests/asyncapi/nats/test_kv_schema.py
deleted file mode 100644
index 4b0edc1847..0000000000
--- a/tests/asyncapi/nats/test_kv_schema.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker
-
-
-def test_kv_schema():
- broker = NatsBroker()
-
- @broker.subscriber("test", kv_watch="test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/test_naming.py b/tests/asyncapi/nats/test_naming.py
deleted file mode 100644
index 833289e8db..0000000000
--- a/tests/asyncapi/nats/test_naming.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker
-from tests.asyncapi.base.naming import NamingTestCase
-
-
-class TestNaming(NamingTestCase):
- broker_class = NatsBroker
-
- def test_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "nats://localhost:4222",
- "protocol": "nats",
- "protocolVersion": "custom",
- }
- },
- "channels": {
- "test:Handle": {
- "servers": ["development"],
- "bindings": {
- "nats": {"subject": "test", "bindingVersion": "custom"}
- },
- "subscribe": {
- "message": {"$ref": "#/components/messages/test:Handle:Message"}
- },
- }
- },
- "components": {
- "messages": {
- "test:Handle:Message": {
- "title": "test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {"$ref": "#/components/schemas/EmptyPayload"},
- }
- },
- "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
- },
- }
diff --git a/tests/asyncapi/nats/test_obj_schema.py b/tests/asyncapi/nats/test_obj_schema.py
deleted file mode 100644
index f7546cbc22..0000000000
--- a/tests/asyncapi/nats/test_obj_schema.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker
-
-
-def test_obj_schema():
- broker = NatsBroker()
-
- @broker.subscriber("test", obj_watch=True)
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/test_publisher.py b/tests/asyncapi/nats/test_publisher.py
deleted file mode 100644
index 5263a0dd99..0000000000
--- a/tests/asyncapi/nats/test_publisher.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestArguments(PublisherTestcase):
- broker_class = NatsBroker
-
- def test_publisher_bindings(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "nats": {"bindingVersion": "custom", "subject": "test"}
- }, schema["channels"][key]["bindings"]
diff --git a/tests/asyncapi/nats/test_router.py b/tests/asyncapi/nats/test_router.py
deleted file mode 100644
index 19087d14de..0000000000
--- a/tests/asyncapi/nats/test_router.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.nats import NatsBroker, NatsPublisher, NatsRoute, NatsRouter
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-from tests.asyncapi.base.publisher import PublisherTestcase
-from tests.asyncapi.base.router import RouterTestcase
-
-
-class TestRouter(RouterTestcase):
- broker_class = NatsBroker
- router_class = NatsRouter
- route_class = NatsRoute
- publisher_class = NatsPublisher
-
- def test_prefix(self):
- broker = self.broker_class()
-
- router = self.router_class(prefix="test_")
-
- @router.subscriber("test")
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "nats://localhost:4222",
- "protocol": "nats",
- "protocolVersion": "custom",
- }
- },
- "channels": {
- "test_test:Handle": {
- "servers": ["development"],
- "bindings": {
- "nats": {"subject": "test_test", "bindingVersion": "custom"}
- },
- "subscribe": {
- "message": {
- "$ref": "#/components/messages/test_test:Handle:Message"
- }
- },
- }
- },
- "components": {
- "messages": {
- "test_test:Handle:Message": {
- "title": "test_test:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/Handle:Message:Payload"
- },
- }
- },
- "schemas": {
- "Handle:Message:Payload": {"title": "Handle:Message:Payload"}
- },
- },
- }
-
-
-class TestRouterArguments(ArgumentsTestcase):
- broker_class = NatsRouter
-
- def build_app(self, router):
- broker = NatsBroker()
- broker.include_router(router)
- return FastStream(broker)
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = NatsRouter
-
- def build_app(self, router):
- broker = NatsBroker()
- broker.include_router(router)
- return FastStream(broker)
diff --git a/tests/a_docs/getting_started/cli/nats/__init__.py b/tests/asyncapi/nats/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/nats/__init__.py
rename to tests/asyncapi/nats/v2_6_0/__init__.py
diff --git a/tests/asyncapi/nats/v2_6_0/test_arguments.py b/tests/asyncapi/nats/v2_6_0/test_arguments.py
new file mode 100644
index 0000000000..5ad34a0001
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_arguments.py
@@ -0,0 +1,20 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_class = NatsBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "nats": {"bindingVersion": "custom", "subject": "test"},
+ }, schema["channels"][key]["bindings"]
diff --git a/tests/asyncapi/nats/v2_6_0/test_connection.py b/tests/asyncapi/nats/v2_6_0/test_connection.py
new file mode 100644
index 0000000000..486bbb8033
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_connection.py
@@ -0,0 +1,90 @@
+from faststream.nats import NatsBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ NatsBroker(
+ "nats:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "url": "nats:9092",
+ },
+ },
+ }, schema
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ NatsBroker(["nats:9092", "nats:9093"]),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "url": "nats:9092",
+ },
+ "Server2": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "url": "nats:9093",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ NatsBroker(
+ ["nats:9092", "nats:9093"],
+ specification_url=["nats:9094", "nats:9095"],
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "url": "nats:9094",
+ },
+ "Server2": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "url": "nats:9095",
+ },
+ },
+ }
diff --git a/tests/asyncapi/nats/v2_6_0/test_fastapi.py b/tests/asyncapi/nats/v2_6_0/test_fastapi.py
new file mode 100644
index 0000000000..6f7c8b3eb1
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_fastapi.py
@@ -0,0 +1,21 @@
+from faststream.nats import TestNatsBroker
+from faststream.nats.fastapi import NatsRouter
+from tests.asyncapi.base.v2_6_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v2_6_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_class = staticmethod(lambda: NatsRouter().broker)
+ router_class = NatsRouter
+ broker_wrapper = staticmethod(TestNatsBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = staticmethod(lambda: NatsRouter().broker)
+
+ def build_app(self, router):
+ return router
diff --git a/tests/asyncapi/nats/v2_6_0/test_kv_schema.py b/tests/asyncapi/nats/v2_6_0/test_kv_schema.py
new file mode 100644
index 0000000000..f069f9b476
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_kv_schema.py
@@ -0,0 +1,13 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_kv_schema() -> None:
+ broker = NatsBroker()
+
+ @broker.subscriber("test", kv_watch="test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/v2_6_0/test_naming.py b/tests/asyncapi/nats/v2_6_0/test_naming.py
new file mode 100644
index 0000000000..9c0738f9de
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_naming.py
@@ -0,0 +1,53 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = NatsBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "nats://localhost:4222",
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "nats": {"subject": "test", "bindingVersion": "custom"},
+ },
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test:Handle:Message"
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:Message": {
+ "title": "test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/nats/v2_6_0/test_obj_schema.py b/tests/asyncapi/nats/v2_6_0/test_obj_schema.py
new file mode 100644
index 0000000000..51d5507bb4
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_obj_schema.py
@@ -0,0 +1,13 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_obj_schema() -> None:
+ broker = NatsBroker()
+
+ @broker.subscriber("test", obj_watch=True)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/v2_6_0/test_publisher.py b/tests/asyncapi/nats/v2_6_0/test_publisher.py
new file mode 100644
index 0000000000..cdf7291ab7
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_class = NatsBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "nats": {"bindingVersion": "custom", "subject": "test"},
+ }, schema["channels"][key]["bindings"]
diff --git a/tests/asyncapi/nats/v2_6_0/test_router.py b/tests/asyncapi/nats/v2_6_0/test_router.py
new file mode 100644
index 0000000000..7986cba82e
--- /dev/null
+++ b/tests/asyncapi/nats/v2_6_0/test_router.py
@@ -0,0 +1,84 @@
+from faststream.nats import NatsBroker, NatsPublisher, NatsRoute, NatsRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v2_6_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = NatsBroker
+ router_class = NatsRouter
+ route_class = NatsRoute
+ publisher_class = NatsPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "nats://localhost:4222",
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "nats": {"subject": "test_test", "bindingVersion": "custom"},
+ },
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test_test:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:Message": {
+ "title": "test_test:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = NatsRouter
+
+ def build_app(self, router):
+ broker = NatsBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = NatsRouter
+
+ def build_app(self, router):
+ broker = NatsBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/a_docs/nats/__init__.py b/tests/asyncapi/nats/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/nats/__init__.py
rename to tests/asyncapi/nats/v3_0_0/__init__.py
diff --git a/tests/asyncapi/nats/v3_0_0/test_arguments.py b/tests/asyncapi/nats/v3_0_0/test_arguments.py
new file mode 100644
index 0000000000..837ad7bdcc
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_arguments.py
@@ -0,0 +1,20 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_factory = NatsBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "nats": {"bindingVersion": "custom", "subject": "test"},
+ }
diff --git a/tests/asyncapi/nats/v3_0_0/test_connection.py b/tests/asyncapi/nats/v3_0_0/test_connection.py
new file mode 100644
index 0000000000..f88fc0fb83
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_connection.py
@@ -0,0 +1,98 @@
+from faststream.nats import NatsBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ NatsBroker(
+ "nats:9092",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "host": "nats:9092",
+ "pathname": "",
+ },
+ },
+ }, schema
+
+
+def test_multi() -> None:
+ schema = AsyncAPI(
+ NatsBroker(["nats:9092", "nats:9093"]),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "host": "nats:9092",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "host": "nats:9093",
+ "pathname": "",
+ },
+ },
+ }
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ NatsBroker(
+ ["nats:9092", "nats:9093"],
+ specification_url=["nats:9094", "nats:9095"],
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "Server1": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "host": "nats:9094",
+ "pathname": "",
+ },
+ "Server2": {
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ "host": "nats:9095",
+ "pathname": "",
+ },
+ },
+ }
diff --git a/tests/asyncapi/nats/v3_0_0/test_fastapi.py b/tests/asyncapi/nats/v3_0_0/test_fastapi.py
new file mode 100644
index 0000000000..2bd8cc25c4
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_fastapi.py
@@ -0,0 +1,21 @@
+from faststream.nats import TestNatsBroker
+from faststream.nats.fastapi import NatsRouter
+from tests.asyncapi.base.v3_0_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v3_0_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_factory = staticmethod(lambda: NatsRouter().broker)
+ router_factory = NatsRouter
+ broker_wrapper = staticmethod(TestNatsBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_factory = staticmethod(lambda: NatsRouter().broker)
+
+ def build_app(self, router):
+ return router
diff --git a/tests/asyncapi/nats/v3_0_0/test_kv_schema.py b/tests/asyncapi/nats/v3_0_0/test_kv_schema.py
new file mode 100644
index 0000000000..bcd0c0e158
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_kv_schema.py
@@ -0,0 +1,13 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_kv_schema() -> None:
+ broker = NatsBroker()
+
+ @broker.subscriber("test", kv_watch="test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/v3_0_0/test_naming.py b/tests/asyncapi/nats/v3_0_0/test_naming.py
new file mode 100644
index 0000000000..b28fea104f
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_naming.py
@@ -0,0 +1,72 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = NatsBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "host": "localhost:4222",
+ "pathname": "",
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ },
+ },
+ "channels": {
+ "test:Handle": {
+ "address": "test:Handle",
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "bindings": {
+ "nats": {"subject": "test", "bindingVersion": "custom"},
+ },
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test:Handle:SubscribeMessage",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:HandleSubscribe": {
+ "action": "receive",
+ "channel": {
+ "$ref": "#/channels/test:Handle",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:SubscribeMessage": {
+ "title": "test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/nats/v3_0_0/test_obj_schema.py b/tests/asyncapi/nats/v3_0_0/test_obj_schema.py
new file mode 100644
index 0000000000..d3b434ddee
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_obj_schema.py
@@ -0,0 +1,13 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_obj_schema() -> None:
+ broker = NatsBroker()
+
+ @broker.subscriber("test", obj_watch=True)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema["channels"] == {}
diff --git a/tests/asyncapi/nats/v3_0_0/test_publisher.py b/tests/asyncapi/nats/v3_0_0/test_publisher.py
new file mode 100644
index 0000000000..9a83756c09
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_publisher.py
@@ -0,0 +1,20 @@
+from faststream.nats import NatsBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_factory = NatsBroker
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "nats": {"bindingVersion": "custom", "subject": "test"},
+ }, schema["channels"][key]["bindings"]
diff --git a/tests/asyncapi/nats/v3_0_0/test_router.py b/tests/asyncapi/nats/v3_0_0/test_router.py
new file mode 100644
index 0000000000..a075322b60
--- /dev/null
+++ b/tests/asyncapi/nats/v3_0_0/test_router.py
@@ -0,0 +1,97 @@
+from faststream.nats import NatsBroker, NatsPublisher, NatsRoute, NatsRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v3_0_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = NatsBroker
+ router_class = NatsRouter
+ route_class = NatsRoute
+ publisher_class = NatsPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost:4222",
+ "pathname": "",
+ "protocol": "nats",
+ "protocolVersion": "custom",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "address": "test_test:Handle",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_test:Handle:SubscribeMessage",
+ },
+ },
+ "bindings": {
+ "nats": {"subject": "test_test", "bindingVersion": "custom"},
+ },
+ },
+ },
+ "operations": {
+ "test_test:HandleSubscribe": {
+ "action": "receive",
+ "messages": [
+ {
+ "$ref": "#/channels/test_test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ "channel": {"$ref": "#/channels/test_test:Handle"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:SubscribeMessage": {
+ "title": "test_test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = NatsRouter
+
+ def build_app(self, router):
+ broker = NatsBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = NatsRouter
+
+ def build_app(self, router):
+ broker = NatsBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/rabbit/test_arguments.py b/tests/asyncapi/rabbit/test_arguments.py
deleted file mode 100644
index f192b43766..0000000000
--- a/tests/asyncapi/rabbit/test_arguments.py
+++ /dev/null
@@ -1,170 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-
-
-class TestArguments(ArgumentsTestcase):
- broker_class = RabbitBroker
-
- def test_subscriber_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.TOPIC),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "topic",
- "vhost": "/",
- },
- "is": "routingKey",
- "queue": {
- "autoDelete": True,
- "durable": False,
- "exclusive": False,
- "name": "test",
- "vhost": "/",
- },
- }
- }
-
- def test_subscriber_fanout_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.FANOUT),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "fanout",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- }
-
- def test_subscriber_headers_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.HEADERS),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "headers",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- }
-
- def test_subscriber_xdelay_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.X_DELAYED_MESSAGE),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "x-delayed-message",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- }
-
- def test_subscriber_consistent_hash_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.X_CONSISTENT_HASH),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "x-consistent-hash",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- }
-
- def test_subscriber_modules_hash_bindings(self):
- broker = self.broker_class()
-
- @broker.subscriber(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.X_MODULUS_HASH),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "x-modulus-hash",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- }
diff --git a/tests/asyncapi/rabbit/test_connection.py b/tests/asyncapi/rabbit/test_connection.py
deleted file mode 100644
index 4362e8ac48..0000000000
--- a/tests/asyncapi/rabbit/test_connection.py
+++ /dev/null
@@ -1,120 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Tag
-from faststream.rabbit import RabbitBroker
-
-
-def test_base():
- schema = get_app_schema(
- FastStream(
- RabbitBroker(
- "amqps://localhost",
- port=5673,
- protocol_version="0.9.0",
- description="Test description",
- tags=(Tag(name="some-tag", description="experimental"),),
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "description": "Test description",
- "protocol": "amqps",
- "protocolVersion": "0.9.0",
- "tags": [{"description": "experimental", "name": "some-tag"}],
- "url": "amqps://guest:guest@localhost:5673/", # pragma: allowlist secret
- }
- },
- }
-
-
-def test_kwargs():
- broker = RabbitBroker(
- "amqp://guest:guest@localhost:5672/?heartbeat=300", # pragma: allowlist secret
- host="127.0.0.1",
- )
-
- assert (
- broker.url
- == "amqp://guest:guest@127.0.0.1:5672/?heartbeat=300" # pragma: allowlist secret
- )
-
-
-def test_custom():
- broker = RabbitBroker(
- "amqps://localhost",
- asyncapi_url="amqp://guest:guest@127.0.0.1:5672/vh", # pragma: allowlist secret
- )
-
- broker.publisher("test")
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "channels": {
- "test:_:Publisher": {
- "bindings": {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {"type": "default", "vhost": "/vh"},
- "is": "routingKey",
- "queue": {
- "autoDelete": False,
- "durable": False,
- "exclusive": False,
- "name": "test",
- "vhost": "/vh",
- },
- }
- },
- "publish": {
- "bindings": {
- "amqp": {
- "ack": True,
- "bindingVersion": "0.2.0",
- "cc": "test",
- "deliveryMode": 1,
- "mandatory": True,
- }
- },
- "message": {
- "$ref": "#/components/messages/test:_:Publisher:Message"
- },
- },
- "servers": ["development"],
- }
- },
- "components": {
- "messages": {
- "test:_:Publisher:Message": {
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/test:_:PublisherPayload"
- },
- "title": "test:_:Publisher:Message",
- }
- },
- "schemas": {"test:_:PublisherPayload": {}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqp",
- "protocolVersion": "0.9.1",
- "url": "amqp://guest:guest@127.0.0.1:5672/vh", # pragma: allowlist secret
- }
- },
- }
- )
diff --git a/tests/asyncapi/rabbit/test_fastapi.py b/tests/asyncapi/rabbit/test_fastapi.py
deleted file mode 100644
index e205f9966e..0000000000
--- a/tests/asyncapi/rabbit/test_fastapi.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from typing import Type
-
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit.fastapi import RabbitRouter
-from faststream.rabbit.testing import TestRabbitBroker
-from faststream.security import SASLPlaintext
-from tests.asyncapi.base.arguments import FastAPICompatible
-from tests.asyncapi.base.fastapi import FastAPITestCase
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestRouterArguments(FastAPITestCase, FastAPICompatible):
- broker_class: Type[RabbitRouter] = RabbitRouter
- broker_wrapper = staticmethod(TestRabbitBroker)
-
- def build_app(self, router):
- return router
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = RabbitRouter
-
- def build_app(self, router):
- return router
-
-
-def test_fastapi_security_schema():
- security = SASLPlaintext(username="user", password="pass", use_ssl=False)
-
- broker = RabbitRouter(security=security)
-
- schema = get_app_schema(broker).to_jsonable()
-
- assert schema["servers"]["development"] == {
- "protocol": "amqp",
- "protocolVersion": "0.9.1",
- "security": [{"user-password": []}],
- "url": "amqp://user:pass@localhost:5672/", # pragma: allowlist secret
- }
- assert schema["components"]["securitySchemes"] == {
- "user-password": {"type": "userPassword"}
- }
diff --git a/tests/asyncapi/rabbit/test_naming.py b/tests/asyncapi/rabbit/test_naming.py
deleted file mode 100644
index b97965649c..0000000000
--- a/tests/asyncapi/rabbit/test_naming.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from typing import Type
-
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit import RabbitBroker
-from tests.asyncapi.base.naming import NamingTestCase
-
-
-class TestNaming(NamingTestCase):
- broker_class: Type[RabbitBroker] = RabbitBroker
-
- def test_subscriber_with_exchange(self):
- broker = self.broker_class()
-
- @broker.subscriber("test", "exchange")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["test:exchange:Handle"]
-
- assert list(schema["components"]["messages"].keys()) == [
- "test:exchange:Handle:Message"
- ]
-
- def test_publisher_with_exchange(self):
- broker = self.broker_class()
-
- @broker.publisher("test", "exchange")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert list(schema["channels"].keys()) == ["test:exchange:Publisher"]
-
- assert list(schema["components"]["messages"].keys()) == [
- "test:exchange:Publisher:Message"
- ]
-
- def test_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
- "protocol": "amqp",
- "protocolVersion": "0.9.1",
- }
- },
- "channels": {
- "test:_:Handle": {
- "servers": ["development"],
- "bindings": {
- "amqp": {
- "is": "routingKey",
- "bindingVersion": "0.2.0",
- "queue": {
- "name": "test",
- "durable": False,
- "exclusive": False,
- "autoDelete": False,
- "vhost": "/",
- },
- "exchange": {"type": "default", "vhost": "/"},
- }
- },
- "subscribe": {
- "bindings": {
- "amqp": {
- "cc": "test",
- "ack": True,
- "bindingVersion": "0.2.0",
- }
- },
- "message": {
- "$ref": "#/components/messages/test:_:Handle:Message"
- },
- },
- }
- },
- "components": {
- "messages": {
- "test:_:Handle:Message": {
- "title": "test:_:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {"$ref": "#/components/schemas/EmptyPayload"},
- }
- },
- "schemas": {
- "EmptyPayload": {"title": "EmptyPayload", "type": "null"}
- },
- },
- }
- )
diff --git a/tests/asyncapi/rabbit/test_publisher.py b/tests/asyncapi/rabbit/test_publisher.py
deleted file mode 100644
index bbe4faf3c8..0000000000
--- a/tests/asyncapi/rabbit/test_publisher.py
+++ /dev/null
@@ -1,188 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestArguments(PublisherTestcase):
- broker_class = RabbitBroker
-
- def test_just_exchange(self):
- broker = self.broker_class("amqp://guest:guest@localhost:5672/vhost")
-
- @broker.publisher(exchange="test-ex")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- assert schema["channels"] == {
- "_:test-ex:Publisher": {
- "bindings": {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "direct",
- "vhost": "/vhost",
- },
- "is": "routingKey",
- }
- },
- "publish": {
- "bindings": {
- "amqp": {
- "ack": True,
- "bindingVersion": "0.2.0",
- "deliveryMode": 1,
- "mandatory": True,
- }
- },
- "message": {
- "$ref": "#/components/messages/_:test-ex:Publisher:Message"
- },
- },
- "servers": ["development"],
- }
- }, schema["channels"]
-
- def test_publisher_bindings(self):
- broker = self.broker_class()
-
- @broker.publisher(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.TOPIC),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "topic",
- "vhost": "/",
- },
- "is": "routingKey",
- "queue": {
- "autoDelete": True,
- "durable": False,
- "exclusive": False,
- "name": "test",
- "vhost": "/",
- },
- }
- }
-
- def test_useless_queue_bindings(self):
- broker = self.broker_class()
-
- @broker.publisher(
- RabbitQueue("test", auto_delete=True),
- RabbitExchange("test-ex", type=ExchangeType.FANOUT),
- )
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- assert schema["channels"] == {
- "_:test-ex:Publisher": {
- "bindings": {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "fanout",
- "vhost": "/",
- },
- "is": "routingKey",
- }
- },
- "publish": {
- "message": {
- "$ref": "#/components/messages/_:test-ex:Publisher:Message"
- }
- },
- "servers": ["development"],
- }
- }
-
- def test_reusable_exchange(self):
- broker = self.broker_class("amqp://guest:guest@localhost:5672/vhost")
-
- @broker.publisher(exchange="test-ex", routing_key="key1")
- @broker.publisher(exchange="test-ex", routing_key="key2", priority=10)
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
-
- assert schema["channels"] == {
- "key1:test-ex:Publisher": {
- "bindings": {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "direct",
- "vhost": "/vhost",
- },
- "is": "routingKey",
- }
- },
- "publish": {
- "bindings": {
- "amqp": {
- "ack": True,
- "bindingVersion": "0.2.0",
- "cc": "key1",
- "deliveryMode": 1,
- "mandatory": True,
- }
- },
- "message": {
- "$ref": "#/components/messages/key1:test-ex:Publisher:Message"
- },
- },
- "servers": ["development"],
- },
- "key2:test-ex:Publisher": {
- "bindings": {
- "amqp": {
- "bindingVersion": "0.2.0",
- "exchange": {
- "autoDelete": False,
- "durable": False,
- "name": "test-ex",
- "type": "direct",
- "vhost": "/vhost",
- },
- "is": "routingKey",
- }
- },
- "publish": {
- "bindings": {
- "amqp": {
- "ack": True,
- "bindingVersion": "0.2.0",
- "cc": "key2",
- "deliveryMode": 1,
- "priority": 10,
- "mandatory": True,
- }
- },
- "message": {
- "$ref": "#/components/messages/key2:test-ex:Publisher:Message"
- },
- },
- "servers": ["development"],
- },
- }
diff --git a/tests/asyncapi/rabbit/test_router.py b/tests/asyncapi/rabbit/test_router.py
deleted file mode 100644
index 386f4960f5..0000000000
--- a/tests/asyncapi/rabbit/test_router.py
+++ /dev/null
@@ -1,112 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit import (
- RabbitBroker,
- RabbitPublisher,
- RabbitQueue,
- RabbitRoute,
- RabbitRouter,
-)
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-from tests.asyncapi.base.publisher import PublisherTestcase
-from tests.asyncapi.base.router import RouterTestcase
-
-
-class TestRouter(RouterTestcase):
- broker_class = RabbitBroker
- router_class = RabbitRouter
- route_class = RabbitRoute
- publisher_class = RabbitPublisher
-
- def test_prefix(self):
- broker = self.broker_class()
-
- router = self.router_class(prefix="test_")
-
- @router.subscriber(RabbitQueue("test", routing_key="key"))
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "defaultContentType": "application/json",
- "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
- "servers": {
- "development": {
- "url": "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
- "protocol": "amqp",
- "protocolVersion": "0.9.1",
- }
- },
- "channels": {
- "test_test:_:Handle": {
- "servers": ["development"],
- "bindings": {
- "amqp": {
- "is": "routingKey",
- "bindingVersion": "0.2.0",
- "queue": {
- "name": "test_test",
- "durable": False,
- "exclusive": False,
- "autoDelete": False,
- "vhost": "/",
- },
- "exchange": {"type": "default", "vhost": "/"},
- }
- },
- "subscribe": {
- "bindings": {
- "amqp": {
- "cc": "test_key",
- "ack": True,
- "bindingVersion": "0.2.0",
- }
- },
- "message": {
- "$ref": "#/components/messages/test_test:_:Handle:Message"
- },
- },
- }
- },
- "components": {
- "messages": {
- "test_test:_:Handle:Message": {
- "title": "test_test:_:Handle:Message",
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/Handle:Message:Payload"
- },
- }
- },
- "schemas": {
- "Handle:Message:Payload": {"title": "Handle:Message:Payload"}
- },
- },
- }
- ), schema
-
-
-class TestRouterArguments(ArgumentsTestcase):
- broker_class = RabbitRouter
-
- def build_app(self, router):
- broker = RabbitBroker()
- broker.include_router(router)
- return FastStream(broker)
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = RabbitRouter
-
- def build_app(self, router):
- broker = RabbitBroker()
- broker.include_router(router)
- return FastStream(broker)
diff --git a/tests/asyncapi/rabbit/test_security.py b/tests/asyncapi/rabbit/test_security.py
deleted file mode 100644
index 88ea3f683c..0000000000
--- a/tests/asyncapi/rabbit/test_security.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import ssl
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.rabbit import RabbitBroker
-from faststream.security import (
- BaseSecurity,
- SASLPlaintext,
-)
-
-
-def test_base_security_schema():
- ssl_context = ssl.create_default_context()
- security = BaseSecurity(ssl_context=ssl_context)
-
- broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
-
- assert (
- broker.url == "amqps://guest:guest@localhost:5672/" # pragma: allowlist secret
- ) # pragma: allowlist secret
- assert broker._connection_kwargs.get("ssl_context") is ssl_context
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqps",
- "protocolVersion": "0.9.1",
- "security": [],
- "url": "amqps://guest:guest@localhost:5672/", # pragma: allowlist secret
- }
- },
- }
-
-
-def test_plaintext_security_schema():
- ssl_context = ssl.create_default_context()
-
- security = SASLPlaintext(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = RabbitBroker("amqp://guest:guest@localhost/", security=security)
-
- assert (
- broker.url
- == "amqps://admin:password@localhost:5671/" # pragma: allowlist secret
- ) # pragma: allowlist secret
- assert broker._connection_kwargs.get("ssl_context") is ssl_context
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqps",
- "protocolVersion": "0.9.1",
- "security": [{"user-password": []}],
- "url": "amqps://admin:password@localhost:5671/", # pragma: allowlist secret
- }
- },
- }
- )
-
-
-def test_plaintext_security_schema_without_ssl():
- security = SASLPlaintext(
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
-
- assert (
- broker.url
- == "amqp://admin:password@localhost:5672/" # pragma: allowlist secret
- ) # pragma: allowlist secret
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
- assert (
- schema
- == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "amqp",
- "protocolVersion": "0.9.1",
- "security": [{"user-password": []}],
- "url": "amqp://admin:password@localhost:5672/", # pragma: allowlist secret
- }
- },
- }
- )
diff --git a/tests/a_docs/getting_started/cli/rabbit/__init__.py b/tests/asyncapi/rabbit/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/rabbit/__init__.py
rename to tests/asyncapi/rabbit/v2_6_0/__init__.py
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_arguments.py b/tests/asyncapi/rabbit/v2_6_0/test_arguments.py
new file mode 100644
index 0000000000..a8e2c4f745
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_arguments.py
@@ -0,0 +1,170 @@
+from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_class = RabbitBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.TOPIC),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "topic",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ "queue": {
+ "autoDelete": True,
+ "durable": False,
+ "exclusive": False,
+ "name": "test",
+ "vhost": "/",
+ },
+ },
+ }
+
+ def test_subscriber_fanout_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.FANOUT),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "fanout",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
+
+ def test_subscriber_headers_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.HEADERS),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "headers",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
+
+ def test_subscriber_xdelay_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.X_DELAYED_MESSAGE),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "x-delayed-message",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
+
+ def test_subscriber_consistent_hash_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.X_CONSISTENT_HASH),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "x-consistent-hash",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
+
+ def test_subscriber_modules_hash_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.X_MODULUS_HASH),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "x-modulus-hash",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_connection.py b/tests/asyncapi/rabbit/v2_6_0/test_connection.py
new file mode 100644
index 0000000000..15781dcf0e
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_connection.py
@@ -0,0 +1,118 @@
+from faststream.rabbit import RabbitBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ RabbitBroker(
+ "amqps://localhost",
+ port=5673,
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "amqps",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "url": "amqps://guest:guest@localhost:5673/", # pragma: allowlist secret
+ },
+ },
+ }
+
+
+def test_kwargs() -> None:
+ broker = RabbitBroker(
+ "amqp://guest:guest@localhost:5672/?heartbeat=300", # pragma: allowlist secret
+ host="127.0.0.1",
+ )
+
+ assert (
+ broker.url
+ == "amqp://guest:guest@127.0.0.1:5672/?heartbeat=300" # pragma: allowlist secret
+ )
+
+
+def test_custom() -> None:
+ broker = RabbitBroker(
+ "amqps://localhost",
+ specification_url="amqp://guest:guest@127.0.0.1:5672/vh", # pragma: allowlist secret
+ )
+
+ broker.publisher("test")
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "test:_:Publisher": {
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {"type": "default", "vhost": "/vh"},
+ "is": "routingKey",
+ "queue": {
+ "autoDelete": False,
+ "durable": False,
+ "exclusive": False,
+ "name": "test",
+ "vhost": "/vh",
+ },
+ },
+ },
+ "subscribe": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "cc": "test",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/test:_:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:_:Publisher:Message": {
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/test:_:PublisherPayload",
+ },
+ "title": "test:_:Publisher:Message",
+ },
+ },
+ "schemas": {"test:_:PublisherPayload": {}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "url": "amqp://guest:guest@127.0.0.1:5672/vh", # pragma: allowlist secret
+ },
+ },
+ }
+ ), schema
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_fastapi.py b/tests/asyncapi/rabbit/v2_6_0/test_fastapi.py
new file mode 100644
index 0000000000..29e46b3078
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_fastapi.py
@@ -0,0 +1,41 @@
+from faststream.rabbit.fastapi import RabbitRouter
+from faststream.rabbit.testing import TestRabbitBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v2_6_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_class = staticmethod(lambda: RabbitRouter().broker)
+ router_class = RabbitRouter
+ broker_wrapper = staticmethod(TestRabbitBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = staticmethod(lambda: RabbitRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = RabbitRouter(security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "url": "amqp://user:pass@localhost:5672/", # pragma: allowlist secret
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_naming.py b/tests/asyncapi/rabbit/v2_6_0/test_naming.py
new file mode 100644
index 0000000000..2ee937f21e
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_naming.py
@@ -0,0 +1,104 @@
+from faststream.rabbit import RabbitBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class: type[RabbitBroker] = RabbitBroker
+
+ def test_subscriber_with_exchange(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", "exchange")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["test:exchange:Handle"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "test:exchange:Handle:Message",
+ ]
+
+ def test_publisher_with_exchange(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", "exchange")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["test:exchange:Publisher"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "test:exchange:Publisher:Message",
+ ]
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ },
+ },
+ "channels": {
+ "test:_:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "amqp": {
+ "is": "routingKey",
+ "bindingVersion": "0.2.0",
+ "queue": {
+ "name": "test",
+ "durable": False,
+ "exclusive": False,
+ "autoDelete": False,
+ "vhost": "/",
+ },
+ "exchange": {"type": "default", "vhost": "/"},
+ },
+ },
+ "publish": {
+ "bindings": {
+ "amqp": {
+ "cc": "test",
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/test:_:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test:_:Handle:Message": {
+ "title": "test:_:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {
+ "EmptyPayload": {"title": "EmptyPayload", "type": "null"},
+ },
+ },
+ }
+ )
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_publisher.py b/tests/asyncapi/rabbit/v2_6_0/test_publisher.py
new file mode 100644
index 0000000000..e24edd3fed
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_publisher.py
@@ -0,0 +1,196 @@
+from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_class = RabbitBroker
+
+ def test_just_exchange(self) -> None:
+ broker = self.broker_class("amqp://guest:guest@localhost:5672/vhost")
+
+ @broker.publisher(exchange="test-ex")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "_:test-ex:Publisher": {
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "subscribe": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/_:test-ex:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ }, schema["channels"]
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.TOPIC),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "topic",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ "queue": {
+ "autoDelete": True,
+ "durable": False,
+ "exclusive": False,
+ "name": "test",
+ "vhost": "/",
+ },
+ },
+ }
+
+ def test_useless_queue_bindings(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.FANOUT),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "_:test-ex:Publisher": {
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "fanout",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ },
+ "subscribe": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/_:test-ex:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ }
+
+ def test_reusable_exchange(self) -> None:
+ broker = self.broker_class("amqp://guest:guest@localhost:5672/vhost")
+
+ @broker.publisher(exchange="test-ex", routing_key="key1")
+ @broker.publisher(exchange="test-ex", routing_key="key2", priority=10)
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "key1:test-ex:Publisher": {
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "subscribe": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "cc": "key1",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/key1:test-ex:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ "key2:test-ex:Publisher": {
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.2.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "subscribe": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "cc": "key2",
+ "deliveryMode": 1,
+ "priority": 10,
+ "mandatory": True,
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/key2:test-ex:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ }, schema["channels"]
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_router.py b/tests/asyncapi/rabbit/v2_6_0/test_router.py
new file mode 100644
index 0000000000..8e042da398
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_router.py
@@ -0,0 +1,111 @@
+from faststream.rabbit import (
+ RabbitBroker,
+ RabbitPublisher,
+ RabbitQueue,
+ RabbitRoute,
+ RabbitRouter,
+)
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v2_6_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = RabbitBroker
+ router_class = RabbitRouter
+ route_class = RabbitRoute
+ publisher_class = RabbitPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber(RabbitQueue("test", routing_key="key"))
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "url": "amqp://guest:guest@localhost:5672/", # pragma: allowlist secret
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ },
+ },
+ "channels": {
+ "test_test:_:Handle": {
+ "servers": ["development"],
+ "bindings": {
+ "amqp": {
+ "is": "routingKey",
+ "bindingVersion": "0.2.0",
+ "queue": {
+ "name": "test_test",
+ "durable": False,
+ "exclusive": False,
+ "autoDelete": False,
+ "vhost": "/",
+ },
+ "exchange": {"type": "default", "vhost": "/"},
+ },
+ },
+ "publish": {
+ "bindings": {
+ "amqp": {
+ "cc": "test_key",
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ },
+ },
+ "message": {
+ "$ref": "#/components/messages/test_test:_:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:_:Handle:Message": {
+ "title": "test_test:_:Handle:Message",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+ ), schema
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = RabbitRouter
+
+ def build_app(self, router):
+ broker = RabbitBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = RabbitRouter
+
+ def build_app(self, router):
+ broker = RabbitBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_security.py b/tests/asyncapi/rabbit/v2_6_0/test_security.py
new file mode 100644
index 0000000000..6fcf9f753f
--- /dev/null
+++ b/tests/asyncapi/rabbit/v2_6_0/test_security.py
@@ -0,0 +1,118 @@
+import ssl
+
+from faststream.rabbit import RabbitBroker
+from faststream.security import (
+ BaseSecurity,
+ SASLPlaintext,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
+
+ assert (
+ broker.url == "amqps://guest:guest@localhost:5672/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+ assert broker._connection_kwargs.get("ssl_context") is ssl_context
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [],
+ "url": "amqps://guest:guest@localhost:5672/", # pragma: allowlist secret
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RabbitBroker("amqp://guest:guest@localhost/", security=security)
+
+ assert (
+ broker.url
+ == "amqps://admin:password@localhost:5671/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+ assert broker._connection_kwargs.get("ssl_context") is ssl_context
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "url": "amqps://admin:password@localhost:5671/", # pragma: allowlist secret
+ },
+ },
+ }
+ )
+
+
+def test_plaintext_security_schema_without_ssl() -> None:
+ security = SASLPlaintext(
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
+
+ assert (
+ broker.url
+ == "amqp://admin:password@localhost:5672/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "url": "amqp://admin:password@localhost:5672/", # pragma: allowlist secret
+ },
+ },
+ }
+ )
diff --git a/tests/a_docs/rabbit/__init__.py b/tests/asyncapi/rabbit/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/rabbit/__init__.py
rename to tests/asyncapi/rabbit/v3_0_0/__init__.py
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_arguments.py b/tests/asyncapi/rabbit/v3_0_0/test_arguments.py
new file mode 100644
index 0000000000..1b4ef1f730
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_arguments.py
@@ -0,0 +1,59 @@
+from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_factory = RabbitBroker
+
+ def test_subscriber_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.TOPIC),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "is": "queue",
+ "queue": {
+ "autoDelete": True,
+ "durable": False,
+ "exclusive": False,
+ "name": "test",
+ "vhost": "/",
+ },
+ },
+ }
+
+ def test_subscriber_fanout_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.FANOUT),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "queue": {
+ "autoDelete": True,
+ "durable": False,
+ "exclusive": False,
+ "name": "test",
+ "vhost": "/",
+ },
+ "is": "queue",
+ },
+ }
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_connection.py b/tests/asyncapi/rabbit/v3_0_0/test_connection.py
new file mode 100644
index 0000000000..adf937a705
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_connection.py
@@ -0,0 +1,129 @@
+from faststream.rabbit import RabbitBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ RabbitBroker(
+ "amqps://localhost",
+ port=5673,
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "amqps",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "host": "guest:guest@localhost:5673", # pragma: allowlist secret
+ "pathname": "/",
+ },
+ },
+ }
+
+
+def test_kwargs() -> None:
+ broker = RabbitBroker(
+ "amqp://guest:guest@localhost:5672/?heartbeat=300", # pragma: allowlist secret
+ host="127.0.0.1",
+ )
+
+ assert (
+ broker.url
+ == "amqp://guest:guest@127.0.0.1:5672/?heartbeat=300" # pragma: allowlist secret
+ )
+
+
+def test_custom() -> None:
+ broker = RabbitBroker(
+ "amqps://localhost",
+ specification_url="amqp://guest:guest@127.0.0.1:5672/vh", # pragma: allowlist secret
+ )
+
+ broker.publisher("test")
+ schema = AsyncAPI(broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {
+ "test:_:Publisher": {
+ "address": "test:_:Publisher",
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {"type": "default", "vhost": "/vh"},
+ "is": "routingKey",
+ },
+ },
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "messages": {
+ "Message": {
+ "$ref": "#/components/messages/test:_:Publisher:Message",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:_:Publisher": {
+ "action": "send",
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "cc": [
+ "test",
+ ],
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "channel": {
+ "$ref": "#/channels/test:_:Publisher",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test:_:Publisher/messages/Message",
+ },
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:_:Publisher:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/test:_:Publisher:Message:Payload",
+ },
+ "title": "test:_:Publisher:Message",
+ },
+ },
+ "schemas": {"test:_:Publisher:Message:Payload": {}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "host": "guest:guest@127.0.0.1:5672", # pragma: allowlist secret
+ "pathname": "/vh", # pragma: allowlist secret
+ },
+ },
+ }
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_fastapi.py b/tests/asyncapi/rabbit/v3_0_0/test_fastapi.py
new file mode 100644
index 0000000000..6bbd0d172a
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_fastapi.py
@@ -0,0 +1,42 @@
+from faststream.rabbit.fastapi import RabbitRouter
+from faststream.rabbit.testing import TestRabbitBroker
+from faststream.security import SASLPlaintext
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v3_0_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_factory = staticmethod(lambda: RabbitRouter().broker)
+ router_factory = RabbitRouter
+ broker_wrapper = staticmethod(TestRabbitBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_factory = staticmethod(lambda: RabbitRouter().broker)
+
+ def build_app(self, router):
+ return router
+
+
+def test_fastapi_security_schema() -> None:
+ security = SASLPlaintext(username="user", password="pass", use_ssl=False)
+
+ router = RabbitRouter(security=security)
+
+ schema = AsyncAPI(router.broker, schema_version="3.0.0").to_jsonable()
+
+ assert schema["servers"]["development"] == {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "host": "user:pass@localhost:5672",
+ "pathname": "/",
+ }
+ assert schema["components"]["securitySchemes"] == {
+ "user-password": {"type": "userPassword"},
+ }
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_naming.py b/tests/asyncapi/rabbit/v3_0_0/test_naming.py
new file mode 100644
index 0000000000..2839a4505a
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_naming.py
@@ -0,0 +1,128 @@
+from faststream.rabbit import RabbitBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class: type[RabbitBroker] = RabbitBroker
+
+ def test_subscriber_with_exchange(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test", "exchange")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["test:exchange:Handle"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "test:exchange:Handle:SubscribeMessage",
+ ]
+
+ def test_publisher_with_exchange(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test", "exchange")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert list(schema["channels"].keys()) == ["test:exchange:Publisher"]
+
+ assert list(schema["components"]["messages"].keys()) == [
+ "test:exchange:Publisher:Message",
+ ]
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "servers": {
+ "development": {
+ "host": "guest:guest@localhost:5672", # pragma: allowlist secret
+ "pathname": "/",
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ },
+ },
+ "channels": {
+ "test:_:Handle": {
+ "address": "test:_:Handle",
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "bindings": {
+ "amqp": {
+ "is": "queue",
+ "bindingVersion": "0.3.0",
+ "queue": {
+ "name": "test",
+ "durable": False,
+ "exclusive": False,
+ "autoDelete": False,
+ "vhost": "/",
+ },
+ },
+ },
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test:_:Handle:SubscribeMessage",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:_:HandleSubscribe": {
+ "action": "receive",
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "cc": [
+ "test",
+ ],
+ },
+ },
+ "channel": {
+ "$ref": "#/channels/test:_:Handle",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test:_:Handle/messages/SubscribeMessage",
+ },
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:_:Handle:SubscribeMessage": {
+ "title": "test:_:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ }
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_publisher.py b/tests/asyncapi/rabbit/v3_0_0/test_publisher.py
new file mode 100644
index 0000000000..b4826fe7e0
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_publisher.py
@@ -0,0 +1,258 @@
+from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_factory = RabbitBroker
+
+ def test_just_exchange(self) -> None:
+ broker = self.broker_factory("amqp://guest:guest@localhost:5672/vhost")
+
+ @broker.publisher(exchange="test-ex")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "_:test-ex:Publisher": {
+ "address": "_:test-ex:Publisher",
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "messages": {
+ "Message": {
+ "$ref": "#/components/messages/_:test-ex:Publisher:Message",
+ },
+ },
+ },
+ }, schema["channels"]
+
+ assert schema["operations"] == {
+ "_:test-ex:Publisher": {
+ "action": "send",
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "channel": {
+ "$ref": "#/channels/_:test-ex:Publisher",
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/_:test-ex:Publisher/messages/Message",
+ },
+ ],
+ },
+ }
+
+ def test_publisher_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.TOPIC),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "topic",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ }
+
+ def test_useless_queue_bindings(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher(
+ RabbitQueue("test", auto_delete=True),
+ RabbitExchange("test-ex", type=ExchangeType.FANOUT),
+ )
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "_:test-ex:Publisher": {
+ "address": "_:test-ex:Publisher",
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "fanout",
+ "vhost": "/",
+ },
+ "is": "routingKey",
+ },
+ },
+ "messages": {
+ "Message": {
+ "$ref": "#/components/messages/_:test-ex:Publisher:Message",
+ },
+ },
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ },
+ }
+
+ assert schema["operations"] == {
+ "_:test-ex:Publisher": {
+ "action": "send",
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ }
+ },
+ "channel": {"$ref": "#/channels/_:test-ex:Publisher"},
+ "messages": [
+ {"$ref": "#/channels/_:test-ex:Publisher/messages/Message"}
+ ],
+ }
+ }
+
+ def test_reusable_exchange(self) -> None:
+ broker = self.broker_factory("amqp://guest:guest@localhost:5672/vhost")
+
+ @broker.publisher(exchange="test-ex", routing_key="key1")
+ @broker.publisher(exchange="test-ex", routing_key="key2", priority=10)
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+
+ assert schema["channels"] == {
+ "key1:test-ex:Publisher": {
+ "address": "key1:test-ex:Publisher",
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "messages": {
+ "Message": {
+ "$ref": "#/components/messages/key1:test-ex:Publisher:Message",
+ },
+ },
+ },
+ "key2:test-ex:Publisher": {
+ "address": "key2:test-ex:Publisher",
+ "bindings": {
+ "amqp": {
+ "bindingVersion": "0.3.0",
+ "exchange": {
+ "autoDelete": False,
+ "durable": False,
+ "name": "test-ex",
+ "type": "direct",
+ "vhost": "/vhost",
+ },
+ "is": "routingKey",
+ },
+ },
+ "servers": [
+ {
+ "$ref": "#/servers/development",
+ },
+ ],
+ "messages": {
+ "Message": {
+ "$ref": "#/components/messages/key2:test-ex:Publisher:Message",
+ },
+ },
+ },
+ }
+
+ assert schema["operations"] == {
+ "key1:test-ex:Publisher": {
+ "action": "send",
+ "channel": {
+ "$ref": "#/channels/key1:test-ex:Publisher",
+ },
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "cc": [
+ "key1",
+ ],
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
+ "messages": [
+ {"$ref": "#/channels/key1:test-ex:Publisher/messages/Message"},
+ ],
+ },
+ "key2:test-ex:Publisher": {
+ "action": "send",
+ "channel": {
+ "$ref": "#/channels/key2:test-ex:Publisher",
+ },
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "cc": [
+ "key2",
+ ],
+ "deliveryMode": 1,
+ "priority": 10,
+ "mandatory": True,
+ },
+ },
+ "messages": [
+ {"$ref": "#/channels/key2:test-ex:Publisher/messages/Message"},
+ ],
+ },
+ }
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_router.py b/tests/asyncapi/rabbit/v3_0_0/test_router.py
new file mode 100644
index 0000000000..e1bb277da6
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_router.py
@@ -0,0 +1,125 @@
+from faststream.rabbit import (
+ RabbitBroker,
+ RabbitPublisher,
+ RabbitQueue,
+ RabbitRoute,
+ RabbitRouter,
+)
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v3_0_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = RabbitBroker
+ router_class = RabbitRouter
+ route_class = RabbitRoute
+ publisher_class = RabbitPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber(RabbitQueue("test", routing_key="key"))
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "guest:guest@localhost:5672",
+ "pathname": "/",
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ },
+ },
+ "channels": {
+ "test_test:_:Handle": {
+ "address": "test_test:_:Handle",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_test:_:Handle:SubscribeMessage",
+ },
+ },
+ "bindings": {
+ "amqp": {
+ "is": "queue",
+ "bindingVersion": "0.3.0",
+ "queue": {
+ "name": "test_test",
+ "durable": False,
+ "exclusive": False,
+ "autoDelete": False,
+ "vhost": "/",
+ },
+ },
+ },
+ },
+ },
+ "operations": {
+ "test_test:_:HandleSubscribe": {
+ "action": "receive",
+ "bindings": {
+ "amqp": {
+ "cc": [
+ "test_key",
+ ],
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ },
+ },
+ "messages": [
+ {
+ "$ref": "#/channels/test_test:_:Handle/messages/SubscribeMessage",
+ },
+ ],
+ "channel": {"$ref": "#/channels/test_test:_:Handle"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:_:Handle:SubscribeMessage": {
+ "title": "test_test:_:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }, schema
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = RabbitRouter
+
+ def build_app(self, router):
+ broker = RabbitBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = RabbitRouter
+
+ def build_app(self, router):
+ broker = RabbitBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_security.py b/tests/asyncapi/rabbit/v3_0_0/test_security.py
new file mode 100644
index 0000000000..c0e0ceeb38
--- /dev/null
+++ b/tests/asyncapi/rabbit/v3_0_0/test_security.py
@@ -0,0 +1,127 @@
+import ssl
+
+from faststream.rabbit import RabbitBroker
+from faststream.security import (
+ BaseSecurity,
+ SASLPlaintext,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
+
+ assert (
+ broker.url == "amqps://guest:guest@localhost:5672/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+ assert broker._connection_kwargs.get("ssl_context") is ssl_context
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [],
+ "host": "guest:guest@localhost:5672", # pragma: allowlist secret
+ "pathname": "/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RabbitBroker("amqp://guest:guest@localhost/", security=security)
+
+ assert (
+ broker.url
+ == "amqps://admin:password@localhost:5671/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+ assert broker._connection_kwargs.get("ssl_context") is ssl_context
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "host": "admin:password@localhost:5671", # pragma: allowlist secret
+ "pathname": "/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema_without_ssl() -> None:
+ security = SASLPlaintext(
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RabbitBroker("amqp://guest:guest@localhost:5672/", security=security)
+
+ assert (
+ broker.url
+ == "amqp://admin:password@localhost:5672/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqp",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "host": "admin:password@localhost:5672", # pragma: allowlist secret
+ "pathname": "/", # pragma: allowlist secret
+ },
+ },
+ }
diff --git a/tests/asyncapi/redis/test_arguments.py b/tests/asyncapi/redis/test_arguments.py
deleted file mode 100644
index 3f64aba9b6..0000000000
--- a/tests/asyncapi/redis/test_arguments.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.redis import RedisBroker, StreamSub
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-
-
-class TestArguments(ArgumentsTestcase):
- broker_class = RedisBroker
-
- def test_channel_subscriber(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test",
- "method": "subscribe",
- }
- }
-
- def test_channel_pattern_subscriber(self):
- broker = self.broker_class()
-
- @broker.subscriber("test.{path}")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test.*",
- "method": "psubscribe",
- }
- }
-
- def test_list_subscriber(self):
- broker = self.broker_class()
-
- @broker.subscriber(list="test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {"bindingVersion": "custom", "channel": "test", "method": "lpop"}
- }
-
- def test_stream_subscriber(self):
- broker = self.broker_class()
-
- @broker.subscriber(stream="test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {"bindingVersion": "custom", "channel": "test", "method": "xread"}
- }
-
- def test_stream_group_subscriber(self):
- broker = self.broker_class()
-
- @broker.subscriber(stream=StreamSub("test", group="group", consumer="consumer"))
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test",
- "consumer_name": "consumer",
- "group_name": "group",
- "method": "xreadgroup",
- }
- }
diff --git a/tests/asyncapi/redis/test_connection.py b/tests/asyncapi/redis/test_connection.py
deleted file mode 100644
index a5719d4a77..0000000000
--- a/tests/asyncapi/redis/test_connection.py
+++ /dev/null
@@ -1,60 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.asyncapi.schema import Tag
-from faststream.redis import RedisBroker
-
-
-def test_base():
- schema = get_app_schema(
- FastStream(
- RedisBroker(
- "redis://localhost:6379",
- protocol="plaintext",
- protocol_version="0.9.0",
- description="Test description",
- tags=(Tag(name="some-tag", description="experimental"),),
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "description": "Test description",
- "protocol": "plaintext",
- "protocolVersion": "0.9.0",
- "tags": [{"description": "experimental", "name": "some-tag"}],
- "url": "redis://localhost:6379",
- }
- },
- }, schema
-
-
-def test_custom():
- schema = get_app_schema(
- FastStream(
- RedisBroker(
- "redis://localhost:6379", asyncapi_url="rediss://127.0.0.1:8000"
- )
- )
- ).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "rediss",
- "protocolVersion": "custom",
- "url": "rediss://127.0.0.1:8000",
- }
- },
- }
diff --git a/tests/asyncapi/redis/test_fastapi.py b/tests/asyncapi/redis/test_fastapi.py
deleted file mode 100644
index 1a5466d4e8..0000000000
--- a/tests/asyncapi/redis/test_fastapi.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from typing import Type
-
-from faststream.redis import TestRedisBroker
-from faststream.redis.fastapi import RedisRouter
-from tests.asyncapi.base.arguments import FastAPICompatible
-from tests.asyncapi.base.fastapi import FastAPITestCase
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestRouterArguments(FastAPITestCase, FastAPICompatible):
- broker_class: Type[RedisRouter] = RedisRouter
- broker_wrapper = staticmethod(TestRedisBroker)
-
- def build_app(self, router):
- return router
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = RedisRouter
-
- def build_app(self, router):
- return router
diff --git a/tests/asyncapi/redis/test_naming.py b/tests/asyncapi/redis/test_naming.py
deleted file mode 100644
index 92bcb5b0f9..0000000000
--- a/tests/asyncapi/redis/test_naming.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import pytest
-
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.redis import RedisBroker
-from tests.asyncapi.base.naming import NamingTestCase
-
-
-class TestNaming(NamingTestCase):
- broker_class = RedisBroker
-
- def test_base(self):
- broker = self.broker_class()
-
- @broker.subscriber("test")
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {
- "test:Handle": {
- "bindings": {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test",
- "method": "subscribe",
- }
- },
- "servers": ["development"],
- "subscribe": {
- "message": {"$ref": "#/components/messages/test:Handle:Message"}
- },
- }
- },
- "components": {
- "messages": {
- "test:Handle:Message": {
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {"$ref": "#/components/schemas/EmptyPayload"},
- "title": "test:Handle:Message",
- }
- },
- "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "url": "redis://localhost:6379",
- }
- },
- }, schema
-
- @pytest.mark.parametrize(
- "args",
- ( # noqa: PT007
- pytest.param({"channel": "test"}, id="channel"),
- pytest.param({"list": "test"}, id="list"),
- pytest.param({"stream": "test"}, id="stream"),
- ),
- )
- def test_subscribers_variations(self, args):
- broker = self.broker_class()
-
- @broker.subscriber(**args)
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker))
- assert list(schema.channels.keys()) == ["test:Handle"]
-
- @pytest.mark.parametrize(
- "args",
- ( # noqa: PT007
- pytest.param({"channel": "test"}, id="channel"),
- pytest.param({"list": "test"}, id="list"),
- pytest.param({"stream": "test"}, id="stream"),
- ),
- )
- def test_publisher_variations(self, args):
- broker = self.broker_class()
-
- @broker.publisher(**args)
- async def handle(): ...
-
- schema = get_app_schema(FastStream(broker))
- assert list(schema.channels.keys()) == ["test:Publisher"]
diff --git a/tests/asyncapi/redis/test_publisher.py b/tests/asyncapi/redis/test_publisher.py
deleted file mode 100644
index 8a82bca90d..0000000000
--- a/tests/asyncapi/redis/test_publisher.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from faststream.asyncapi.generate import get_app_schema
-from faststream.redis import RedisBroker
-from tests.asyncapi.base.publisher import PublisherTestcase
-
-
-class TestArguments(PublisherTestcase):
- broker_class = RedisBroker
-
- def test_channel_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher("test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test",
- "method": "publish",
- }
- }
-
- def test_list_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher(list="test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {"bindingVersion": "custom", "channel": "test", "method": "rpush"}
- }
-
- def test_stream_publisher(self):
- broker = self.broker_class()
-
- @broker.publisher(stream="test")
- async def handle(msg): ...
-
- schema = get_app_schema(self.build_app(broker)).to_jsonable()
- key = tuple(schema["channels"].keys())[0] # noqa: RUF015
-
- assert schema["channels"][key]["bindings"] == {
- "redis": {"bindingVersion": "custom", "channel": "test", "method": "xadd"}
- }
diff --git a/tests/asyncapi/redis/test_router.py b/tests/asyncapi/redis/test_router.py
deleted file mode 100644
index eff7d40003..0000000000
--- a/tests/asyncapi/redis/test_router.py
+++ /dev/null
@@ -1,89 +0,0 @@
-from faststream import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.redis import RedisBroker, RedisPublisher, RedisRoute, RedisRouter
-from tests.asyncapi.base.arguments import ArgumentsTestcase
-from tests.asyncapi.base.publisher import PublisherTestcase
-from tests.asyncapi.base.router import RouterTestcase
-
-
-class TestRouter(RouterTestcase):
- broker_class = RedisBroker
- router_class = RedisRouter
- route_class = RedisRoute
- publisher_class = RedisPublisher
-
- def test_prefix(self):
- broker = self.broker_class()
-
- router = self.router_class(prefix="test_")
-
- @router.subscriber("test")
- async def handle(msg): ...
-
- broker.include_router(router)
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {
- "test_test:Handle": {
- "bindings": {
- "redis": {
- "bindingVersion": "custom",
- "channel": "test_test",
- "method": "subscribe",
- }
- },
- "servers": ["development"],
- "subscribe": {
- "message": {
- "$ref": "#/components/messages/test_test:Handle:Message"
- }
- },
- }
- },
- "components": {
- "messages": {
- "test_test:Handle:Message": {
- "correlationId": {
- "location": "$message.header#/correlation_id"
- },
- "payload": {
- "$ref": "#/components/schemas/Handle:Message:Payload"
- },
- "title": "test_test:Handle:Message",
- }
- },
- "schemas": {
- "Handle:Message:Payload": {"title": "Handle:Message:Payload"}
- },
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "url": "redis://localhost:6379",
- }
- },
- }
-
-
-class TestRouterArguments(ArgumentsTestcase):
- broker_class = RedisRouter
-
- def build_app(self, router):
- broker = RedisBroker()
- broker.include_router(router)
- return FastStream(broker)
-
-
-class TestRouterPublisher(PublisherTestcase):
- broker_class = RedisRouter
-
- def build_app(self, router):
- broker = RedisBroker()
- broker.include_router(router)
- return FastStream(broker)
diff --git a/tests/asyncapi/redis/test_security.py b/tests/asyncapi/redis/test_security.py
deleted file mode 100644
index b9ef40b41a..0000000000
--- a/tests/asyncapi/redis/test_security.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import ssl
-
-from faststream.app import FastStream
-from faststream.asyncapi.generate import get_app_schema
-from faststream.redis import RedisBroker
-from faststream.security import (
- BaseSecurity,
- SASLPlaintext,
-)
-
-
-def test_base_security_schema():
- ssl_context = ssl.create_default_context()
- security = BaseSecurity(ssl_context=ssl_context)
-
- broker = RedisBroker("rediss://localhost:6379/", security=security)
-
- assert (
- broker.url == "rediss://localhost:6379/" # pragma: allowlist secret
- ) # pragma: allowlist secret
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "rediss",
- "protocolVersion": "custom",
- "security": [],
- "url": "rediss://localhost:6379/",
- }
- },
- }
-
-
-def test_plaintext_security_schema():
- ssl_context = ssl.create_default_context()
-
- security = SASLPlaintext(
- ssl_context=ssl_context,
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = RedisBroker("redis://localhost:6379/", security=security)
-
- assert (
- broker.url == "redis://localhost:6379/" # pragma: allowlist secret
- ) # pragma: allowlist secret
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "security": [{"user-password": []}],
- "url": "redis://localhost:6379/",
- }
- },
- }
-
-
-def test_plaintext_security_schema_without_ssl():
- security = SASLPlaintext(
- username="admin",
- password="password", # pragma: allowlist secret
- )
-
- broker = RedisBroker("redis://localhost:6379/", security=security)
-
- assert (
- broker.url == "redis://localhost:6379/" # pragma: allowlist secret
- ) # pragma: allowlist secret
-
- schema = get_app_schema(FastStream(broker)).to_jsonable()
-
- assert schema == {
- "asyncapi": "2.6.0",
- "channels": {},
- "components": {
- "messages": {},
- "schemas": {},
- "securitySchemes": {"user-password": {"type": "userPassword"}},
- },
- "defaultContentType": "application/json",
- "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
- "servers": {
- "development": {
- "protocol": "redis",
- "protocolVersion": "custom",
- "security": [{"user-password": []}],
- "url": "redis://localhost:6379/",
- }
- },
- }
diff --git a/tests/a_docs/getting_started/cli/redis/__init__.py b/tests/asyncapi/redis/v2_6_0/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/redis/__init__.py
rename to tests/asyncapi/redis/v2_6_0/__init__.py
diff --git a/tests/asyncapi/redis/v2_6_0/test_arguments.py b/tests/asyncapi/redis/v2_6_0/test_arguments.py
new file mode 100644
index 0000000000..403cccad84
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_arguments.py
@@ -0,0 +1,86 @@
+from faststream.redis import RedisBroker, StreamSub
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_class = RedisBroker
+
+ def test_channel_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "subscribe",
+ },
+ }
+
+ def test_channel_pattern_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test.{path}")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test.*",
+ "method": "psubscribe",
+ },
+ }
+
+ def test_list_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(list="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "lpop"},
+ }
+
+ def test_stream_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(stream="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "xread"},
+ }
+
+ def test_stream_group_subscriber(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(stream=StreamSub("test", group="group", consumer="consumer"))
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "consumerName": "consumer",
+ "groupName": "group",
+ "method": "xreadgroup",
+ },
+ }
diff --git a/tests/asyncapi/redis/v2_6_0/test_connection.py b/tests/asyncapi/redis/v2_6_0/test_connection.py
new file mode 100644
index 0000000000..194371e767
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_connection.py
@@ -0,0 +1,58 @@
+from faststream.redis import RedisBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ RedisBroker(
+ "redis://localhost:6379",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "url": "redis://localhost:6379",
+ },
+ },
+ }, schema
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ RedisBroker(
+ "redis://localhost:6379",
+ specification_url="rediss://127.0.0.1:8000",
+ ),
+ schema_version="2.6.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "rediss",
+ "protocolVersion": "custom",
+ "url": "rediss://127.0.0.1:8000",
+ },
+ },
+ }
diff --git a/tests/asyncapi/redis/v2_6_0/test_fastapi.py b/tests/asyncapi/redis/v2_6_0/test_fastapi.py
new file mode 100644
index 0000000000..3ccfff1c8c
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_fastapi.py
@@ -0,0 +1,21 @@
+from faststream.redis import TestRedisBroker
+from faststream.redis.fastapi import RedisRouter
+from tests.asyncapi.base.v2_6_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v2_6_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_class = staticmethod(lambda: RedisRouter().broker)
+ router_class = RedisRouter
+ broker_wrapper = staticmethod(TestRedisBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = staticmethod(lambda: RedisRouter().broker)
+
+ def build_app(self, router):
+ return router
diff --git a/tests/asyncapi/redis/v2_6_0/test_naming.py b/tests/asyncapi/redis/v2_6_0/test_naming.py
new file mode 100644
index 0000000000..e2558bb9a6
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_naming.py
@@ -0,0 +1,93 @@
+import pytest
+
+from faststream.redis import RedisBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = RedisBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "test:Handle": {
+ "bindings": {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "subscribe",
+ },
+ },
+ "servers": ["development"],
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test:Handle:Message"
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:Message": {
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ "title": "test:Handle:Message",
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "url": "redis://localhost:6379",
+ },
+ },
+ }, schema
+
+ @pytest.mark.parametrize(
+ "args",
+ (
+ pytest.param({"channel": "test"}, id="channel"),
+ pytest.param({"list": "test"}, id="list"),
+ pytest.param({"stream": "test"}, id="stream"),
+ ),
+ )
+ def test_subscribers_variations(self, args) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(**args)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker)
+ assert list(schema.to_jsonable()["channels"].keys()) == ["test:Handle"]
+
+ @pytest.mark.parametrize(
+ "args",
+ (
+ pytest.param({"channel": "test"}, id="channel"),
+ pytest.param({"list": "test"}, id="list"),
+ pytest.param({"stream": "test"}, id="stream"),
+ ),
+ )
+ def test_publisher_variations(self, args) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(**args)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker)
+ assert list(schema.to_jsonable()["channels"].keys()) == ["test:Publisher"]
diff --git a/tests/asyncapi/redis/v2_6_0/test_publisher.py b/tests/asyncapi/redis/v2_6_0/test_publisher.py
new file mode 100644
index 0000000000..939f79bd32
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_publisher.py
@@ -0,0 +1,50 @@
+from faststream.redis import RedisBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_class = RedisBroker
+
+ def test_channel_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "publish",
+ },
+ }
+
+ def test_list_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(list="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "rpush"},
+ }
+
+ def test_stream_publisher(self) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(stream="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="2.6.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "xadd"},
+ }
diff --git a/tests/asyncapi/redis/v2_6_0/test_router.py b/tests/asyncapi/redis/v2_6_0/test_router.py
new file mode 100644
index 0000000000..7d37538dbc
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_router.py
@@ -0,0 +1,88 @@
+from faststream.redis import RedisBroker, RedisPublisher, RedisRoute, RedisRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v2_6_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = RedisBroker
+ router_class = RedisRouter
+ route_class = RedisRoute
+ publisher_class = RedisPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "test_test:Handle": {
+ "bindings": {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test_test",
+ "method": "subscribe",
+ },
+ },
+ "servers": ["development"],
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/test_test:Handle:Message",
+ },
+ },
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:Message": {
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ "title": "test_test:Handle:Message",
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "url": "redis://localhost:6379",
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = RedisRouter
+
+ def build_app(self, router):
+ broker = RedisBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = RedisRouter
+
+ def build_app(self, router):
+ broker = RedisBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/redis/v2_6_0/test_security.py b/tests/asyncapi/redis/v2_6_0/test_security.py
new file mode 100644
index 0000000000..e03dd85e54
--- /dev/null
+++ b/tests/asyncapi/redis/v2_6_0/test_security.py
@@ -0,0 +1,110 @@
+import ssl
+
+from faststream.redis import RedisBroker
+from faststream.security import (
+ BaseSecurity,
+ SASLPlaintext,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = RedisBroker("rediss://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "rediss://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "rediss",
+ "protocolVersion": "custom",
+ "security": [],
+ "url": "rediss://localhost:6379/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RedisBroker("redis://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "redis://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [{"user-password": []}],
+ "url": "redis://localhost:6379/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema_without_ssl() -> None:
+ security = SASLPlaintext(
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RedisBroker("redis://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "redis://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [{"user-password": []}],
+ "url": "redis://localhost:6379/",
+ },
+ },
+ }
diff --git a/tests/a_docs/redis/__init__.py b/tests/asyncapi/redis/v3_0_0/__init__.py
similarity index 100%
rename from tests/a_docs/redis/__init__.py
rename to tests/asyncapi/redis/v3_0_0/__init__.py
diff --git a/tests/asyncapi/redis/v3_0_0/test_arguments.py b/tests/asyncapi/redis/v3_0_0/test_arguments.py
new file mode 100644
index 0000000000..0def5e4f41
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_arguments.py
@@ -0,0 +1,86 @@
+from faststream.redis import RedisBroker, StreamSub
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.arguments import ArgumentsTestcase
+
+
+class TestArguments(ArgumentsTestcase):
+ broker_factory = RedisBroker
+
+ def test_channel_subscriber(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "subscribe",
+ },
+ }
+
+ def test_channel_pattern_subscriber(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber("test.{path}")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test.*",
+ "method": "psubscribe",
+ },
+ }
+
+ def test_list_subscriber(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber(list="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "lpop"},
+ }
+
+ def test_stream_subscriber(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber(stream="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "xread"},
+ }
+
+ def test_stream_group_subscriber(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.subscriber(stream=StreamSub("test", group="group", consumer="consumer"))
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "consumerName": "consumer",
+ "groupName": "group",
+ "method": "xreadgroup",
+ },
+ }
diff --git a/tests/asyncapi/redis/v3_0_0/test_connection.py b/tests/asyncapi/redis/v3_0_0/test_connection.py
new file mode 100644
index 0000000000..968e67b464
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_connection.py
@@ -0,0 +1,62 @@
+from faststream.redis import RedisBroker
+from faststream.specification import Tag
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base() -> None:
+ schema = AsyncAPI(
+ RedisBroker(
+ "redis://localhost:6379",
+ protocol="plaintext",
+ protocol_version="0.9.0",
+ description="Test description",
+ tags=(Tag(name="some-tag", description="experimental"),),
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "description": "Test description",
+ "protocol": "plaintext",
+ "protocolVersion": "0.9.0",
+ "tags": [{"description": "experimental", "name": "some-tag"}],
+ "host": "localhost:6379",
+ "pathname": "",
+ },
+ },
+ }, schema
+
+
+def test_custom() -> None:
+ schema = AsyncAPI(
+ RedisBroker(
+ "redis://localhost:6379",
+ specification_url="rediss://127.0.0.1:8000",
+ ),
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "rediss",
+ "protocolVersion": "custom",
+ "host": "127.0.0.1:8000",
+ "pathname": "",
+ },
+ },
+ }
diff --git a/tests/asyncapi/redis/v3_0_0/test_fastapi.py b/tests/asyncapi/redis/v3_0_0/test_fastapi.py
new file mode 100644
index 0000000000..fc75b0f092
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_fastapi.py
@@ -0,0 +1,21 @@
+from faststream.redis import TestRedisBroker
+from faststream.redis.fastapi import RedisRouter
+from tests.asyncapi.base.v3_0_0.arguments import FastAPICompatible
+from tests.asyncapi.base.v3_0_0.fastapi import FastAPITestCase
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestRouterArguments(FastAPITestCase, FastAPICompatible):
+ broker_factory = staticmethod(lambda: RedisRouter().broker)
+ router_factory = RedisRouter
+ broker_wrapper = staticmethod(TestRedisBroker)
+
+ def build_app(self, router):
+ return router
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_factory = staticmethod(lambda: RedisRouter().broker)
+
+ def build_app(self, router):
+ return router
diff --git a/tests/asyncapi/redis/v3_0_0/test_naming.py b/tests/asyncapi/redis/v3_0_0/test_naming.py
new file mode 100644
index 0000000000..098e7ed2e9
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_naming.py
@@ -0,0 +1,109 @@
+import pytest
+
+from faststream.redis import RedisBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.naming import NamingTestCase
+
+
+class TestNaming(NamingTestCase):
+ broker_class = RedisBroker
+
+ def test_base(self) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber("test")
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {
+ "test:Handle": {
+ "address": "test:Handle",
+ "bindings": {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "subscribe",
+ },
+ },
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test:Handle:SubscribeMessage",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test:HandleSubscribe": {
+ "action": "receive",
+ "channel": {
+ "$ref": "#/channels/test:Handle",
+ },
+ "messages": [
+ {"$ref": "#/channels/test:Handle/messages/SubscribeMessage"},
+ ],
+ },
+ },
+ "components": {
+ "messages": {
+ "test:Handle:SubscribeMessage": {
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {"$ref": "#/components/schemas/EmptyPayload"},
+ "title": "test:Handle:SubscribeMessage",
+ },
+ },
+ "schemas": {"EmptyPayload": {"title": "EmptyPayload", "type": "null"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "host": "localhost:6379",
+ "pathname": "",
+ },
+ },
+ }, schema
+
+ @pytest.mark.parametrize(
+ "args",
+ (
+ pytest.param({"channel": "test"}, id="channel"),
+ pytest.param({"list": "test"}, id="list"),
+ pytest.param({"stream": "test"}, id="stream"),
+ ),
+ )
+ def test_subscribers_variations(self, args) -> None:
+ broker = self.broker_class()
+
+ @broker.subscriber(**args)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker)
+ assert list(schema.to_jsonable()["channels"].keys()) == ["test:Handle"]
+
+ @pytest.mark.parametrize(
+ "args",
+ (
+ pytest.param({"channel": "test"}, id="channel"),
+ pytest.param({"list": "test"}, id="list"),
+ pytest.param({"stream": "test"}, id="stream"),
+ ),
+ )
+ def test_publisher_variations(self, args) -> None:
+ broker = self.broker_class()
+
+ @broker.publisher(**args)
+ async def handle() -> None: ...
+
+ schema = AsyncAPI(broker)
+ assert list(schema.to_jsonable()["channels"].keys()) == ["test:Publisher"]
diff --git a/tests/asyncapi/redis/v3_0_0/test_publisher.py b/tests/asyncapi/redis/v3_0_0/test_publisher.py
new file mode 100644
index 0000000000..ac25a1efa0
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_publisher.py
@@ -0,0 +1,50 @@
+from faststream.redis import RedisBroker
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v3_0_0.publisher import PublisherTestcase
+
+
+class TestArguments(PublisherTestcase):
+ broker_factory = RedisBroker
+
+ def test_channel_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher("test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {
+ "bindingVersion": "custom",
+ "channel": "test",
+ "method": "publish",
+ },
+ }
+
+ def test_list_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher(list="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "rpush"},
+ }
+
+ def test_stream_publisher(self) -> None:
+ broker = self.broker_factory()
+
+ @broker.publisher(stream="test")
+ async def handle(msg) -> None: ...
+
+ schema = AsyncAPI(self.build_app(broker), schema_version="3.0.0").to_jsonable()
+ key = tuple(schema["channels"].keys())[0] # noqa: RUF015
+
+ assert schema["channels"][key]["bindings"] == {
+ "redis": {"bindingVersion": "custom", "channel": "test", "method": "xadd"},
+ }
diff --git a/tests/asyncapi/redis/v3_0_0/test_router.py b/tests/asyncapi/redis/v3_0_0/test_router.py
new file mode 100644
index 0000000000..14dc2c351e
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_router.py
@@ -0,0 +1,104 @@
+from faststream.redis import RedisBroker, RedisPublisher, RedisRoute, RedisRouter
+from faststream.specification.asyncapi import AsyncAPI
+from tests.asyncapi.base.v2_6_0.arguments import ArgumentsTestcase
+from tests.asyncapi.base.v2_6_0.publisher import PublisherTestcase
+from tests.asyncapi.base.v3_0_0.router import RouterTestcase
+
+
+class TestRouter(RouterTestcase):
+ broker_class = RedisBroker
+ router_class = RedisRouter
+ route_class = RedisRoute
+ publisher_class = RedisPublisher
+
+ def test_prefix(self) -> None:
+ broker = self.broker_class()
+
+ router = self.router_class(prefix="test_")
+
+ @router.subscriber("test")
+ async def handle(msg) -> None: ...
+
+ broker.include_router(router)
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "info": {"title": "FastStream", "version": "0.1.0", "description": ""},
+ "asyncapi": "3.0.0",
+ "defaultContentType": "application/json",
+ "servers": {
+ "development": {
+ "host": "localhost:6379",
+ "pathname": "",
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ },
+ },
+ "channels": {
+ "test_test:Handle": {
+ "address": "test_test:Handle",
+ "servers": [{"$ref": "#/servers/development"}],
+ "messages": {
+ "SubscribeMessage": {
+ "$ref": "#/components/messages/test_test:Handle:SubscribeMessage",
+ },
+ },
+ "bindings": {
+ "redis": {
+ "channel": "test_test",
+ "method": "subscribe",
+ "bindingVersion": "custom",
+ },
+ },
+ },
+ },
+ "operations": {
+ "test_test:HandleSubscribe": {
+ "action": "receive",
+ "messages": [
+ {
+ "$ref": "#/channels/test_test:Handle/messages/SubscribeMessage",
+ },
+ ],
+ "channel": {"$ref": "#/channels/test_test:Handle"},
+ },
+ },
+ "components": {
+ "messages": {
+ "test_test:Handle:SubscribeMessage": {
+ "title": "test_test:Handle:SubscribeMessage",
+ "correlationId": {
+ "location": "$message.header#/correlation_id",
+ },
+ "payload": {
+ "$ref": "#/components/schemas/Handle:Message:Payload",
+ },
+ },
+ },
+ "schemas": {
+ "Handle:Message:Payload": {"title": "Handle:Message:Payload"},
+ },
+ },
+ }
+
+
+class TestRouterArguments(ArgumentsTestcase):
+ broker_class = RedisRouter
+
+ def build_app(self, router):
+ broker = RedisBroker()
+ broker.include_router(router)
+ return broker
+
+
+class TestRouterPublisher(PublisherTestcase):
+ broker_class = RedisRouter
+
+ def build_app(self, router):
+ broker = RedisBroker()
+ broker.include_router(router)
+ return broker
diff --git a/tests/asyncapi/redis/v3_0_0/test_security.py b/tests/asyncapi/redis/v3_0_0/test_security.py
new file mode 100644
index 0000000000..80ba20324d
--- /dev/null
+++ b/tests/asyncapi/redis/v3_0_0/test_security.py
@@ -0,0 +1,125 @@
+import ssl
+
+from faststream.redis import RedisBroker
+from faststream.security import (
+ BaseSecurity,
+ SASLPlaintext,
+)
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_base_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+ security = BaseSecurity(ssl_context=ssl_context)
+
+ broker = RedisBroker("rediss://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "rediss://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "rediss",
+ "protocolVersion": "custom",
+ "security": [],
+ "host": "localhost:6379",
+ "pathname": "/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema() -> None:
+ ssl_context = ssl.create_default_context()
+
+ security = SASLPlaintext(
+ ssl_context=ssl_context,
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RedisBroker("redis://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "redis://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [{"user-password": []}],
+ "host": "localhost:6379",
+ "pathname": "/",
+ },
+ },
+ }
+
+
+def test_plaintext_security_schema_without_ssl() -> None:
+ security = SASLPlaintext(
+ username="admin",
+ password="password", # pragma: allowlist secret
+ )
+
+ broker = RedisBroker("redis://localhost:6379/", security=security)
+
+ assert (
+ broker.url == "redis://localhost:6379/" # pragma: allowlist secret
+ ) # pragma: allowlist secret
+
+ schema = AsyncAPI(
+ broker,
+ schema_version="3.0.0",
+ ).to_jsonable()
+
+ assert schema == {
+ "asyncapi": "3.0.0",
+ "channels": {},
+ "operations": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [{"user-password": []}],
+ "host": "localhost:6379",
+ "pathname": "/",
+ },
+ },
+ }
diff --git a/tests/brokers/base/basic.py b/tests/brokers/base/basic.py
index e550393052..28f9dbfa78 100644
--- a/tests/brokers/base/basic.py
+++ b/tests/brokers/base/basic.py
@@ -1,13 +1,38 @@
-from typing import Any, Dict, Tuple
+from abc import abstractmethod
+from typing import Any
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.broker.router import BrokerRouter
class BaseTestcaseConfig:
timeout: float = 3.0
+ @abstractmethod
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> BrokerUsecase[Any, Any]:
+ raise NotImplementedError
+
+ def patch_broker(
+ self,
+ broker: BrokerUsecase,
+ **kwargs: Any,
+ ) -> BrokerUsecase:
+ return broker
+
def get_subscriber_params(
- self, *args: Any, **kwargs: Any
- ) -> Tuple[
- Tuple[Any, ...],
- Dict[str, Any],
+ self,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[
+ tuple[Any, ...],
+ dict[str, Any],
]:
return args, kwargs
+
+ @abstractmethod
+ def get_router(self, **kwargs: Any) -> BrokerRouter:
+ raise NotImplementedError
diff --git a/tests/brokers/base/connection.py b/tests/brokers/base/connection.py
index 1614b6151b..6d30249c5e 100644
--- a/tests/brokers/base/connection.py
+++ b/tests/brokers/base/connection.py
@@ -1,55 +1,51 @@
-from typing import Type
-
import pytest
-from faststream.broker.core.usecase import BrokerUsecase
+from faststream._internal.broker.broker import BrokerUsecase
class BrokerConnectionTestcase:
- broker: Type[BrokerUsecase]
+ broker: type[BrokerUsecase]
def get_broker_args(self, settings):
return {}
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def ping(self, broker) -> bool:
return await broker.ping(timeout=5.0)
- @pytest.mark.asyncio
- async def test_close_before_start(self, async_mock):
+ @pytest.mark.asyncio()
+ async def test_close_before_start(self) -> None:
br = self.broker()
assert br._connection is None
await br.close()
- br._connection = async_mock
- await br._close()
assert not br.running
- @pytest.mark.asyncio
- async def test_init_connect_by_url(self, settings):
+ @pytest.mark.asyncio()
+ async def test_init_connect_by_url(self, settings) -> None:
kwargs = self.get_broker_args(settings)
broker = self.broker(**kwargs)
await broker.connect()
assert await self.ping(broker)
await broker.close()
- @pytest.mark.asyncio
- async def test_connection_by_url(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connection_by_url(self, settings) -> None:
kwargs = self.get_broker_args(settings)
broker = self.broker()
await broker.connect(**kwargs)
assert await self.ping(broker)
await broker.close()
- @pytest.mark.asyncio
- async def test_connect_by_url_priority(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_by_url_priority(self, settings) -> None:
kwargs = self.get_broker_args(settings)
broker = self.broker("wrong_url")
await broker.connect(**kwargs)
assert await self.ping(broker)
await broker.close()
- @pytest.mark.asyncio
- async def test_ping_timeout(self, settings):
+ @pytest.mark.asyncio()
+ async def test_ping_timeout(self, settings) -> None:
kwargs = self.get_broker_args(settings)
broker = self.broker("wrong_url")
await broker.connect(**kwargs)
diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py
index 0e7b07b698..4fdb0e118d 100644
--- a/tests/brokers/base/consume.py
+++ b/tests/brokers/base/consume.py
@@ -1,6 +1,4 @@
import asyncio
-from abc import abstractmethod
-from typing import Any
from unittest.mock import MagicMock
import anyio
@@ -8,32 +6,24 @@
from pydantic import BaseModel
from faststream import Context, Depends
-from faststream.broker.core.usecase import BrokerUsecase
from faststream.exceptions import StopConsume
from .basic import BaseTestcaseConfig
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class BrokerConsumeTestcase(BaseTestcaseConfig):
- @abstractmethod
- def get_broker(self, broker: BrokerUsecase) -> BrokerUsecase[Any, Any]:
- raise NotImplementedError
-
- def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]:
- return broker
-
async def test_consume(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
consume_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@consume_broker.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -52,7 +42,7 @@ async def test_consume_from_multi(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
consume_broker = self.get_broker()
consume = asyncio.Event()
@@ -63,7 +53,7 @@ async def test_consume_from_multi(
@consume_broker.subscriber(*args, **kwargs)
@consume_broker.subscriber(*args2, **kwargs2)
- def subscriber(m):
+ def subscriber(m) -> None:
mock()
if not consume.is_set():
consume.set()
@@ -90,7 +80,7 @@ async def test_consume_double(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
consume_broker = self.get_broker()
consume = asyncio.Event()
@@ -99,7 +89,7 @@ async def test_consume_double(
args, kwargs = self.get_subscriber_params(queue)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
mock()
if not consume.is_set():
consume.set()
@@ -126,7 +116,7 @@ async def test_different_consume(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
consume_broker = self.get_broker()
consume = asyncio.Event()
@@ -135,7 +125,7 @@ async def test_different_consume(
args, kwargs = self.get_subscriber_params(queue)
@consume_broker.subscriber(*args, **kwargs)
- def handler(m):
+ def handler(m) -> None:
mock.handler()
consume.set()
@@ -143,7 +133,7 @@ def handler(m):
args, kwargs = self.get_subscriber_params(another_topic)
@consume_broker.subscriber(*args, **kwargs)
- def handler2(m):
+ def handler2(m) -> None:
mock.handler2()
consume2.set()
@@ -168,7 +158,7 @@ async def test_consume_with_filter(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
consume_broker = self.get_broker()
consume = asyncio.Event()
@@ -181,12 +171,12 @@ async def test_consume_with_filter(
sub = consume_broker.subscriber(*args, **kwargs)
@sub(filter=lambda m: m.content_type == "application/json")
- async def handler(m):
+ async def handler(m) -> None:
mock.handler(m)
consume.set()
@sub
- async def handler2(m):
+ async def handler2(m) -> None:
mock.handler2(m)
consume2.set()
@@ -210,13 +200,14 @@ async def handler2(m):
async def test_consume_validate_false(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
- consume_broker = self.get_broker()
+ ) -> None:
+ event = asyncio.Event()
- consume_broker._is_apply_types = True
- consume_broker._is_validate = False
+ consume_broker = self.get_broker(
+ apply_types=True,
+ serializer=None,
+ )
class Foo(BaseModel):
x: int
@@ -227,7 +218,9 @@ def dependency() -> str:
args, kwargs = self.get_subscriber_params(queue)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(m: Foo, dep: int = Depends(dependency), broker=Context()):
+ async def handler(
+ m: Foo, dep: int = Depends(dependency), broker=Context()
+ ) -> None:
mock(m, dep, broker)
event.set()
@@ -248,11 +241,12 @@ async def handler(m: Foo, dep: int = Depends(dependency), broker=Context()):
async def test_dynamic_sub(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
- async def subscriber(m):
+ async def subscriber(m) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -273,13 +267,13 @@ async def subscriber(m):
assert event.is_set()
- async def test_get_one_conflicts_with_handler(self, queue):
+ async def test_get_one_conflicts_with_handler(self, queue) -> None:
broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
subscriber = broker.subscriber(*args, **kwargs)
@subscriber
- async def t(): ...
+ async def t() -> None: ...
async with self.patch_broker(broker) as br:
await br.start()
@@ -288,14 +282,13 @@ async def t(): ...
await subscriber.get_one(timeout=1e-24)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class BrokerRealConsumeTestcase(BrokerConsumeTestcase):
async def test_get_one(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -304,10 +297,10 @@ async def test_get_one(
async with self.patch_broker(broker) as br:
await br.start()
- async def consume():
+ async def consume() -> None:
mock(await subscriber.get_one(timeout=self.timeout))
- async def publish():
+ async def publish() -> None:
await anyio.sleep(1e-24)
await br.publish("test_message", queue)
@@ -328,7 +321,7 @@ async def test_get_one_timeout(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
subscriber = broker.subscriber(*args, **kwargs)
@@ -339,13 +332,14 @@ async def test_get_one_timeout(
mock(await subscriber.get_one(timeout=1e-24))
mock.assert_called_once_with(None)
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_stop_consume_exc(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -354,7 +348,7 @@ async def test_stop_consume_exc(
def subscriber(m):
mock()
event.set()
- raise StopConsume()
+ raise StopConsume
async with self.patch_broker(consume_broker) as br:
await br.start()
diff --git a/tests/brokers/base/fastapi.py b/tests/brokers/base/fastapi.py
index 6c18b7e9d5..602a040840 100644
--- a/tests/brokers/base/fastapi.py
+++ b/tests/brokers/base/fastapi.py
@@ -1,6 +1,6 @@
import asyncio
from contextlib import asynccontextmanager
-from typing import Any, Callable, Type, TypeVar
+from typing import Any, TypeVar
from unittest.mock import Mock
import pytest
@@ -8,24 +8,25 @@
from fastapi.exceptions import RequestValidationError
from fastapi.testclient import TestClient
-from faststream import Response, context
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.broker.fastapi.context import Context
-from faststream.broker.fastapi.router import StreamRouter
-from faststream.broker.router import BrokerRouter
-from faststream.types import AnyCallable
+from faststream import Response
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.broker.router import BrokerRouter
+from faststream._internal.fastapi.context import Context
+from faststream._internal.fastapi.router import StreamRouter
from .basic import BaseTestcaseConfig
Broker = TypeVar("Broker", bound=BrokerUsecase)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class FastAPITestcase(BaseTestcaseConfig):
- router_class: Type[StreamRouter[BrokerUsecase]]
- broker_router_class: Type[BrokerRouter[Any]]
+ router_class: type[StreamRouter[BrokerUsecase]]
+ broker_router_class: type[BrokerRouter[Any]]
+
+ async def test_base_real(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
- async def test_base_real(self, mock: Mock, queue: str, event: asyncio.Event):
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -48,7 +49,13 @@ async def hello(msg):
assert event.is_set()
mock.assert_called_with("hi")
- async def test_background(self, mock: Mock, queue: str, event: asyncio.Event):
+ async def test_background(
+ self,
+ mock: Mock,
+ queue: str,
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
def task(msg):
@@ -58,7 +65,7 @@ def task(msg):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello(msg, tasks: BackgroundTasks):
+ async def hello(msg, tasks: BackgroundTasks) -> None:
tasks.add_task(task, msg)
async with router.broker:
@@ -74,8 +81,11 @@ async def hello(msg, tasks: BackgroundTasks):
assert event.is_set()
mock.assert_called_with("hi")
- async def test_context(self, mock: Mock, queue: str, event: asyncio.Event):
+ async def test_context(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
+ context = router.context
context_key = "message.headers"
@@ -83,14 +93,19 @@ async def test_context(self, mock: Mock, queue: str, event: asyncio.Event):
@router.subscriber(*args, **kwargs)
async def hello(msg=Context(context_key)):
- event.set()
- return mock(msg == context.resolve(context_key))
+ try:
+ mock(msg == context.resolve(context_key) and msg["1"] == "1")
+ finally:
+ event.set()
+ router._setup()
async with router.broker:
await router.broker.start()
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("", queue)),
+ asyncio.create_task(
+ router.broker.publish("", queue, headers={"1": "1"})
+ ),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -99,17 +114,21 @@ async def hello(msg=Context(context_key)):
assert event.is_set()
mock.assert_called_with(True)
- async def test_initial_context(self, queue: str, event: asyncio.Event):
+ async def test_initial_context(self, queue: str) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
+ context = router.context
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello(msg: int, data=Context(queue, initial=set)):
+ async def hello(msg: int, data=Context(queue, initial=set)) -> None:
data.add(msg)
if len(data) == 2:
event.set()
+ router._setup()
async with router.broker:
await router.broker.start()
await asyncio.wait(
@@ -125,8 +144,10 @@ async def hello(msg: int, data=Context(queue, initial=set)):
assert context.get(queue) == {1, 2}
context.reset_global(queue)
- async def test_double_real(self, mock: Mock, queue: str, event: asyncio.Event):
+ async def test_double_real(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
event2 = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -136,7 +157,7 @@ async def test_double_real(self, mock: Mock, queue: str, event: asyncio.Event):
@sub1
@router.subscriber(*args2, **kwargs2)
- async def hello(msg: str):
+ async def hello(msg: str) -> None:
if event.is_set():
event2.set()
else:
@@ -163,21 +184,22 @@ async def test_base_publisher_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
@router.publisher(queue + "resp")
- async def m():
+ async def m() -> str:
return "hi"
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@router.subscriber(*args2, **kwargs2)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -196,13 +218,11 @@ async def resp(msg):
mock.assert_called_once_with("hi")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class FastAPILocalTestcase(BaseTestcaseConfig):
- router_class: Type[StreamRouter[BrokerUsecase]]
- broker_test: Callable[[Broker], Broker]
- build_message: AnyCallable
+ router_class: type[StreamRouter[BrokerUsecase]]
- async def test_base(self, queue: str):
+ async def test_base(self, queue: str) -> None:
router = self.router_class()
app = FastAPI()
@@ -211,22 +231,21 @@ async def test_base(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello():
+ async def hello() -> str:
return "hi"
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app) as client:
- assert client.app_state["broker"] is router.broker
+ assert client.app_state["broker"] is br
- r = await router.broker.publish(
+ r = await br.request(
"hi",
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi", r
+ assert await r.decode() == "hi", r
- async def test_request(self, queue: str):
+ async def test_request(self, queue: str) -> None:
"""Local test due request exists in all TestClients."""
router = self.router_class(setup_state=False)
@@ -238,11 +257,11 @@ async def test_request(self, queue: str):
async def hello():
return Response("Hi!", headers={"x-header": "test"})
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app) as client:
assert not client.app_state.get("broker")
- r = await router.broker.request(
+ r = await br.request(
"hi",
queue,
timeout=0.5,
@@ -250,7 +269,7 @@ async def hello():
assert await r.decode() == "Hi!"
assert r.headers["x-header"] == "test"
- async def test_base_without_state(self, queue: str):
+ async def test_base_without_state(self, queue: str) -> None:
router = self.router_class(setup_state=False)
app = FastAPI()
@@ -258,22 +277,21 @@ async def test_base_without_state(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello():
+ async def hello() -> str:
return "hi"
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app) as client:
assert not client.app_state.get("broker")
- r = await router.broker.publish(
+ r = await br.request(
"hi",
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
- async def test_invalid(self, queue: str):
+ async def test_invalid(self, queue: str) -> None:
router = self.router_class()
app = FastAPI()
@@ -281,16 +299,16 @@ async def test_invalid(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello(msg: int): ...
+ async def hello(msg: int) -> None: ...
app.include_router(router)
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app):
with pytest.raises(RequestValidationError):
- await router.broker.publish("hi", queue)
+ await br.publish("hi", queue)
- async def test_headers(self, queue: str):
+ async def test_headers(self, queue: str) -> None:
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -299,17 +317,16 @@ async def test_headers(self, queue: str):
async def hello(w=Header()):
return w
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
"",
queue,
headers={"w": "hi"},
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
- async def test_depends(self, mock: Mock, queue: str):
+ async def test_depends(self, mock: Mock, queue: str) -> None:
router = self.router_class()
def dep(a):
@@ -322,18 +339,17 @@ def dep(a):
async def hello(a, w=Depends(dep)):
return w
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
{"a": "hi"},
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
mock.assert_called_once_with("hi")
- async def test_yield_depends(self, mock: Mock, queue: str):
+ async def test_yield_depends(self, mock: Mock, queue: str) -> None:
router = self.router_class()
def dep(a):
@@ -349,20 +365,19 @@ async def hello(a, w=Depends(dep)):
assert not mock.close.call_count
return w
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
{"a": "hi"},
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
mock.start.assert_called_once()
mock.close.assert_called_once()
- async def test_router_depends(self, mock: Mock, queue: str):
- def mock_dep():
+ async def test_router_depends(self, mock: Mock, queue: str) -> None:
+ def mock_dep() -> None:
mock()
router = self.router_class(dependencies=(Depends(mock_dep, use_cache=False),))
@@ -373,14 +388,14 @@ def mock_dep():
async def hello(a):
return a
- async with self.broker_test(router.broker):
- r = await router.broker.publish("hi", queue, rpc=True, rpc_timeout=0.5)
- assert r == "hi"
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request("hi", queue, timeout=0.5)
+ assert await r.decode() == "hi", r
mock.assert_called_once()
- async def test_subscriber_depends(self, mock: Mock, queue: str):
- def mock_dep():
+ async def test_subscriber_depends(self, mock: Mock, queue: str) -> None:
+ def mock_dep() -> None:
mock()
router = self.router_class()
@@ -394,40 +409,39 @@ def mock_dep():
async def hello(a):
return a
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
"hi",
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
mock.assert_called_once()
- async def test_hooks(self, mock: Mock):
+ async def test_hooks(self, mock: Mock) -> None:
router = self.router_class()
app = FastAPI()
app.include_router(router)
@router.after_startup
- def test_sync(app):
+ def test_sync(app) -> None:
mock.sync_called()
@router.after_startup
- async def test_async(app):
+ async def test_async(app) -> None:
mock.async_called()
@router.on_broker_shutdown
- def test_shutdown_sync(app):
+ def test_shutdown_sync(app) -> None:
mock.sync_shutdown_called()
@router.on_broker_shutdown
- async def test_shutdown_async(app):
+ async def test_shutdown_async(app) -> None:
mock.async_shutdown_called()
- async with self.broker_test(router.broker), router.lifespan_context(app):
+ async with self.patch_broker(router.broker), router.lifespan_context(app):
pass
mock.sync_called.assert_called_once()
@@ -435,7 +449,7 @@ async def test_shutdown_async(app):
mock.sync_shutdown_called.assert_called_once()
mock.async_shutdown_called.assert_called_once()
- async def test_existed_lifespan_startup(self, mock: Mock):
+ async def test_existed_lifespan_startup(self, mock: Mock) -> None:
@asynccontextmanager
async def lifespan(app):
mock.start()
@@ -447,28 +461,31 @@ async def lifespan(app):
app = FastAPI()
app.include_router(router)
- async with self.broker_test(router.broker), router.lifespan_context(
- app
- ) as context:
+ async with (
+ self.patch_broker(router.broker),
+ router.lifespan_context(
+ app,
+ ) as context,
+ ):
assert context["lifespan"]
mock.start.assert_called_once()
mock.close.assert_called_once()
- async def test_subscriber_mock(self, queue: str):
+ async def test_subscriber_mock(self, queue: str) -> None:
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def m():
+ async def m() -> str:
return "hi"
- async with self.broker_test(router.broker) as rb:
+ async with self.patch_broker(router.broker) as rb:
await rb.publish("hello", queue)
m.mock.assert_called_once_with("hello")
- async def test_publisher_mock(self, queue: str):
+ async def test_publisher_mock(self, queue: str) -> None:
router = self.router_class()
publisher = router.publisher(queue + "resp")
@@ -478,14 +495,14 @@ async def test_publisher_mock(self, queue: str):
@publisher
@sub
- async def m():
+ async def m() -> str:
return "response"
- async with self.broker_test(router.broker) as rb:
+ async with self.patch_broker(router.broker) as rb:
await rb.publish("hello", queue)
publisher.mock.assert_called_with("response")
- async def test_include(self, queue: str):
+ async def test_include(self, queue: str) -> None:
router = self.router_class()
router2 = self.broker_router_class()
@@ -494,69 +511,92 @@ async def test_include(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def hello():
+ async def hello() -> str:
return "hi"
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@router2.subscriber(*args2, **kwargs2)
- async def hello_router2():
+ async def hello_router2() -> str:
return "hi"
router.include_router(router2)
app.include_router(router)
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app) as client:
- assert client.app_state["broker"] is router.broker
+ assert client.app_state["broker"] is br
- r = await router.broker.publish(
+ r = await br.request(
"hi",
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
- r = await router.broker.publish(
+ r = await br.request(
"hi",
queue + "1",
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
- async def test_dependency_overrides(self, mock: Mock, queue: str):
+ async def test_dependency_overrides(self, mock: Mock, queue: str) -> None:
router = self.router_class()
- router2 = self.router_class()
- def dep1():
+ def dep1() -> None:
mock.not_call()
- pass
+
+ def dep2() -> None:
+ mock()
app = FastAPI()
- app.dependency_overrides[dep1] = lambda: mock()
+ app.dependency_overrides[dep1] = dep2
args, kwargs = self.get_subscriber_params(queue)
- @router2.subscriber(*args, **kwargs)
- async def hello_router2(dep=Depends(dep1)):
+ @router.subscriber(*args, **kwargs)
+ async def hello_router2(dep: None = Depends(dep1)) -> str:
return "hi"
- router.include_router(router2)
app.include_router(router)
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
with TestClient(app) as client:
- assert client.app_state["broker"] is router.broker
+ assert client.app_state["broker"] is br
- r = await router.broker.publish(
+ r = await br.request(
"hi",
queue,
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "hi"
+ assert await r.decode() == "hi", r
mock.assert_called_once()
assert not mock.not_call.called
+
+ async def test_nested_router(self, queue: str) -> None:
+ router = self.router_class()
+ router2 = self.router_class()
+
+ app = FastAPI()
+
+ args, kwargs = self.get_subscriber_params(queue)
+
+ @router2.subscriber(*args, **kwargs)
+ async def hello_router2() -> str:
+ return "hi"
+
+ router.include_router(router2)
+ app.include_router(router)
+
+ async with self.patch_broker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.app_state["broker"] is br
+
+ r = await br.request(
+ "hi",
+ queue,
+ timeout=0.5,
+ )
+ assert r.body == b"hi"
diff --git a/tests/brokers/base/middlewares.py b/tests/brokers/base/middlewares.py
index 94a8cf5afb..9df2f84d19 100644
--- a/tests/brokers/base/middlewares.py
+++ b/tests/brokers/base/middlewares.py
@@ -1,28 +1,20 @@
import asyncio
-from typing import Type
-from unittest.mock import Mock, call
+from unittest.mock import MagicMock, call
import pytest
from faststream import Context
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.broker.middlewares import BaseMiddleware, ExceptionMiddleware
+from faststream._internal.basic_types import DecodedMessage
from faststream.exceptions import SkipMessage
-from faststream.types import DecodedMessage
+from faststream.middlewares import BaseMiddleware, ExceptionMiddleware
+from faststream.response import PublishCommand
from .basic import BaseTestcaseConfig
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class MiddlewaresOrderTestcase(BaseTestcaseConfig):
- broker_class: Type[BrokerUsecase]
-
- def patch_broker(self, broker: BrokerUsecase) -> BrokerUsecase:
- return broker
-
- async def test_broker_middleware_order(
- self, event: asyncio.Event, queue: str, mock: Mock
- ):
+ async def test_broker_middleware_order(self, queue: str, mock: MagicMock):
class InnerMiddleware(BaseMiddleware):
async def __aenter__(self):
mock.enter_inner()
@@ -37,10 +29,10 @@ async def consume_scope(self, call_next, msg):
mock.sub("inner")
return await call_next(msg)
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_inner()
mock.pub("inner")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
class OuterMiddleware(BaseMiddleware):
async def __aenter__(self):
@@ -56,32 +48,22 @@ async def consume_scope(self, call_next, msg):
mock.sub("outer")
return await call_next(msg)
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_outer()
mock.pub("outer")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
- broker = self.broker_class(
- middlewares=[OuterMiddleware, InnerMiddleware],
- )
+ broker = self.get_broker(middlewares=[OuterMiddleware, InnerMiddleware])
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
async def handler(msg):
- event.set()
+ pass
async with self.patch_broker(broker) as br:
- await br.start()
- await asyncio.wait(
- (
- asyncio.create_task(broker.publish("start", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=self.timeout,
- )
+ await br.publish(None, queue)
- assert event.is_set()
mock.consume_inner.assert_called_once()
mock.consume_outer.assert_called_once()
mock.publish_inner.assert_called_once()
@@ -91,52 +73,36 @@ async def handler(msg):
mock.exit_inner.assert_called_once()
mock.exit_outer.assert_called_once()
- assert [c.args[0] for c in mock.sub.call_args_list] == [
- "outer",
- "inner",
- ], mock.sub.call_args_list
- assert [c.args[0] for c in mock.pub.call_args_list] == [
- "outer",
- "inner",
- ], mock.pub.call_args_list
- assert [c.args[0] for c in mock.enter.call_args_list] == [
- "outer",
- "inner",
- ], mock.enter.call_args_list
- assert [c.args[0] for c in mock.exit.call_args_list] == [
- "inner",
- "outer",
- ], mock.exit.call_args_list
-
- async def test_publisher_middleware_order(
- self, event: asyncio.Event, queue: str, mock: Mock
- ):
+ assert [c.args[0] for c in mock.sub.call_args_list] == ["outer", "inner"]
+ assert [c.args[0] for c in mock.pub.call_args_list] == ["outer", "inner"]
+ assert [c.args[0] for c in mock.enter.call_args_list] == ["outer", "inner"]
+ assert [c.args[0] for c in mock.exit.call_args_list] == ["inner", "outer"]
+
+ async def test_publisher_middleware_order(self, queue: str, mock: MagicMock):
class InnerMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_inner()
mock("inner")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
class MiddleMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_middle()
mock("middle")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
class OuterMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_outer()
mock("outer")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
- broker = self.broker_class(
- middlewares=[OuterMiddleware],
- )
+ broker = self.get_broker(middlewares=[OuterMiddleware])
publisher = broker.publisher(
queue,
middlewares=[
- MiddleMiddleware(None).publish_scope,
- InnerMiddleware(None).publish_scope,
+ MiddleMiddleware(None, context=None).publish_scope,
+ InnerMiddleware(None, context=None).publish_scope,
],
)
@@ -144,50 +110,41 @@ async def publish_scope(self, call_next, msg, *args, **kwargs):
@broker.subscriber(*args, **kwargs)
async def handler(msg):
- event.set()
+ pass
- async with self.patch_broker(broker) as br:
- await br.start()
+ async with self.patch_broker(broker):
await publisher.publish(None, queue)
mock.publish_inner.assert_called_once()
mock.publish_middle.assert_called_once()
mock.publish_outer.assert_called_once()
- assert [c.args[0] for c in mock.call_args_list] == [
- "outer",
- "middle",
- "inner",
- ], mock.call_args_list
+
+ assert [c.args[0] for c in mock.call_args_list] == ["outer", "middle", "inner"]
async def test_publisher_with_router_middleware_order(
- self,
- event: asyncio.Event,
- queue: str,
- mock: Mock,
+ self, queue: str, mock: MagicMock
):
class InnerMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_inner()
mock("inner")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
class MiddleMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_middle()
mock("middle")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
class OuterMiddleware(BaseMiddleware):
- async def publish_scope(self, call_next, msg, *args, **kwargs):
+ async def publish_scope(self, call_next, cmd):
mock.publish_outer()
mock("outer")
- return await call_next(msg, *args, **kwargs)
+ return await call_next(cmd)
- broker = self.broker_class(
- middlewares=[OuterMiddleware],
- )
- router = self.broker_class(middlewares=[MiddleMiddleware])
- router2 = self.broker_class(middlewares=[InnerMiddleware])
+ broker = self.get_broker(middlewares=[OuterMiddleware])
+ router = self.get_router(middlewares=[MiddleMiddleware])
+ router2 = self.get_router(middlewares=[InnerMiddleware])
publisher = router2.publisher(queue)
@@ -195,84 +152,63 @@ async def publish_scope(self, call_next, msg, *args, **kwargs):
@router2.subscriber(*args, **kwargs)
async def handler(msg):
- event.set()
+ pass
router.include_router(router2)
broker.include_router(router)
- async with self.patch_broker(broker) as br:
- await br.start()
+ async with self.patch_broker(broker):
await publisher.publish(None, queue)
mock.publish_inner.assert_called_once()
mock.publish_middle.assert_called_once()
mock.publish_outer.assert_called_once()
- assert [c.args[0] for c in mock.call_args_list] == [
- "outer",
- "middle",
- "inner",
- ], mock.call_args_list
+ assert [c.args[0] for c in mock.call_args_list] == ["outer", "middle", "inner"]
- async def test_consume_middleware_order(
- self, event: asyncio.Event, queue: str, mock: Mock
- ):
+ async def test_consume_middleware_order(self, queue: str, mock: MagicMock):
class InnerMiddleware(BaseMiddleware):
- async def consume_scope(self, call_next, msg):
+ async def consume_scope(self, call_next, cmd):
mock.consume_inner()
mock("inner")
- return await call_next(msg)
+ return await call_next(cmd)
class MiddleMiddleware(BaseMiddleware):
- async def consume_scope(self, call_next, msg):
+ async def consume_scope(self, call_next, cmd):
mock.consume_middle()
mock("middle")
- return await call_next(msg)
+ return await call_next(cmd)
class OuterMiddleware(BaseMiddleware):
- async def consume_scope(self, call_next, msg):
+ async def consume_scope(self, call_next, cmd):
mock.consume_outer()
mock("outer")
- return await call_next(msg)
+ return await call_next(cmd)
- broker = self.broker_class(middlewares=[OuterMiddleware])
+ broker = self.get_broker(middlewares=[OuterMiddleware])
args, kwargs = self.get_subscriber_params(
queue,
middlewares=[
- MiddleMiddleware(None).consume_scope,
- InnerMiddleware(None).consume_scope,
+ MiddleMiddleware(None, context=None).consume_scope,
+ InnerMiddleware(None, context=None).consume_scope,
],
)
@broker.subscriber(*args, **kwargs)
async def handler(msg):
- event.set()
+ pass
async with self.patch_broker(broker) as br:
- await br.start()
- await asyncio.wait(
- (
- asyncio.create_task(broker.publish("start", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=self.timeout,
- )
+ await br.publish(None, queue)
- assert event.is_set()
mock.consume_inner.assert_called_once()
mock.consume_middle.assert_called_once()
mock.consume_outer.assert_called_once()
- assert [c.args[0] for c in mock.call_args_list] == [
- "outer",
- "middle",
- "inner",
- ], mock.call_args_list
+ assert [c.args[0] for c in mock.call_args_list] == ["outer", "middle", "inner"]
- async def test_consume_with_middleware_order(
- self, event: asyncio.Event, queue: str, mock: Mock
- ):
+ async def test_consume_with_middleware_order(self, queue: str, mock: MagicMock):
class InnerMiddleware(BaseMiddleware):
async def consume_scope(self, call_next, cmd):
mock.consume_inner()
@@ -291,61 +227,37 @@ async def consume_scope(self, call_next, cmd):
mock("outer")
return await call_next(cmd)
- broker = self.broker_class(middlewares=[OuterMiddleware])
- router = self.broker_class(middlewares=[MiddleMiddleware])
- router2 = self.broker_class(middlewares=[InnerMiddleware])
+ broker = self.get_broker(middlewares=[OuterMiddleware])
+ router = self.get_router(middlewares=[MiddleMiddleware])
+ router2 = self.get_router(middlewares=[InnerMiddleware])
args, kwargs = self.get_subscriber_params(queue)
@router2.subscriber(*args, **kwargs)
async def handler(msg):
- event.set()
+ pass
router.include_router(router2)
broker.include_router(router)
-
async with self.patch_broker(broker) as br:
- await br.start()
- await asyncio.wait(
- (
- asyncio.create_task(broker.publish("start", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=self.timeout,
- )
+ await br.publish(None, queue)
mock.consume_inner.assert_called_once()
mock.consume_middle.assert_called_once()
mock.consume_outer.assert_called_once()
- assert event.is_set()
- assert [c.args[0] for c in mock.call_args_list] == [
- "outer",
- "middle",
- "inner",
- ], mock.call_args_list
+ assert [c.args[0] for c in mock.call_args_list] == ["outer", "middle", "inner"]
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class LocalMiddlewareTestcase(BaseTestcaseConfig):
- broker_class: Type[BrokerUsecase]
-
- @pytest.fixture
- def raw_broker(self):
- return None
-
- def patch_broker(
- self, raw_broker: BrokerUsecase, broker: BrokerUsecase
- ) -> BrokerUsecase:
- return broker
-
async def test_subscriber_middleware(
self,
- event: asyncio.Event,
queue: str,
- mock: Mock,
- raw_broker,
- ):
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg):
mock.start(await msg.decode())
result = await call_next(msg)
@@ -353,22 +265,20 @@ async def mid(call_next, msg):
event.set()
return result
- broker = self.broker_class()
+ broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue, middlewares=(mid,))
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> str:
mock.inner(m)
return "end"
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("start", queue)),
+ asyncio.create_task(br.publish("start", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -382,11 +292,11 @@ async def handler(m):
async def test_publisher_middleware(
self,
- event: asyncio.Event,
queue: str,
- mock: Mock,
- raw_broker,
- ):
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg, **kwargs):
mock.enter()
result = await call_next(msg, **kwargs)
@@ -395,24 +305,22 @@ async def mid(call_next, msg, **kwargs):
event.set()
return result
- broker = self.broker_class()
+ broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
@broker.publisher(queue + "1", middlewares=(mid,))
@broker.publisher(queue + "2", middlewares=(mid,))
- async def handler(m):
+ async def handler(m) -> str:
mock.inner(m)
return "end"
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("start", queue)),
+ asyncio.create_task(br.publish("start", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -424,8 +332,10 @@ async def handler(m):
assert mock.end.call_count == 2
async def test_local_middleware_not_shared_between_subscribers(
- self, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
event1 = asyncio.Event()
event2 = asyncio.Event()
@@ -435,7 +345,7 @@ async def mid(call_next, msg):
mock.end()
return result
- broker = self.broker_class()
+ broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
args2, kwargs2 = self.get_subscriber_params(
@@ -445,7 +355,7 @@ async def mid(call_next, msg):
@broker.subscriber(*args, **kwargs)
@broker.subscriber(*args2, **kwargs2)
- async def handler(m):
+ async def handler(m) -> str:
if event1.is_set():
event2.set()
else:
@@ -453,10 +363,8 @@ async def handler(m):
mock()
return ""
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
asyncio.create_task(broker.publish("", queue)),
@@ -474,8 +382,10 @@ async def handler(m):
assert mock.call_count == 2
async def test_local_middleware_consume_not_shared_between_filters(
- self, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
event1 = asyncio.Event()
event2 = asyncio.Event()
@@ -485,7 +395,7 @@ async def mid(call_next, msg):
mock.end()
return result
- broker = self.broker_class()
+ broker = self.get_broker()
args, kwargs = self.get_subscriber_params(
queue,
@@ -494,25 +404,23 @@ async def mid(call_next, msg):
sub = broker.subscriber(*args, **kwargs)
@sub(filter=lambda m: m.content_type == "application/json")
- async def handler(m):
+ async def handler(m) -> str:
event2.set()
mock()
return ""
@sub(middlewares=(mid,))
- async def handler2(m):
+ async def handler2(m) -> str:
event1.set()
mock()
return ""
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish({"msg": "hi"}, queue)),
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish({"msg": "hi"}, queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event1.wait()),
asyncio.create_task(event2.wait()),
),
@@ -525,33 +433,33 @@ async def handler2(m):
mock.end.assert_called_once()
assert mock.call_count == 2
- async def test_error_traceback(self, queue: str, mock: Mock, event, raw_broker):
+ async def test_error_traceback(self, queue: str, mock: MagicMock) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg):
try:
result = await call_next(msg)
except Exception as e:
mock(isinstance(e, ValueError))
- raise e
+ raise
else:
return result
- broker = self.broker_class()
+ broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue, middlewares=(mid,))
@broker.subscriber(*args, **kwargs)
async def handler2(m):
event.set()
- raise ValueError()
-
- broker = self.patch_broker(raw_broker, broker)
+ raise ValueError
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -561,11 +469,15 @@ async def handler2(m):
mock.assert_called_once_with(True)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class MiddlewareTestcase(LocalMiddlewareTestcase):
async def test_global_middleware(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
class mid(BaseMiddleware): # noqa: N801
async def on_receive(self):
mock.start(self.msg)
@@ -575,22 +487,22 @@ async def after_processed(self, exc_type, exc_val, exc_tb):
mock.end()
return await super().after_processed(exc_type, exc_val, exc_tb)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(
+ middlewares=(mid,),
+ )
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> str:
event.set()
return ""
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -602,11 +514,11 @@ async def handler(m):
async def test_add_global_middleware(
self,
- event: asyncio.Event,
queue: str,
- mock: Mock,
- raw_broker,
- ):
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
class mid(BaseMiddleware): # noqa: N801
async def on_receive(self):
mock.start(self.msg)
@@ -616,13 +528,13 @@ async def after_processed(self, exc_type, exc_val, exc_tb):
mock.end()
return await super().after_processed(exc_type, exc_val, exc_tb)
- broker = self.broker_class()
+ broker = self.get_broker()
# already registered subscriber
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> str:
event.set()
return ""
@@ -635,18 +547,16 @@ async def handler(m):
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args2, **kwargs2)
- async def handler2(m):
+ async def handler2(m) -> str:
event2.set()
return ""
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
- asyncio.create_task(broker.publish("", f"{queue}1")),
+ asyncio.create_task(br.publish("", queue)),
+ asyncio.create_task(br.publish("", f"{queue}1")),
asyncio.create_task(event.wait()),
asyncio.create_task(event2.wait()),
),
@@ -657,14 +567,19 @@ async def handler2(m):
assert mock.start.call_count == 2
assert mock.end.call_count == 2
- async def test_patch_publish(self, queue: str, mock: Mock, event, raw_broker):
+ async def test_patch_publish(
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
- async def on_publish(self, msg: str, *args, **kwargs) -> str:
- return msg * 2
+ async def on_publish(self, msg: PublishCommand) -> PublishCommand:
+ msg.body *= 2
+ return msg
- broker = self.broker_class(
- middlewares=(Mid,),
- )
+ broker = self.get_broker(middlewares=(Mid,))
args, kwargs = self.get_subscriber_params(queue)
@@ -675,20 +590,16 @@ async def handler(m):
args2, kwargs2 = self.get_subscriber_params(queue + "r")
@broker.subscriber(*args2, **kwargs2)
- async def handler_resp(m):
+ async def handler_resp(m) -> None:
mock(m)
event.set()
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- broker.publish("r", queue, reply_to=queue + "r")
- ),
+ asyncio.create_task(br.publish("r", queue, reply_to=queue + "r")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -699,24 +610,23 @@ async def handler_resp(m):
async def test_global_publisher_middleware(
self,
- event: asyncio.Event,
queue: str,
- mock: Mock,
- raw_broker,
- ):
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
- async def on_publish(self, msg: str, *args, **kwargs) -> str:
- data = msg * 2
- assert args or kwargs
- mock.enter(data)
- return data
+ async def on_publish(self, msg: PublishCommand) -> PublishCommand:
+ msg.body *= 2
+ mock.enter(msg.body)
+ return msg
- async def after_publish(self, *args, **kwargs):
+ async def after_publish(self, *args, **kwargs) -> None:
mock.end()
if mock.end.call_count > 2:
event.set()
- broker = self.broker_class(middlewares=(Mid,))
+ broker = self.get_broker(middlewares=(Mid,))
args, kwargs = self.get_subscriber_params(queue)
@@ -727,13 +637,11 @@ async def handler(m):
mock.inner(m)
return m
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("1", queue)),
+ asyncio.create_task(br.publish("1", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -746,29 +654,22 @@ async def handler(m):
assert mock.end.call_count == 3
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class ExceptionMiddlewareTestcase(BaseTestcaseConfig):
- broker_class: Type[BrokerUsecase]
-
- @pytest.fixture
- def raw_broker(self):
- return None
-
- def patch_broker(
- self, raw_broker: BrokerUsecase, broker: BrokerUsecase
- ) -> BrokerUsecase:
- return broker
-
async def test_exception_middleware_default_msg(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> str:
return "value"
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(apply_types=True, middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@@ -780,17 +681,15 @@ async def subscriber1(m):
args, kwargs = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args, **kwargs)
- async def subscriber2(msg=Context("message")):
+ async def subscriber2(msg=Context("message")) -> None:
mock(await msg.decode())
event.set()
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -801,18 +700,20 @@ async def subscriber2(msg=Context("message")):
mock.assert_called_once_with("value")
async def test_exception_middleware_skip_msg(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
async def value_error_handler(exc):
event.set()
- raise SkipMessage()
+ raise SkipMessage
- broker = self.broker_class(
- middlewares=(mid,),
- )
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
@@ -823,16 +724,14 @@ async def subscriber1(m):
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args2, **kwargs2)
- async def subscriber2(msg=Context("message")):
+ async def subscriber2(msg=Context("message")) -> None:
mock(await msg.decode())
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -842,17 +741,19 @@ async def subscriber2(msg=Context("message")):
assert mock.call_count == 0
async def test_exception_middleware_do_not_catch_skip_msg(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(Exception)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> None:
mock()
- broker = self.broker_class(
- middlewares=(mid,),
- )
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
@@ -860,13 +761,11 @@ async def subscriber(m):
event.set()
raise SkipMessage
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -877,8 +776,12 @@ async def subscriber(m):
assert mock.call_count == 0
async def test_exception_middleware_reraise(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
@@ -886,9 +789,7 @@ async def value_error_handler(exc):
event.set()
raise exc
- broker = self.broker_class(
- middlewares=(mid,),
- )
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
@@ -899,16 +800,14 @@ async def subscriber1(m):
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args2, **kwargs2)
- async def subscriber2(msg=Context("message")):
+ async def subscriber2(msg=Context("message")) -> None:
mock(await msg.decode())
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -918,21 +817,23 @@ async def subscriber2(msg=Context("message")):
assert mock.call_count == 0
async def test_exception_middleware_different_handler(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ZeroDivisionError, publish=True)
- async def zero_error_handler(exc):
+ async def zero_error_handler(exc) -> str:
return "zero"
@mid.add_handler(ValueError, publish=True)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> str:
return "value"
- broker = self.broker_class(
- middlewares=(mid,),
- )
+ broker = self.get_broker(apply_types=True, middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
publisher = broker.publisher(queue + "2")
@@ -952,19 +853,17 @@ async def subscriber2(m):
args3, kwargs3 = self.get_subscriber_params(queue + "2")
@broker.subscriber(*args3, **kwargs3)
- async def subscriber3(msg=Context("message")):
+ async def subscriber3(msg=Context("message")) -> None:
mock(await msg.decode())
if mock.call_count > 1:
event.set()
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
- asyncio.create_task(broker.publish("", queue + "1")),
+ asyncio.create_task(br.publish("", queue)),
+ asyncio.create_task(br.publish("", queue + "1")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -974,22 +873,22 @@ async def subscriber3(msg=Context("message")):
assert mock.call_count == 2
mock.assert_has_calls([call("zero"), call("value")], any_order=True)
- async def test_exception_middleware_init_handler_same(self):
+ async def test_exception_middleware_init_handler_same(self) -> None:
mid1 = ExceptionMiddleware()
@mid1.add_handler(ValueError)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> str:
return "value"
mid2 = ExceptionMiddleware(handlers={ValueError: value_error_handler})
assert [x[0] for x in mid1._handlers] == [x[0] for x in mid2._handlers]
- async def test_exception_middleware_init_publish_handler_same(self):
+ async def test_exception_middleware_init_publish_handler_same(self) -> None:
mid1 = ExceptionMiddleware()
@mid1.add_handler(ValueError, publish=True)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> str:
return "value"
mid2 = ExceptionMiddleware(publish_handlers={ValueError: value_error_handler})
@@ -999,8 +898,12 @@ async def value_error_handler(exc):
]
async def test_exception_middleware_decoder_error(
- self, event: asyncio.Event, queue: str, mock: Mock, raw_broker
- ):
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
async def decoder(
msg,
original_decoder,
@@ -1010,13 +913,10 @@ async def decoder(
mid = ExceptionMiddleware()
@mid.add_handler(ValueError)
- async def value_error_handler(exc):
+ async def value_error_handler(exc) -> None:
event.set()
- broker = self.broker_class(
- middlewares=(mid,),
- decoder=decoder,
- )
+ broker = self.get_broker(middlewares=(mid,), decoder=decoder)
args, kwargs = self.get_subscriber_params(queue)
@@ -1024,13 +924,11 @@ async def value_error_handler(exc):
async def subscriber1(m):
raise ZeroDivisionError
- broker = self.patch_broker(raw_broker, broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish("", queue)),
+ asyncio.create_task(br.publish("", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
diff --git a/tests/brokers/base/parser.py b/tests/brokers/base/parser.py
index c4a47683c3..859c508c53 100644
--- a/tests/brokers/base/parser.py
+++ b/tests/brokers/base/parser.py
@@ -1,35 +1,21 @@
import asyncio
-from typing import Type
from unittest.mock import Mock
import pytest
-from faststream.broker.core.usecase import BrokerUsecase
-
from .basic import BaseTestcaseConfig
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class LocalCustomParserTestcase(BaseTestcaseConfig):
- broker_class: Type[BrokerUsecase]
-
- @pytest.fixture
- def raw_broker(self):
- return None
-
- def patch_broker(
- self, raw_broker: BrokerUsecase, broker: BrokerUsecase
- ) -> BrokerUsecase:
- return broker
-
async def test_local_parser(
self,
mock: Mock,
queue: str,
- raw_broker,
- event: asyncio.Event,
- ):
- broker = self.broker_class()
+ ) -> None:
+ event = asyncio.Event()
+
+ broker = self.get_broker()
async def custom_parser(msg, original):
msg = await original(msg)
@@ -39,16 +25,15 @@ async def custom_parser(msg, original):
args, kwargs = self.get_subscriber_params(queue, parser=custom_parser)
@broker.subscriber(*args, **kwargs)
- async def handle(m):
+ async def handle(m) -> None:
event.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -61,10 +46,10 @@ async def test_local_sync_decoder(
self,
mock: Mock,
queue: str,
- raw_broker,
- event: asyncio.Event,
- ):
- broker = self.broker_class()
+ ) -> None:
+ event = asyncio.Event()
+
+ broker = self.get_broker()
def custom_decoder(msg):
mock(msg.body)
@@ -73,16 +58,15 @@ def custom_decoder(msg):
args, kwargs = self.get_subscriber_params(queue, decoder=custom_decoder)
@broker.subscriber(*args, **kwargs)
- async def handle(m):
+ async def handle(m) -> None:
event.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -95,28 +79,27 @@ async def test_global_sync_decoder(
self,
mock: Mock,
queue: str,
- raw_broker,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
def custom_decoder(msg):
mock(msg.body)
return msg
- broker = self.broker_class(decoder=custom_decoder)
+ broker = self.get_broker(decoder=custom_decoder)
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handle(m):
+ async def handle(m) -> None:
event.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -127,13 +110,13 @@ async def handle(m):
async def test_local_parser_no_share_between_subscribers(
self,
- event: asyncio.Event,
mock: Mock,
queue: str,
- raw_broker,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
event2 = asyncio.Event()
- broker = self.broker_class()
+ broker = self.get_broker()
async def custom_parser(msg, original):
msg = await original(msg)
@@ -145,20 +128,19 @@ async def custom_parser(msg, original):
@broker.subscriber(*args, **kwargs)
@broker.subscriber(*args2, **kwargs2)
- async def handle(m):
+ async def handle(m) -> None:
if event.is_set():
event2.set()
else:
event.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish(b"hello", queue)),
- asyncio.create_task(broker.publish(b"hello", queue + "1")),
+ asyncio.create_task(br.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue + "1")),
asyncio.create_task(event.wait()),
asyncio.create_task(event2.wait()),
),
@@ -173,17 +155,16 @@ async def test_local_parser_no_share_between_handlers(
self,
mock: Mock,
queue: str,
- raw_broker,
- event: asyncio.Event,
- ):
- broker = self.broker_class()
+ ) -> None:
+ event = asyncio.Event()
- args, kwargs = self.get_subscriber_params(
- queue, filter=lambda m: m.content_type == "application/json"
- )
+ broker = self.get_broker()
- @broker.subscriber(*args, **kwargs)
- async def handle(m):
+ args, kwargs = self.get_subscriber_params(queue)
+ sub = broker.subscriber(*args, **kwargs)
+
+ @sub(filter=lambda m: m.content_type == "application/json")
+ async def handle(m) -> None:
event.set()
event2 = asyncio.Event()
@@ -193,20 +174,17 @@ async def custom_parser(msg, original):
mock(msg.body)
return msg
- args2, kwargs2 = self.get_subscriber_params(queue, parser=custom_parser)
-
- @broker.subscriber(*args2, **kwargs2)
- async def handle2(m):
+ @sub(parser=custom_parser)
+ async def handle2(m) -> None:
event2.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish({"msg": "hello"}, queue)),
- asyncio.create_task(broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish({"msg": "hello"}, queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
asyncio.create_task(event2.wait()),
),
@@ -223,29 +201,28 @@ async def test_global_parser(
self,
mock: Mock,
queue: str,
- raw_broker,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
async def custom_parser(msg, original):
msg = await original(msg)
mock(msg.body)
return msg
- broker = self.broker_class(parser=custom_parser)
+ broker = self.get_broker(parser=custom_parser)
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handle(m):
+ async def handle(m) -> None:
event.set()
- broker = self.patch_broker(raw_broker, broker)
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py
index c94b9ca6bb..feac1efac4 100644
--- a/tests/brokers/base/publish.py
+++ b/tests/brokers/base/publish.py
@@ -1,8 +1,7 @@
import asyncio
-from abc import abstractmethod
from dataclasses import asdict, dataclass
-from datetime import datetime
-from typing import Any, Dict, List, Tuple
+from datetime import datetime, timezone
+from typing import Any
from unittest.mock import Mock
import anyio
@@ -10,8 +9,7 @@
from pydantic import BaseModel
from faststream import BaseMiddleware, Context, Response
-from faststream._compat import dump_json, model_to_json
-from faststream.broker.core.usecase import BrokerUsecase
+from faststream._internal._compat import dump_json, model_to_json
from .basic import BaseTestcaseConfig
@@ -25,21 +23,14 @@ class SimpleDataclass:
r: str
-now = datetime.now()
+now = datetime.now(timezone.utc)
class BrokerPublishTestcase(BaseTestcaseConfig):
- @abstractmethod
- def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]:
- raise NotImplementedError
-
- def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]:
- return broker
-
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
@pytest.mark.parametrize(
("message", "message_type", "expected_message"),
- ( # noqa: PT007
+ (
pytest.param(
"hello",
str,
@@ -78,13 +69,13 @@ def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, An
),
pytest.param(
{"m": 1},
- Dict[str, int],
+ dict[str, int],
{"m": 1},
id="dict->dict",
),
pytest.param(
[1, 2, 3],
- List[int],
+ list[int],
[1, 2, 3],
id="list->list",
),
@@ -150,15 +141,16 @@ async def test_serialize(
message,
message_type,
expected_message,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def handler(m: message_type):
+ async def handler(m: message_type) -> None:
event.set()
mock(m)
@@ -176,13 +168,14 @@ async def handler(m: message_type):
assert event.is_set()
mock.assert_called_with(expected_message)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -195,7 +188,7 @@ async def m():
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@pub_broker.subscriber(*args2, **kwargs2)
- async def m_next(msg=Context("message")):
+ async def m_next(msg=Context("message")) -> None:
event.set()
mock(
body=msg.body,
@@ -220,19 +213,20 @@ async def m_next(msg=Context("message")):
headers="1",
)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_unwrap_dict(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def m(a: int, b: int):
+ async def m(a: int, b: int) -> None:
event.set()
mock({"a": a, "b": b})
@@ -251,22 +245,23 @@ async def m(a: int, b: int):
{
"a": 1,
"b": 1,
- }
+ },
)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_unwrap_list(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def m(a: int, b: int, *args: Tuple[int, ...]):
+ async def m(a: int, b: int, *args: tuple[int, ...]) -> None:
event.set()
mock({"a": a, "b": b, "args": args})
@@ -283,26 +278,27 @@ async def m(a: int, b: int, *args: Tuple[int, ...]):
assert event.is_set()
mock.assert_called_with({"a": 1, "b": 1, "args": (2, 3)})
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_base_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
@pub_broker.publisher(queue + "resp")
- async def m():
+ async def m() -> str:
return ""
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@pub_broker.subscriber(*args2, **kwargs2)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -319,13 +315,14 @@ async def resp(msg):
assert event.is_set()
mock.assert_called_once_with("")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_publisher_object(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
publisher = pub_broker.publisher(queue + "resp")
@@ -334,13 +331,13 @@ async def test_publisher_object(
@publisher
@pub_broker.subscriber(*args, **kwargs)
- async def m():
+ async def m() -> str:
return ""
args, kwargs = self.get_subscriber_params(queue + "resp")
@pub_broker.subscriber(*args, **kwargs)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -357,13 +354,14 @@ async def resp(msg):
assert event.is_set()
mock.assert_called_once_with("")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_publish_manual(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
publisher = pub_broker.publisher(queue + "resp")
@@ -371,13 +369,13 @@ async def test_publish_manual(
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def m():
+ async def m() -> None:
await publisher.publish("")
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@pub_broker.subscriber(*args2, **kwargs2)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -394,12 +392,12 @@ async def resp(msg):
assert event.is_set()
mock.assert_called_once_with("")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_multiple_publishers(
self,
queue: str,
mock: Mock,
- ):
+ ) -> None:
pub_broker = self.get_broker(apply_types=True)
event = anyio.Event()
@@ -410,20 +408,20 @@ async def test_multiple_publishers(
@pub_broker.publisher(queue + "resp2")
@pub_broker.subscriber(*args, **kwargs)
@pub_broker.publisher(queue + "resp")
- async def m():
+ async def m() -> str:
return ""
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@pub_broker.subscriber(*args2, **kwargs2)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock.resp1(msg)
args3, kwargs3 = self.get_subscriber_params(queue + "resp2")
@pub_broker.subscriber(*args3, **kwargs3)
- async def resp2(msg):
+ async def resp2(msg) -> None:
event2.set()
mock.resp2(msg)
@@ -443,12 +441,12 @@ async def resp2(msg):
mock.resp1.assert_called_once_with("")
mock.resp2.assert_called_once_with("")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_reusable_publishers(
self,
queue: str,
mock: Mock,
- ):
+ ) -> None:
pub_broker = self.get_broker(apply_types=True)
consume = anyio.Event()
@@ -460,20 +458,20 @@ async def test_reusable_publishers(
@pub
@pub_broker.subscriber(*args, **kwargs)
- async def m():
+ async def m() -> str:
return ""
args2, kwargs2 = self.get_subscriber_params(queue + "2")
@pub
@pub_broker.subscriber(*args2, **kwargs2)
- async def m2():
+ async def m2() -> str:
return ""
args3, kwargs3 = self.get_subscriber_params(queue + "resp")
@pub_broker.subscriber(*args3, **kwargs3)
- async def resp():
+ async def resp() -> None:
if not consume.is_set():
consume.set()
else:
@@ -496,19 +494,20 @@ async def resp():
assert consume.is_set()
assert mock.call_count == 2
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_reply_to(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue + "reply")
@pub_broker.subscriber(*args, **kwargs)
- async def reply_handler(m):
+ async def reply_handler(m) -> None:
event.set()
mock(m)
@@ -524,7 +523,7 @@ async def handler(m):
await asyncio.wait(
(
asyncio.create_task(
- br.publish("Hello!", queue, reply_to=queue + "reply")
+ br.publish("Hello!", queue, reply_to=queue + "reply"),
),
asyncio.create_task(event.wait()),
),
@@ -534,13 +533,14 @@ async def handler(m):
assert event.is_set()
mock.assert_called_with("Hello!")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_no_reply(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
async def after_processed(self, *args: Any, **kwargs: Any):
event.set()
@@ -553,7 +553,7 @@ async def after_processed(self, *args: Any, **kwargs: Any):
args, kwargs = self.get_subscriber_params(queue + "reply")
@pub_broker.subscriber(*args, **kwargs)
- async def reply_handler(m):
+ async def reply_handler(m) -> None:
mock(m)
args2, kwargs2 = self.get_subscriber_params(queue, no_reply=True)
@@ -568,7 +568,7 @@ async def handler(m):
await asyncio.wait(
(
asyncio.create_task(
- br.publish("Hello!", queue, reply_to=queue + "reply")
+ br.publish("Hello!", queue, reply_to=queue + "reply"),
),
asyncio.create_task(event.wait()),
),
@@ -578,19 +578,20 @@ async def handler(m):
assert event.is_set()
assert not mock.called
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_publisher_after_start(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
event.set()
mock(m)
diff --git a/tests/brokers/base/publish_command.py b/tests/brokers/base/publish_command.py
new file mode 100644
index 0000000000..8624ec6c7d
--- /dev/null
+++ b/tests/brokers/base/publish_command.py
@@ -0,0 +1,73 @@
+from typing import Any
+
+import pytest
+
+from faststream import Response
+from faststream.response import ensure_response
+from faststream.response.response import (
+ BatchPublishCommand,
+ PublishCommand,
+)
+
+
+class BasePublishCommandTestcase:
+ publish_command_cls: type[PublishCommand]
+
+ def test_simple_reponse(self) -> None:
+ response = ensure_response(1)
+ cmd = self.publish_command_cls.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+
+ def test_base_response_class(self) -> None:
+ response = ensure_response(Response(body=1, headers={"1": 1}))
+ cmd = self.publish_command_cls.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {"1": 1}
+
+
+class BatchPublishCommandTestcase(BasePublishCommandTestcase):
+ publish_command_cls: type[BatchPublishCommand]
+
+ @pytest.mark.parametrize(
+ ("data", "expected_body"),
+ (
+ pytest.param(None, (), id="None Response"),
+ pytest.param((), (), id="Empty Sequence"),
+ pytest.param("123", ("123",), id="String Response"),
+ pytest.param("", ("",), id="Empty String Response"),
+ pytest.param(b"", (b"",), id="Empty Bytes Response"),
+ pytest.param([1, 2, 3], (1, 2, 3), id="Sequence Data"),
+ pytest.param(
+ [0, 1, 2], (0, 1, 2), id="Sequence Data with False first element"
+ ),
+ ),
+ )
+ def test_batch_response(self, data: Any, expected_body: Any) -> None:
+ response = ensure_response(data)
+ cmd = self.publish_command_cls.from_cmd(
+ response.as_publish_command(),
+ batch=True,
+ )
+ assert cmd.batch_bodies == expected_body
+
+ def test_batch_bodies_setter(self) -> None:
+ response = ensure_response(None)
+ cmd = self.publish_command_cls.from_cmd(
+ response.as_publish_command(), batch=True
+ )
+ cmd.batch_bodies = (1, 2, 3)
+
+ assert cmd.batch_bodies == (1, 2, 3)
+ assert cmd.body == 1
+ assert cmd.extra_bodies == (2, 3)
+
+ def test_batch_bodies_empty_setter(self) -> None:
+ response = ensure_response((1, 2, 3))
+ cmd = self.publish_command_cls.from_cmd(
+ response.as_publish_command(), batch=True
+ )
+ cmd.batch_bodies = ()
+
+ assert cmd.batch_bodies == ()
+ assert cmd.body is None
+ assert cmd.extra_bodies == ()
diff --git a/tests/brokers/base/requests.py b/tests/brokers/base/requests.py
index 78dcdcb58b..16414f47d3 100644
--- a/tests/brokers/base/requests.py
+++ b/tests/brokers/base/requests.py
@@ -1,3 +1,5 @@
+import asyncio
+
import anyio
import pytest
@@ -8,42 +10,36 @@ class RequestsTestcase(BaseTestcaseConfig):
def get_middleware(self, **kwargs):
raise NotImplementedError
- def get_broker(self, **kwargs):
- raise NotImplementedError
-
def get_router(self, **kwargs):
raise NotImplementedError
- def patch_broker(self, broker, **kwargs):
- return broker
-
- async def test_request_timeout(self, queue: str):
+ async def test_request_timeout(self, queue: str) -> None:
broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(msg):
- await anyio.sleep(1.0)
+ async def handler(msg) -> str:
+ await anyio.sleep(0.01)
return "Response"
async with self.patch_broker(broker):
await broker.start()
- with pytest.raises(TimeoutError):
+ with pytest.raises((TimeoutError, asyncio.TimeoutError)):
await broker.request(
None,
queue,
timeout=1e-24,
)
- async def test_broker_base_request(self, queue: str):
+ async def test_broker_base_request(self, queue: str) -> None:
broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> str:
return "Response"
async with self.patch_broker(broker):
@@ -59,7 +55,7 @@ async def handler(msg):
assert await response.decode() == "Response"
assert response.correlation_id == "1", response.correlation_id
- async def test_publisher_base_request(self, queue: str):
+ async def test_publisher_base_request(self, queue: str) -> None:
broker = self.get_broker()
publisher = broker.publisher(queue)
@@ -67,7 +63,7 @@ async def test_publisher_base_request(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> str:
return "Response"
async with self.patch_broker(broker):
@@ -82,7 +78,7 @@ async def handler(msg):
assert await response.decode() == "Response"
assert response.correlation_id == "1", response.correlation_id
- async def test_router_publisher_request(self, queue: str):
+ async def test_router_publisher_request(self, queue: str) -> None:
router = self.get_router()
publisher = router.publisher(queue)
@@ -90,7 +86,7 @@ async def test_router_publisher_request(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> str:
return "Response"
broker = self.get_broker()
@@ -108,7 +104,7 @@ async def handler(msg):
assert await response.decode() == "Response"
assert response.correlation_id == "1", response.correlation_id
- async def test_broker_request_respect_middleware(self, queue: str):
+ async def test_broker_request_respect_middleware(self, queue: str) -> None:
broker = self.get_broker(middlewares=(self.get_middleware(),))
args, kwargs = self.get_subscriber_params(queue)
@@ -128,7 +124,9 @@ async def handler(msg):
assert await response.decode() == "x" * 2 * 2 * 2 * 2
- async def test_broker_publisher_request_respect_middleware(self, queue: str):
+ async def test_broker_publisher_request_respect_middleware(
+ self, queue: str
+ ) -> None:
broker = self.get_broker(middlewares=(self.get_middleware(),))
publisher = broker.publisher(queue)
@@ -149,7 +147,9 @@ async def handler(msg):
assert await response.decode() == "x" * 2 * 2 * 2 * 2
- async def test_router_publisher_request_respect_middleware(self, queue: str):
+ async def test_router_publisher_request_respect_middleware(
+ self, queue: str
+ ) -> None:
router = self.get_router(middlewares=(self.get_middleware(),))
publisher = router.publisher(queue)
diff --git a/tests/brokers/base/router.py b/tests/brokers/base/router.py
index 02d60cd578..a29c8e63ec 100644
--- a/tests/brokers/base/router.py
+++ b/tests/brokers/base/router.py
@@ -1,59 +1,48 @@
import asyncio
-from typing import Type
from unittest.mock import Mock
import pytest
-from faststream import BaseMiddleware, Depends
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.broker.router import ArgsContainer, BrokerRouter, SubscriberRoute
-from faststream.types import AnyCallable
+from faststream import Depends
+from faststream._internal.broker.router import (
+ ArgsContainer,
+ BrokerRouter,
+ SubscriberRoute,
+)
from tests.brokers.base.middlewares import LocalMiddlewareTestcase
from tests.brokers.base.parser import LocalCustomParserTestcase
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class RouterTestcase(
LocalMiddlewareTestcase,
LocalCustomParserTestcase,
):
- build_message: AnyCallable
- route_class: Type[SubscriberRoute]
- publisher_class: Type[ArgsContainer]
-
- def patch_broker(self, br: BrokerUsecase, router: BrokerRouter) -> BrokerUsecase:
- br.include_router(router)
- return br
-
- @pytest.fixture
- def pub_broker(self, broker):
- return broker
-
- @pytest.fixture
- def raw_broker(self, pub_broker):
- return pub_broker
+ route_class: type[SubscriberRoute]
+ publisher_class: type[ArgsContainer]
async def test_empty_prefix(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -64,26 +53,27 @@ def subscriber(m):
async def test_not_empty_prefix(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
router.prefix = "test_"
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -94,24 +84,25 @@ def subscriber(m):
async def test_include_with_prefix(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
pub_broker.include_router(router, prefix="test_")
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -122,31 +113,32 @@ def subscriber(m):
async def test_empty_prefix_publisher(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
@router.publisher(queue + "resp")
- def subscriber(m):
+ def subscriber(m) -> str:
return "hi"
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@router.subscriber(*args2, **kwargs2)
- def response(m):
+ def response(m) -> None:
event.set()
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -157,33 +149,34 @@ def response(m):
async def test_not_empty_prefix_publisher(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
router.prefix = "test_"
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
@router.publisher(queue + "resp")
- def subscriber(m):
+ def subscriber(m) -> str:
return "hi"
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@router.subscriber(*args2, **kwargs2)
- def response(m):
+ def response(m) -> None:
event.set()
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -194,10 +187,12 @@ def response(m):
async def test_manual_publisher(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
router.prefix = "test_"
p = router.publisher(queue + "resp")
@@ -205,23 +200,22 @@ async def test_manual_publisher(
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- async def subscriber(m):
+ async def subscriber(m) -> None:
await p.publish("resp")
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@router.subscriber(*args2, **kwargs2)
- def response(m):
+ def response(m) -> None:
event.set()
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -231,29 +225,30 @@ def response(m):
async def test_delayed_handlers(
self,
- event: asyncio.Event,
router: BrokerRouter,
queue: str,
- pub_broker: BrokerUsecase,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
+ def response(m) -> None:
event.set()
args, kwargs = self.get_subscriber_params(queue)
- r = type(router)(
+ router = type(router)(
prefix="test_",
handlers=(self.route_class(response, *args, **kwargs),),
)
- pub_broker.include_router(r)
-
- async with pub_broker:
- await pub_broker.start()
+ pub_broker.include_router(router)
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -263,12 +258,14 @@ def response(m):
async def test_delayed_publishers(
self,
- event: asyncio.Event,
router: BrokerRouter,
queue: str,
- pub_broker: BrokerUsecase,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
def response(m):
return m
@@ -291,16 +288,16 @@ def response(m):
args, kwargs = self.get_subscriber_params(f"test_{queue}1")
@pub_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", f"test_{queue}")),
+ asyncio.create_task(br.publish("hello", f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -313,18 +310,20 @@ async def handler(msg):
async def test_nested_routers_sub(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
core_router = type(router)(prefix="test1_")
router.prefix = "test2_"
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> str:
event.set()
mock(m)
return "hi"
@@ -332,14 +331,12 @@ def subscriber(m):
core_router.include_routers(router)
pub_broker.include_routers(core_router)
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- pub_broker.publish("hello", f"test1_test2_{queue}")
- ),
+ asyncio.create_task(br.publish("hello", f"test1_test2_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -351,10 +348,12 @@ def subscriber(m):
async def test_nested_routers_pub(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
core_router = type(router)(prefix="test1_")
router.prefix = "test2_"
@@ -362,28 +361,26 @@ async def test_nested_routers_pub(
@router.subscriber(*args, **kwargs)
@router.publisher(queue + "resp")
- def subscriber(m):
+ def subscriber(m) -> str:
return "hi"
args2, kwargs2 = self.get_subscriber_params(
- "test1_" + "test2_" + queue + "resp"
+ "test1_" + "test2_" + queue + "resp",
)
@pub_broker.subscriber(*args2, **kwargs2)
- def response(m):
+ def response(m) -> None:
event.set()
core_router.include_routers(router)
- pub_broker.include_routers(core_router)
+ pub_broker.include_router(core_router)
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- pub_broker.publish("hello", f"test1_test2_{queue}")
- ),
+ asyncio.create_task(br.publish("hello", f"test1_test2_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -394,31 +391,34 @@ def response(m):
async def test_router_dependencies(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- ):
+ ) -> None:
+ pub_broker = self.get_broker()
+
router = type(router)(dependencies=(Depends(lambda: 1),))
router2 = type(router)(dependencies=(Depends(lambda: 2),))
args, kwargs = self.get_subscriber_params(
- queue, dependencies=(Depends(lambda: 3),)
+ queue,
+ dependencies=(Depends(lambda: 3),),
)
@router2.subscriber(*args, **kwargs)
- def subscriber(): ...
+ def subscriber() -> None: ...
router.include_router(router2)
pub_broker.include_routers(router)
- sub = next(iter(pub_broker._subscribers.values()))
+ sub = next(iter(pub_broker._subscribers))
assert len((*sub._broker_dependencies, *sub.calls[0].dependencies)) == 3
async def test_router_include_with_dependencies(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- ):
+ ) -> None:
+ pub_broker = self.get_broker()
+
router2 = type(router)()
args, kwargs = self.get_subscriber_params(
@@ -427,71 +427,83 @@ async def test_router_include_with_dependencies(
)
@router2.subscriber(*args, **kwargs)
- def subscriber(): ...
+ def subscriber() -> None: ...
router.include_router(router2, dependencies=(Depends(lambda: 2),))
pub_broker.include_router(router, dependencies=(Depends(lambda: 1),))
- sub = next(iter(pub_broker._subscribers.values()))
+ sub = next(iter(pub_broker._subscribers))
dependencies = (*sub._broker_dependencies, *sub.calls[0].dependencies)
assert len(dependencies) == 3, dependencies
async def test_router_middlewares(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- ):
- router = type(router)(middlewares=(BaseMiddleware,))
- router2 = type(router)(middlewares=(BaseMiddleware,))
+ ) -> None:
+ pub_broker = self.get_broker()
+
+ router = type(router)(middlewares=(1,))
+ router2 = type(router)(middlewares=(2,))
args, kwargs = self.get_subscriber_params(queue, middlewares=(3,))
@router2.subscriber(*args, **kwargs)
@router2.publisher(queue, middlewares=(3,))
- def subscriber(): ...
+ def subscriber() -> None: ...
router.include_router(router2)
pub_broker.include_routers(router)
- sub = next(iter(pub_broker._subscribers.values()))
- publisher = next(iter(pub_broker._publishers.values()))
+ sub = next(iter(pub_broker._subscribers))
+ publisher = next(iter(pub_broker._publishers))
- assert len((*sub._broker_middlewares, *sub.calls[0].item_middlewares)) == 3
- assert len((*publisher._broker_middlewares, *publisher._middlewares)) == 3
+ subscriber_middlewares = (
+ *sub._broker_middlewares,
+ *sub.calls[0].item_middlewares,
+ )
+ assert subscriber_middlewares == (1, 2, 3)
+
+ publisher_middlewares = (*publisher._broker_middlewares, *publisher.middlewares)
+ assert publisher_middlewares == (1, 2, 3)
async def test_router_include_with_middlewares(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- ):
+ ) -> None:
+ pub_broker = self.get_broker()
+
router2 = type(router)()
args, kwargs = self.get_subscriber_params(queue, middlewares=(3,))
@router2.subscriber(*args, **kwargs)
@router2.publisher(queue, middlewares=(3,))
- def subscriber(): ...
+ def subscriber() -> None: ...
- router.include_router(router2, middlewares=(BaseMiddleware,))
- pub_broker.include_router(router, middlewares=(BaseMiddleware,))
+ router.include_router(router2, middlewares=(2,))
+ pub_broker.include_router(router, middlewares=(1,))
- sub = next(iter(pub_broker._subscribers.values()))
- publisher = next(iter(pub_broker._publishers.values()))
+ sub = next(iter(pub_broker._subscribers))
+ publisher = next(iter(pub_broker._publishers))
sub_middlewares = (*sub._broker_middlewares, *sub.calls[0].item_middlewares)
- assert len(sub_middlewares) == 3, sub_middlewares
- assert len((*publisher._broker_middlewares, *publisher._middlewares)) == 3
+ assert sub_middlewares == (1, 2, 3), sub_middlewares
+
+ publisher_middlewares = (*publisher._broker_middlewares, *publisher.middlewares)
+ assert publisher_middlewares == (1, 2, 3)
async def test_router_parser(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
async def parser(msg, original):
mock.parser()
return await original(msg)
@@ -508,17 +520,16 @@ async def decoder(msg, original):
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(s):
+ def subscriber(s) -> None:
event.set()
- pub_broker.include_routers(router)
-
- async with pub_broker:
- await pub_broker.start()
+ pub_broker.include_router(router)
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -531,11 +542,13 @@ def subscriber(s):
async def test_router_parser_override(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
async def global_parser(msg, original): # pragma: no cover
mock()
return await original(msg)
@@ -560,17 +573,16 @@ async def decoder(msg, original):
args, kwargs = self.get_subscriber_params(queue, parser=parser, decoder=decoder)
@router.subscriber(*args, **kwargs)
- def subscriber(s):
+ def subscriber(s) -> None:
event.set()
- pub_broker.include_routers(router)
-
- async with pub_broker:
- await pub_broker.start()
+ pub_broker.include_router(router)
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -581,38 +593,63 @@ def subscriber(s):
mock.parser.assert_called_once()
mock.decoder.assert_called_once()
+ async def test_router_in_init(
+ self,
+ router: BrokerRouter,
+ queue: str,
+ ) -> None:
+ event = asyncio.Event()
-@pytest.mark.asyncio
-class RouterLocalTestcase(RouterTestcase):
- @pytest.fixture
- def pub_broker(self, test_broker):
- return test_broker
+ args, kwargs = self.get_subscriber_params(queue)
+
+ @router.subscriber(*args, **kwargs)
+ def subscriber(m) -> None:
+ event.set()
+
+ pub_broker = self.get_broker(routers=[router])
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
+
+ await asyncio.wait(
+ (
+ asyncio.create_task(br.publish("hello", queue)),
+ asyncio.create_task(event.wait()),
+ ),
+ timeout=self.timeout,
+ )
+
+ assert event.is_set()
+
+
+@pytest.mark.asyncio()
+class RouterLocalTestcase(RouterTestcase):
async def test_publisher_mock(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
pub = router.publisher(queue + "resp")
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
@pub
- def subscriber(m):
+ def subscriber(m) -> str:
event.set()
return "hi"
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -624,25 +661,26 @@ def subscriber(m):
async def test_subscriber_mock(
self,
router: BrokerRouter,
- pub_broker: BrokerUsecase,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
args, kwargs = self.get_subscriber_params(queue)
@router.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> str:
event.set()
return "hi"
pub_broker.include_router(router)
-
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(pub_broker.publish("hello", queue)),
+ asyncio.create_task(br.publish("hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -652,18 +690,22 @@ def subscriber(m):
subscriber.mock.assert_called_with("hello")
async def test_manual_publisher_mock(
- self, router: BrokerRouter, queue: str, pub_broker: BrokerUsecase
- ):
+ self,
+ router: BrokerRouter,
+ queue: str,
+ ) -> None:
+ pub_broker = self.get_broker()
+
publisher = router.publisher(queue + "resp")
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def m(m):
+ async def m(m) -> None:
await publisher.publish("response")
pub_broker.include_router(router)
- async with pub_broker:
- await pub_broker.start()
- await pub_broker.publish("hello", queue)
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
+ await br.publish("hello", queue)
publisher.mock.assert_called_with("response")
diff --git a/tests/brokers/base/rpc.py b/tests/brokers/base/rpc.py
deleted file mode 100644
index dcdd8e85e0..0000000000
--- a/tests/brokers/base/rpc.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import asyncio
-from abc import abstractstaticmethod
-from typing import Any
-from unittest.mock import MagicMock
-
-import anyio
-import pytest
-
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.utils.functions import timeout_scope
-
-from .basic import BaseTestcaseConfig
-
-
-class BrokerRPCTestcase(BaseTestcaseConfig):
- @abstractstaticmethod
- def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]:
- raise NotImplementedError
-
- def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]:
- return broker
-
- @pytest.mark.asyncio
- async def test_rpc(self, queue: str):
- rpc_broker = self.get_broker()
-
- args, kwargs = self.get_subscriber_params(queue)
-
- @rpc_broker.subscriber(*args, **kwargs)
- async def m(m):
- return "Hi!"
-
- async with self.patch_broker(rpc_broker) as br:
- await br.start()
- r = await br.publish("hello", queue, rpc_timeout=3, rpc=True)
-
- assert r == "Hi!"
-
- @pytest.mark.asyncio
- async def test_rpc_timeout_raises(self, queue: str):
- rpc_broker = self.get_broker()
-
- args, kwargs = self.get_subscriber_params(queue)
-
- @rpc_broker.subscriber(*args, **kwargs)
- async def m(m): # pragma: no cover
- await anyio.sleep(1)
-
- async with self.patch_broker(rpc_broker) as br:
- await br.start()
-
- with pytest.raises(TimeoutError): # pragma: no branch
- await br.publish(
- "hello",
- queue,
- rpc=True,
- rpc_timeout=0,
- raise_timeout=True,
- )
-
- @pytest.mark.asyncio
- async def test_rpc_timeout_none(self, queue: str):
- rpc_broker = self.get_broker()
-
- args, kwargs = self.get_subscriber_params(queue)
-
- @rpc_broker.subscriber(*args, **kwargs)
- async def m(m): # pragma: no cover
- await anyio.sleep(1)
-
- async with self.patch_broker(rpc_broker) as br:
- await br.start()
-
- r = await br.publish(
- "hello",
- queue,
- rpc=True,
- rpc_timeout=0,
- )
-
- assert r is None
-
- @pytest.mark.asyncio
- async def test_rpc_with_reply(
- self,
- queue: str,
- mock: MagicMock,
- event: asyncio.Event,
- ):
- rpc_broker = self.get_broker()
-
- reply_queue = queue + "1"
-
- args, kwargs = self.get_subscriber_params(reply_queue)
-
- @rpc_broker.subscriber(*args, **kwargs)
- async def response_hanler(m: str):
- mock(m)
- event.set()
-
- args2, kwargs2 = self.get_subscriber_params(queue)
-
- @rpc_broker.subscriber(*args2, **kwargs2)
- async def m(m):
- return "1"
-
- async with self.patch_broker(rpc_broker) as br:
- await br.start()
-
- await br.publish("hello", queue, reply_to=reply_queue)
-
- with timeout_scope(3, True):
- await event.wait()
-
- mock.assert_called_with("1")
-
-
-class ReplyAndConsumeForbidden:
- @pytest.mark.asyncio
- async def test_rpc_with_reply_and_callback(self):
- rpc_broker = self.get_broker()
-
- async with rpc_broker:
- with pytest.raises(ValueError): # noqa: PT011
- await rpc_broker.publish(
- "hello",
- "some",
- reply_to="some",
- rpc=True,
- rpc_timeout=0,
- )
diff --git a/tests/brokers/base/testclient.py b/tests/brokers/base/testclient.py
index 4cc45da756..2076561581 100644
--- a/tests/brokers/base/testclient.py
+++ b/tests/brokers/base/testclient.py
@@ -5,42 +5,50 @@
import anyio
import pytest
-from faststream.testing.broker import TestBroker
-from faststream.types import AnyCallable
-from tests.brokers.base.consume import BrokerConsumeTestcase
-from tests.brokers.base.publish import BrokerPublishTestcase
-from tests.brokers.base.rpc import BrokerRPCTestcase
+from .consume import BrokerConsumeTestcase
+from .publish import BrokerPublishTestcase
-class BrokerTestclientTestcase(
- BrokerPublishTestcase,
- BrokerConsumeTestcase,
- BrokerRPCTestcase,
-):
- build_message: AnyCallable
- test_class: TestBroker
-
+class BrokerTestclientTestcase(BrokerPublishTestcase, BrokerConsumeTestcase):
@abstractmethod
def get_fake_producer_class(self) -> type:
raise NotImplementedError
- @pytest.mark.asyncio
- async def test_subscriber_mock(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_correct_clean_fake_subscribers(self) -> None:
+ broker = self.get_broker()
+
+ @broker.subscriber("test")
+ async def handler1(msg) -> None: ...
+
+ broker.publisher("test2")
+ broker.publisher("test")
+
+ assert len(broker._subscribers) == 1
+
+ test_client = self.patch_broker(broker)
+ async with test_client as br:
+ assert len(br._subscribers) == 2
+
+ assert len(broker._subscribers) == 1
+
+ @pytest.mark.asyncio()
+ async def test_subscriber_mock(self, queue: str) -> None:
test_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@test_broker.subscriber(*args, **kwargs)
- async def m(msg):
+ async def m(msg) -> None:
pass
- async with self.test_class(test_broker):
- await test_broker.start()
- await test_broker.publish("hello", queue)
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+ await br.publish("hello", queue)
m.mock.assert_called_once_with("hello")
- @pytest.mark.asyncio
- async def test_publisher_mock(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_publisher_mock(self, queue: str) -> None:
test_broker = self.get_broker()
publisher = test_broker.publisher(queue + "resp")
@@ -49,16 +57,16 @@ async def test_publisher_mock(self, queue: str):
@publisher
@test_broker.subscriber(*args, **kwargs)
- async def m(msg):
+ async def m(msg) -> str:
return "response"
- async with self.test_class(test_broker):
- await test_broker.start()
- await test_broker.publish("hello", queue)
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+ await br.publish("hello", queue)
publisher.mock.assert_called_with("response")
- @pytest.mark.asyncio
- async def test_publisher_with_subscriber__mock(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_publisher_with_subscriber__mock(self, queue: str) -> None:
test_broker = self.get_broker()
publisher = test_broker.publisher(queue + "resp")
@@ -67,25 +75,25 @@ async def test_publisher_with_subscriber__mock(self, queue: str):
@publisher
@test_broker.subscriber(*args, **kwargs)
- async def m(msg):
+ async def m(msg) -> str:
return "response"
args2, kwargs2 = self.get_subscriber_params(queue + "resp")
@test_broker.subscriber(*args2, **kwargs2)
- async def handler_response(msg): ...
+ async def handler_response(msg) -> None: ...
- async with self.test_class(test_broker):
- await test_broker.start()
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
- assert len(test_broker._subscribers) == 2
+ assert len(br._subscribers) == 2
- await test_broker.publish("hello", queue)
+ await br.publish("hello", queue)
publisher.mock.assert_called_with("response")
handler_response.mock.assert_called_once_with("response")
- @pytest.mark.asyncio
- async def test_manual_publisher_mock(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_manual_publisher_mock(self, queue: str) -> None:
test_broker = self.get_broker()
publisher = test_broker.publisher(queue + "resp")
@@ -93,52 +101,72 @@ async def test_manual_publisher_mock(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@test_broker.subscriber(*args, **kwargs)
- async def m(msg):
+ async def m(msg) -> None:
await publisher.publish("response")
- async with self.test_class(test_broker):
- await test_broker.start()
- await test_broker.publish("hello", queue)
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+ await br.publish("hello", queue)
publisher.mock.assert_called_with("response")
- @pytest.mark.asyncio
- async def test_exception_raises(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_exception_raises(self, queue: str) -> None:
test_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@test_broker.subscriber(*args, **kwargs)
async def m(msg): # pragma: no cover
- raise ValueError()
+ raise ValueError
- async with self.test_class(test_broker):
- await test_broker.start()
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
with pytest.raises(ValueError): # noqa: PT011
- await test_broker.publish("hello", queue)
+ await br.publish("hello", queue)
- async def test_broker_gets_patched_attrs_within_cm(self):
+ @pytest.mark.asyncio()
+ async def test_parser_exception_raises(self, queue: str) -> None:
+ test_broker = self.get_broker()
+
+ def parser(msg):
+ raise ValueError
+
+ args, kwargs = self.get_subscriber_params(queue, parser=parser)
+
+ @test_broker.subscriber(*args, **kwargs)
+ async def m(msg): # pragma: no cover
+ pass
+
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+
+ with pytest.raises(ValueError): # noqa: PT011
+ await br.publish("hello", queue)
+
+ async def test_broker_gets_patched_attrs_within_cm(self, fake_producer_cls) -> None:
test_broker = self.get_broker()
- fake_producer_class = self.get_fake_producer_class()
await test_broker.start()
- async with self.test_class(test_broker) as br:
+ old_producer = test_broker._producer
+
+ async with self.patch_broker(test_broker) as br:
assert isinstance(br.start, Mock)
assert isinstance(br._connect, Mock)
assert isinstance(br.close, Mock)
- assert isinstance(br._producer, fake_producer_class)
+ assert isinstance(br._producer, fake_producer_cls)
assert not isinstance(br.start, Mock)
assert not isinstance(br._connect, Mock)
assert not isinstance(br.close, Mock)
assert br._connection is not None
- assert not isinstance(br._producer, fake_producer_class)
+ assert br._producer == old_producer
- async def test_broker_with_real_doesnt_get_patched(self):
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
test_broker = self.get_broker()
await test_broker.start()
- async with self.test_class(test_broker, with_real=True) as br:
+ async with self.patch_broker(test_broker, with_real=True) as br:
assert not isinstance(br.start, Mock)
assert not isinstance(br._connect, Mock)
assert not isinstance(br.close, Mock)
@@ -146,8 +174,9 @@ async def test_broker_with_real_doesnt_get_patched(self):
assert br._producer is not None
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
test_broker = self.get_broker()
publisher = test_broker.publisher(f"{queue}1")
@@ -155,12 +184,12 @@ async def test_broker_with_real_patches_publishers_and_subscribers(
args, kwargs = self.get_subscriber_params(queue)
@test_broker.subscriber(*args, **kwargs)
- async def m(msg):
+ async def m(msg) -> None:
await publisher.publish(f"response: {msg}")
await test_broker.start()
- async with self.test_class(test_broker, with_real=True) as br:
+ async with self.patch_broker(test_broker, with_real=True) as br:
await br.publish("hello", queue)
await m.wait_call(self.timeout)
diff --git a/tests/brokers/confluent/basic.py b/tests/brokers/confluent/basic.py
index 6fffc1c976..4b9e626695 100644
--- a/tests/brokers/confluent/basic.py
+++ b/tests/brokers/confluent/basic.py
@@ -1,17 +1,24 @@
-from typing import Any, Dict, Tuple
+from typing import Any
-from faststream.confluent import TopicPartition
-from tests.brokers.base.basic import BaseTestcaseConfig as _Base
+from faststream.confluent import (
+ KafkaBroker,
+ KafkaRouter,
+ TestKafkaBroker,
+ TopicPartition,
+)
+from tests.brokers.base.basic import BaseTestcaseConfig
-class ConfluentTestcaseConfig(_Base):
+class ConfluentTestcaseConfig(BaseTestcaseConfig):
timeout: float = 10.0
def get_subscriber_params(
- self, *topics: Any, **kwargs: Any
- ) -> Tuple[
- Tuple[Any, ...],
- Dict[str, Any],
+ self,
+ *topics: Any,
+ **kwargs: Any,
+ ) -> tuple[
+ tuple[Any, ...],
+ dict[str, Any],
]:
if len(topics) == 1:
partitions = [TopicPartition(topics[0], partition=0, offset=0)]
@@ -25,3 +32,21 @@ def get_subscriber_params(
"partitions": partitions,
**kwargs,
}
+
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> KafkaBroker:
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
+ def patch_broker(self, broker: KafkaBroker, **kwargs: Any) -> KafkaBroker:
+ return broker
+
+ def get_router(self, **kwargs: Any) -> KafkaRouter:
+ return KafkaRouter(**kwargs)
+
+
+class ConfluentMemoryTestcaseConfig(ConfluentTestcaseConfig):
+ def patch_broker(self, broker: KafkaBroker, **kwargs: Any) -> KafkaBroker:
+ return TestKafkaBroker(broker, **kwargs)
diff --git a/tests/brokers/confluent/conftest.py b/tests/brokers/confluent/conftest.py
index 291ca36f09..e6983b6dda 100644
--- a/tests/brokers/confluent/conftest.py
+++ b/tests/brokers/confluent/conftest.py
@@ -1,9 +1,8 @@
from dataclasses import dataclass
import pytest
-import pytest_asyncio
-from faststream.confluent import KafkaBroker, KafkaRouter, TestKafkaBroker
+from faststream.confluent import KafkaRouter
@dataclass
@@ -18,27 +17,6 @@ def settings():
return Settings()
-@pytest.fixture
+@pytest.fixture()
def router():
return KafkaRouter()
-
-
-@pytest_asyncio.fixture()
-async def broker(settings):
- broker = KafkaBroker(settings.url, apply_types=False)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def full_broker(settings):
- broker = KafkaBroker(settings.url)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def test_broker():
- broker = KafkaBroker()
- async with TestKafkaBroker(broker) as br:
- yield br
diff --git a/tests/brokers/confluent/test_connect.py b/tests/brokers/confluent/test_connect.py
index 0861c8f9d5..5a5ee7dc6b 100644
--- a/tests/brokers/confluent/test_connect.py
+++ b/tests/brokers/confluent/test_connect.py
@@ -4,7 +4,7 @@
from tests.brokers.base.connection import BrokerConnectionTestcase
-def test_correct_config():
+def test_correct_config() -> None:
broker = KafkaBroker(
config={
"compression.codec": config.CompressionCodec.none,
@@ -18,7 +18,7 @@ def test_correct_config():
"builtin.features": config.BuiltinFeatures.gzip,
"debug": config.Debug.broker,
"group.protocol": config.GroupProtocol.classic,
- }
+ },
)
assert broker.config.as_config_dict() == {
@@ -36,7 +36,7 @@ def test_correct_config():
}
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestConnection(BrokerConnectionTestcase):
broker = KafkaBroker
diff --git a/tests/brokers/confluent/test_consume.py b/tests/brokers/confluent/test_consume.py
index 7dff51fbcb..806a37a62a 100644
--- a/tests/brokers/confluent/test_consume.py
+++ b/tests/brokers/confluent/test_consume.py
@@ -3,7 +3,7 @@
import pytest
-from faststream.confluent import KafkaBroker
+from faststream import AckPolicy
from faststream.confluent.annotations import KafkaMessage
from faststream.confluent.client import AsyncConfluentConsumer
from faststream.exceptions import AckMessage
@@ -13,15 +13,12 @@
from .basic import ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestConsume(ConfluentTestcaseConfig, BrokerRealConsumeTestcase):
"""A class to represent a test Kafka broker."""
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
-
- @pytest.mark.asyncio
- async def test_consume_batch(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_consume_batch(self, queue: str) -> None:
consume_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=1)
@@ -29,7 +26,7 @@ async def test_consume_batch(self, queue: str):
args, kwargs = self.get_subscriber_params(queue, batch=True)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -44,25 +41,26 @@ async def handler(msg):
assert [{1, "hi"}] == [set(r.result()) for r in result]
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_batch_headers(
self,
- mock,
- event: asyncio.Event,
+ mock: MagicMock,
queue: str,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue, batch=True)
@consume_broker.subscriber(*args, **kwargs)
- def subscriber(m, msg: KafkaMessage):
+ def subscriber(m, msg: KafkaMessage) -> None:
check = all(
(
msg.headers,
[msg.headers] == msg.batch_headers,
msg.headers.get("custom") == "1",
- )
+ ),
)
mock(check)
event.set()
@@ -81,21 +79,24 @@ def subscriber(m, msg: KafkaMessage):
assert event.is_set()
mock.assert_called_once_with(True)
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_consume_ack(
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_consume_auto_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
- queue, group_id="test", auto_commit=False
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg: KafkaMessage):
+ async def handler(msg: KafkaMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -112,7 +113,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -122,21 +123,24 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
- queue, group_id="test", auto_commit=False
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg: KafkaMessage):
+ async def handler(msg: KafkaMessage) -> None:
await msg.ack()
event.set()
@@ -159,23 +163,26 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_ack_raise(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
- queue, group_id="test", auto_commit=False
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@consume_broker.subscriber(*args, **kwargs)
async def handler(msg: KafkaMessage):
event.set()
- raise AckMessage()
+ raise AckMessage
async with self.patch_broker(consume_broker) as br:
await br.start()
@@ -196,21 +203,24 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_nack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
- queue, group_id="test", auto_commit=False
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg: KafkaMessage):
+ async def handler(msg: KafkaMessage) -> None:
await msg.nack()
event.set()
@@ -233,19 +243,22 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- args, kwargs = self.get_subscriber_params(queue, group_id="test", no_ack=True)
+ args, kwargs = self.get_subscriber_params(
+ queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING
+ )
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg: KafkaMessage):
+ async def handler(msg: KafkaMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -262,7 +275,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -272,21 +285,24 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_with_no_auto_commit(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
- queue, auto_commit=False, group_id="test"
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@consume_broker.subscriber(*args, **kwargs)
- async def subscriber_no_auto_commit(msg: KafkaMessage):
+ async def subscriber_no_auto_commit(msg: KafkaMessage) -> None:
await msg.nack()
event.set()
@@ -294,11 +310,13 @@ async def subscriber_no_auto_commit(msg: KafkaMessage):
event2 = asyncio.Event()
args, kwargs = self.get_subscriber_params(
- queue, auto_commit=True, group_id="test"
+ queue,
+ group_id="test",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
@broker2.subscriber(*args, **kwargs)
- async def subscriber_with_auto_commit(m):
+ async def subscriber_with_auto_commit(m) -> None:
event2.set()
async with self.patch_broker(consume_broker) as br:
@@ -323,9 +341,9 @@ async def subscriber_with_auto_commit(m):
assert event.is_set()
assert event2.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_concurrent_consume(self, queue: str, mock: MagicMock):
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_concurrent_consume(self, queue: str, mock: MagicMock) -> None:
event = asyncio.Event()
event2 = asyncio.Event()
diff --git a/tests/brokers/confluent/test_fastapi.py b/tests/brokers/confluent/test_fastapi.py
index 0de5bb7311..1059e4d433 100644
--- a/tests/brokers/confluent/test_fastapi.py
+++ b/tests/brokers/confluent/test_fastapi.py
@@ -1,18 +1,16 @@
import asyncio
-from typing import List
from unittest.mock import Mock
import pytest
from faststream.confluent import KafkaRouter
from faststream.confluent.fastapi import KafkaRouter as StreamRouter
-from faststream.confluent.testing import TestKafkaBroker, build_message
from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase
-from .basic import ConfluentTestcaseConfig
+from .basic import ConfluentMemoryTestcaseConfig, ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestConfluentRouter(ConfluentTestcaseConfig, FastAPITestcase):
router_class = StreamRouter
broker_router_class = KafkaRouter
@@ -21,22 +19,23 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue, batch=True)
@router.subscriber(*args, **kwargs)
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
- async with router.broker:
- await router.broker.start()
+ async with self.patch_broker(router.broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hi", queue)),
+ asyncio.create_task(br.publish("hi", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -46,31 +45,30 @@ async def hello(msg: List[str]):
mock.assert_called_with(["hi"])
-class TestRouterLocal(ConfluentTestcaseConfig, FastAPILocalTestcase):
+class TestRouterLocal(ConfluentMemoryTestcaseConfig, FastAPILocalTestcase):
router_class = StreamRouter
broker_router_class = KafkaRouter
- broker_test = staticmethod(TestKafkaBroker)
- build_message = staticmethod(build_message)
async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue, batch=True)
@router.subscriber(*args, **kwargs)
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
- async with TestKafkaBroker(router.broker):
+ async with self.patch_broker(router.broker) as br:
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hi", queue)),
+ asyncio.create_task(br.publish("hi", queue)),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
diff --git a/tests/brokers/confluent/test_logger.py b/tests/brokers/confluent/test_logger.py
index 3c2e3a132a..ce1c8f145f 100644
--- a/tests/brokers/confluent/test_logger.py
+++ b/tests/brokers/confluent/test_logger.py
@@ -1,56 +1,28 @@
-import asyncio
import logging
-from typing import Any
import pytest
-from faststream.broker.core.usecase import BrokerUsecase
-from faststream.confluent import KafkaBroker
-
from .basic import ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestLogger(ConfluentTestcaseConfig):
"""A class to represent a test Kafka broker."""
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]:
- return broker
-
- @pytest.mark.asyncio
- async def test_custom_logger(
- self,
- queue: str,
- event: asyncio.Event,
- ):
+ @pytest.mark.asyncio()
+ async def test_custom_logger(self, queue: str) -> None:
test_logger = logging.getLogger("test_logger")
- consume_broker = KafkaBroker(logger=test_logger)
+ broker = self.get_broker(logger=test_logger)
args, kwargs = self.get_subscriber_params(queue)
- @consume_broker.subscriber(*args, **kwargs)
- def subscriber(m):
- event.set()
-
- async with self.patch_broker(consume_broker) as br:
- await br.start()
-
- for sub in br._subscribers.values():
- consumer_logger = sub.consumer.logger
- assert consumer_logger == test_logger
+ @broker.subscriber(*args, **kwargs)
+ def subscriber(m) -> None: ...
- producer_logger = br._producer._producer.logger
- assert producer_logger == test_logger
+ await broker.start()
- await asyncio.wait(
- (
- asyncio.create_task(br.publish("hello", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=10,
- )
+ for sub in broker._subscribers:
+ consumer_logger = sub.consumer.logger_state.logger.logger
+ assert consumer_logger == test_logger
- assert event.is_set()
+ await broker.close()
diff --git a/tests/brokers/confluent/test_middlewares.py b/tests/brokers/confluent/test_middlewares.py
index 17e203879c..b81c9a4325 100644
--- a/tests/brokers/confluent/test_middlewares.py
+++ b/tests/brokers/confluent/test_middlewares.py
@@ -1,27 +1,23 @@
import pytest
-from faststream.confluent import KafkaBroker, TestKafkaBroker
from tests.brokers.base.middlewares import (
ExceptionMiddlewareTestcase,
MiddlewareTestcase,
MiddlewaresOrderTestcase,
)
-from .basic import ConfluentTestcaseConfig
+from .basic import ConfluentMemoryTestcaseConfig, ConfluentTestcaseConfig
-@pytest.mark.confluent
-class TestMiddlewares(ConfluentTestcaseConfig, MiddlewareTestcase):
- broker_class = KafkaBroker
-
+class TestMiddlewaresOrder(ConfluentMemoryTestcaseConfig, MiddlewaresOrderTestcase):
+ pass
-@pytest.mark.confluent
-class TestExceptionMiddlewares(ConfluentTestcaseConfig, ExceptionMiddlewareTestcase):
- broker_class = KafkaBroker
+@pytest.mark.confluent()
+class TestMiddlewares(ConfluentTestcaseConfig, MiddlewareTestcase):
+ pass
-class TestMiddlewaresOrder(MiddlewaresOrderTestcase):
- broker_class = KafkaBroker
- def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker:
- return TestKafkaBroker(broker)
+@pytest.mark.confluent()
+class TestExceptionMiddlewares(ConfluentTestcaseConfig, ExceptionMiddlewareTestcase):
+ pass
diff --git a/tests/brokers/confluent/test_misconfigure.py b/tests/brokers/confluent/test_misconfigure.py
index bcc115fee0..0f4ab6e180 100644
--- a/tests/brokers/confluent/test_misconfigure.py
+++ b/tests/brokers/confluent/test_misconfigure.py
@@ -1,11 +1,84 @@
import pytest
-from faststream.confluent import KafkaBroker
+from faststream import AckPolicy
+from faststream.confluent import KafkaBroker, TopicPartition
+from faststream.confluent.subscriber.specified import (
+ SpecificationConcurrentDefaultSubscriber,
+)
from faststream.exceptions import SetupError
def test_max_workers_with_manual(queue: str) -> None:
broker = KafkaBroker()
- with pytest.raises(SetupError):
+ with pytest.warns(DeprecationWarning):
+ sub = broker.subscriber(queue, max_workers=3, auto_commit=True)
+ assert isinstance(sub, SpecificationConcurrentDefaultSubscriber)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
broker.subscriber(queue, max_workers=3, auto_commit=False)
+
+
+def test_max_workers_with_ack_policy(queue: str) -> None:
+ broker = KafkaBroker()
+
+ sub = broker.subscriber(queue, max_workers=3, ack_policy=AckPolicy.ACK_FIRST)
+ assert isinstance(sub, SpecificationConcurrentDefaultSubscriber)
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, max_workers=3, ack_policy=AckPolicy.REJECT_ON_ERROR)
+
+
+def test_deprecated_options(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", auto_commit=False)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=True)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", no_ack=False)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", no_ack=True)
+
+
+def test_deprecated_conflicts_actual(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=False, ack_policy=AckPolicy.ACK)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, no_ack=False, ack_policy=AckPolicy.ACK)
+
+
+def test_manual_ack_policy_without_group(queue: str) -> None:
+ broker = KafkaBroker()
+
+ broker.subscriber(queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING)
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, ack_policy=AckPolicy.DO_NOTHING)
+
+
+def test_manual_commit_without_group(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", auto_commit=False)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=False)
+
+
+def test_wrong_destination(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.raises(SetupError):
+ broker.subscriber()
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, partitions=[TopicPartition(queue, 1)])
diff --git a/tests/brokers/confluent/test_parser.py b/tests/brokers/confluent/test_parser.py
index 36a407e100..65aa2bff15 100644
--- a/tests/brokers/confluent/test_parser.py
+++ b/tests/brokers/confluent/test_parser.py
@@ -1,11 +1,10 @@
import pytest
-from faststream.confluent import KafkaBroker
from tests.brokers.base.parser import CustomParserTestcase
from .basic import ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestCustomParser(ConfluentTestcaseConfig, CustomParserTestcase):
- broker_class = KafkaBroker
+ pass
diff --git a/tests/brokers/confluent/test_publish.py b/tests/brokers/confluent/test_publish.py
index c337953397..7ed7a76ac8 100644
--- a/tests/brokers/confluent/test_publish.py
+++ b/tests/brokers/confluent/test_publish.py
@@ -4,19 +4,16 @@
import pytest
from faststream import Context
-from faststream.confluent import KafkaBroker, KafkaResponse
+from faststream.confluent import KafkaResponse
from tests.brokers.base.publish import BrokerPublishTestcase
from .basic import ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestPublish(ConfluentTestcaseConfig, BrokerPublishTestcase):
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
-
- @pytest.mark.asyncio
- async def test_publish_batch(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_publish_batch(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@@ -24,7 +21,7 @@ async def test_publish_batch(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(pub_broker) as br:
@@ -42,8 +39,8 @@ async def handler(msg):
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
- async def test_batch_publisher_manual(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_batch_publisher_manual(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@@ -51,7 +48,7 @@ async def test_batch_publisher_manual(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
publisher = pub_broker.publisher(queue, batch=True)
@@ -71,8 +68,8 @@ async def handler(msg):
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
- async def test_batch_publisher_decorator(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_batch_publisher_decorator(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@@ -80,7 +77,7 @@ async def test_batch_publisher_decorator(self, queue: str):
args, kwargs = self.get_subscriber_params(queue)
@pub_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@@ -105,13 +102,14 @@ async def pub(m):
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -124,7 +122,7 @@ async def handle():
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@pub_broker.subscriber(*args2, **kwargs2)
- async def handle_next(msg=Context("message")):
+ async def handle_next(msg=Context("message")) -> None:
mock(body=msg.body)
event.set()
diff --git a/tests/brokers/confluent/test_publish_command.py b/tests/brokers/confluent/test_publish_command.py
new file mode 100644
index 0000000000..cb53928c71
--- /dev/null
+++ b/tests/brokers/confluent/test_publish_command.py
@@ -0,0 +1,14 @@
+from faststream.confluent.response import KafkaPublishCommand, KafkaResponse
+from faststream.response import ensure_response
+from tests.brokers.base.publish_command import BatchPublishCommandTestcase
+
+
+class TestPublishCommand(BatchPublishCommandTestcase):
+ publish_command_cls = KafkaPublishCommand
+
+ def test_kafka_response_class(self) -> None:
+ response = ensure_response(KafkaResponse(body=1, headers={"1": 1}, key=b"1"))
+ cmd = self.publish_command_cls.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {"1": 1}
+ assert cmd.key == b"1"
diff --git a/tests/brokers/confluent/test_requests.py b/tests/brokers/confluent/test_requests.py
index 39f4677113..fac1b09331 100644
--- a/tests/brokers/confluent/test_requests.py
+++ b/tests/brokers/confluent/test_requests.py
@@ -1,31 +1,23 @@
+from typing import Any
+
import pytest
from faststream import BaseMiddleware
-from faststream.confluent import KafkaBroker, KafkaRouter, TestKafkaBroker
from tests.brokers.base.requests import RequestsTestcase
-from .basic import ConfluentTestcaseConfig
+from .basic import ConfluentMemoryTestcaseConfig
class Mid(BaseMiddleware):
async def on_receive(self) -> None:
- self.msg._raw_msg = self.msg._raw_msg * 2
+ self.msg._raw_msg *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body = msg._decoded_body * 2
+ msg.body *= 2
return await call_next(msg)
-@pytest.mark.asyncio
-class TestRequestTestClient(ConfluentTestcaseConfig, RequestsTestcase):
- def get_middleware(self, **kwargs):
+@pytest.mark.asyncio()
+class TestRequestTestClient(ConfluentMemoryTestcaseConfig, RequestsTestcase):
+ def get_middleware(self, **kwargs: Any):
return Mid
-
- def get_broker(self, **kwargs):
- return KafkaBroker(**kwargs)
-
- def get_router(self, **kwargs):
- return KafkaRouter(**kwargs)
-
- def patch_broker(self, broker, **kwargs):
- return TestKafkaBroker(broker, **kwargs)
diff --git a/tests/brokers/confluent/test_router.py b/tests/brokers/confluent/test_router.py
index 746857d9de..c26198d8d4 100644
--- a/tests/brokers/confluent/test_router.py
+++ b/tests/brokers/confluent/test_router.py
@@ -1,19 +1,20 @@
import pytest
-from faststream.confluent import KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.confluent import (
+ KafkaPublisher,
+ KafkaRoute,
+)
from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase
-from .basic import ConfluentTestcaseConfig
+from .basic import ConfluentMemoryTestcaseConfig, ConfluentTestcaseConfig
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestRouter(ConfluentTestcaseConfig, RouterTestcase):
- broker_class = KafkaRouter
route_class = KafkaRoute
publisher_class = KafkaPublisher
-class TestRouterLocal(ConfluentTestcaseConfig, RouterLocalTestcase):
- broker_class = KafkaRouter
+class TestRouterLocal(ConfluentMemoryTestcaseConfig, RouterLocalTestcase):
route_class = KafkaRoute
publisher_class = KafkaPublisher
diff --git a/tests/brokers/confluent/test_security.py b/tests/brokers/confluent/test_security.py
index 489c95fbe2..20db04ff90 100644
--- a/tests/brokers/confluent/test_security.py
+++ b/tests/brokers/confluent/test_security.py
@@ -1,5 +1,4 @@
from contextlib import contextmanager
-from typing import Tuple
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
@@ -8,7 +7,7 @@
@contextmanager
-def patch_aio_consumer_and_producer() -> Tuple[MagicMock, MagicMock]:
+def patch_aio_consumer_and_producer() -> tuple[MagicMock, MagicMock]:
try:
producer = MagicMock(return_value=AsyncMock())
@@ -21,9 +20,9 @@ def patch_aio_consumer_and_producer() -> Tuple[MagicMock, MagicMock]:
pass
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_base_security_pass_ssl_context():
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_base_security_pass_ssl_context() -> None:
import ssl
from faststream.confluent import KafkaBroker
@@ -34,9 +33,13 @@ async def test_base_security_pass_ssl_context():
basic_broker = KafkaBroker("localhost:9092", security=security)
- with patch_aio_consumer_and_producer(), pytest.raises(
- SetupError, match="not supported"
- ) as e:
+ with (
+ patch_aio_consumer_and_producer(),
+ pytest.raises(
+ SetupError,
+ match="not supported",
+ ) as e,
+ ):
async with basic_broker:
pass
diff --git a/tests/brokers/confluent/test_test_client.py b/tests/brokers/confluent/test_test_client.py
index 82e9aefe91..01fc161746 100644
--- a/tests/brokers/confluent/test_test_client.py
+++ b/tests/brokers/confluent/test_test_client.py
@@ -3,70 +3,59 @@
import pytest
-from faststream import BaseMiddleware
-from faststream.confluent import KafkaBroker, TestKafkaBroker
+from faststream import AckPolicy, BaseMiddleware
from faststream.confluent.annotations import KafkaMessage
from faststream.confluent.message import FAKE_CONSUMER
from faststream.confluent.testing import FakeProducer
from tests.brokers.base.testclient import BrokerTestclientTestcase
from tests.tools import spy_decorator
-from .basic import ConfluentTestcaseConfig
+from .basic import ConfluentMemoryTestcaseConfig
-@pytest.mark.asyncio
-class TestTestclient(ConfluentTestcaseConfig, BrokerTestclientTestcase):
- """A class to represent a test Kafka broker."""
-
- test_class = TestKafkaBroker
-
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker:
- return TestKafkaBroker(broker)
-
- def get_fake_producer_class(self) -> type:
- return FakeProducer
-
+@pytest.mark.asyncio()
+class TestTestclient(ConfluentMemoryTestcaseConfig, BrokerTestclientTestcase):
async def test_message_nack_seek(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
@broker.subscriber(
queue,
group_id=f"{queue}-consume",
- auto_commit=False,
auto_offset_reset="earliest",
+ ack_policy=AckPolicy.REJECT_ON_ERROR,
)
- async def m(msg: KafkaMessage):
+ async def m(msg: KafkaMessage) -> None:
await msg.nack()
async with self.patch_broker(broker) as br:
with patch.object(
- FAKE_CONSUMER, "seek", spy_decorator(FAKE_CONSUMER.seek)
+ FAKE_CONSUMER,
+ "seek",
+ spy_decorator(FAKE_CONSUMER.seek),
) as mocked:
await br.publish("hello", queue)
m.mock.assert_called_once_with("hello")
mocked.mock.assert_called_once()
- @pytest.mark.confluent
+ @pytest.mark.confluent()
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
- async with TestKafkaBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -80,11 +69,11 @@ def subscriber(m):
async def test_batch_pub_by_default_pub(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, batch=True)
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -94,11 +83,11 @@ async def m(msg):
async def test_batch_pub_by_pub_batch(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, batch=True)
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -108,7 +97,7 @@ async def m(msg):
async def test_batch_publisher_mock(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
publisher = broker.publisher(queue + "1", batch=True)
@@ -123,7 +112,7 @@ async def m(msg):
m.mock.assert_called_once_with("hello")
publisher.mock.assert_called_once_with([1, 2, 3])
- async def test_respect_middleware(self, queue):
+ async def test_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -131,22 +120,22 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = KafkaBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(msg) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(msg) -> None: ...
- async with TestKafkaBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
assert len(routes) == 2
- @pytest.mark.confluent
- async def test_real_respect_middleware(self, queue):
+ @pytest.mark.confluent()
+ async def test_real_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -154,19 +143,19 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = KafkaBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def h1(): ...
+ async def h1(msg) -> None: ...
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args2, **kwargs2)
- async def h2(): ...
+ async def h2(msg) -> None: ...
- async with TestKafkaBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
await h1.wait_call(10)
@@ -177,81 +166,90 @@ async def h2(): ...
async def test_multiple_subscribers_different_groups(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
- @test_broker.subscriber(queue, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
- @test_broker.subscriber(queue, group_id="group2")
- async def subscriber2(): ...
+ @broker.subscriber(queue, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ @broker.subscriber(queue, group_id="group2")
+ async def subscriber2(msg) -> None: ...
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 1
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
+
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 1
async def test_multiple_subscribers_same_group(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
- @test_broker.subscriber(queue, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
- @test_broker.subscriber(queue, group_id="group1")
- async def subscriber2(): ...
+ @broker.subscriber(queue, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ @broker.subscriber(queue, group_id="group1")
+ async def subscriber2(msg) -> None: ...
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 0
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
+
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 0
async def test_multiple_batch_subscriber_with_different_group(
self,
- test_broker: KafkaBroker,
queue: str,
- ):
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
- @test_broker.subscriber(queue, batch=True, group_id="group2")
- async def subscriber2(): ...
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ @broker.subscriber(queue, batch=True, group_id="group2")
+ async def subscriber2(msg) -> None: ...
+
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 1
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 1
async def test_multiple_batch_subscriber_with_same_group(
self,
- test_broker: KafkaBroker,
queue: str,
- ):
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
+
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber2(): ...
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber2(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 0
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 0
- @pytest.mark.confluent
- async def test_broker_gets_patched_attrs_within_cm(self):
- await super().test_broker_gets_patched_attrs_within_cm()
+ @pytest.mark.confluent()
+ async def test_broker_gets_patched_attrs_within_cm(self) -> None:
+ await super().test_broker_gets_patched_attrs_within_cm(FakeProducer)
- @pytest.mark.confluent
- async def test_broker_with_real_doesnt_get_patched(self):
+ @pytest.mark.confluent()
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
await super().test_broker_with_real_doesnt_get_patched()
- @pytest.mark.confluent
+ @pytest.mark.confluent()
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
await super().test_broker_with_real_patches_publishers_and_subscribers(queue)
diff --git a/tests/brokers/confluent/test_test_reentrancy.py b/tests/brokers/confluent/test_test_reentrancy.py
index 6ed219e2ee..bdf35f58e7 100644
--- a/tests/brokers/confluent/test_test_reentrancy.py
+++ b/tests/brokers/confluent/test_test_reentrancy.py
@@ -15,20 +15,22 @@
@to_output_data
@broker.subscriber(
- partitions=[TopicPartition(first_topic_name, 0)], auto_offset_reset="earliest"
+ partitions=[TopicPartition(first_topic_name, 0)],
+ auto_offset_reset="earliest",
)
async def on_input_data(msg: int):
return msg + 1
@broker.subscriber(
- partitions=[TopicPartition(out_topic_name, 0)], auto_offset_reset="earliest"
+ partitions=[TopicPartition(out_topic_name, 0)],
+ auto_offset_reset="earliest",
)
-async def on_output_data(msg: int):
+async def on_output_data(msg: int) -> None:
pass
-async def _test_with_broker(with_real: bool):
+async def _test_with_broker(with_real: bool) -> None:
async with TestKafkaBroker(broker, with_real=with_real) as tester:
await tester.publish(1, first_topic_name)
@@ -39,22 +41,22 @@ async def _test_with_broker(with_real: bool):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
-async def test_with_fake_broker():
+@pytest.mark.asyncio()
+async def test_with_fake_broker() -> None:
await _test_with_broker(False)
await _test_with_broker(False)
-@pytest.mark.asyncio
-@pytest.mark.confluent
-async def test_with_real_broker():
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_with_real_broker() -> None:
await _test_with_broker(True)
await _test_with_broker(True)
-async def _test_with_temp_subscriber():
+async def _test_with_temp_subscriber() -> None:
@broker.subscriber("output_data", auto_offset_reset="earliest")
- async def on_output_data(msg: int):
+ async def on_output_data(msg: int) -> None:
pass
async with TestKafkaBroker(broker) as tester:
@@ -67,13 +69,13 @@ async def on_output_data(msg: int):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skip(
reason=(
"Failed due `on_output_data` subscriber creates inside test and doesn't removed after "
"https://github.com/airtai/faststream/issues/556"
- )
+ ),
)
-async def test_with_temp_subscriber():
+async def test_with_temp_subscriber() -> None:
await _test_with_temp_subscriber()
await _test_with_temp_subscriber()
diff --git a/tests/brokers/kafka/basic.py b/tests/brokers/kafka/basic.py
new file mode 100644
index 0000000000..39c095a637
--- /dev/null
+++ b/tests/brokers/kafka/basic.py
@@ -0,0 +1,24 @@
+from typing import Any
+
+from faststream.kafka import KafkaBroker, KafkaRouter, TestKafkaBroker
+from tests.brokers.base.basic import BaseTestcaseConfig
+
+
+class KafkaTestcaseConfig(BaseTestcaseConfig):
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> KafkaBroker:
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
+ def patch_broker(self, broker: KafkaBroker, **kwargs: Any) -> KafkaBroker:
+ return broker
+
+ def get_router(self, **kwargs: Any) -> KafkaRouter:
+ return KafkaRouter(**kwargs)
+
+
+class KafkaMemoryTestcaseConfig(KafkaTestcaseConfig):
+ def patch_broker(self, broker: KafkaBroker, **kwargs: Any) -> KafkaBroker:
+ return TestKafkaBroker(broker, **kwargs)
diff --git a/tests/brokers/kafka/conftest.py b/tests/brokers/kafka/conftest.py
index c20d0f9f28..4936da6428 100644
--- a/tests/brokers/kafka/conftest.py
+++ b/tests/brokers/kafka/conftest.py
@@ -1,9 +1,8 @@
from dataclasses import dataclass
import pytest
-import pytest_asyncio
-from faststream.kafka import KafkaBroker, KafkaRouter, TestKafkaBroker
+from faststream.kafka import KafkaRouter
@dataclass
@@ -16,27 +15,6 @@ def settings():
return Settings()
-@pytest.fixture
+@pytest.fixture()
def router():
return KafkaRouter()
-
-
-@pytest_asyncio.fixture()
-async def broker(settings):
- broker = KafkaBroker(settings.url, apply_types=False)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def full_broker(settings):
- broker = KafkaBroker(settings.url)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def test_broker():
- broker = KafkaBroker()
- async with TestKafkaBroker(broker) as br:
- yield br
diff --git a/tests/brokers/kafka/test_connect.py b/tests/brokers/kafka/test_connect.py
index 8feb6dd4e7..40cfe9f8c4 100644
--- a/tests/brokers/kafka/test_connect.py
+++ b/tests/brokers/kafka/test_connect.py
@@ -4,7 +4,7 @@
from tests.brokers.base.connection import BrokerConnectionTestcase
-@pytest.mark.kafka
+@pytest.mark.kafka()
class TestConnection(BrokerConnectionTestcase):
broker = KafkaBroker
diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py
index ed7fc52f7c..d9160ddca0 100644
--- a/tests/brokers/kafka/test_consume.py
+++ b/tests/brokers/kafka/test_consume.py
@@ -3,41 +3,43 @@
import pytest
from aiokafka import AIOKafkaConsumer
+from aiokafka.structs import RecordMetadata
+from faststream import AckPolicy
from faststream.exceptions import AckMessage
-from faststream.kafka import KafkaBroker, TopicPartition
+from faststream.kafka import TopicPartition
from faststream.kafka.annotations import KafkaMessage
from tests.brokers.base.consume import BrokerRealConsumeTestcase
from tests.tools import spy_decorator
+from .basic import KafkaTestcaseConfig
-@pytest.mark.kafka
-class TestConsume(BrokerRealConsumeTestcase):
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
- @pytest.mark.asyncio
+@pytest.mark.kafka()
+class TestConsume(KafkaTestcaseConfig, BrokerRealConsumeTestcase):
+ @pytest.mark.asyncio()
async def test_consume_by_pattern(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(queue)
- async def handler(msg):
+ async def handler(msg) -> None:
event.set()
pattern_event = asyncio.Event()
@consume_broker.subscriber(pattern=f"{queue[:-1]}*")
- async def pattern_handler(msg):
+ async def pattern_handler(msg) -> None:
pattern_event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
- await br.publish(1, topic=queue)
+ result = await br.publish(1, topic=queue)
await asyncio.wait(
(
@@ -47,18 +49,19 @@ async def pattern_handler(msg):
),
timeout=3,
)
+ assert isinstance(result, RecordMetadata), result
assert event.is_set()
assert pattern_event.is_set()
- @pytest.mark.asyncio
- async def test_consume_batch(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_consume_batch(self, queue: str) -> None:
consume_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=1)
@consume_broker.subscriber(queue, batch=True)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -73,23 +76,24 @@ async def handler(msg):
assert [{1, "hi"}] == [set(r.result()) for r in result]
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_batch_headers(
self,
mock,
- event: asyncio.Event,
queue: str,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, batch=True)
- def subscriber(m, msg: KafkaMessage):
+ def subscriber(m, msg: KafkaMessage) -> None:
check = all(
(
msg.headers,
[msg.headers] == msg.batch_headers,
msg.headers.get("custom") == "1",
- )
+ ),
)
mock(check)
event.set()
@@ -108,24 +112,29 @@ def subscriber(m, msg: KafkaMessage):
assert event.is_set()
mock.assert_called_once_with(True)
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_consume_ack(
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_consume_auto_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", auto_commit=False)
- async def handler(msg: KafkaMessage):
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.REJECT_ON_ERROR
+ )
+ async def handler(msg: KafkaMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
) as m:
await asyncio.wait(
(
@@ -133,7 +142,7 @@ async def handler(msg: KafkaMessage):
consume_broker.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -143,18 +152,19 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_manual_partition_consume(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
tp1 = TopicPartition(queue, partition=0)
@consume_broker.subscriber(partitions=[tp1])
- async def handler_tp1(msg):
+ async def handler_tp1(msg) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -170,17 +180,20 @@ async def handler_tp1(msg):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", auto_commit=False)
- async def handler(msg: KafkaMessage):
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.REJECT_ON_ERROR
+ )
+ async def handler(msg: KafkaMessage) -> None:
await msg.ack()
event.set()
@@ -188,7 +201,9 @@ async def handler(msg: KafkaMessage):
await br.start()
with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
) as m:
await asyncio.wait(
(
@@ -196,7 +211,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -206,25 +221,30 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_consume_ack_raise(
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_consume_ack_by_raise(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", auto_commit=False)
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.REJECT_ON_ERROR
+ )
async def handler(msg: KafkaMessage):
event.set()
- raise AckMessage()
+ raise AckMessage
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
) as m:
await asyncio.wait(
(
@@ -232,7 +252,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -242,17 +262,20 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_nack(
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_manual_nack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", auto_commit=False)
- async def handler(msg: KafkaMessage):
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.REJECT_ON_ERROR
+ )
+ async def handler(msg: KafkaMessage) -> None:
await msg.nack()
event.set()
@@ -260,7 +283,9 @@ async def handler(msg: KafkaMessage):
await br.start()
with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
) as m:
await asyncio.wait(
(
@@ -268,7 +293,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -278,24 +303,29 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", no_ack=True)
- async def handler(msg: KafkaMessage):
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING
+ )
+ async def handler(msg: KafkaMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit)
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
) as m:
await asyncio.wait(
(
@@ -303,7 +333,7 @@ async def handler(msg: KafkaMessage):
br.publish(
"hello",
queue,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -313,9 +343,9 @@ async def handler(msg: KafkaMessage):
assert event.is_set()
- @pytest.mark.asyncio
- @pytest.mark.slow
- async def test_concurrent_consume(self, queue: str, mock: MagicMock):
+ @pytest.mark.asyncio()
+ @pytest.mark.slow()
+ async def test_concurrent_consume(self, queue: str, mock: MagicMock) -> None:
event = asyncio.Event()
event2 = asyncio.Event()
@@ -324,7 +354,7 @@ async def test_concurrent_consume(self, queue: str, mock: MagicMock):
args, kwargs = self.get_subscriber_params(queue, max_workers=2)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> None:
mock()
if event.is_set():
event2.set()
@@ -352,7 +382,7 @@ async def handler(msg):
assert event2.is_set()
assert mock.call_count == 2, mock.call_count
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_without_value(
self,
mock: MagicMock,
@@ -371,7 +401,9 @@ async def handler(msg):
await asyncio.wait(
(
- asyncio.create_task(br._producer._producer.send(queue, key=b"")),
+ asyncio.create_task(
+ br._producer._producer.producer.send(queue, key=b"")
+ ),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -379,7 +411,7 @@ async def handler(msg):
mock.assert_called_once_with(b"")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_batch_without_value(
self,
mock: MagicMock,
@@ -398,7 +430,9 @@ async def handler(msg):
await asyncio.wait(
(
- asyncio.create_task(br._producer._producer.send(queue, key=b"")),
+ asyncio.create_task(
+ br._producer._producer.producer.send(queue, key=b"")
+ ),
asyncio.create_task(event.wait()),
),
timeout=3,
diff --git a/tests/brokers/kafka/test_fastapi.py b/tests/brokers/kafka/test_fastapi.py
index 509466bc65..3da7a8bf51 100644
--- a/tests/brokers/kafka/test_fastapi.py
+++ b/tests/brokers/kafka/test_fastapi.py
@@ -1,16 +1,16 @@
import asyncio
-from typing import List
from unittest.mock import Mock
import pytest
from faststream.kafka import KafkaRouter
from faststream.kafka.fastapi import KafkaRouter as StreamRouter
-from faststream.kafka.testing import TestKafkaBroker, build_message
from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase
+from .basic import KafkaMemoryTestcaseConfig
-@pytest.mark.kafka
+
+@pytest.mark.kafka()
class TestKafkaRouter(FastAPITestcase):
router_class = StreamRouter
broker_router_class = KafkaRouter
@@ -19,12 +19,13 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(queue, batch=True)
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
@@ -42,29 +43,28 @@ async def hello(msg: List[str]):
mock.assert_called_with(["hi"])
-class TestRouterLocal(FastAPILocalTestcase):
+class TestRouterLocal(KafkaMemoryTestcaseConfig, FastAPILocalTestcase):
router_class = StreamRouter
broker_router_class = KafkaRouter
- broker_test = staticmethod(TestKafkaBroker)
- build_message = staticmethod(build_message)
async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(queue, batch=True)
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
- async with TestKafkaBroker(router.broker):
+ async with self.patch_broker(router.broker) as br:
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hi", queue)),
+ asyncio.create_task(br.publish("hi", queue)),
asyncio.create_task(event.wait()),
),
timeout=3,
diff --git a/tests/brokers/kafka/test_middlewares.py b/tests/brokers/kafka/test_middlewares.py
index 13f7e79349..58d5bf7d31 100644
--- a/tests/brokers/kafka/test_middlewares.py
+++ b/tests/brokers/kafka/test_middlewares.py
@@ -1,25 +1,23 @@
import pytest
-from faststream.kafka import KafkaBroker, TestKafkaBroker
from tests.brokers.base.middlewares import (
ExceptionMiddlewareTestcase,
MiddlewareTestcase,
MiddlewaresOrderTestcase,
)
+from .basic import KafkaMemoryTestcaseConfig, KafkaTestcaseConfig
-@pytest.mark.kafka
-class TestMiddlewares(MiddlewareTestcase):
- broker_class = KafkaBroker
+class TestMiddlewaresOrder(KafkaMemoryTestcaseConfig, MiddlewaresOrderTestcase):
+ pass
-@pytest.mark.kafka
-class TestExceptionMiddlewares(ExceptionMiddlewareTestcase):
- broker_class = KafkaBroker
+@pytest.mark.kafka()
+class TestMiddlewares(KafkaTestcaseConfig, MiddlewareTestcase):
+ pass
-class TestMiddlewaresOrder(MiddlewaresOrderTestcase):
- broker_class = KafkaBroker
- def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker:
- return TestKafkaBroker(broker)
+@pytest.mark.kafka()
+class TestExceptionMiddlewares(KafkaTestcaseConfig, ExceptionMiddlewareTestcase):
+ pass
diff --git a/tests/brokers/kafka/test_misconfigure.py b/tests/brokers/kafka/test_misconfigure.py
index 771c45426f..79bd8bdef8 100644
--- a/tests/brokers/kafka/test_misconfigure.py
+++ b/tests/brokers/kafka/test_misconfigure.py
@@ -1,11 +1,90 @@
import pytest
+from faststream import AckPolicy
from faststream.exceptions import SetupError
-from faststream.kafka import KafkaBroker
+from faststream.kafka import KafkaBroker, TopicPartition
+from faststream.kafka.subscriber.specified import (
+ SpecificationConcurrentDefaultSubscriber,
+)
-def test_max_workers_with_manual(queue: str) -> None:
+def test_deprecated_options(queue: str) -> None:
broker = KafkaBroker()
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", auto_commit=False)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=True)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, no_ack=False)
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", no_ack=True)
+
+
+def test_deprecated_conflicts_actual(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=False, ack_policy=AckPolicy.ACK)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, no_ack=False, ack_policy=AckPolicy.ACK)
+
+
+def test_manual_ack_policy_without_group(queue: str) -> None:
+ broker = KafkaBroker()
+
+ broker.subscriber(queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING)
+
with pytest.raises(SetupError):
+ broker.subscriber(queue, ack_policy=AckPolicy.DO_NOTHING)
+
+
+def test_manual_commit_without_group(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, group_id="test", auto_commit=False)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
+ broker.subscriber(queue, auto_commit=False)
+
+
+def test_max_workers_with_manual(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.warns(DeprecationWarning):
+ sub = broker.subscriber(queue, max_workers=3, auto_commit=True)
+ assert isinstance(sub, SpecificationConcurrentDefaultSubscriber)
+
+ with pytest.raises(SetupError), pytest.warns(DeprecationWarning):
broker.subscriber(queue, max_workers=3, auto_commit=False)
+
+
+def test_max_workers_with_ack_policy(queue: str) -> None:
+ broker = KafkaBroker()
+
+ sub = broker.subscriber(queue, max_workers=3, ack_policy=AckPolicy.ACK_FIRST)
+ assert isinstance(sub, SpecificationConcurrentDefaultSubscriber)
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, max_workers=3, ack_policy=AckPolicy.REJECT_ON_ERROR)
+
+
+def test_wrong_destination(queue: str) -> None:
+ broker = KafkaBroker()
+
+ with pytest.raises(SetupError):
+ broker.subscriber()
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, partitions=[TopicPartition(queue, 1)])
+
+ with pytest.raises(SetupError):
+ broker.subscriber(partitions=[TopicPartition(queue, 1)], pattern=".*")
+
+ with pytest.raises(SetupError):
+ broker.subscriber(queue, pattern=".*")
diff --git a/tests/brokers/kafka/test_parser.py b/tests/brokers/kafka/test_parser.py
index 1725c15e6c..0e229bbd37 100644
--- a/tests/brokers/kafka/test_parser.py
+++ b/tests/brokers/kafka/test_parser.py
@@ -1,9 +1,10 @@
import pytest
-from faststream.kafka import KafkaBroker
from tests.brokers.base.parser import CustomParserTestcase
+from .basic import KafkaTestcaseConfig
-@pytest.mark.kafka
-class TestCustomParser(CustomParserTestcase):
- broker_class = KafkaBroker
+
+@pytest.mark.kafka()
+class TestCustomParser(KafkaTestcaseConfig, CustomParserTestcase):
+ pass
diff --git a/tests/brokers/kafka/test_publish.py b/tests/brokers/kafka/test_publish.py
index ac2c866362..1662cb6e3a 100644
--- a/tests/brokers/kafka/test_publish.py
+++ b/tests/brokers/kafka/test_publish.py
@@ -1,35 +1,33 @@
import asyncio
-from typing import Any
from unittest.mock import Mock
import pytest
+from aiokafka.structs import RecordMetadata
from faststream import Context
-from faststream.kafka import KafkaBroker, KafkaResponse
+from faststream.kafka import KafkaResponse
from faststream.kafka.exceptions import BatchBufferOverflowException
from tests.brokers.base.publish import BrokerPublishTestcase
+from .basic import KafkaTestcaseConfig
-@pytest.mark.kafka
-class TestPublish(BrokerPublishTestcase):
- def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
- return KafkaBroker(apply_types=apply_types, **kwargs)
- @pytest.mark.asyncio
- async def test_publish_batch(self, queue: str):
+@pytest.mark.kafka()
+class TestPublish(KafkaTestcaseConfig, BrokerPublishTestcase):
+ @pytest.mark.asyncio()
+ async def test_publish_batch(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@pub_broker.subscriber(queue)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(pub_broker) as br:
await br.start()
- await br.publish_batch(1, "hi", topic=queue)
-
+ record_metadata = await br.publish_batch(1, "hi", topic=queue)
result, _ = await asyncio.wait(
(
asyncio.create_task(msgs_queue.get()),
@@ -37,17 +35,18 @@ async def handler(msg):
),
timeout=3,
)
+ assert isinstance(record_metadata, RecordMetadata)
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
- async def test_batch_publisher_manual(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_batch_publisher_manual(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@pub_broker.subscriber(queue)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
publisher = pub_broker.publisher(queue, batch=True)
@@ -67,14 +66,14 @@ async def handler(msg):
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
- async def test_batch_publisher_decorator(self, queue: str):
+ @pytest.mark.asyncio()
+ async def test_batch_publisher_decorator(self, queue: str) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@pub_broker.subscriber(queue)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
@pub_broker.publisher(queue, batch=True)
@@ -85,7 +84,7 @@ async def pub(m):
async with self.patch_broker(pub_broker) as br:
await br.start()
- await br.publish("", queue + "1")
+ record_metadata = await br.publish("", queue + "1")
result, _ = await asyncio.wait(
(
@@ -94,16 +93,18 @@ async def pub(m):
),
timeout=3,
)
+ assert isinstance(record_metadata, RecordMetadata)
assert {1, "hi"} == {r.result() for r in result}
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -112,7 +113,7 @@ async def handle():
return KafkaResponse(1, key=b"1")
@pub_broker.subscriber(queue + "1")
- async def handle_next(msg=Context("message")):
+ async def handle_next(msg=Context("message")) -> None:
mock(
body=msg.body,
key=msg.raw_message.key,
@@ -136,7 +137,29 @@ async def handle_next(msg=Context("message")):
key=b"1",
)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
+ async def test_return_future(
+ self,
+ queue: str,
+ mock: Mock,
+ ) -> None:
+ pub_broker = self.get_broker()
+
+ @pub_broker.subscriber(queue)
+ async def handler(m) -> None:
+ pass
+
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
+
+ batch_record_metadata_future = await br.publish_batch(
+ 1, "hi", topic=queue, no_confirm=True
+ )
+ record_metadata_future = await br.publish("", topic=queue, no_confirm=True)
+ assert isinstance(batch_record_metadata_future, asyncio.Future)
+ assert isinstance(record_metadata_future, asyncio.Future)
+
+ @pytest.mark.asyncio()
async def test_raise_buffer_overflow_exception(
self, queue: str, mock: Mock
) -> None:
diff --git a/tests/brokers/kafka/test_publish_command.py b/tests/brokers/kafka/test_publish_command.py
new file mode 100644
index 0000000000..fa7f9d81d3
--- /dev/null
+++ b/tests/brokers/kafka/test_publish_command.py
@@ -0,0 +1,14 @@
+from faststream.kafka.response import KafkaPublishCommand, KafkaResponse
+from faststream.response import ensure_response
+from tests.brokers.base.publish_command import BatchPublishCommandTestcase
+
+
+class TestPublishCommand(BatchPublishCommandTestcase):
+ publish_command_cls = KafkaPublishCommand
+
+ def test_kafka_response_class(self) -> None:
+ response = ensure_response(KafkaResponse(body=1, headers={"1": 1}, key=b"1"))
+ cmd = self.publish_command_cls.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {"1": 1}
+ assert cmd.key == b"1"
diff --git a/tests/brokers/kafka/test_requests.py b/tests/brokers/kafka/test_requests.py
index a518b2fa43..c3d7a4cb4d 100644
--- a/tests/brokers/kafka/test_requests.py
+++ b/tests/brokers/kafka/test_requests.py
@@ -1,29 +1,23 @@
+from typing import Any
+
import pytest
from faststream import BaseMiddleware
-from faststream.kafka import KafkaBroker, KafkaRouter, TestKafkaBroker
from tests.brokers.base.requests import RequestsTestcase
+from .basic import KafkaMemoryTestcaseConfig
+
class Mid(BaseMiddleware):
async def on_receive(self) -> None:
- self.msg.value = self.msg.value * 2
+ self.msg.value *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body = msg._decoded_body * 2
+ msg.body *= 2
return await call_next(msg)
-@pytest.mark.asyncio
-class TestRequestTestClient(RequestsTestcase):
- def get_middleware(self, **kwargs):
+@pytest.mark.asyncio()
+class TestRequestTestClient(KafkaMemoryTestcaseConfig, RequestsTestcase):
+ def get_middleware(self, **kwargs: Any):
return Mid
-
- def get_broker(self, **kwargs):
- return KafkaBroker(**kwargs)
-
- def get_router(self, **kwargs):
- return KafkaRouter(**kwargs)
-
- def patch_broker(self, broker, **kwargs):
- return TestKafkaBroker(broker, **kwargs)
diff --git a/tests/brokers/kafka/test_router.py b/tests/brokers/kafka/test_router.py
index 17ef78d942..e9b27f5a01 100644
--- a/tests/brokers/kafka/test_router.py
+++ b/tests/brokers/kafka/test_router.py
@@ -1,17 +1,20 @@
import pytest
-from faststream.kafka import KafkaPublisher, KafkaRoute, KafkaRouter
+from faststream.kafka import (
+ KafkaPublisher,
+ KafkaRoute,
+)
from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase
+from .basic import KafkaMemoryTestcaseConfig, KafkaTestcaseConfig
-@pytest.mark.kafka
-class TestRouter(RouterTestcase):
- broker_class = KafkaRouter
+
+@pytest.mark.kafka()
+class TestRouter(KafkaTestcaseConfig, RouterTestcase):
route_class = KafkaRoute
publisher_class = KafkaPublisher
-class TestRouterLocal(RouterLocalTestcase):
- broker_class = KafkaRouter
+class TestRouterLocal(KafkaMemoryTestcaseConfig, RouterLocalTestcase):
route_class = KafkaRoute
publisher_class = KafkaPublisher
diff --git a/tests/brokers/kafka/test_stuff.py b/tests/brokers/kafka/test_stuff.py
deleted file mode 100644
index 83c5c61d86..0000000000
--- a/tests/brokers/kafka/test_stuff.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import pytest
-
-from faststream.kafka import KafkaBroker
-
-
-def test_wrong_subscriber():
- broker = KafkaBroker()
-
- with pytest.raises(ValueError): # noqa: PT011
- broker.subscriber("test", auto_commit=False)(lambda: None)
diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py
index 27fa522a11..6f28a38353 100644
--- a/tests/brokers/kafka/test_test_client.py
+++ b/tests/brokers/kafka/test_test_client.py
@@ -3,36 +3,27 @@
import pytest
-from faststream import BaseMiddleware
-from faststream.kafka import KafkaBroker, TestKafkaBroker, TopicPartition
+from faststream import AckPolicy, BaseMiddleware
+from faststream.kafka import TopicPartition
from faststream.kafka.annotations import KafkaMessage
from faststream.kafka.message import FAKE_CONSUMER
from faststream.kafka.testing import FakeProducer
from tests.brokers.base.testclient import BrokerTestclientTestcase
from tests.tools import spy_decorator
+from .basic import KafkaMemoryTestcaseConfig
-@pytest.mark.asyncio
-class TestTestclient(BrokerTestclientTestcase):
- test_class = TestKafkaBroker
-
- def get_broker(self, apply_types: bool = False):
- return KafkaBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker:
- return TestKafkaBroker(broker)
-
- def get_fake_producer_class(self) -> type:
- return FakeProducer
+@pytest.mark.asyncio()
+class TestTestclient(KafkaMemoryTestcaseConfig, BrokerTestclientTestcase):
async def test_partition_match(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(partitions=[TopicPartition(queue, 1)])
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -43,11 +34,11 @@ async def m(msg):
async def test_partition_match_exect(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(partitions=[TopicPartition(queue, 1)])
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -58,15 +49,15 @@ async def m(msg):
async def test_partition_missmatch(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(partitions=[TopicPartition(queue, 1)])
- async def m(msg):
+ async def m(msg) -> None:
pass
@broker.subscriber(queue)
- async def m2(msg):
+ async def m2(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -78,33 +69,36 @@ async def m2(msg):
async def test_message_nack_seek(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
- @broker.subscriber(queue)
- async def m(msg: KafkaMessage):
+ @broker.subscriber(queue, group_id=f"{queue}1", ack_policy=AckPolicy.DO_NOTHING)
+ async def m(msg: KafkaMessage) -> None:
await msg.nack()
async with self.patch_broker(broker) as br:
with patch.object(
- FAKE_CONSUMER, "seek", spy_decorator(FAKE_CONSUMER.seek)
+ FAKE_CONSUMER,
+ "seek",
+ spy_decorator(FAKE_CONSUMER.seek),
) as mocked:
await br.publish("hello", queue)
mocked.mock.assert_called_once()
- @pytest.mark.kafka
+ @pytest.mark.kafka()
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
- async with TestKafkaBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -118,35 +112,35 @@ def subscriber(m):
async def test_batch_pub_by_default_pub(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, batch=True)
- async def m(msg):
+ async def m(msg) -> None:
pass
- async with TestKafkaBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue)
m.mock.assert_called_once_with(["hello"])
async def test_batch_pub_by_pub_batch(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, batch=True)
- async def m(msg):
+ async def m(msg) -> None:
pass
- async with TestKafkaBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish_batch("hello", topic=queue)
m.mock.assert_called_once_with(["hello"])
async def test_batch_publisher_mock(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
publisher = broker.publisher(queue + "1", batch=True)
@@ -156,12 +150,12 @@ async def test_batch_publisher_mock(
async def m(msg):
return 1, 2, 3
- async with TestKafkaBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue)
m.mock.assert_called_once_with("hello")
publisher.mock.assert_called_once_with([1, 2, 3])
- async def test_respect_middleware(self, queue):
+ async def test_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -169,22 +163,22 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = KafkaBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(msg) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(msg) -> None: ...
- async with TestKafkaBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
assert len(routes) == 2
- @pytest.mark.kafka
- async def test_real_respect_middleware(self, queue):
+ @pytest.mark.kafka()
+ async def test_real_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -192,15 +186,15 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = KafkaBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(msg) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(msg) -> None: ...
- async with TestKafkaBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
await h1.wait_call(3)
@@ -211,81 +205,90 @@ async def h2(): ...
async def test_multiple_subscribers_different_groups(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
+ ) -> None:
+ test_broker = self.get_broker()
+
@test_broker.subscriber(queue, group_id="group1")
- async def subscriber1(): ...
+ async def subscriber1(msg) -> None: ...
@test_broker.subscriber(queue, group_id="group2")
- async def subscriber2(): ...
+ async def subscriber2(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 1
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 1
async def test_multiple_subscribers_same_group(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
- @test_broker.subscriber(queue, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
- @test_broker.subscriber(queue, group_id="group1")
- async def subscriber2(): ...
+ @broker.subscriber(queue, group_id="group1")
+ async def subscriber1(msg) -> None: ...
+
+ @broker.subscriber(queue, group_id="group1")
+ async def subscriber2(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 0
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 0
async def test_multiple_batch_subscriber_with_different_group(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
+
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- @test_broker.subscriber(queue, batch=True, group_id="group2")
- async def subscriber2(): ...
+ @broker.subscriber(queue, batch=True, group_id="group2")
+ async def subscriber2(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 1
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 1
async def test_multiple_batch_subscriber_with_same_group(
self,
queue: str,
- test_broker: KafkaBroker,
- ):
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber1(): ...
+ ) -> None:
+ broker = self.get_broker()
+
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber1(msg) -> None: ...
- @test_broker.subscriber(queue, batch=True, group_id="group1")
- async def subscriber2(): ...
+ @broker.subscriber(queue, batch=True, group_id="group1")
+ async def subscriber2(msg) -> None: ...
- await test_broker.start()
- await test_broker.publish("", queue)
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish("", queue)
- assert subscriber1.mock.call_count == 1
- assert subscriber2.mock.call_count == 0
+ assert subscriber1.mock.call_count == 1
+ assert subscriber2.mock.call_count == 0
- @pytest.mark.kafka
- async def test_broker_gets_patched_attrs_within_cm(self):
- await super().test_broker_gets_patched_attrs_within_cm()
+ @pytest.mark.kafka()
+ async def test_broker_gets_patched_attrs_within_cm(self) -> None:
+ await super().test_broker_gets_patched_attrs_within_cm(FakeProducer)
- @pytest.mark.kafka
- async def test_broker_with_real_doesnt_get_patched(self):
+ @pytest.mark.kafka()
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
await super().test_broker_with_real_doesnt_get_patched()
- @pytest.mark.kafka
+ @pytest.mark.kafka()
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
await super().test_broker_with_real_patches_publishers_and_subscribers(queue)
diff --git a/tests/brokers/kafka/test_test_reentrancy.py b/tests/brokers/kafka/test_test_reentrancy.py
index 5e15ecd171..eaa60c8ea0 100644
--- a/tests/brokers/kafka/test_test_reentrancy.py
+++ b/tests/brokers/kafka/test_test_reentrancy.py
@@ -15,11 +15,11 @@ async def on_input_data(msg: int):
@broker.subscriber("output_data")
-async def on_output_data(msg: int):
+async def on_output_data(msg: int) -> None:
pass
-async def _test_with_broker(with_real: bool):
+async def _test_with_broker(with_real: bool) -> None:
async with TestKafkaBroker(broker, with_real=with_real) as tester:
await tester.publish(1, "input_data")
@@ -30,22 +30,22 @@ async def _test_with_broker(with_real: bool):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
-async def test_with_fake_broker():
+@pytest.mark.asyncio()
+async def test_with_fake_broker() -> None:
await _test_with_broker(False)
await _test_with_broker(False)
-@pytest.mark.asyncio
-@pytest.mark.kafka
-async def test_with_real_broker():
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+async def test_with_real_broker() -> None:
await _test_with_broker(True)
await _test_with_broker(True)
-async def _test_with_temp_subscriber():
+async def _test_with_temp_subscriber() -> None:
@broker.subscriber("output_data")
- async def on_output_data(msg: int):
+ async def on_output_data(msg: int) -> None:
pass
async with TestKafkaBroker(broker) as tester:
@@ -58,13 +58,13 @@ async def on_output_data(msg: int):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skip(
reason=(
"Failed due `on_output_data` subscriber creates inside test and doesn't removed after "
"https://github.com/airtai/faststream/issues/556"
- )
+ ),
)
-async def test_with_temp_subscriber():
+async def test_with_temp_subscriber() -> None:
await _test_with_temp_subscriber()
await _test_with_temp_subscriber()
diff --git a/tests/brokers/nats/basic.py b/tests/brokers/nats/basic.py
new file mode 100644
index 0000000000..bc73b67da6
--- /dev/null
+++ b/tests/brokers/nats/basic.py
@@ -0,0 +1,24 @@
+from typing import Any
+
+from faststream.nats import NatsBroker, NatsRouter, TestNatsBroker
+from tests.brokers.base.basic import BaseTestcaseConfig
+
+
+class NatsTestcaseConfig(BaseTestcaseConfig):
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> NatsBroker:
+ return NatsBroker(apply_types=apply_types, **kwargs)
+
+ def patch_broker(self, broker: NatsBroker, **kwargs: Any) -> NatsBroker:
+ return broker
+
+ def get_router(self, **kwargs: Any) -> NatsRouter:
+ return NatsRouter(**kwargs)
+
+
+class NatsMemoryTestcaseConfig(NatsTestcaseConfig):
+ def patch_broker(self, broker: NatsBroker, **kwargs: Any) -> NatsBroker:
+ return TestNatsBroker(broker, **kwargs)
diff --git a/tests/brokers/nats/conftest.py b/tests/brokers/nats/conftest.py
index 253cd709f2..5a4abb86da 100644
--- a/tests/brokers/nats/conftest.py
+++ b/tests/brokers/nats/conftest.py
@@ -1,14 +1,8 @@
from dataclasses import dataclass
import pytest
-import pytest_asyncio
-from faststream.nats import (
- JStream,
- NatsBroker,
- NatsRouter,
- TestNatsBroker,
-)
+from faststream.nats import JStream, NatsRouter
@dataclass
@@ -21,32 +15,11 @@ def settings():
return Settings()
-@pytest.fixture
+@pytest.fixture()
def stream(queue):
return JStream(queue)
-@pytest.fixture
+@pytest.fixture()
def router():
return NatsRouter()
-
-
-@pytest_asyncio.fixture()
-async def broker(settings):
- broker = NatsBroker([settings.url], apply_types=False)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def full_broker(settings):
- broker = NatsBroker([settings.url])
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def test_broker():
- broker = NatsBroker()
- async with TestNatsBroker(broker) as br:
- yield br
diff --git a/tests/brokers/nats/test_connect.py b/tests/brokers/nats/test_connect.py
index f46e6480a9..dc18feed5c 100644
--- a/tests/brokers/nats/test_connect.py
+++ b/tests/brokers/nats/test_connect.py
@@ -4,7 +4,7 @@
from tests.brokers.base.connection import BrokerConnectionTestcase
-@pytest.mark.nats
+@pytest.mark.nats()
class TestConnection(BrokerConnectionTestcase):
broker = NatsBroker
diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py
index eececb8b18..f81bd7671f 100644
--- a/tests/brokers/nats/test_consume.py
+++ b/tests/brokers/nats/test_consume.py
@@ -1,58 +1,105 @@
import asyncio
-from unittest.mock import Mock, patch
+from unittest.mock import MagicMock, patch
import pytest
from nats.aio.msg import Msg
+from faststream import AckPolicy
from faststream.exceptions import AckMessage
-from faststream.nats import ConsumerConfig, JStream, NatsBroker, PullSub
+from faststream.nats import ConsumerConfig, JStream, PubAck, PullSub
from faststream.nats.annotations import NatsMessage
+from faststream.nats.message import NatsMessage as StreamMessage
from tests.brokers.base.consume import BrokerRealConsumeTestcase
from tests.tools import spy_decorator
+from .basic import NatsTestcaseConfig
-@pytest.mark.nats
-class TestConsume(BrokerRealConsumeTestcase):
- def get_broker(self, apply_types: bool = False) -> NatsBroker:
- return NatsBroker(apply_types=apply_types)
+
+@pytest.mark.nats()
+class TestConsume(NatsTestcaseConfig, BrokerRealConsumeTestcase):
+ async def test_concurrent_subscriber(
+ self,
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+ event2 = asyncio.Event()
+
+ broker = self.get_broker()
+
+ args, kwargs = self.get_subscriber_params(queue, max_workers=2)
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(msg):
+ mock()
+
+ if event.is_set():
+ event2.set()
+ else:
+ event.set()
+
+ await asyncio.sleep(1.0)
+
+ async with self.patch_broker(broker) as br:
+ await br.start()
+
+ for i in range(5):
+ await br.publish(i, queue)
+
+ await asyncio.wait(
+ (
+ asyncio.create_task(event.wait()),
+ asyncio.create_task(event2.wait()),
+ ),
+ timeout=3,
+ )
+
+ assert event.is_set()
+ assert event2.is_set()
+ assert mock.call_count == 2, mock.call_count
async def test_consume_js(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
- @consume_broker.subscriber(queue, stream=stream)
- def subscriber(m):
+ args, kwargs = self.get_subscriber_params(queue, stream=stream)
+
+ @consume_broker.subscriber(*args, **kwargs)
+ def subscriber(m) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
+
+ result = await br.publish("hello", queue, stream=stream.name)
+
await asyncio.wait(
- (
- asyncio.create_task(br.publish("hello", queue, stream=stream.name)),
- asyncio.create_task(event.wait()),
- ),
+ (asyncio.create_task(event.wait()),),
timeout=3,
)
+ assert isinstance(result, PubAck), result
assert event.is_set()
async def test_consume_with_filter(
self,
- queue,
- mock: Mock,
- event: asyncio.Event,
- ):
+ queue: str,
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
config=ConsumerConfig(filter_subjects=[f"{queue}.a"]),
stream=JStream(queue, subjects=[f"{queue}.*"]),
)
- def subscriber(m):
+ def subscriber(m) -> None:
mock(m)
event.set()
@@ -73,9 +120,10 @@ async def test_consume_pull(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -83,7 +131,7 @@ async def test_consume_pull(
stream=stream,
pull_sub=PullSub(1),
)
- def subscriber(m):
+ def subscriber(m) -> None:
mock(m)
event.set()
@@ -105,9 +153,10 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -115,7 +164,7 @@ async def test_consume_batch(
stream=stream,
pull_sub=PullSub(1, batch=True),
)
- def subscriber(m):
+ def subscriber(m) -> None:
mock(m)
event.set()
@@ -133,22 +182,31 @@ def subscriber(m):
assert event.is_set()
mock.assert_called_once_with([b"hello"])
- async def test_consume_ack(
+ async def test_core_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
- stream: JStream,
- ):
+ mock: MagicMock,
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, stream=stream)
- async def handler(msg: NatsMessage):
+ args, kwargs = self.get_subscriber_params(
+ queue, ack_policy=AckPolicy.DO_NOTHING
+ )
+
+ @consume_broker.subscriber(*args, **kwargs)
+ async def handler(msg: NatsMessage) -> None:
+ mock(msg.raw_message._ackd)
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
- with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m:
+ # Check, that Core Subscriber doesn't call Acknowledgement automatically
+ with patch.object(
+ StreamMessage, "ack", spy_decorator(StreamMessage.ack)
+ ) as m:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -156,46 +214,50 @@ async def handler(msg: NatsMessage):
),
timeout=3,
)
- m.mock.assert_called_once()
+ assert not m.mock.called
assert event.is_set()
+ mock.assert_called_once_with(False)
- async def test_core_consume_no_ack(
+ async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, no_ack=True)
- async def handler(msg: NatsMessage):
- if not msg.raw_message._ackd:
- event.set()
+ @consume_broker.subscriber(queue, stream=stream)
+ async def handler(msg: NatsMessage) -> None:
+ event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
- await asyncio.wait(
- (
- asyncio.create_task(br.publish("hello", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=3,
- )
+ with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m:
+ await asyncio.wait(
+ (
+ asyncio.create_task(br.publish("hello", queue)),
+ asyncio.create_task(event.wait()),
+ ),
+ timeout=3,
+ )
+ m.mock.assert_called_once()
assert event.is_set()
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
- async def handler(msg: NatsMessage):
+ async def handler(msg: NatsMessage) -> None:
await msg.ack()
event.set()
@@ -245,15 +307,16 @@ async def handler(msg: NatsMessage):
async def test_consume_ack_raise(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
async def handler(msg: NatsMessage):
event.set()
- raise AckMessage()
+ raise AckMessage
async with self.patch_broker(consume_broker) as br:
await br.start()
@@ -273,13 +336,14 @@ async def handler(msg: NatsMessage):
async def test_nack(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
- async def handler(msg: NatsMessage):
+ async def handler(msg: NatsMessage) -> None:
await msg.nack()
event.set()
@@ -301,12 +365,16 @@ async def handler(msg: NatsMessage):
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ stream: str,
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, no_ack=True)
- async def handler(msg: NatsMessage):
+ @consume_broker.subscriber(
+ queue, stream=stream, ack_policy=AckPolicy.DO_NOTHING
+ )
+ async def handler(msg: NatsMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -328,9 +396,10 @@ async def test_consume_batch_headers(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
@@ -338,13 +407,13 @@ async def test_consume_batch_headers(
stream=stream,
pull_sub=PullSub(1, batch=True),
)
- def subscriber(m, msg: NatsMessage):
+ def subscriber(m, msg: NatsMessage) -> None:
check = all(
(
msg.headers,
[msg.headers] == msg.batch_headers,
msg.headers.get("custom") == "1",
- )
+ ),
)
mock(check)
event.set()
@@ -362,17 +431,18 @@ def subscriber(m, msg: NatsMessage):
assert event.is_set()
mock.assert_called_once_with(True)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_kv(
self,
queue: str,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, kv_watch=queue + "1")
- async def handler(m):
+ async def handler(m) -> None:
mock(m)
event.set()
@@ -386,7 +456,7 @@ async def handler(m):
bucket.put(
queue,
b"world",
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -396,17 +466,18 @@ async def handler(m):
assert event.is_set()
mock.assert_called_with(b"world")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_os(
self,
queue: str,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, obj_watch=True)
- async def handler(filename: str):
+ async def handler(filename: str) -> None:
event.set()
mock(filename)
@@ -420,7 +491,7 @@ async def handler(filename: str):
bucket.put(
"hello",
b"world",
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -433,9 +504,8 @@ async def handler(filename: str):
async def test_get_one_js(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, stream=stream)
@@ -444,11 +514,11 @@ async def test_get_one_js(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await br.publish("test_message", queue, stream=stream.name)
await asyncio.wait(
@@ -467,7 +537,7 @@ async def test_get_one_timeout_js(
queue: str,
stream: JStream,
mock,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, stream=stream)
@@ -480,9 +550,8 @@ async def test_get_one_timeout_js(
async def test_get_one_pull(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(
queue,
@@ -495,11 +564,11 @@ async def test_get_one_pull(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await br.publish("test_message", queue)
await asyncio.wait(
@@ -516,10 +585,9 @@ async def publish():
async def test_get_one_pull_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- mock: Mock,
- ):
+ mock: MagicMock,
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(
queue,
@@ -536,9 +604,8 @@ async def test_get_one_pull_timeout(
async def test_get_one_batch(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(
queue,
@@ -551,11 +618,11 @@ async def test_get_one_batch(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await br.publish("test_message", queue)
await asyncio.wait(
@@ -572,10 +639,9 @@ async def publish():
async def test_get_one_batch_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- mock: Mock,
- ):
+ mock: MagicMock,
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(
queue,
@@ -592,9 +658,8 @@ async def test_get_one_batch_timeout(
async def test_get_one_with_filter(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(
config=ConsumerConfig(filter_subjects=[f"{queue}.a"]),
@@ -606,11 +671,11 @@ async def test_get_one_with_filter(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await br.publish("test_message", f"{queue}.a")
await asyncio.wait(
@@ -627,9 +692,8 @@ async def publish():
async def test_get_one_kv(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, kv_watch=queue + "1")
@@ -639,11 +703,11 @@ async def test_get_one_kv(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await bucket.put(queue, b"test_message")
await asyncio.wait(
@@ -660,10 +724,9 @@ async def publish():
async def test_get_one_kv_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- mock: Mock,
- ):
+ mock: MagicMock,
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, kv_watch=queue + "1")
@@ -676,9 +739,8 @@ async def test_get_one_kv_timeout(
async def test_get_one_os(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, obj_watch=True)
@@ -688,12 +750,12 @@ async def test_get_one_os(
new_object_id = None
- async def consume():
+ async def consume() -> None:
nonlocal new_object_id
new_object_event = await subscriber.get_one(timeout=5)
new_object_id = await new_object_event.decode()
- async def publish():
+ async def publish() -> None:
await bucket.put(queue, b"test_message")
await asyncio.wait(
@@ -710,10 +772,9 @@ async def publish():
async def test_get_one_os_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
- mock: Mock,
- ):
+ mock: MagicMock,
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(queue, obj_watch=True)
diff --git a/tests/brokers/nats/test_fastapi.py b/tests/brokers/nats/test_fastapi.py
index 518fdcd637..15fbe319f7 100644
--- a/tests/brokers/nats/test_fastapi.py
+++ b/tests/brokers/nats/test_fastapi.py
@@ -1,30 +1,31 @@
import asyncio
-from typing import List
from unittest.mock import MagicMock
import pytest
from faststream.nats import JStream, NatsRouter, PullSub
from faststream.nats.fastapi import NatsRouter as StreamRouter
-from faststream.nats.testing import TestNatsBroker, build_message
from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase
+from .basic import NatsMemoryTestcaseConfig, NatsTestcaseConfig
-@pytest.mark.nats
-class TestRouter(FastAPITestcase):
+
+@pytest.mark.nats()
+class TestRouter(NatsTestcaseConfig, FastAPITestcase):
router_class = StreamRouter
broker_router_class = NatsRouter
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
- @router.subscriber("in.{name}")
- def subscriber(msg: str, name: str):
+ @router.subscriber(queue + ".{name}")
+ def subscriber(msg: str, name: str) -> None:
mock(msg=msg, name=name)
event.set()
@@ -32,7 +33,9 @@ def subscriber(msg: str, name: str):
await router.broker.start()
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hello", "in.john")),
+ asyncio.create_task(
+ router.broker.publish("hello", f"{queue}.john"),
+ ),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -45,9 +48,10 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
@@ -55,7 +59,7 @@ async def test_consume_batch(
stream=stream,
pull_sub=PullSub(1, batch=True),
)
- def subscriber(m: List[str]):
+ def subscriber(m: list[str]) -> None:
mock(m)
event.set()
@@ -73,19 +77,18 @@ def subscriber(m: List[str]):
mock.assert_called_once_with(["hello"])
-class TestRouterLocal(FastAPILocalTestcase):
+class TestRouterLocal(NatsMemoryTestcaseConfig, FastAPILocalTestcase):
router_class = StreamRouter
broker_router_class = NatsRouter
- broker_test = staticmethod(TestNatsBroker)
- build_message = staticmethod(build_message)
async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
@@ -93,14 +96,14 @@ async def test_consume_batch(
stream=stream,
pull_sub=PullSub(1, batch=True),
)
- def subscriber(m: List[str]):
+ def subscriber(m: list[str]) -> None:
mock(m)
event.set()
- async with self.broker_test(router.broker):
+ async with self.patch_broker(router.broker) as br:
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish(b"hello", queue)),
+ asyncio.create_task(br.publish(b"hello", queue)),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -109,18 +112,17 @@ def subscriber(m: List[str]):
assert event.is_set()
mock.assert_called_once_with(["hello"])
- async def test_path(self, queue: str):
+ async def test_path(self, queue: str) -> None:
router = self.router_class()
@router.subscriber(queue + ".{name}")
async def hello(name):
return name
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
"hi",
f"{queue}.john",
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "john"
+ assert await r.decode() == "john"
diff --git a/tests/brokers/nats/test_kv_declarer_cache.py b/tests/brokers/nats/test_kv_declarer_cache.py
index 8ecc9fcce6..3c3c60e8d1 100644
--- a/tests/brokers/nats/test_kv_declarer_cache.py
+++ b/tests/brokers/nats/test_kv_declarer_cache.py
@@ -7,9 +7,9 @@
from tests.tools import spy_decorator
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_kv_storage_cache():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_kv_storage_cache() -> None:
broker = NatsBroker()
await broker.connect()
with patch.object(
diff --git a/tests/brokers/nats/test_middlewares.py b/tests/brokers/nats/test_middlewares.py
index d9dbca9224..c726d7e231 100644
--- a/tests/brokers/nats/test_middlewares.py
+++ b/tests/brokers/nats/test_middlewares.py
@@ -1,25 +1,23 @@
import pytest
-from faststream.nats import NatsBroker, TestNatsBroker
from tests.brokers.base.middlewares import (
ExceptionMiddlewareTestcase,
MiddlewareTestcase,
MiddlewaresOrderTestcase,
)
+from .basic import NatsMemoryTestcaseConfig, NatsTestcaseConfig
-@pytest.mark.nats
-class TestMiddlewares(MiddlewareTestcase):
- broker_class = NatsBroker
+class TestMiddlewaresOrder(NatsMemoryTestcaseConfig, MiddlewaresOrderTestcase):
+ pass
-@pytest.mark.nats
-class TestExceptionMiddlewares(ExceptionMiddlewareTestcase):
- broker_class = NatsBroker
+@pytest.mark.nats()
+class TestMiddlewares(NatsTestcaseConfig, MiddlewareTestcase):
+ pass
-class TestMiddlewaresOrder(MiddlewaresOrderTestcase):
- broker_class = NatsBroker
- def patch_broker(self, broker: NatsBroker) -> TestNatsBroker:
- return TestNatsBroker(broker)
+@pytest.mark.nats()
+class TestExceptionMiddlewares(NatsTestcaseConfig, ExceptionMiddlewareTestcase):
+ pass
diff --git a/tests/brokers/nats/test_new_inbox.py b/tests/brokers/nats/test_new_inbox.py
index ce6db83ee0..b608a1f6ef 100644
--- a/tests/brokers/nats/test_new_inbox.py
+++ b/tests/brokers/nats/test_new_inbox.py
@@ -7,9 +7,9 @@
from tests.tools import spy_decorator
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_new_inbox():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_new_inbox() -> None:
with patch.object(
NatsClient,
"new_inbox",
diff --git a/tests/brokers/nats/test_os_declarer_cache.py b/tests/brokers/nats/test_os_declarer_cache.py
index 0f68542c8e..ee195cafae 100644
--- a/tests/brokers/nats/test_os_declarer_cache.py
+++ b/tests/brokers/nats/test_os_declarer_cache.py
@@ -7,9 +7,9 @@
from tests.tools import spy_decorator
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_object_storage_cache():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_object_storage_cache() -> None:
broker = NatsBroker()
await broker.connect()
diff --git a/tests/brokers/nats/test_parser.py b/tests/brokers/nats/test_parser.py
index a50b4d4d18..635cbccb65 100644
--- a/tests/brokers/nats/test_parser.py
+++ b/tests/brokers/nats/test_parser.py
@@ -1,9 +1,10 @@
import pytest
-from faststream.nats import NatsBroker
from tests.brokers.base.parser import CustomParserTestcase
+from .basic import NatsTestcaseConfig
-@pytest.mark.nats
-class TestCustomParser(CustomParserTestcase):
- broker_class = NatsBroker
+
+@pytest.mark.nats()
+class TestCustomParser(NatsTestcaseConfig, CustomParserTestcase):
+ pass
diff --git a/tests/brokers/nats/test_publish.py b/tests/brokers/nats/test_publish.py
index 1fb8b799d6..cee41ef6a4 100644
--- a/tests/brokers/nats/test_publish.py
+++ b/tests/brokers/nats/test_publish.py
@@ -4,24 +4,24 @@
import pytest
from faststream import Context
-from faststream.nats import NatsBroker, NatsResponse
+from faststream.nats import NatsResponse
from tests.brokers.base.publish import BrokerPublishTestcase
+from .basic import NatsTestcaseConfig
-@pytest.mark.nats
-class TestPublish(BrokerPublishTestcase):
- """Test publish method of NATS broker."""
- def get_broker(self, apply_types: bool = False) -> NatsBroker:
- return NatsBroker(apply_types=apply_types)
+@pytest.mark.nats()
+class TestPublish(NatsTestcaseConfig, BrokerPublishTestcase):
+ """Test publish method of NATS broker."""
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -30,7 +30,7 @@ async def handle():
return NatsResponse(1, correlation_id="1")
@pub_broker.subscriber(queue + "1")
- async def handle_next(msg=Context("message")):
+ async def handle_next(msg=Context("message")) -> None:
mock(
body=msg.body,
correlation_id=msg.correlation_id,
@@ -54,12 +54,11 @@ async def handle_next(msg=Context("message")):
correlation_id="1",
)
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -70,8 +69,8 @@ async def handle():
await br.start()
response = await asyncio.wait_for(
- br.publish("", queue, rpc=True),
+ br.request("", queue),
timeout=3,
)
- assert response == "Hi!", response
+ assert await response.decode() == "Hi!", response
diff --git a/tests/brokers/nats/test_requests.py b/tests/brokers/nats/test_requests.py
index 19f9c2cb15..f440a83c6d 100644
--- a/tests/brokers/nats/test_requests.py
+++ b/tests/brokers/nats/test_requests.py
@@ -1,31 +1,26 @@
import pytest
from faststream import BaseMiddleware
-from faststream.nats import NatsBroker, NatsRouter, TestNatsBroker
from tests.brokers.base.requests import RequestsTestcase
+from .basic import NatsMemoryTestcaseConfig, NatsTestcaseConfig
+
class Mid(BaseMiddleware):
async def on_receive(self) -> None:
- self.msg.data = self.msg.data * 2
+ self.msg.data *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body = msg._decoded_body * 2
+ msg.body *= 2
return await call_next(msg)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class NatsRequestsTestcase(RequestsTestcase):
def get_middleware(self, **kwargs):
return Mid
- def get_broker(self, **kwargs):
- return NatsBroker(**kwargs)
-
- def get_router(self, **kwargs):
- return NatsRouter(**kwargs)
-
- async def test_broker_stream_request(self, queue: str):
+ async def test_broker_stream_request(self, queue: str) -> None:
broker = self.get_broker()
stream_name = f"{queue}st"
@@ -33,7 +28,7 @@ async def test_broker_stream_request(self, queue: str):
args, kwargs = self.get_subscriber_params(queue, stream=stream_name)
@broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> str:
return "Response"
async with self.patch_broker(broker):
@@ -50,7 +45,7 @@ async def handler(msg):
assert await response.decode() == "Response"
assert response.correlation_id == "1"
- async def test_publisher_stream_request(self, queue: str):
+ async def test_publisher_stream_request(self, queue: str) -> None:
broker = self.get_broker()
stream_name = f"{queue}st"
@@ -59,7 +54,7 @@ async def test_publisher_stream_request(self, queue: str):
args, kwargs = self.get_subscriber_params(queue, stream=stream_name)
@broker.subscriber(*args, **kwargs)
- async def handler(msg):
+ async def handler(msg) -> str:
return "Response"
async with self.patch_broker(broker):
@@ -75,11 +70,10 @@ async def handler(msg):
assert response.correlation_id == "1"
-@pytest.mark.nats
-class TestRealRequests(NatsRequestsTestcase):
+@pytest.mark.nats()
+class TestRealRequests(NatsTestcaseConfig, NatsRequestsTestcase):
pass
-class TestRequestTestClient(NatsRequestsTestcase):
- def patch_broker(self, broker, **kwargs):
- return TestNatsBroker(broker, **kwargs)
+class TestRequestTestClient(NatsMemoryTestcaseConfig, NatsRequestsTestcase):
+ pass
diff --git a/tests/brokers/nats/test_router.py b/tests/brokers/nats/test_router.py
index 24b3fd772e..4bd846abd0 100644
--- a/tests/brokers/nats/test_router.py
+++ b/tests/brokers/nats/test_router.py
@@ -3,13 +3,19 @@
import pytest
from faststream import Path
-from faststream.nats import JStream, NatsBroker, NatsPublisher, NatsRoute, NatsRouter
+from faststream.nats import (
+ JStream,
+ NatsPublisher,
+ NatsRoute,
+ NatsRouter,
+)
from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase
+from .basic import NatsMemoryTestcaseConfig, NatsTestcaseConfig
-@pytest.mark.nats
-class TestRouter(RouterTestcase):
- broker_class = NatsRouter
+
+@pytest.mark.nats()
+class TestRouter(NatsTestcaseConfig, RouterTestcase):
route_class = NatsRoute
publisher_class = NatsPublisher
@@ -18,26 +24,22 @@ async def test_router_path(
event,
mock,
router: NatsRouter,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
@router.subscriber("in.{name}.{id}")
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
- pub_broker._is_apply_types = True
pub_broker.include_router(router)
await pub_broker.start()
- await pub_broker.publish(
- "",
- "in.john.2",
- rpc=True,
- )
+ await pub_broker.request("", "in.john.2")
assert event.is_set()
mock.assert_called_once_with(name="john", id=2)
@@ -47,21 +49,21 @@ async def test_path_as_first_with_prefix(
event,
mock,
router: NatsRouter,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
router.prefix = "root."
@router.subscriber("{name}.nested")
- async def h(name: str = Path()):
+ async def h(name: str = Path()) -> None:
event.set()
mock(name=name)
- pub_broker._is_apply_types = True
pub_broker.include_router(router)
await pub_broker.start()
- await pub_broker.publish("", "root.john.nested", rpc=True)
+ await pub_broker.request("", "root.john.nested")
assert event.is_set()
mock.assert_called_once_with(name="john")
@@ -71,28 +73,24 @@ async def test_router_path_with_prefix(
event,
mock,
router: NatsRouter,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
router.prefix = "test."
@router.subscriber("in.{name}.{id}")
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
- pub_broker._is_apply_types = True
pub_broker.include_router(router)
await pub_broker.start()
- await pub_broker.publish(
- "",
- "test.in.john.2",
- rpc=True,
- )
+ await pub_broker.request("", "test.in.john.2")
assert event.is_set()
mock.assert_called_once_with(name="john", id=2)
@@ -102,43 +100,42 @@ async def test_router_delay_handler_path(
event,
mock,
router: NatsRouter,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
r = type(router)(handlers=(self.route_class(h, subject="in.{name}.{id}"),))
- pub_broker._is_apply_types = True
pub_broker.include_router(r)
await pub_broker.start()
- await pub_broker.publish(
- "",
- "in.john.2",
- rpc=True,
- )
+ await pub_broker.request("", "in.john.2")
assert event.is_set()
mock.assert_called_once_with(name="john", id=2)
async def test_delayed_handlers_with_queue(
self,
- event,
router: NatsRouter,
queue: str,
- pub_broker,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
+ def response(m) -> None:
event.set()
r = type(router)(
- prefix="test.", handlers=(self.route_class(response, subject=queue),)
+ prefix="test.",
+ handlers=(self.route_class(response, subject=queue),),
)
pub_broker.include_router(r)
@@ -156,24 +153,24 @@ def response(m):
assert event.is_set()
-class TestRouterLocal(RouterLocalTestcase):
- broker_class = NatsRouter
+class TestRouterLocal(NatsMemoryTestcaseConfig, RouterLocalTestcase):
route_class = NatsRoute
publisher_class = NatsPublisher
async def test_include_stream(
self,
router: NatsRouter,
- pub_broker: NatsBroker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker()
+
@router.subscriber("test", stream="stream")
- async def handler(): ...
+ async def handler() -> None: ...
pub_broker.include_router(router)
assert next(iter(pub_broker._stream_builder.objects.keys())) == "stream"
- async def test_include_stream_with_subjects(self):
+ async def test_include_stream_with_subjects(self) -> None:
stream = JStream("test-stream")
sub_router = NatsRouter(prefix="client.")
@@ -187,7 +184,7 @@ async def test_include_stream_with_subjects(self):
router.include_router(sub_router)
- broker = NatsBroker()
+ broker = self.get_broker()
broker.include_router(router)
assert set(stream.subjects) == {
diff --git a/tests/brokers/nats/test_rpc.py b/tests/brokers/nats/test_rpc.py
deleted file mode 100644
index d863008fb4..0000000000
--- a/tests/brokers/nats/test_rpc.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import pytest
-
-from faststream.nats import JStream, NatsBroker
-from tests.brokers.base.rpc import BrokerRPCTestcase, ReplyAndConsumeForbidden
-
-
-@pytest.mark.nats
-class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden):
- def get_broker(self, apply_types: bool = False) -> NatsBroker:
- return NatsBroker(apply_types=apply_types)
-
- @pytest.mark.asyncio
- async def test_rpc_js(self, queue: str, stream: JStream):
- rpc_broker = self.get_broker()
-
- @rpc_broker.subscriber(queue, stream=stream)
- async def m(m): # pragma: no cover
- return "1"
-
- async with rpc_broker:
- await rpc_broker.start()
-
- r = await rpc_broker.publish(
- "hello", queue, rpc_timeout=3, stream=stream.name, rpc=True
- )
- assert r == "1"
diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py
index b8f7f8d5b2..1c008ed9b2 100644
--- a/tests/brokers/nats/test_test_client.py
+++ b/tests/brokers/nats/test_test_client.py
@@ -3,77 +3,61 @@
import pytest
from faststream import BaseMiddleware
-from faststream.exceptions import SetupError
-from faststream.nats import ConsumerConfig, JStream, NatsBroker, PullSub, TestNatsBroker
+from faststream.nats import (
+ ConsumerConfig,
+ JStream,
+ PullSub,
+)
from faststream.nats.testing import FakeProducer
from tests.brokers.base.testclient import BrokerTestclientTestcase
+from .basic import NatsMemoryTestcaseConfig
-@pytest.mark.asyncio
-class TestTestclient(BrokerTestclientTestcase):
- test_class = TestNatsBroker
- def get_broker(self, apply_types: bool = False) -> NatsBroker:
- return NatsBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: NatsBroker) -> TestNatsBroker:
- return TestNatsBroker(broker)
-
- def get_fake_producer_class(self) -> type:
- return FakeProducer
-
- @pytest.mark.asyncio
+@pytest.mark.asyncio()
+class TestTestclient(NatsMemoryTestcaseConfig, BrokerTestclientTestcase):
+ @pytest.mark.asyncio()
async def test_stream_publish(
self,
queue: str,
- ):
- pub_broker = NatsBroker(apply_types=False)
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=False)
@pub_broker.subscriber(queue, stream="test")
- async def m(msg): ...
+ async def m(msg) -> None: ...
- async with TestNatsBroker(pub_broker) as br:
+ async with self.patch_broker(pub_broker) as br:
await br.publish("Hi!", queue, stream="test")
m.mock.assert_called_once_with("Hi!")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_wrong_stream_publish(
self,
queue: str,
- ):
- pub_broker = NatsBroker(apply_types=False)
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=False)
@pub_broker.subscriber(queue)
- async def m(msg): ...
+ async def m(msg) -> None: ...
- async with TestNatsBroker(pub_broker) as br:
+ async with self.patch_broker(pub_broker) as br:
await br.publish("Hi!", queue, stream="test")
assert not m.mock.called
- @pytest.mark.asyncio
- async def test_rpc_conflicts_reply(self, queue):
- async with TestNatsBroker(NatsBroker()) as br:
- with pytest.raises(SetupError):
- await br.publish(
- "",
- queue,
- rpc=True,
- reply_to="response",
- )
-
- @pytest.mark.nats
+ @pytest.mark.nats()
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
- async with TestNatsBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -84,18 +68,18 @@ def subscriber(m):
assert event.is_set()
- @pytest.mark.nats
+ @pytest.mark.nats()
async def test_inbox_prefix_with_real(
self,
queue: str,
- ):
- broker = NatsBroker(inbox_prefix="test")
+ ) -> None:
+ broker = self.get_broker(inbox_prefix="test")
- async with TestNatsBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
assert br._connection._inbox_prefix == b"test"
assert "test" in str(br._connection.new_inbox())
- async def test_respect_middleware(self, queue):
+ async def test_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -103,22 +87,22 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = NatsBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(m) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(m) -> None: ...
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
assert len(routes) == 2
- @pytest.mark.nats
- async def test_real_respect_middleware(self, queue):
+ @pytest.mark.nats()
+ async def test_real_respect_middleware(self, queue) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -126,15 +110,15 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = NatsBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(m) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(m) -> None: ...
- async with TestNatsBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
await h1.wait_call(3)
@@ -146,14 +130,14 @@ async def test_js_subscriber_mock(
self,
queue: str,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, stream=stream)
- async def m(msg):
+ async def m(msg) -> None:
pass
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue, stream=stream.name)
m.mock.assert_called_once_with("hello")
@@ -161,47 +145,47 @@ async def test_js_publisher_mock(
self,
queue: str,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker()
publisher = broker.publisher(queue + "resp")
@publisher
@broker.subscriber(queue, stream=stream)
- async def m(msg):
+ async def m(msg) -> str:
return "response"
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue, stream=stream.name)
publisher.mock.assert_called_with("response")
- async def test_any_subject_routing(self):
+ async def test_any_subject_routing(self) -> None:
broker = self.get_broker()
@broker.subscriber("test.*.subj.*")
- def subscriber(msg): ...
+ def subscriber(msg) -> None: ...
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", "test.a.subj.b")
subscriber.mock.assert_called_once_with("hello")
- async def test_ending_subject_routing(self):
+ async def test_ending_subject_routing(self) -> None:
broker = self.get_broker()
@broker.subscriber("test.>")
- def subscriber(msg): ...
+ def subscriber(msg) -> None: ...
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", "test.a.subj.b")
subscriber.mock.assert_called_once_with("hello")
- async def test_mixed_subject_routing(self):
+ async def test_mixed_subject_routing(self) -> None:
broker = self.get_broker()
@broker.subscriber("*.*.subj.>")
- def subscriber(msg): ...
+ def subscriber(msg) -> None: ...
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", "test.a.subj.b.c")
subscriber.mock.assert_called_once_with("hello")
@@ -209,13 +193,13 @@ async def test_consume_pull(
self,
queue: str,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(queue, stream=stream, pull_sub=PullSub(1))
- def subscriber(m): ...
+ def subscriber(m) -> None: ...
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue)
subscriber.mock.assert_called_once_with("hello")
@@ -223,7 +207,7 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(
@@ -231,41 +215,42 @@ async def test_consume_batch(
stream=stream,
pull_sub=PullSub(1, batch=True),
)
- def subscriber(m):
+ def subscriber(m) -> None:
pass
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("hello", queue)
subscriber.mock.assert_called_once_with(["hello"])
async def test_consume_with_filter(
self,
queue,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(
config=ConsumerConfig(filter_subjects=[f"{queue}.a"]),
stream=JStream(queue, subjects=[f"{queue}.*"]),
)
- def subscriber(m):
+ def subscriber(m) -> None:
pass
- async with TestNatsBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish(1, f"{queue}.b")
await br.publish(2, f"{queue}.a")
subscriber.mock.assert_called_once_with(2)
- @pytest.mark.nats
- async def test_broker_gets_patched_attrs_within_cm(self):
- await super().test_broker_gets_patched_attrs_within_cm()
+ @pytest.mark.nats()
+ async def test_broker_gets_patched_attrs_within_cm(self) -> None:
+ await super().test_broker_gets_patched_attrs_within_cm(FakeProducer)
- @pytest.mark.nats
- async def test_broker_with_real_doesnt_get_patched(self):
+ @pytest.mark.nats()
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
await super().test_broker_with_real_doesnt_get_patched()
- @pytest.mark.nats
+ @pytest.mark.nats()
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
await super().test_broker_with_real_patches_publishers_and_subscribers(queue)
diff --git a/tests/brokers/rabbit/basic.py b/tests/brokers/rabbit/basic.py
new file mode 100644
index 0000000000..6a451530c5
--- /dev/null
+++ b/tests/brokers/rabbit/basic.py
@@ -0,0 +1,24 @@
+from typing import Any
+
+from faststream.rabbit import RabbitBroker, RabbitRouter, TestRabbitBroker
+from tests.brokers.base.basic import BaseTestcaseConfig
+
+
+class RabbitTestcaseConfig(BaseTestcaseConfig):
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> RabbitBroker:
+ return RabbitBroker(apply_types=apply_types, **kwargs)
+
+ def patch_broker(self, broker: RabbitBroker, **kwargs: Any) -> RabbitBroker:
+ return broker
+
+ def get_router(self, **kwargs: Any) -> RabbitRouter:
+ return RabbitRouter(**kwargs)
+
+
+class RabbitMemoryTestcaseConfig(RabbitTestcaseConfig):
+ def patch_broker(self, broker: RabbitBroker, **kwargs: Any) -> RabbitBroker:
+ return TestRabbitBroker(broker, **kwargs)
diff --git a/tests/brokers/rabbit/conftest.py b/tests/brokers/rabbit/conftest.py
index 00ff9bcde3..4ba2496e89 100644
--- a/tests/brokers/rabbit/conftest.py
+++ b/tests/brokers/rabbit/conftest.py
@@ -1,13 +1,10 @@
from dataclasses import dataclass
import pytest
-import pytest_asyncio
from faststream.rabbit import (
- RabbitBroker,
RabbitExchange,
RabbitRouter,
- TestRabbitBroker,
)
@@ -23,7 +20,7 @@ class Settings:
queue = "test_queue"
-@pytest.fixture
+@pytest.fixture()
def exchange(queue):
return RabbitExchange(name=queue)
@@ -33,27 +30,6 @@ def settings():
return Settings()
-@pytest.fixture
+@pytest.fixture()
def router():
return RabbitRouter()
-
-
-@pytest_asyncio.fixture()
-async def broker(settings):
- broker = RabbitBroker(settings.url, apply_types=False)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def full_broker(settings):
- broker = RabbitBroker(settings.url)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def test_broker():
- broker = RabbitBroker()
- async with TestRabbitBroker(broker) as br:
- yield br
diff --git a/tests/brokers/rabbit/core/test_call_manual.py b/tests/brokers/rabbit/core/test_call_manual.py
index 756113534f..07bf62c3c6 100644
--- a/tests/brokers/rabbit/core/test_call_manual.py
+++ b/tests/brokers/rabbit/core/test_call_manual.py
@@ -8,8 +8,8 @@ def just_broker(request):
return request.param
-@pytest.mark.asyncio # run it async to create anyio.Event
-async def test_sync(just_broker: RabbitBroker):
+@pytest.mark.asyncio() # run it async to create anyio.Event
+async def test_sync(just_broker: RabbitBroker) -> None:
@just_broker.subscriber("test")
def func(a: int) -> str:
return "pong"
@@ -17,8 +17,8 @@ def func(a: int) -> str:
assert func(1) == "pong"
-@pytest.mark.asyncio # run it async to create anyio.Event
-async def test_sync_publisher(just_broker: RabbitBroker):
+@pytest.mark.asyncio() # run it async to create anyio.Event
+async def test_sync_publisher(just_broker: RabbitBroker) -> None:
@just_broker.publisher("test")
def func(a: int) -> str:
return "pong"
@@ -26,8 +26,8 @@ def func(a: int) -> str:
assert func(1) == "pong"
-@pytest.mark.asyncio # run it async to create anyio.Event
-async def test_sync_multi(just_broker: RabbitBroker):
+@pytest.mark.asyncio() # run it async to create anyio.Event
+async def test_sync_multi(just_broker: RabbitBroker) -> None:
@just_broker.publisher("test")
@just_broker.subscriber("test")
@just_broker.publisher("test")
@@ -37,8 +37,8 @@ def func(a: int) -> str:
assert func(1) == "pong"
-@pytest.mark.asyncio
-async def test_async(just_broker: RabbitBroker):
+@pytest.mark.asyncio()
+async def test_async(just_broker: RabbitBroker) -> None:
@just_broker.subscriber("test")
async def func(a: int) -> str:
return "pong"
@@ -46,8 +46,8 @@ async def func(a: int) -> str:
assert await func(1) == "pong"
-@pytest.mark.asyncio
-async def test_async_publisher(just_broker: RabbitBroker):
+@pytest.mark.asyncio()
+async def test_async_publisher(just_broker: RabbitBroker) -> None:
@just_broker.publisher("test")
async def func(a: int) -> str:
return "pong"
@@ -55,8 +55,8 @@ async def func(a: int) -> str:
assert await func(1) == "pong"
-@pytest.mark.asyncio
-async def test_async_multi(just_broker: RabbitBroker):
+@pytest.mark.asyncio()
+async def test_async_multi(just_broker: RabbitBroker) -> None:
@just_broker.publisher("test")
@just_broker.subscriber("test")
@just_broker.publisher("test")
diff --git a/tests/brokers/rabbit/core/test_depends.py b/tests/brokers/rabbit/core/test_depends.py
index 7b4f0e72bd..7b50cef000 100644
--- a/tests/brokers/rabbit/core/test_depends.py
+++ b/tests/brokers/rabbit/core/test_depends.py
@@ -1,17 +1,16 @@
import aio_pika
import pytest
+from faststream import Depends
from faststream.rabbit import RabbitBroker
from faststream.rabbit.annotations import RabbitMessage
-from faststream.utils import Depends
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_broker_depends(
- queue,
- full_broker: RabbitBroker,
-):
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_broker_depends(queue: str) -> None:
+ full_broker = RabbitBroker(apply_types=True)
+
def sync_depends(message: RabbitMessage):
return message
@@ -25,7 +24,7 @@ async def h(
message: RabbitMessage,
k1=Depends(sync_depends),
k2=Depends(async_depends),
- ):
+ ) -> None:
nonlocal check_message
check_message = (
isinstance(message.raw_message, aio_pika.IncomingMessage)
@@ -35,34 +34,35 @@ async def h(
await full_broker.start()
- await full_broker.publish(queue=queue, rpc=True)
+ await full_broker.request(queue=queue)
assert check_message is True
-@pytest.mark.asyncio
-@pytest.mark.rabbit
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
async def test_different_consumers_has_different_messages(
context,
- full_broker: RabbitBroker,
-):
+) -> None:
+ full_broker = RabbitBroker(apply_types=True)
+
message1 = None
@full_broker.subscriber("test_different_consume_1")
- async def consumer1(message: RabbitMessage):
+ async def consumer1(message: RabbitMessage) -> None:
nonlocal message1
message1 = message
message2 = None
@full_broker.subscriber("test_different_consume_2")
- async def consumer2(message: RabbitMessage):
+ async def consumer2(message: RabbitMessage) -> None:
nonlocal message2
message2 = message
await full_broker.start()
- await full_broker.publish(queue="test_different_consume_1", rpc=True)
- await full_broker.publish(queue="test_different_consume_2", rpc=True)
+ await full_broker.request(queue="test_different_consume_1")
+ await full_broker.request(queue="test_different_consume_2")
assert isinstance(message1.raw_message, aio_pika.IncomingMessage)
assert isinstance(message2.raw_message, aio_pika.IncomingMessage)
diff --git a/tests/brokers/rabbit/specific/test_declare.py b/tests/brokers/rabbit/specific/test_declare.py
index 5025f83ca3..874fb403cc 100644
--- a/tests/brokers/rabbit/specific/test_declare.py
+++ b/tests/brokers/rabbit/specific/test_declare.py
@@ -4,9 +4,10 @@
from faststream.rabbit.helpers.declarer import RabbitDeclarer
-@pytest.mark.asyncio
-async def test_declare_queue(async_mock, queue: str):
- declarer = RabbitDeclarer(async_mock)
+@pytest.mark.asyncio()
+async def test_declare_queue(async_mock, queue: str) -> None:
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
q1 = await declarer.declare_queue(RabbitQueue(queue))
q2 = await declarer.declare_queue(RabbitQueue(queue))
@@ -15,12 +16,13 @@ async def test_declare_queue(async_mock, queue: str):
async_mock.declare_queue.assert_awaited_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
async def test_declare_exchange(
async_mock,
queue: str,
-):
- declarer = RabbitDeclarer(async_mock)
+) -> None:
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
ex1 = await declarer.declare_exchange(RabbitExchange(queue))
ex2 = await declarer.declare_exchange(RabbitExchange(queue))
@@ -29,12 +31,13 @@ async def test_declare_exchange(
async_mock.declare_exchange.assert_awaited_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
async def test_declare_nested_exchange_cash_nested(
async_mock,
queue: str,
-):
- declarer = RabbitDeclarer(async_mock)
+) -> None:
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
exchange = RabbitExchange(queue)
@@ -45,19 +48,20 @@ async def test_declare_nested_exchange_cash_nested(
assert async_mock.declare_exchange.await_count == 2
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
async def test_publisher_declare(
async_mock,
queue: str,
-):
- declarer = RabbitDeclarer(async_mock)
+) -> None:
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
broker = RabbitBroker()
broker._connection = async_mock
broker.declarer = declarer
@broker.publisher(queue, queue)
- async def f(): ...
+ async def f() -> None: ...
await broker.start()
diff --git a/tests/brokers/rabbit/specific/test_init.py b/tests/brokers/rabbit/specific/test_init.py
index 0b92c79167..e61959c700 100644
--- a/tests/brokers/rabbit/specific/test_init.py
+++ b/tests/brokers/rabbit/specific/test_init.py
@@ -3,9 +3,9 @@
from faststream.rabbit import RabbitBroker
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_set_max():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_set_max() -> None:
broker = RabbitBroker(logger=None, max_consumers=10)
await broker.start()
assert broker._channel._prefetch_count == 10
diff --git a/tests/brokers/rabbit/specific/test_nested_exchange.py b/tests/brokers/rabbit/specific/test_nested_exchange.py
index d0fbe5a031..821091cea3 100644
--- a/tests/brokers/rabbit/specific/test_nested_exchange.py
+++ b/tests/brokers/rabbit/specific/test_nested_exchange.py
@@ -2,24 +2,29 @@
import pytest
-from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_bind_to(queue: RabbitQueue, broker: RabbitBroker):
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_bind_to(queue: str) -> None:
+ broker = RabbitBroker(apply_types=False)
+
consume = Event()
async with broker:
meta_parent = RabbitExchange("meta", type=ExchangeType.FANOUT)
parent_exch = RabbitExchange(
- "main", type=ExchangeType.FANOUT, bind_to=meta_parent
+ "main",
+ type=ExchangeType.FANOUT,
+ bind_to=meta_parent,
)
@broker.subscriber(
- queue, exchange=RabbitExchange("nested", bind_to=parent_exch)
+ queue,
+ exchange=RabbitExchange("nested", bind_to=parent_exch),
)
- async def handler(m):
+ async def handler(m) -> None:
consume.set()
await broker.start()
diff --git a/tests/brokers/rabbit/test_connect.py b/tests/brokers/rabbit/test_connect.py
index 61934ec043..858fed251b 100644
--- a/tests/brokers/rabbit/test_connect.py
+++ b/tests/brokers/rabbit/test_connect.py
@@ -1,5 +1,3 @@
-from typing import Type
-
import pytest
from faststream.rabbit import RabbitBroker
@@ -7,15 +5,17 @@
from tests.brokers.base.connection import BrokerConnectionTestcase
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestConnection(BrokerConnectionTestcase):
- broker: Type[RabbitBroker] = RabbitBroker
+ broker: type[RabbitBroker] = RabbitBroker
def get_broker_args(self, settings):
return {"url": settings.url}
- @pytest.mark.asyncio
- async def test_connect_handover_config_to_init(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_handover_config_to_init(
+ self, settings: dict[str, str]
+ ) -> None:
broker = self.broker(
host=settings.host,
port=settings.port,
@@ -27,8 +27,10 @@ async def test_connect_handover_config_to_init(self, settings):
assert await broker.connect()
await broker.close()
- @pytest.mark.asyncio
- async def test_connect_handover_config_to_connect(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_handover_config_to_connect(
+ self, settings: dict[str, str]
+ ) -> None:
broker = self.broker()
assert await broker.connect(
host=settings.host,
@@ -40,8 +42,10 @@ async def test_connect_handover_config_to_connect(self, settings):
)
await broker.close()
- @pytest.mark.asyncio
- async def test_connect_handover_config_to_connect_override_init(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_handover_config_to_connect_override_init(
+ self, settings: dict[str, str]
+ ) -> None:
broker = self.broker("fake-url") # will be ignored
assert await broker.connect(url=settings.url)
await broker.close()
diff --git a/tests/brokers/rabbit/test_consume.py b/tests/brokers/rabbit/test_consume.py
index cd2550429c..c89e429a70 100644
--- a/tests/brokers/rabbit/test_consume.py
+++ b/tests/brokers/rabbit/test_consume.py
@@ -3,61 +3,61 @@
import pytest
from aio_pika import IncomingMessage, Message
+from aiormq.abc import ConfirmationFrameType
+from faststream import AckPolicy
from faststream.exceptions import AckMessage, NackMessage, RejectMessage, SkipMessage
-from faststream.rabbit import RabbitBroker, RabbitExchange, RabbitQueue
+from faststream.rabbit import RabbitExchange, RabbitQueue
from faststream.rabbit.annotations import RabbitMessage
from tests.brokers.base.consume import BrokerRealConsumeTestcase
from tests.tools import spy_decorator
+from .basic import RabbitTestcaseConfig
-@pytest.mark.rabbit
-class TestConsume(BrokerRealConsumeTestcase):
- def get_broker(self, apply_types: bool = False) -> RabbitBroker:
- return RabbitBroker(apply_types=apply_types)
- @pytest.mark.asyncio
+@pytest.mark.rabbit()
+class TestConsume(RabbitTestcaseConfig, BrokerRealConsumeTestcase):
+ @pytest.mark.asyncio()
async def test_consume_from_exchange(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- def h(m):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ def h(m) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
+
+ result = await br.publish("hello", queue=queue, exchange=exchange)
await asyncio.wait(
- (
- asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
- ),
- asyncio.create_task(event.wait()),
- ),
+ (asyncio.create_task(event.wait()),),
timeout=3,
)
+ assert isinstance(result, ConfirmationFrameType), result
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_with_get_old(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
queue=RabbitQueue(name=queue, passive=True),
exchange=RabbitExchange(name=exchange.name, passive=True),
- retry=True,
)
- def h(m):
+ def h(m) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -73,7 +73,7 @@ def h(m):
Message(b"hello"),
queue=queue,
exchange=exchange.name,
- )
+ ),
),
asyncio.create_task(event.wait()),
),
@@ -82,29 +82,32 @@ def h(m):
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)
+ IncomingMessage,
+ "ack",
+ spy_decorator(IncomingMessage.ack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -114,17 +117,18 @@ async def handler(msg: RabbitMessage):
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_manual_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
await msg.ack()
event.set()
@@ -132,12 +136,14 @@ async def handler(msg: RabbitMessage):
await br.start()
with patch.object(
- IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)
+ IncomingMessage,
+ "ack",
+ spy_decorator(IncomingMessage.ack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -146,19 +152,20 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_exception_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
try:
- raise AckMessage()
+ raise AckMessage
finally:
event.set()
@@ -166,12 +173,14 @@ async def handler(msg: RabbitMessage):
await br.start()
with patch.object(
- IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)
+ IncomingMessage,
+ "ack",
+ spy_decorator(IncomingMessage.ack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -180,31 +189,34 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_manual_nack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage):
await msg.nack()
event.set()
- raise ValueError()
+ raise ValueError
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- IncomingMessage, "nack", spy_decorator(IncomingMessage.nack)
+ IncomingMessage,
+ "nack",
+ spy_decorator(IncomingMessage.nack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -213,19 +225,20 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_exception_nack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
try:
- raise NackMessage()
+ raise NackMessage
finally:
event.set()
@@ -233,12 +246,14 @@ async def handler(msg: RabbitMessage):
await br.start()
with patch.object(
- IncomingMessage, "nack", spy_decorator(IncomingMessage.nack)
+ IncomingMessage,
+ "nack",
+ spy_decorator(IncomingMessage.nack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -247,31 +262,34 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_manual_reject(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage):
await msg.reject()
event.set()
- raise ValueError()
+ raise ValueError
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- IncomingMessage, "reject", spy_decorator(IncomingMessage.reject)
+ IncomingMessage,
+ "reject",
+ spy_decorator(IncomingMessage.reject),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -280,19 +298,20 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_exception_reject(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
try:
- raise RejectMessage()
+ raise RejectMessage
finally:
event.set()
@@ -300,12 +319,14 @@ async def handler(msg: RabbitMessage):
await br.start()
with patch.object(
- IncomingMessage, "reject", spy_decorator(IncomingMessage.reject)
+ IncomingMessage,
+ "reject",
+ spy_decorator(IncomingMessage.reject),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -314,31 +335,42 @@ async def handler(msg: RabbitMessage):
m.mock.assert_called_once()
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_skip_message(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue)
- async def handler(msg: RabbitMessage):
+ async def handler(msg: RabbitMessage) -> None:
try:
- raise SkipMessage()
+ raise SkipMessage
finally:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
- with patch.object(
- IncomingMessage, "reject", spy_decorator(IncomingMessage.reject)
- ) as m, patch.object(
- IncomingMessage, "reject", spy_decorator(IncomingMessage.reject)
- ) as m1, patch.object(
- IncomingMessage, "reject", spy_decorator(IncomingMessage.reject)
- ) as m2:
+ with (
+ patch.object(
+ IncomingMessage,
+ "reject",
+ spy_decorator(IncomingMessage.reject),
+ ) as m,
+ patch.object(
+ IncomingMessage,
+ "reject",
+ spy_decorator(IncomingMessage.reject),
+ ) as m1,
+ patch.object(
+ IncomingMessage,
+ "reject",
+ spy_decorator(IncomingMessage.reject),
+ ) as m2,
+ ):
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -352,29 +384,36 @@ async def handler(msg: RabbitMessage):
assert event.is_set()
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_consume_no_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, exchange=exchange, retry=1, no_ack=True)
- async def handler(msg: RabbitMessage):
+ @consume_broker.subscriber(
+ queue,
+ exchange=exchange,
+ ack_policy=AckPolicy.DO_NOTHING,
+ )
+ async def handler(msg: RabbitMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
with patch.object(
- IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)
+ IncomingMessage,
+ "ack",
+ spy_decorator(IncomingMessage.ack),
) as m:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(event.wait()),
),
diff --git a/tests/brokers/rabbit/test_fastapi.py b/tests/brokers/rabbit/test_fastapi.py
index 18fff70dc6..6ffa26392a 100644
--- a/tests/brokers/rabbit/test_fastapi.py
+++ b/tests/brokers/rabbit/test_fastapi.py
@@ -5,22 +5,24 @@
from faststream.rabbit import ExchangeType, RabbitExchange, RabbitQueue, RabbitRouter
from faststream.rabbit.fastapi import RabbitRouter as StreamRouter
-from faststream.rabbit.testing import TestRabbitBroker, build_message
from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase
+from .basic import RabbitMemoryTestcaseConfig
-@pytest.mark.rabbit
+
+@pytest.mark.rabbit()
class TestRouter(FastAPITestcase):
router_class = StreamRouter
broker_router_class = RabbitRouter
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
@@ -33,7 +35,7 @@ async def test_path(
type=ExchangeType.TOPIC,
),
)
- def subscriber(msg: str, name: str):
+ def subscriber(msg: str, name: str) -> None:
mock(msg=msg, name=name)
event.set()
@@ -42,7 +44,7 @@ def subscriber(msg: str, name: str):
await asyncio.wait(
(
asyncio.create_task(
- router.broker.publish("hello", "in.john", queue + "1")
+ router.broker.publish("hello", "in.john", queue + "1"),
),
asyncio.create_task(event.wait()),
),
@@ -53,14 +55,12 @@ def subscriber(msg: str, name: str):
mock.assert_called_once_with(msg="hello", name="john")
-@pytest.mark.asyncio
-class TestRouterLocal(FastAPILocalTestcase):
+@pytest.mark.asyncio()
+class TestRouterLocal(RabbitMemoryTestcaseConfig, FastAPILocalTestcase):
router_class = StreamRouter
broker_router_class = RabbitRouter
- broker_test = staticmethod(TestRabbitBroker)
- build_message = staticmethod(build_message)
- async def test_path(self):
+ async def test_path(self) -> None:
router = self.router_class()
@router.subscriber(
@@ -76,12 +76,11 @@ async def test_path(self):
async def hello(name):
return name
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
"hi",
"in.john",
"test",
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "john"
+ assert await r.decode() == "john"
diff --git a/tests/brokers/rabbit/test_middlewares.py b/tests/brokers/rabbit/test_middlewares.py
index 050d98d173..f56c836e8d 100644
--- a/tests/brokers/rabbit/test_middlewares.py
+++ b/tests/brokers/rabbit/test_middlewares.py
@@ -1,25 +1,23 @@
import pytest
-from faststream.rabbit import RabbitBroker, TestRabbitBroker
from tests.brokers.base.middlewares import (
ExceptionMiddlewareTestcase,
MiddlewareTestcase,
MiddlewaresOrderTestcase,
)
+from .basic import RabbitMemoryTestcaseConfig, RabbitTestcaseConfig
-@pytest.mark.rabbit
-class TestMiddlewares(MiddlewareTestcase):
- broker_class = RabbitBroker
+class TestMiddlewaresOrder(RabbitMemoryTestcaseConfig, MiddlewaresOrderTestcase):
+ pass
-@pytest.mark.rabbit
-class TestExceptionMiddlewares(ExceptionMiddlewareTestcase):
- broker_class = RabbitBroker
+@pytest.mark.rabbit()
+class TestMiddlewares(RabbitTestcaseConfig, MiddlewareTestcase):
+ pass
-class TestMiddlewaresOrder(MiddlewaresOrderTestcase):
- broker_class = RabbitBroker
- def patch_broker(self, broker: RabbitBroker) -> TestRabbitBroker:
- return TestRabbitBroker(broker)
+@pytest.mark.rabbit()
+class TestExceptionMiddlewares(RabbitTestcaseConfig, ExceptionMiddlewareTestcase):
+ pass
diff --git a/tests/brokers/rabbit/test_parser.py b/tests/brokers/rabbit/test_parser.py
index 038f43ac93..2ecaf2967c 100644
--- a/tests/brokers/rabbit/test_parser.py
+++ b/tests/brokers/rabbit/test_parser.py
@@ -1,9 +1,10 @@
import pytest
-from faststream.rabbit import RabbitBroker
from tests.brokers.base.parser import CustomParserTestcase
+from .basic import RabbitTestcaseConfig
-@pytest.mark.rabbit
-class TestCustomParser(CustomParserTestcase):
- broker_class = RabbitBroker
+
+@pytest.mark.rabbit()
+class TestCustomParser(RabbitTestcaseConfig, CustomParserTestcase):
+ pass
diff --git a/tests/brokers/rabbit/test_publish.py b/tests/brokers/rabbit/test_publish.py
index f747bb6b29..a8aa2508f1 100644
--- a/tests/brokers/rabbit/test_publish.py
+++ b/tests/brokers/rabbit/test_publish.py
@@ -1,41 +1,43 @@
import asyncio
+from typing import TYPE_CHECKING
from unittest.mock import Mock, patch
import pytest
from faststream import Context
-from faststream.rabbit import RabbitBroker, RabbitResponse, ReplyConfig
+from faststream.rabbit import RabbitResponse
from faststream.rabbit.publisher.producer import AioPikaFastProducer
from tests.brokers.base.publish import BrokerPublishTestcase
from tests.tools import spy_decorator
+from .basic import RabbitTestcaseConfig
-@pytest.mark.rabbit
-class TestPublish(BrokerPublishTestcase):
- def get_broker(self, apply_types: bool = False) -> RabbitBroker:
- return RabbitBroker(apply_types=apply_types)
+if TYPE_CHECKING:
+ from faststream.rabbit.response import RabbitPublishCommand
- @pytest.mark.asyncio
+
+@pytest.mark.rabbit()
+class TestPublish(RabbitTestcaseConfig, BrokerPublishTestcase):
+ @pytest.mark.asyncio()
async def test_reply_config(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
reply_queue = queue + "reply"
@pub_broker.subscriber(reply_queue)
- async def reply_handler(m):
+ async def reply_handler(m) -> None:
event.set()
mock(m)
- with pytest.warns(DeprecationWarning):
-
- @pub_broker.subscriber(queue, reply_config=ReplyConfig(persist=True))
- async def handler(m):
- return m
+ @pub_broker.subscriber(queue)
+ async def handler(m):
+ return RabbitResponse(m, persist=True)
async with self.patch_broker(pub_broker) as br:
with patch.object(
@@ -48,38 +50,37 @@ async def handler(m):
await asyncio.wait(
(
asyncio.create_task(
- br.publish("Hello!", queue, reply_to=reply_queue)
+ br.publish("Hello!", queue, reply_to=reply_queue),
),
asyncio.create_task(event.wait()),
),
timeout=3,
)
- assert m.mock.call_args.kwargs.get("persist")
- assert m.mock.call_args.kwargs.get("immediate") is False
+ cmd: RabbitPublishCommand = m.mock.call_args[0][1]
+ assert cmd.message_options["persist"]
+ assert not cmd.publish_options["immediate"]
assert event.is_set()
mock.assert_called_with("Hello!")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@pub_broker.publisher(queue + "1")
async def handle():
- return RabbitResponse(
- 1,
- persist=True,
- )
+ return RabbitResponse(1, persist=True)
@pub_broker.subscriber(queue + "1")
- async def handle_next(msg=Context("message")):
+ async def handle_next(msg=Context("message")) -> None:
mock(body=msg.body)
event.set()
@@ -101,16 +102,16 @@ async def handle_next(msg=Context("message")):
assert event.is_set()
- assert m.mock.call_args.kwargs.get("persist")
+ cmd: RabbitPublishCommand = m.mock.call_args[0][1]
+ assert cmd.message_options["persist"]
mock.assert_called_once_with(body=b"1")
- @pytest.mark.asyncio
+ @pytest.mark.asyncio()
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -121,8 +122,8 @@ async def handle():
await br.start()
response = await asyncio.wait_for(
- br.publish("", queue, rpc=True),
+ br.request("", queue),
timeout=3,
)
- assert response == "Hi!", response
+ assert await response.decode() == "Hi!", response
diff --git a/tests/brokers/rabbit/test_requests.py b/tests/brokers/rabbit/test_requests.py
index c0927eabc8..8eb64a075a 100644
--- a/tests/brokers/rabbit/test_requests.py
+++ b/tests/brokers/rabbit/test_requests.py
@@ -1,38 +1,32 @@
import pytest
from faststream import BaseMiddleware
-from faststream.rabbit import RabbitBroker, RabbitRouter, TestRabbitBroker
from tests.brokers.base.requests import RequestsTestcase
+from .basic import RabbitMemoryTestcaseConfig, RabbitTestcaseConfig
+
class Mid(BaseMiddleware):
async def on_receive(self) -> None:
self.msg._Message__lock = False
- self.msg.body = self.msg.body * 2
+ self.msg.body *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body = msg._decoded_body * 2
+ msg.body *= 2
return await call_next(msg)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class RabbitRequestsTestcase(RequestsTestcase):
def get_middleware(self, **kwargs):
return Mid
- def get_broker(self, **kwargs):
- return RabbitBroker(**kwargs)
-
- def get_router(self, **kwargs):
- return RabbitRouter(**kwargs)
-
-@pytest.mark.rabbit
-class TestRealRequests(RabbitRequestsTestcase):
+@pytest.mark.rabbit()
+class TestRealRequests(RabbitTestcaseConfig, RabbitRequestsTestcase):
pass
-@pytest.mark.asyncio
-class TestRequestTestClient(RabbitRequestsTestcase):
- def patch_broker(self, broker, **kwargs):
- return TestRabbitBroker(broker, **kwargs)
+@pytest.mark.asyncio()
+class TestRequestTestClient(RabbitMemoryTestcaseConfig, RabbitRequestsTestcase):
+ pass
diff --git a/tests/brokers/rabbit/test_router.py b/tests/brokers/rabbit/test_router.py
index 6cbd2557a4..436dc4fef5 100644
--- a/tests/brokers/rabbit/test_router.py
+++ b/tests/brokers/rabbit/test_router.py
@@ -1,11 +1,11 @@
import asyncio
+from unittest.mock import MagicMock
import pytest
from faststream import Path
from faststream.rabbit import (
ExchangeType,
- RabbitBroker,
RabbitExchange,
RabbitPublisher,
RabbitQueue,
@@ -14,21 +14,21 @@
)
from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase
+from .basic import RabbitMemoryTestcaseConfig, RabbitTestcaseConfig
-@pytest.mark.rabbit
-class TestRouter(RouterTestcase):
- broker_class = RabbitRouter
+
+@pytest.mark.rabbit()
+class TestRouter(RabbitTestcaseConfig, RouterTestcase):
route_class = RabbitRoute
publisher_class = RabbitPublisher
async def test_router_path(
self,
- queue,
- event,
- mock,
- router,
- pub_broker,
- ):
+ queue: str,
+ event: asyncio.Event,
+ mock: MagicMock,
+ router: RabbitRouter,
+ ) -> None:
@router.subscriber(
RabbitQueue(
queue,
@@ -42,20 +42,19 @@ async def test_router_path(
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
- pub_broker._is_apply_types = True
+ pub_broker = self.get_broker(apply_types=True)
pub_broker.include_router(router)
await pub_broker.start()
- await pub_broker.publish(
+ await pub_broker.request(
"",
"in.john.2",
queue + "1",
- rpc=True,
)
assert event.is_set()
@@ -63,16 +62,15 @@ async def h(
async def test_router_delay_handler_path(
self,
- queue,
- event,
- mock,
- router,
- pub_broker,
- ):
+ queue: str,
+ event: asyncio.Event,
+ mock: MagicMock,
+ router: RabbitRouter,
+ ) -> None:
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
@@ -89,19 +87,18 @@ async def h(
type=ExchangeType.TOPIC,
),
),
- )
+ ),
)
- pub_broker._is_apply_types = True
+ pub_broker = self.get_broker(apply_types=True)
pub_broker.include_router(r)
await pub_broker.start()
- await pub_broker.publish(
+ await pub_broker.request(
"",
"in.john.2",
queue + "1",
- rpc=True,
)
assert event.is_set()
@@ -110,16 +107,18 @@ async def h(
async def test_queue_obj(
self,
router: RabbitRouter,
- broker: RabbitBroker,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ broker = self.get_broker()
+
router.prefix = "test/"
r_queue = RabbitQueue(queue)
@router.subscriber(r_queue)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
broker.include_router(router)
@@ -130,7 +129,7 @@ def subscriber(m):
await asyncio.wait(
(
asyncio.create_task(
- broker.publish("hello", f"test/{r_queue.name}")
+ broker.publish("hello", f"test/{r_queue.name}"),
),
asyncio.create_task(event.wait()),
),
@@ -142,17 +141,19 @@ def subscriber(m):
async def test_queue_obj_with_routing_key(
self,
router: RabbitRouter,
- broker: RabbitBroker,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
+ broker = self.get_broker()
+
router.prefix = "test/"
r_queue = RabbitQueue("useless", routing_key=f"{queue}1")
exchange = RabbitExchange(f"{queue}exch")
@router.subscriber(r_queue, exchange=exchange)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
broker.include_router(router)
@@ -163,7 +164,7 @@ def subscriber(m):
await asyncio.wait(
(
asyncio.create_task(
- broker.publish("hello", f"test/{queue}1", exchange=exchange)
+ broker.publish("hello", f"test/{queue}1", exchange=exchange),
),
asyncio.create_task(event.wait()),
),
@@ -174,20 +175,22 @@ def subscriber(m):
async def test_delayed_handlers_with_queue(
self,
- event: asyncio.Event,
router: RabbitRouter,
queue: str,
- pub_broker: RabbitBroker,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ def response(m) -> None:
event.set()
r_queue = RabbitQueue(queue)
r = type(router)(
- prefix="test/", handlers=(self.route_class(response, queue=r_queue),)
+ prefix="test/",
+ handlers=(self.route_class(response, queue=r_queue),),
)
+ pub_broker = self.get_broker()
pub_broker.include_router(r)
async with pub_broker:
@@ -196,7 +199,7 @@ def response(m):
await asyncio.wait(
(
asyncio.create_task(
- pub_broker.publish("hello", f"test/{r_queue.name}")
+ pub_broker.publish("hello", f"test/{r_queue.name}"),
),
asyncio.create_task(event.wait()),
),
@@ -206,7 +209,6 @@ def response(m):
assert event.is_set()
-class TestRouterLocal(RouterLocalTestcase):
- broker_class = RabbitRouter
+class TestRouterLocal(RabbitMemoryTestcaseConfig, RouterLocalTestcase):
route_class = RabbitRoute
publisher_class = RabbitPublisher
diff --git a/tests/brokers/rabbit/test_rpc.py b/tests/brokers/rabbit/test_rpc.py
deleted file mode 100644
index 68e6d12812..0000000000
--- a/tests/brokers/rabbit/test_rpc.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import pytest
-
-from faststream.rabbit import RabbitBroker
-from tests.brokers.base.rpc import BrokerRPCTestcase, ReplyAndConsumeForbidden
-
-
-@pytest.mark.rabbit
-class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden):
- def get_broker(self, apply_types: bool = False) -> RabbitBroker:
- return RabbitBroker(apply_types=apply_types)
diff --git a/tests/brokers/rabbit/test_schemas.py b/tests/brokers/rabbit/test_schemas.py
index da8e4914cb..2224fb976a 100644
--- a/tests/brokers/rabbit/test_schemas.py
+++ b/tests/brokers/rabbit/test_schemas.py
@@ -1,25 +1,21 @@
from faststream.rabbit import RabbitQueue
-def test_same_queue():
+def test_same_queue() -> None:
assert (
- len(
- {
- RabbitQueue("test"): 0,
- RabbitQueue("test"): 1,
- }
- )
+ len({
+ RabbitQueue("test"): 0,
+ RabbitQueue("test"): 1,
+ })
== 1
)
-def test_different_queue_routing_key():
+def test_different_queue_routing_key() -> None:
assert (
- len(
- {
- RabbitQueue("test", routing_key="binding-1"): 0,
- RabbitQueue("test", routing_key="binding-2"): 1,
- }
- )
+ len({
+ RabbitQueue("test", routing_key="binding-1"): 0,
+ RabbitQueue("test", routing_key="binding-2"): 1,
+ })
== 1
)
diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py
index 1d92edf2fa..1d14978dd5 100644
--- a/tests/brokers/rabbit/test_test_client.py
+++ b/tests/brokers/rabbit/test_test_client.py
@@ -1,59 +1,39 @@
import asyncio
+from typing import Any
import pytest
from faststream import BaseMiddleware
-from faststream.exceptions import SetupError
+from faststream.exceptions import SubscriberNotFound
from faststream.rabbit import (
ExchangeType,
RabbitBroker,
RabbitExchange,
RabbitQueue,
- TestRabbitBroker,
)
from faststream.rabbit.annotations import RabbitMessage
-from faststream.rabbit.testing import FakeProducer, apply_pattern
+from faststream.rabbit.testing import FakeProducer, _is_handler_matches, apply_pattern
from tests.brokers.base.testclient import BrokerTestclientTestcase
+from .basic import RabbitMemoryTestcaseConfig
-@pytest.mark.asyncio
-class TestTestclient(BrokerTestclientTestcase):
- test_class = TestRabbitBroker
- def get_broker(self, apply_types: bool = False) -> RabbitBroker:
- return RabbitBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: RabbitBroker) -> RabbitBroker:
- return TestRabbitBroker(broker)
-
- def get_fake_producer_class(self) -> type:
- return FakeProducer
-
- async def test_rpc_conflicts_reply(self, queue):
- broker = self.get_broker()
-
- async with TestRabbitBroker(broker) as br:
- with pytest.raises(SetupError):
- await br.publish(
- "",
- queue,
- rpc=True,
- reply_to="response",
- )
-
- @pytest.mark.rabbit
+@pytest.mark.asyncio()
+class TestTestclient(RabbitMemoryTestcaseConfig, BrokerTestclientTestcase):
+ @pytest.mark.rabbit()
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
- async with TestRabbitBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -64,179 +44,55 @@ def subscriber(m):
assert event.is_set()
- async def test_respect_routing_key(self):
- broker = self.get_broker()
-
- publisher = broker.publisher(
- exchange=RabbitExchange("test", type=ExchangeType.TOPIC), routing_key="up"
- )
-
- async with TestRabbitBroker(broker):
- await publisher.publish("Hi!")
-
- publisher.mock.assert_called_once_with("Hi!")
-
- async def test_direct(
- self,
- queue: str,
- ):
- broker = self.get_broker()
-
- @broker.subscriber(queue)
- async def handler(m):
- return 1
-
- @broker.subscriber(queue + "1", exchange="test")
- async def handler2(m):
- return 2
-
- async with TestRabbitBroker(broker) as br:
- await br.start()
- assert await br.publish("", queue, rpc=True) == 1
- assert await br.publish("", queue + "1", exchange="test", rpc=True) == 2
- assert None is await br.publish("", exchange="test2", rpc=True)
-
- async def test_fanout(
+ async def test_direct_not_found(
self,
queue: str,
- mock,
- ):
+ ) -> None:
broker = self.get_broker()
- exch = RabbitExchange("test", type=ExchangeType.FANOUT)
-
- @broker.subscriber(queue, exchange=exch)
- async def handler(m):
- mock()
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("", exchange=exch, rpc=True)
-
- assert None is await br.publish("", exchange="test2", rpc=True)
-
- assert mock.call_count == 1
-
- async def test_any_topic_routing(self):
- broker = self.get_broker()
-
- exch = RabbitExchange("test", type=ExchangeType.TOPIC)
-
- @broker.subscriber(
- RabbitQueue("test", routing_key="test.*.subj.*"),
- exchange=exch,
- )
- def subscriber(msg): ...
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("hello", "test.a.subj.b", exchange=exch)
- subscriber.mock.assert_called_once_with("hello")
-
- async def test_ending_topic_routing(self):
- broker = self.get_broker()
-
- exch = RabbitExchange("test", type=ExchangeType.TOPIC)
-
- @broker.subscriber(
- RabbitQueue("test", routing_key="test.#"),
- exchange=exch,
- )
- def subscriber(msg): ...
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("hello", "test.a.subj.b", exchange=exch)
- subscriber.mock.assert_called_once_with("hello")
-
- async def test_mixed_topic_routing(self):
- broker = self.get_broker()
-
- exch = RabbitExchange("test", type=ExchangeType.TOPIC)
-
- @broker.subscriber(
- RabbitQueue("test", routing_key="*.*.subj.#"),
- exchange=exch,
- )
- def subscriber(msg): ...
-
- async with TestRabbitBroker(broker) as br:
- await br.publish("hello", "test.a.subj.b.c", exchange=exch)
- subscriber.mock.assert_called_once_with("hello")
-
- async def test_header(self):
- broker = self.get_broker()
-
- q1 = RabbitQueue(
- "test-queue-2",
- bind_arguments={"key": 2, "key2": 2, "x-match": "any"},
- )
- q2 = RabbitQueue(
- "test-queue-3",
- bind_arguments={"key": 2, "key2": 2, "x-match": "all"},
- )
- q3 = RabbitQueue(
- "test-queue-4",
- bind_arguments={},
- )
- exch = RabbitExchange("exchange", type=ExchangeType.HEADERS)
-
- @broker.subscriber(q2, exch)
- async def handler2(msg):
- return 2
-
- @broker.subscriber(q1, exch)
- async def handler(msg):
- return 1
-
- @broker.subscriber(q3, exch)
- async def handler3(msg):
- return 3
-
- async with TestRabbitBroker(broker) as br:
- assert (
- await br.publish(exchange=exch, rpc=True, headers={"key": 2, "key2": 2})
- == 2
- )
- assert await br.publish(exchange=exch, rpc=True, headers={"key": 2}) == 1
- assert await br.publish(exchange=exch, rpc=True, headers={}) == 3
+ async with self.patch_broker(broker) as br:
+ with pytest.raises(SubscriberNotFound):
+ await br.request("", "")
async def test_consume_manual_ack(
self,
queue: str,
exchange: RabbitExchange,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
consume = asyncio.Event()
consume2 = asyncio.Event()
consume3 = asyncio.Event()
- @broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage):
+ @broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage) -> None:
await msg.raw_message.ack()
consume.set()
- @broker.subscriber(queue=queue + "1", exchange=exchange, retry=1)
- async def handler2(msg: RabbitMessage):
+ @broker.subscriber(queue=queue + "1", exchange=exchange)
+ async def handler2(msg: RabbitMessage) -> None:
await msg.raw_message.nack()
consume2.set()
- raise ValueError()
+ raise ValueError
- @broker.subscriber(queue=queue + "2", exchange=exchange, retry=1)
- async def handler3(msg: RabbitMessage):
+ @broker.subscriber(queue=queue + "2", exchange=exchange)
+ async def handler3(msg: RabbitMessage) -> None:
await msg.raw_message.reject()
consume3.set()
- raise ValueError()
+ raise ValueError
- async with TestRabbitBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await asyncio.wait(
(
asyncio.create_task(
- br.publish("hello", queue=queue, exchange=exchange)
+ br.publish("hello", queue=queue, exchange=exchange),
),
asyncio.create_task(
- br.publish("hello", queue=queue + "1", exchange=exchange)
+ br.publish("hello", queue=queue + "1", exchange=exchange),
),
asyncio.create_task(
- br.publish("hello", queue=queue + "2", exchange=exchange)
+ br.publish("hello", queue=queue + "2", exchange=exchange),
),
asyncio.create_task(consume.wait()),
asyncio.create_task(consume2.wait()),
@@ -249,7 +105,7 @@ async def handler3(msg: RabbitMessage):
assert consume2.is_set()
assert consume3.is_set()
- async def test_respect_middleware(self, queue):
+ async def test_respect_middleware(self, queue: str) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -257,22 +113,22 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = RabbitBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(msg): ...
+ async def h1(msg) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(msg): ...
+ async def h2(msg) -> None: ...
- async with TestRabbitBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
assert len(routes) == 2
- @pytest.mark.rabbit
- async def test_real_respect_middleware(self, queue):
+ @pytest.mark.rabbit()
+ async def test_real_respect_middleware(self, queue: str) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -280,15 +136,15 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = RabbitBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(msg): ...
+ async def h1(msg) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(msg): ...
+ async def h2(msg) -> None: ...
- async with TestRabbitBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
await h1.wait_call(3)
@@ -296,24 +152,25 @@ async def h2(msg): ...
assert len(routes) == 2
- @pytest.mark.rabbit
- async def test_broker_gets_patched_attrs_within_cm(self):
- await super().test_broker_gets_patched_attrs_within_cm()
+ @pytest.mark.rabbit()
+ async def test_broker_gets_patched_attrs_within_cm(self) -> None:
+ await super().test_broker_gets_patched_attrs_within_cm(FakeProducer)
- @pytest.mark.rabbit
- async def test_broker_with_real_doesnt_get_patched(self):
+ @pytest.mark.rabbit()
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
await super().test_broker_with_real_doesnt_get_patched()
- @pytest.mark.rabbit
+ @pytest.mark.rabbit()
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
await super().test_broker_with_real_patches_publishers_and_subscribers(queue)
@pytest.mark.parametrize(
("pattern", "current", "result"),
- [
+ (
pytest.param("#", "1.2.3", True, id="#"),
pytest.param("*", "1", True, id="*"),
pytest.param("*", "1.2", False, id="* - broken"),
@@ -324,14 +181,156 @@ async def test_broker_with_real_patches_publishers_and_subscribers(
pytest.param("#.test.*.*", "1.2.test.1.2", True, id="#.test.*."),
pytest.param("#.test.*.*.*", "1.2.test.1.2", False, id="#.test.*.*.* - broken"),
pytest.param(
- "#.test.*.test.#", "1.2.test.1.test.1.2", True, id="#.test.*.test.#"
+ "#.test.*.test.#",
+ "1.2.test.1.test.1.2",
+ True,
+ id="#.test.*.test.#",
),
pytest.param("#.*.test", "1.2.2.test", True, id="#.*.test"),
pytest.param("#.2.*.test", "1.2.2.test", True, id="#.2.*.test"),
pytest.param("#.*.*.test", "1.2.2.test", True, id="#.*.*.test"),
pytest.param("*.*.*.test", "1.2.test", False, id="*.*.*.test - broken"),
pytest.param("#.*.*.test", "1.2.test", False, id="#.*.*.test - broken"),
- ],
+ ),
)
-def test(pattern: str, current: str, result: bool):
+def test(pattern: str, current: str, result: bool) -> None:
assert apply_pattern(pattern, current) == result
+
+
+exch_direct = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.DIRECT)
+exch_fanout = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.FANOUT)
+exch_topic = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.TOPIC)
+exch_headers = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.HEADERS)
+reqular_queue = RabbitQueue("test-reqular-queue", auto_delete=True)
+
+routing_key_queue = RabbitQueue(
+ "test-routing-key-queue", auto_delete=True, routing_key="*.info"
+)
+one_key_queue = RabbitQueue(
+ "test-one-key-queue", auto_delete=True, bind_arguments={"key": 1}
+)
+any_keys_queue = RabbitQueue(
+ "test-any-keys-queue",
+ auto_delete=True,
+ bind_arguments={"key": 2, "key2": 2, "x-match": "any"},
+)
+all_keys_queue = RabbitQueue(
+ "test-all-keys-queue",
+ auto_delete=True,
+ bind_arguments={"key": 2, "key2": 2, "x-match": "all"},
+)
+
+broker = RabbitBroker()
+
+
+@pytest.mark.parametrize(
+ (
+ "queue",
+ "exchange",
+ "routing_key",
+ "headers",
+ "expected_result",
+ ),
+ (
+ pytest.param(
+ reqular_queue,
+ exch_direct,
+ reqular_queue.routing,
+ {},
+ True,
+ id="direct match",
+ ),
+ pytest.param(
+ reqular_queue,
+ exch_direct,
+ "wrong key",
+ {},
+ False,
+ id="direct mismatch",
+ ),
+ pytest.param(
+ reqular_queue,
+ exch_fanout,
+ "",
+ {},
+ True,
+ id="fanout match",
+ ),
+ pytest.param(
+ routing_key_queue,
+ exch_topic,
+ "log.info",
+ {},
+ True,
+ id="topic match",
+ ),
+ pytest.param(
+ routing_key_queue,
+ exch_topic,
+ "log.wrong",
+ {},
+ False,
+ id="topic mismatch",
+ ),
+ pytest.param(
+ one_key_queue,
+ exch_headers,
+ "",
+ {"key": 1},
+ True,
+ id="one header match",
+ ),
+ pytest.param(
+ one_key_queue,
+ exch_headers,
+ "",
+ {"key": "wrong"},
+ False,
+ id="one header mismatch",
+ ),
+ pytest.param(
+ any_keys_queue,
+ exch_headers,
+ "",
+ {"key2": 2},
+ True,
+ id="any headers match",
+ ),
+ pytest.param(
+ any_keys_queue,
+ exch_headers,
+ "",
+ {"key2": "wrong"},
+ False,
+ id="any headers mismatch",
+ ),
+ pytest.param(
+ all_keys_queue,
+ exch_headers,
+ "",
+ {"key": 2, "key2": 2},
+ True,
+ id="all headers match",
+ ),
+ pytest.param(
+ all_keys_queue,
+ exch_headers,
+ "",
+ {"key": "wrong", "key2": 2},
+ False,
+ id="all headers mismatch",
+ ),
+ ),
+)
+def test_in_memory_routing(
+ queue: str,
+ exchange: RabbitExchange,
+ routing_key: str,
+ headers: dict[str, Any],
+ expected_result: bool,
+) -> None:
+ subscriber = broker.subscriber(queue, exchange)
+ assert (
+ _is_handler_matches(subscriber, routing_key, headers, exchange)
+ is expected_result
+ )
diff --git a/tests/brokers/rabbit/test_test_reentrancy.py b/tests/brokers/rabbit/test_test_reentrancy.py
index 5cc6bd3e77..89f5310d6e 100644
--- a/tests/brokers/rabbit/test_test_reentrancy.py
+++ b/tests/brokers/rabbit/test_test_reentrancy.py
@@ -15,11 +15,11 @@ async def on_input_data(msg: int):
@broker.subscriber("output_data")
-async def on_output_data(msg: int):
+async def on_output_data(msg: int) -> None:
pass
-async def _test_with_broker(with_real: bool):
+async def _test_with_broker(with_real: bool) -> None:
async with TestRabbitBroker(broker, with_real=with_real) as tester:
await tester.publish(1, "input_data")
@@ -30,22 +30,22 @@ async def _test_with_broker(with_real: bool):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
-async def test_with_fake_broker():
+@pytest.mark.asyncio()
+async def test_with_fake_broker() -> None:
await _test_with_broker(False)
await _test_with_broker(False)
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_with_real_broker():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_with_real_broker() -> None:
await _test_with_broker(True)
await _test_with_broker(True)
-async def _test_with_temp_subscriber():
+async def _test_with_temp_subscriber() -> None:
@broker.subscriber("output_data")
- async def on_output_data(msg: int):
+ async def on_output_data(msg: int) -> None:
pass
async with TestRabbitBroker(broker) as tester:
@@ -58,13 +58,13 @@ async def on_output_data(msg: int):
on_output_data.mock.assert_called_once_with(2)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skip(
reason=(
"Failed due `on_output_data` subscriber creates inside test and doesn't removed after "
"https://github.com/airtai/faststream/issues/556"
- )
+ ),
)
-async def test_with_temp_subscriber():
+async def test_with_temp_subscriber() -> None:
await _test_with_temp_subscriber()
await _test_with_temp_subscriber()
diff --git a/tests/brokers/rabbit/test_url_builder.py b/tests/brokers/rabbit/test_url_builder.py
index 0185207145..113708f7a0 100644
--- a/tests/brokers/rabbit/test_url_builder.py
+++ b/tests/brokers/rabbit/test_url_builder.py
@@ -1,4 +1,4 @@
-from typing import Any, Dict
+from typing import Any
import pytest
from yarl import URL
@@ -8,7 +8,7 @@
@pytest.mark.parametrize(
("url_kwargs", "expected_url"),
- [
+ (
pytest.param(
{},
URL("amqp://guest:guest@localhost:5672/"), # pragma: allowlist secret
@@ -31,8 +31,8 @@
),
id="exotic virtualhost",
),
- ],
+ ),
)
-def test_unpack_args(url_kwargs: Dict[str, Any], expected_url: URL) -> None:
+def test_unpack_args(url_kwargs: dict[str, Any], expected_url: URL) -> None:
url = build_url(**url_kwargs)
assert url == expected_url
diff --git a/tests/brokers/redis/basic.py b/tests/brokers/redis/basic.py
new file mode 100644
index 0000000000..11f424017c
--- /dev/null
+++ b/tests/brokers/redis/basic.py
@@ -0,0 +1,24 @@
+from typing import Any
+
+from faststream.redis import RedisBroker, RedisRouter, TestRedisBroker
+from tests.brokers.base.basic import BaseTestcaseConfig
+
+
+class RedisTestcaseConfig(BaseTestcaseConfig):
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> RedisBroker:
+ return RedisBroker(apply_types=apply_types, **kwargs)
+
+ def patch_broker(self, broker: RedisBroker, **kwargs: Any) -> RedisBroker:
+ return broker
+
+ def get_router(self, **kwargs: Any) -> RedisRouter:
+ return RedisRouter(**kwargs)
+
+
+class RedisMemoryTestcaseConfig(RedisTestcaseConfig):
+ def patch_broker(self, broker: RedisBroker, **kwargs: Any) -> RedisBroker:
+ return TestRedisBroker(broker, **kwargs)
diff --git a/tests/brokers/redis/conftest.py b/tests/brokers/redis/conftest.py
index 2066975eb8..ad80aea162 100644
--- a/tests/brokers/redis/conftest.py
+++ b/tests/brokers/redis/conftest.py
@@ -1,13 +1,8 @@
from dataclasses import dataclass
import pytest
-import pytest_asyncio
-from faststream.redis import (
- RedisBroker,
- RedisRouter,
- TestRedisBroker,
-)
+from faststream.redis import RedisRouter
@dataclass
@@ -22,27 +17,6 @@ def settings():
return Settings()
-@pytest.fixture
+@pytest.fixture()
def router():
return RedisRouter()
-
-
-@pytest_asyncio.fixture()
-async def broker(settings):
- broker = RedisBroker(settings.url, apply_types=False)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def full_broker(settings):
- broker = RedisBroker(settings.url)
- async with broker:
- yield broker
-
-
-@pytest_asyncio.fixture()
-async def test_broker():
- broker = RedisBroker()
- async with TestRedisBroker(broker) as br:
- yield br
diff --git a/tests/brokers/redis/test_connect.py b/tests/brokers/redis/test_connect.py
index 297487d629..685b1c7a87 100644
--- a/tests/brokers/redis/test_connect.py
+++ b/tests/brokers/redis/test_connect.py
@@ -4,7 +4,7 @@
from tests.brokers.base.connection import BrokerConnectionTestcase
-@pytest.mark.redis
+@pytest.mark.redis()
class TestConnection(BrokerConnectionTestcase):
broker = RedisBroker
@@ -15,8 +15,8 @@ def get_broker_args(self, settings):
"port": settings.port,
}
- @pytest.mark.asyncio
- async def test_init_connect_by_raw_data(self, settings):
+ @pytest.mark.asyncio()
+ async def test_init_connect_by_raw_data(self, settings) -> None:
async with RedisBroker(
"redis://localhost:6378", # will be ignored
host=settings.host,
@@ -24,8 +24,8 @@ async def test_init_connect_by_raw_data(self, settings):
) as broker:
assert await self.ping(broker)
- @pytest.mark.asyncio
- async def test_connect_merge_kwargs_with_priority(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_merge_kwargs_with_priority(self, settings) -> None:
broker = self.broker(host="fake-host", port=6377) # kwargs will be ignored
await broker.connect(
host=settings.host,
@@ -34,8 +34,8 @@ async def test_connect_merge_kwargs_with_priority(self, settings):
assert await self.ping(broker)
await broker.close()
- @pytest.mark.asyncio
- async def test_connect_merge_args_and_kwargs_native(self, settings):
+ @pytest.mark.asyncio()
+ async def test_connect_merge_args_and_kwargs_native(self, settings) -> None:
broker = self.broker("fake-url") # will be ignored
await broker.connect(url=settings.url)
assert await self.ping(broker)
diff --git a/tests/brokers/redis/test_consume.py b/tests/brokers/redis/test_consume.py
index 467254a62f..78efae46c2 100644
--- a/tests/brokers/redis/test_consume.py
+++ b/tests/brokers/redis/test_consume.py
@@ -1,56 +1,55 @@
import asyncio
-from typing import List
from unittest.mock import MagicMock, patch
import pytest
from redis.asyncio import Redis
-from faststream.redis import ListSub, PubSub, RedisBroker, RedisMessage, StreamSub
+from faststream.redis import ListSub, PubSub, RedisMessage, StreamSub
from tests.brokers.base.consume import BrokerRealConsumeTestcase
from tests.tools import spy_decorator
+from .basic import RedisTestcaseConfig
-@pytest.mark.redis
-@pytest.mark.asyncio
-class TestConsume(BrokerRealConsumeTestcase):
- def get_broker(self, apply_types: bool = False):
- return RedisBroker(apply_types=apply_types)
+@pytest.mark.redis()
+@pytest.mark.asyncio()
+class TestConsume(RedisTestcaseConfig, BrokerRealConsumeTestcase):
async def test_consume_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue: str,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(queue)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
+ result = await br._connection.publish(queue, "hello")
await asyncio.wait(
- (
- asyncio.create_task(br._connection.publish(queue, "hello")),
- asyncio.create_task(event.wait()),
- ),
+ (asyncio.create_task(event.wait()),),
timeout=3,
)
+ assert result == 1, result
mock.assert_called_once_with(b"hello")
async def test_pattern_with_path(
self,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber("test.{name}")
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -69,13 +68,14 @@ async def handler(msg):
async def test_pattern_without_path(
self,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(PubSub("test.*", pattern=True))
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -92,26 +92,54 @@ async def handler(msg):
mock.assert_called_once_with("hello")
+ async def test_concurrent_consume_channel(self, queue: str, mock: MagicMock):
+ event = asyncio.Event()
+ event2 = asyncio.Event()
-@pytest.mark.redis
-@pytest.mark.asyncio
-class TestConsumeList:
- def get_broker(self, apply_types: bool = False):
- return RedisBroker(apply_types=apply_types)
+ consume_broker = self.get_broker()
- def patch_broker(self, broker):
- return broker
+ @consume_broker.subscriber(channel=PubSub(queue), max_workers=2)
+ async def handler(msg):
+ mock()
+ if event.is_set():
+ event2.set()
+ else:
+ event.set()
+ await asyncio.sleep(0.1)
+
+ async with self.patch_broker(consume_broker) as br:
+ await br.start()
+
+ for i in range(5):
+ await br.publish(i, queue)
+
+ await asyncio.wait(
+ (
+ asyncio.create_task(event.wait()),
+ asyncio.create_task(event2.wait()),
+ ),
+ timeout=3,
+ )
+
+ assert event.is_set()
+ assert event2.is_set()
+ assert mock.call_count == 2, mock.call_count
+
+@pytest.mark.redis()
+@pytest.mark.asyncio()
+class TestConsumeList(RedisTestcaseConfig):
async def test_consume_list(
self,
- event: asyncio.Event,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(list=queue)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -130,14 +158,15 @@ async def handler(msg):
async def test_consume_list_native(
self,
- event: asyncio.Event,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(list=queue)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -154,19 +183,20 @@ async def handler(msg):
mock.assert_called_once_with(b"hello")
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_list_batch_with_one(
self,
queue: str,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
- list=ListSub(queue, batch=True, polling_interval=0.01)
+ list=ListSub(queue, batch=True, polling_interval=0.01),
)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -183,26 +213,27 @@ async def handler(msg):
assert event.is_set()
mock.assert_called_once_with(["hi"])
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_list_batch_headers(
self,
queue: str,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
- list=ListSub(queue, batch=True, polling_interval=0.01)
+ list=ListSub(queue, batch=True, polling_interval=0.01),
)
- def subscriber(m, msg: RedisMessage):
+ def subscriber(m, msg: RedisMessage) -> None:
check = all(
(
msg.headers,
msg.headers["correlation_id"]
== msg.batch_headers[0]["correlation_id"],
msg.headers.get("custom") == "1",
- )
+ ),
)
mock(check)
event.set()
@@ -212,7 +243,7 @@ def subscriber(m, msg: RedisMessage):
await asyncio.wait(
(
asyncio.create_task(
- br.publish("", list=queue, headers={"custom": "1"})
+ br.publish("", list=queue, headers={"custom": "1"}),
),
asyncio.create_task(event.wait()),
),
@@ -222,19 +253,19 @@ def subscriber(m, msg: RedisMessage):
assert event.is_set()
mock.assert_called_once_with(True)
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_list_batch(
self,
queue: str,
- ):
+ ) -> None:
consume_broker = self.get_broker(apply_types=True)
msgs_queue = asyncio.Queue(maxsize=1)
@consume_broker.subscriber(
- list=ListSub(queue, batch=True, polling_interval=0.01)
+ list=ListSub(queue, batch=True, polling_interval=0.01),
)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -249,11 +280,11 @@ async def handler(msg):
assert [{1, "hi"}] == [set(r.result()) for r in result]
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_list_batch_complex(
self,
queue: str,
- ):
+ ) -> None:
consume_broker = self.get_broker(apply_types=True)
from pydantic import BaseModel
@@ -267,9 +298,9 @@ def __hash__(self):
msgs_queue = asyncio.Queue(maxsize=1)
@consume_broker.subscriber(
- list=ListSub(queue, batch=True, polling_interval=0.01)
+ list=ListSub(queue, batch=True, polling_interval=0.01),
)
- async def handler(msg: List[Data]):
+ async def handler(msg: list[Data]) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -284,19 +315,19 @@ async def handler(msg: List[Data]):
assert [{Data(m="hi"), Data(m="again")}] == [set(r.result()) for r in result]
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_list_batch_native(
self,
queue: str,
- ):
+ ) -> None:
consume_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=1)
@consume_broker.subscriber(
- list=ListSub(queue, batch=True, polling_interval=0.01)
+ list=ListSub(queue, batch=True, polling_interval=0.01),
)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -314,8 +345,7 @@ async def handler(msg):
async def test_get_one(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(list=queue)
@@ -324,11 +354,11 @@ async def test_get_one(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=5)
- async def publish():
+ async def publish() -> None:
await br.publish("test_message", list=queue)
await asyncio.wait(
@@ -346,7 +376,7 @@ async def test_get_one_timeout(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(list=queue)
@@ -356,27 +386,55 @@ async def test_get_one_timeout(
mock(await subscriber.get_one(timeout=1e-24))
mock.assert_called_once_with(None)
+ async def test_concurrent_consume_list(self, queue: str, mock: MagicMock):
+ event = asyncio.Event()
+ event2 = asyncio.Event()
+
+ consume_broker = self.get_broker()
-@pytest.mark.redis
-@pytest.mark.asyncio
-class TestConsumeStream:
- def get_broker(self, apply_types: bool = False):
- return RedisBroker(apply_types=apply_types)
+ @consume_broker.subscriber(list=ListSub(queue), max_workers=2)
+ async def handler(msg):
+ mock()
+ if event.is_set():
+ event2.set()
+ else:
+ event.set()
+ await asyncio.sleep(0.1)
+
+ async with self.patch_broker(consume_broker) as br:
+ await br.start()
- def patch_broker(self, broker):
- return broker
+ for i in range(5):
+ await br.publish(i, list=queue)
- @pytest.mark.slow
+ await asyncio.wait(
+ (
+ asyncio.create_task(event.wait()),
+ asyncio.create_task(event2.wait()),
+ ),
+ timeout=3,
+ )
+
+ assert event.is_set()
+ assert event2.is_set()
+ assert mock.call_count == 2, mock.call_count
+
+
+@pytest.mark.redis()
+@pytest.mark.asyncio()
+class TestConsumeStream(RedisTestcaseConfig):
+ @pytest.mark.slow()
async def test_consume_stream(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10))
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -393,17 +451,18 @@ async def handler(msg):
mock.assert_called_once_with("hello")
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10))
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -413,7 +472,7 @@ async def handler(msg):
await asyncio.wait(
(
asyncio.create_task(
- br._connection.xadd(queue, {"message": "hello"})
+ br._connection.xadd(queue, {"message": "hello"}),
),
asyncio.create_task(event.wait()),
),
@@ -422,19 +481,20 @@ async def handler(msg):
mock.assert_called_once_with({"message": "hello"})
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_batch(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
- stream=StreamSub(queue, polling_interval=10, batch=True)
+ stream=StreamSub(queue, polling_interval=10, batch=True),
)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -451,26 +511,27 @@ async def handler(msg):
mock.assert_called_once_with(["hello"])
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_batch_headers(
self,
queue: str,
- event: asyncio.Event,
mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
- stream=StreamSub(queue, polling_interval=10, batch=True)
+ stream=StreamSub(queue, polling_interval=10, batch=True),
)
- def subscriber(m, msg: RedisMessage):
+ def subscriber(m, msg: RedisMessage) -> None:
check = all(
(
msg.headers,
msg.headers["correlation_id"]
== msg.batch_headers[0]["correlation_id"],
msg.headers.get("custom") == "1",
- )
+ ),
)
mock(check)
event.set()
@@ -480,7 +541,7 @@ def subscriber(m, msg: RedisMessage):
await asyncio.wait(
(
asyncio.create_task(
- br.publish("", stream=queue, headers={"custom": "1"})
+ br.publish("", stream=queue, headers={"custom": "1"}),
),
asyncio.create_task(event.wait()),
),
@@ -490,11 +551,11 @@ def subscriber(m, msg: RedisMessage):
assert event.is_set()
mock.assert_called_once_with(True)
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_batch_complex(
self,
queue,
- ):
+ ) -> None:
consume_broker = self.get_broker(apply_types=True)
from pydantic import BaseModel
@@ -505,9 +566,9 @@ class Data(BaseModel):
msgs_queue = asyncio.Queue(maxsize=1)
@consume_broker.subscriber(
- stream=StreamSub(queue, polling_interval=10, batch=True)
+ stream=StreamSub(queue, polling_interval=10, batch=True),
)
- async def handler(msg: List[Data]):
+ async def handler(msg: list[Data]) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(consume_broker) as br:
@@ -522,19 +583,20 @@ async def handler(msg: List[Data]):
assert next(iter(result)).result() == [Data(m="hi")]
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_batch_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
- stream=StreamSub(queue, polling_interval=10, batch=True)
+ stream=StreamSub(queue, polling_interval=10, batch=True),
)
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -544,7 +606,7 @@ async def handler(msg):
await asyncio.wait(
(
asyncio.create_task(
- br._connection.xadd(queue, {"message": "hello"})
+ br._connection.xadd(queue, {"message": "hello"}),
),
asyncio.create_task(event.wait()),
),
@@ -556,40 +618,41 @@ async def handler(msg):
async def test_consume_group(
self,
queue: str,
- ):
+ ) -> None:
consume_broker = self.get_broker()
@consume_broker.subscriber(
- stream=StreamSub(queue, group="group", consumer=queue)
+ stream=StreamSub(queue, group="group", consumer=queue),
)
- async def handler(msg: RedisMessage): ...
+ async def handler(msg: RedisMessage) -> None: ...
- assert next(iter(consume_broker._subscribers.values())).last_id == "$"
+ assert next(iter(consume_broker._subscribers)).last_id == "$"
async def test_consume_group_with_last_id(
self,
queue: str,
- ):
+ ) -> None:
consume_broker = self.get_broker()
@consume_broker.subscriber(
- stream=StreamSub(queue, group="group", consumer=queue, last_id="0")
+ stream=StreamSub(queue, group="group", consumer=queue, last_id="0"),
)
- async def handler(msg: RedisMessage): ...
+ async def handler(msg: RedisMessage) -> None: ...
- assert next(iter(consume_broker._subscribers.values())).last_id == "0"
+ assert next(iter(consume_broker._subscribers)).last_id == "0"
async def test_consume_nack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
- stream=StreamSub(queue, group="group", consumer=queue)
+ stream=StreamSub(queue, group="group", consumer=queue),
)
- async def handler(msg: RedisMessage):
+ async def handler(msg: RedisMessage) -> None:
event.set()
await msg.nack()
@@ -612,14 +675,15 @@ async def handler(msg: RedisMessage):
async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
- stream=StreamSub(queue, group="group", consumer=queue)
+ stream=StreamSub(queue, group="group", consumer=queue),
)
- async def handler(msg: RedisMessage):
+ async def handler(msg: RedisMessage) -> None:
event.set()
async with self.patch_broker(consume_broker) as br:
@@ -641,7 +705,7 @@ async def handler(msg: RedisMessage):
async def test_get_one(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(stream=queue)
@@ -650,11 +714,11 @@ async def test_get_one(
message = None
- async def consume():
+ async def consume() -> None:
nonlocal message
message = await subscriber.get_one(timeout=3)
- async def publish():
+ async def publish() -> None:
await asyncio.sleep(0.1)
await br.publish("test_message", stream=queue)
@@ -673,7 +737,7 @@ async def test_get_one_timeout(
self,
queue: str,
mock: MagicMock,
- ):
+ ) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(stream=queue)
@@ -682,3 +746,36 @@ async def test_get_one_timeout(
mock(await subscriber.get_one(timeout=1e-24))
mock.assert_called_once_with(None)
+
+ async def test_concurrent_consume_stream(self, queue: str, mock: MagicMock):
+ event = asyncio.Event()
+ event2 = asyncio.Event()
+
+ consume_broker = self.get_broker()
+
+ @consume_broker.subscriber(stream=StreamSub(queue), max_workers=2)
+ async def handler(msg):
+ mock()
+ if event.is_set():
+ event2.set()
+ else:
+ event.set()
+ await asyncio.sleep(0.1)
+
+ async with self.patch_broker(consume_broker) as br:
+ await br.start()
+
+ for i in range(5):
+ await br.publish(i, stream=queue)
+
+ await asyncio.wait(
+ (
+ asyncio.create_task(event.wait()),
+ asyncio.create_task(event2.wait()),
+ ),
+ timeout=3,
+ )
+
+ assert event.is_set()
+ assert event2.is_set()
+ assert mock.call_count == 2, mock.call_count
diff --git a/tests/brokers/redis/test_fastapi.py b/tests/brokers/redis/test_fastapi.py
index 56ef879e7c..31c1ff0f70 100644
--- a/tests/brokers/redis/test_fastapi.py
+++ b/tests/brokers/redis/test_fastapi.py
@@ -1,16 +1,16 @@
import asyncio
-from typing import List
from unittest.mock import Mock
import pytest
from faststream.redis import ListSub, RedisRouter, StreamSub
from faststream.redis.fastapi import RedisRouter as StreamRouter
-from faststream.redis.testing import TestRedisBroker, build_message
from tests.brokers.base.fastapi import FastAPILocalTestcase, FastAPITestcase
+from .basic import RedisMemoryTestcaseConfig
-@pytest.mark.redis
+
+@pytest.mark.redis()
class TestRouter(FastAPITestcase):
router_class = StreamRouter
broker_router_class = RedisRouter
@@ -18,13 +18,14 @@ class TestRouter(FastAPITestcase):
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber("in.{name}")
- def subscriber(msg: str, name: str):
+ def subscriber(msg: str, name: str) -> None:
mock(msg=msg, name=name)
event.set()
@@ -41,9 +42,10 @@ def subscriber(msg: str, name: str):
assert event.is_set()
mock.assert_called_once_with(msg="hello", name="john")
- async def test_connection_params(self, settings):
+ async def test_connection_params(self, settings) -> None:
broker = self.router_class(
- host="fake-host", port=6377
+ host="fake-host",
+ port=6377,
).broker # kwargs will be ignored
await broker.connect(
host=settings.host,
@@ -56,12 +58,13 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(list=ListSub(queue, batch=True, max_records=1))
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
@@ -78,17 +81,18 @@ async def hello(msg: List[str]):
assert event.is_set()
mock.assert_called_with(["hi"])
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream(
self,
- event: asyncio.Event,
mock: Mock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, polling_interval=10))
- async def handler(msg):
+ async def handler(msg) -> None:
mock(msg)
event.set()
@@ -106,17 +110,18 @@ async def handler(msg):
mock.assert_called_once_with("hello")
- @pytest.mark.slow
+ @pytest.mark.slow()
async def test_consume_stream_batch(
self,
- event: asyncio.Event,
mock: Mock,
queue,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True))
- async def handler(msg: List[str]):
+ async def handler(msg: list[str]) -> None:
mock(msg)
event.set()
@@ -135,29 +140,28 @@ async def handler(msg: List[str]):
mock.assert_called_once_with(["hello"])
-class TestRouterLocal(FastAPILocalTestcase):
+class TestRouterLocal(RedisMemoryTestcaseConfig, FastAPILocalTestcase):
router_class = StreamRouter
broker_router_class = RedisRouter
- broker_test = staticmethod(TestRedisBroker)
- build_message = staticmethod(build_message)
async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(list=ListSub(queue, batch=True, max_records=1))
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
- async with TestRedisBroker(router.broker):
+ async with self.patch_broker(router.broker) as br:
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hi", list=queue)),
+ asyncio.create_task(br.publish("hi", list=queue)),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -170,19 +174,20 @@ async def test_stream_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, batch=True))
- async def hello(msg: List[str]):
+ async def hello(msg: list[str]):
event.set()
return mock(msg)
- async with TestRedisBroker(router.broker):
+ async with self.patch_broker(router.broker) as br:
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("hi", stream=queue)),
+ asyncio.create_task(br.publish("hi", stream=queue)),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -191,18 +196,17 @@ async def hello(msg: List[str]):
assert event.is_set()
mock.assert_called_with(["hi"])
- async def test_path(self, queue: str):
+ async def test_path(self, queue: str) -> None:
router = self.router_class()
@router.subscriber(queue + ".{name}")
async def hello(name):
return name
- async with self.broker_test(router.broker):
- r = await router.broker.publish(
+ async with self.patch_broker(router.broker) as br:
+ r = await br.request(
"hi",
f"{queue}.john",
- rpc=True,
- rpc_timeout=0.5,
+ timeout=0.5,
)
- assert r == "john"
+ assert await r.decode() == "john"
diff --git a/tests/brokers/redis/test_middlewares.py b/tests/brokers/redis/test_middlewares.py
index 2c11d0db0c..c75e0914cd 100644
--- a/tests/brokers/redis/test_middlewares.py
+++ b/tests/brokers/redis/test_middlewares.py
@@ -1,25 +1,23 @@
import pytest
-from faststream.redis import RedisBroker, TestRedisBroker
from tests.brokers.base.middlewares import (
ExceptionMiddlewareTestcase,
MiddlewareTestcase,
MiddlewaresOrderTestcase,
)
+from .basic import RedisMemoryTestcaseConfig, RedisTestcaseConfig
-@pytest.mark.redis
-class TestMiddlewares(MiddlewareTestcase):
- broker_class = RedisBroker
+class TestMiddlewaresOrder(RedisMemoryTestcaseConfig, MiddlewaresOrderTestcase):
+ pass
-@pytest.mark.redis
-class TestExceptionMiddlewares(ExceptionMiddlewareTestcase):
- broker_class = RedisBroker
+@pytest.mark.redis()
+class TestMiddlewares(RedisTestcaseConfig, MiddlewareTestcase):
+ pass
-class TestMiddlewaresOrder(MiddlewaresOrderTestcase):
- broker_class = RedisBroker
- def patch_broker(self, broker: RedisBroker) -> TestRedisBroker:
- return TestRedisBroker(broker)
+@pytest.mark.redis()
+class TestExceptionMiddlewares(RedisTestcaseConfig, ExceptionMiddlewareTestcase):
+ pass
diff --git a/tests/brokers/redis/test_parser.py b/tests/brokers/redis/test_parser.py
index c40306adc2..cf16275b65 100644
--- a/tests/brokers/redis/test_parser.py
+++ b/tests/brokers/redis/test_parser.py
@@ -1,9 +1,10 @@
import pytest
-from faststream.redis import RedisBroker
from tests.brokers.base.parser import CustomParserTestcase
+from .basic import RedisTestcaseConfig
-@pytest.mark.redis
-class TestCustomParser(CustomParserTestcase):
- broker_class = RedisBroker
+
+@pytest.mark.redis()
+class TestCustomParser(RedisTestcaseConfig, CustomParserTestcase):
+ pass
diff --git a/tests/brokers/redis/test_publish.py b/tests/brokers/redis/test_publish.py
index 7e4430ea93..ba4d445d78 100644
--- a/tests/brokers/redis/test_publish.py
+++ b/tests/brokers/redis/test_publish.py
@@ -5,32 +5,32 @@
from redis.asyncio import Redis
from faststream import Context
-from faststream.redis import ListSub, RedisBroker, RedisResponse, StreamSub
+from faststream.redis import ListSub, RedisResponse, StreamSub
from tests.brokers.base.publish import BrokerPublishTestcase
from tests.tools import spy_decorator
+from .basic import RedisTestcaseConfig
-@pytest.mark.redis
-@pytest.mark.asyncio
-class TestPublish(BrokerPublishTestcase):
- def get_broker(self, apply_types: bool = False):
- return RedisBroker(apply_types=apply_types)
+@pytest.mark.redis()
+@pytest.mark.asyncio()
+class TestPublish(RedisTestcaseConfig, BrokerPublishTestcase):
async def test_list_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
@pub_broker.subscriber(list=queue)
@pub_broker.publisher(list=queue + "resp")
- async def m(msg):
+ async def m(msg) -> str:
return ""
@pub_broker.subscriber(list=queue + "resp")
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -51,13 +51,13 @@ async def resp(msg):
async def test_list_publish_batch(
self,
queue: str,
- ):
+ ) -> None:
pub_broker = self.get_broker()
msgs_queue = asyncio.Queue(maxsize=2)
@pub_broker.subscriber(list=queue)
- async def handler(msg):
+ async def handler(msg) -> None:
await msgs_queue.put(msg)
async with self.patch_broker(pub_broker) as br:
@@ -78,9 +78,10 @@ async def handler(msg):
async def test_batch_list_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
batch_list = ListSub(queue + "resp", batch=True)
@@ -91,7 +92,7 @@ async def m(msg):
return 1, 2, 3
@pub_broker.subscriber(list=batch_list)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -112,9 +113,10 @@ async def resp(msg):
async def test_publisher_with_maxlen(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
stream = StreamSub(queue + "resp", maxlen=1)
@@ -125,7 +127,7 @@ async def handler(msg):
return msg
@pub_broker.subscriber(stream=stream)
- async def resp(msg):
+ async def resp(msg) -> None:
event.set()
mock(msg)
@@ -149,9 +151,10 @@ async def resp(msg):
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(list=queue)
@@ -160,7 +163,7 @@ async def m():
return RedisResponse(1, correlation_id="1")
@pub_broker.subscriber(list=queue + "resp")
- async def resp(msg=Context("message")):
+ async def resp(msg=Context("message")) -> None:
mock(
body=msg.body,
correlation_id=msg.correlation_id,
@@ -184,12 +187,10 @@ async def resp(msg=Context("message")):
correlation_id="1",
)
- @pytest.mark.asyncio
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -200,8 +201,8 @@ async def handle():
await br.start()
response = await asyncio.wait_for(
- br.publish("", queue, rpc=True),
+ br.request("", queue),
timeout=3,
)
- assert response == "Hi!", response
+ assert await response.decode() == "Hi!", response
diff --git a/tests/brokers/redis/test_publish_command.py b/tests/brokers/redis/test_publish_command.py
new file mode 100644
index 0000000000..d956ffe972
--- /dev/null
+++ b/tests/brokers/redis/test_publish_command.py
@@ -0,0 +1,14 @@
+from faststream.redis.response import RedisPublishCommand, RedisResponse
+from faststream.response import ensure_response
+from tests.brokers.base.publish_command import BatchPublishCommandTestcase
+
+
+class TestPublishCommand(BatchPublishCommandTestcase):
+ publish_command_cls = RedisPublishCommand
+
+ def test_redis_response_class(self) -> None:
+ response = ensure_response(RedisResponse(body=1, headers={"1": 1}, maxlen=1))
+ cmd = self.publish_command_cls.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {"1": 1}
+ assert cmd.maxlen == 1
diff --git a/tests/brokers/redis/test_requests.py b/tests/brokers/redis/test_requests.py
index e13fe06e92..03c1441fa7 100644
--- a/tests/brokers/redis/test_requests.py
+++ b/tests/brokers/redis/test_requests.py
@@ -3,9 +3,10 @@
import pytest
from faststream import BaseMiddleware
-from faststream.redis import RedisBroker, RedisRouter, TestRedisBroker
from tests.brokers.base.requests import RequestsTestcase
+from .basic import RedisMemoryTestcaseConfig, RedisTestcaseConfig
+
class Mid(BaseMiddleware):
async def on_receive(self) -> None:
@@ -14,27 +15,20 @@ async def on_receive(self) -> None:
self.msg["data"] = json.dumps(data)
async def consume_scope(self, call_next, msg):
- msg._decoded_body = msg._decoded_body * 2
+ msg.body *= 2
return await call_next(msg)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class RedisRequestsTestcase(RequestsTestcase):
def get_middleware(self, **kwargs):
return Mid
- def get_broker(self, **kwargs):
- return RedisBroker(**kwargs)
-
- def get_router(self, **kwargs):
- return RedisRouter(**kwargs)
-
-@pytest.mark.redis
-class TestRealRequests(RedisRequestsTestcase):
+@pytest.mark.redis()
+class TestRealRequests(RedisTestcaseConfig, RedisRequestsTestcase):
pass
-class TestRequestTestClient(RedisRequestsTestcase):
- def patch_broker(self, broker, **kwargs):
- return TestRedisBroker(broker, **kwargs)
+class TestRequestTestClient(RedisMemoryTestcaseConfig, RedisRequestsTestcase):
+ pass
diff --git a/tests/brokers/redis/test_router.py b/tests/brokers/redis/test_router.py
index 33eda512c9..7a56d30edf 100644
--- a/tests/brokers/redis/test_router.py
+++ b/tests/brokers/redis/test_router.py
@@ -3,19 +3,23 @@
import pytest
from faststream import Path
-from faststream.redis import RedisBroker, RedisPublisher, RedisRoute, RedisRouter
+from faststream.redis import (
+ RedisPublisher,
+ RedisRoute,
+ RedisRouter,
+)
from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase
+from .basic import RedisMemoryTestcaseConfig, RedisTestcaseConfig
-@pytest.mark.redis
-class TestRouter(RouterTestcase):
- broker_class = RedisRouter
+
+@pytest.mark.redis()
+class TestRouter(RedisTestcaseConfig, RouterTestcase):
route_class = RedisRoute
publisher_class = RedisPublisher
-class TestRouterLocal(RouterLocalTestcase):
- broker_class = RedisRouter
+class TestRouterLocal(RedisMemoryTestcaseConfig, RouterLocalTestcase):
route_class = RedisRoute
publisher_class = RedisPublisher
@@ -24,112 +28,103 @@ async def test_router_path(
event,
mock,
router,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
@router.subscriber("in.{name}.{id}")
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
- pub_broker._is_apply_types = True
pub_broker.include_router(router)
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
- await pub_broker.publish(
- "",
- "in.john.2",
- rpc=True,
- )
+ await br.request("", "in.john.2")
- assert event.is_set()
- mock.assert_called_once_with(name="john", id=2)
+ assert event.is_set()
+ mock.assert_called_once_with(name="john", id=2)
async def test_router_path_with_prefix(
self,
event,
mock,
router,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
router.prefix = "test."
@router.subscriber("in.{name}.{id}")
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
- pub_broker._is_apply_types = True
pub_broker.include_router(router)
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
- await pub_broker.publish(
- "",
- "test.in.john.2",
- rpc=True,
- )
+ await br.request("", "test.in.john.2")
- assert event.is_set()
- mock.assert_called_once_with(name="john", id=2)
+ assert event.is_set()
+ mock.assert_called_once_with(name="john", id=2)
async def test_router_delay_handler_path(
self,
event,
mock,
router,
- pub_broker,
- ):
+ ) -> None:
+ pub_broker = self.get_broker(apply_types=True)
+
async def h(
name: str = Path(),
id: int = Path("id"),
- ):
+ ) -> None:
event.set()
mock(name=name, id=id)
r = type(router)(handlers=(self.route_class(h, channel="in.{name}.{id}"),))
- pub_broker._is_apply_types = True
pub_broker.include_router(r)
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
- await pub_broker.publish(
- "",
- "in.john.2",
- rpc=True,
- )
+ await br.request("", "in.john.2")
- assert event.is_set()
- mock.assert_called_once_with(name="john", id=2)
+ assert event.is_set()
+ mock.assert_called_once_with(name="john", id=2)
async def test_delayed_channel_handlers(
self,
- event: asyncio.Event,
queue: str,
- pub_broker: RedisBroker,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
+ def response(m) -> None:
event.set()
r = RedisRouter(prefix="test_", handlers=(RedisRoute(response, channel=queue),))
pub_broker.include_router(r)
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- pub_broker.publish("hello", channel=f"test_{queue}")
- ),
+ asyncio.create_task(br.publish("hello", channel=f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -139,25 +134,25 @@ def response(m):
async def test_delayed_list_handlers(
self,
- event: asyncio.Event,
queue: str,
- pub_broker: RedisBroker,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
+ def response(m) -> None:
event.set()
r = RedisRouter(prefix="test_", handlers=(RedisRoute(response, list=queue),))
pub_broker.include_router(r)
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- pub_broker.publish("hello", list=f"test_{queue}")
- ),
+ asyncio.create_task(br.publish("hello", list=f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=3,
@@ -167,25 +162,25 @@ def response(m):
async def test_delayed_stream_handlers(
self,
- event: asyncio.Event,
queue: str,
- pub_broker: RedisBroker,
- ):
- def response(m):
+ ) -> None:
+ event = asyncio.Event()
+
+ pub_broker = self.get_broker()
+
+ def response(m) -> None:
event.set()
r = RedisRouter(prefix="test_", handlers=(RedisRoute(response, stream=queue),))
pub_broker.include_router(r)
- async with pub_broker:
- await pub_broker.start()
+ async with self.patch_broker(pub_broker) as br:
+ await br.start()
await asyncio.wait(
(
- asyncio.create_task(
- pub_broker.publish("hello", stream=f"test_{queue}")
- ),
+ asyncio.create_task(br.publish("hello", stream=f"test_{queue}")),
asyncio.create_task(event.wait()),
),
timeout=3,
diff --git a/tests/brokers/redis/test_rpc.py b/tests/brokers/redis/test_rpc.py
deleted file mode 100644
index 6895c7c998..0000000000
--- a/tests/brokers/redis/test_rpc.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import pytest
-
-from faststream.redis import RedisBroker
-from tests.brokers.base.rpc import BrokerRPCTestcase, ReplyAndConsumeForbidden
-
-
-@pytest.mark.redis
-class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden):
- def get_broker(self, apply_types: bool = False):
- return RedisBroker(apply_types=apply_types)
-
- @pytest.mark.asyncio
- async def test_list_rpc(self, queue: str):
- rpc_broker = self.get_broker()
-
- @rpc_broker.subscriber(list=queue)
- async def m(m): # pragma: no cover
- return "1"
-
- async with self.patch_broker(rpc_broker) as br:
- await br.start()
-
- r = await br.publish("hello", list=queue, rpc_timeout=3, rpc=True)
-
- assert r == "1"
diff --git a/tests/brokers/redis/test_schemas.py b/tests/brokers/redis/test_schemas.py
index 8c0fea9186..fa85d64f92 100644
--- a/tests/brokers/redis/test_schemas.py
+++ b/tests/brokers/redis/test_schemas.py
@@ -3,7 +3,7 @@
from faststream.redis import StreamSub
-def test_stream_group():
+def test_stream_group() -> None:
with pytest.raises(ValueError): # noqa: PT011
StreamSub("test", group="group")
diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py
index 1203c9448a..da352a0c7b 100644
--- a/tests/brokers/redis/test_test_client.py
+++ b/tests/brokers/redis/test_test_client.py
@@ -3,48 +3,29 @@
import pytest
from faststream import BaseMiddleware
-from faststream.exceptions import SetupError
-from faststream.redis import ListSub, RedisBroker, StreamSub, TestRedisBroker
+from faststream.redis import ListSub, StreamSub
from faststream.redis.testing import FakeProducer
from tests.brokers.base.testclient import BrokerTestclientTestcase
+from .basic import RedisMemoryTestcaseConfig
-@pytest.mark.asyncio
-class TestTestclient(BrokerTestclientTestcase):
- test_class = TestRedisBroker
- def get_broker(self, apply_types: bool = False) -> RedisBroker:
- return RedisBroker(apply_types=apply_types)
-
- def patch_broker(self, broker: RedisBroker) -> TestRedisBroker:
- return TestRedisBroker(broker)
-
- def get_fake_producer_class(self) -> type:
- return FakeProducer
-
- async def test_rpc_conflicts_reply(self, queue):
- async with TestRedisBroker(RedisBroker()) as br:
- with pytest.raises(SetupError):
- await br.publish(
- "",
- queue,
- rpc=True,
- reply_to="response",
- )
-
- @pytest.mark.redis
+@pytest.mark.asyncio()
+class TestTestclient(RedisMemoryTestcaseConfig, BrokerTestclientTestcase):
+ @pytest.mark.redis()
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
- def subscriber(m):
+ def subscriber(m) -> None:
event.set()
- async with TestRedisBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue)),
@@ -55,7 +36,7 @@ def subscriber(m):
assert event.is_set()
- async def test_respect_middleware(self, queue):
+ async def test_respect_middleware(self, queue: str) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -63,22 +44,22 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = RedisBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(m) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(m) -> None: ...
- async with TestRedisBroker(broker) as br:
+ async with self.patch_broker(broker) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
assert len(routes) == 2
- @pytest.mark.redis
- async def test_real_respect_middleware(self, queue):
+ @pytest.mark.redis()
+ async def test_real_respect_middleware(self, queue: str) -> None:
routes = []
class Middleware(BaseMiddleware):
@@ -86,15 +67,15 @@ async def on_receive(self) -> None:
routes.append(None)
return await super().on_receive()
- broker = RedisBroker(middlewares=(Middleware,))
+ broker = self.get_broker(middlewares=(Middleware,))
@broker.subscriber(queue)
- async def h1(): ...
+ async def h1(m) -> None: ...
@broker.subscriber(queue + "1")
- async def h2(): ...
+ async def h2(m) -> None: ...
- async with TestRedisBroker(broker, with_real=True) as br:
+ async with self.patch_broker(broker, with_real=True) as br:
await br.publish("", queue)
await br.publish("", queue + "1")
await h1.wait_call(3)
@@ -102,7 +83,7 @@ async def h2(): ...
assert len(routes) == 2
- async def test_pub_sub_pattern(self):
+ async def test_pub_sub_pattern(self) -> None:
broker = self.get_broker()
@broker.subscriber("test.{name}")
@@ -110,13 +91,13 @@ async def handler(msg):
return msg
async with self.patch_broker(broker) as br:
- assert await br.publish(1, "test.name.useless", rpc=True) == 1
+ assert await (await br.request(1, "test.name.useless")).decode() == 1
handler.mock.assert_called_once_with(1)
async def test_list(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(list=queue)
@@ -124,17 +105,17 @@ async def handler(msg):
return msg
async with self.patch_broker(broker) as br:
- assert await br.publish(1, list=queue, rpc=True) == 1
+ assert await (await br.request(1, list=queue)).decode() == 1
handler.mock.assert_called_once_with(1)
async def test_batch_pub_by_default_pub(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(list=ListSub(queue, batch=True))
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -144,11 +125,11 @@ async def m(msg):
async def test_batch_pub_by_pub_batch(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(list=ListSub(queue, batch=True))
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -158,7 +139,7 @@ async def m(msg):
async def test_batch_publisher_mock(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
batch_list = ListSub(queue + "1", batch=True)
@@ -177,7 +158,7 @@ async def m(msg):
async def test_stream(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(stream=queue)
@@ -185,17 +166,17 @@ async def handler(msg):
return msg
async with self.patch_broker(broker) as br:
- assert await br.publish(1, stream=queue, rpc=True) == 1
+ assert await (await br.request(1, stream=queue)).decode() == 1
handler.mock.assert_called_once_with(1)
async def test_stream_batch_pub_by_default_pub(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
@broker.subscriber(stream=StreamSub(queue, batch=True))
- async def m(msg):
+ async def m(msg) -> None:
pass
async with self.patch_broker(broker) as br:
@@ -205,7 +186,7 @@ async def m(msg):
async def test_stream_publisher(
self,
queue: str,
- ):
+ ) -> None:
broker = self.get_broker()
batch_stream = StreamSub(queue + "1")
@@ -221,26 +202,24 @@ async def m(msg):
m.mock.assert_called_once_with("hello")
publisher.mock.assert_called_once_with([1, 2, 3])
- async def test_publish_to_none(
- self,
- queue: str,
- ):
+ async def test_publish_to_none(self) -> None:
broker = self.get_broker()
async with self.patch_broker(broker) as br:
with pytest.raises(ValueError): # noqa: PT011
await br.publish("hello")
- @pytest.mark.redis
- async def test_broker_gets_patched_attrs_within_cm(self):
- await super().test_broker_gets_patched_attrs_within_cm()
+ @pytest.mark.redis()
+ async def test_broker_gets_patched_attrs_within_cm(self) -> None:
+ await super().test_broker_gets_patched_attrs_within_cm(FakeProducer)
- @pytest.mark.redis
- async def test_broker_with_real_doesnt_get_patched(self):
+ @pytest.mark.redis()
+ async def test_broker_with_real_doesnt_get_patched(self) -> None:
await super().test_broker_with_real_doesnt_get_patched()
- @pytest.mark.redis
+ @pytest.mark.redis()
async def test_broker_with_real_patches_publishers_and_subscribers(
- self, queue: str
- ):
+ self,
+ queue: str,
+ ) -> None:
await super().test_broker_with_real_patches_publishers_and_subscribers(queue)
diff --git a/tests/brokers/test_pushback.py b/tests/brokers/test_pushback.py
deleted file mode 100644
index ac56078cb0..0000000000
--- a/tests/brokers/test_pushback.py
+++ /dev/null
@@ -1,122 +0,0 @@
-from unittest.mock import AsyncMock
-
-import pytest
-
-from faststream.broker.acknowledgement_watcher import (
- CounterWatcher,
- EndlessWatcher,
- WatcherContext,
-)
-from faststream.exceptions import NackMessage, SkipMessage
-
-
-@pytest.fixture
-def message():
- return AsyncMock(message_id=1, committed=None)
-
-
-@pytest.mark.asyncio
-async def test_push_back_correct(async_mock: AsyncMock, message):
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- await async_mock()
-
- async_mock.assert_awaited_once()
- message.ack.assert_awaited_once()
- assert not watcher.memory.get(message.message_id)
-
-
-@pytest.mark.asyncio
-async def test_push_back_endless_correct(async_mock: AsyncMock, message):
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- await async_mock()
-
- async_mock.assert_awaited_once()
- message.ack.assert_awaited_once()
-
-
-@pytest.mark.asyncio
-async def test_push_back_watcher(async_mock: AsyncMock, message):
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async_mock.side_effect = ValueError("Ooops!")
-
- while not message.reject.called:
- with pytest.raises(ValueError): # noqa: PT011
- async with context:
- await async_mock()
-
- assert not message.ack.await_count
- assert message.nack.await_count == 3
- message.reject.assert_awaited_once()
-
-
-@pytest.mark.asyncio
-async def test_push_endless_back_watcher(async_mock: AsyncMock, message):
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async_mock.side_effect = ValueError("Ooops!")
-
- while message.nack.await_count < 10:
- with pytest.raises(ValueError): # noqa: PT011
- async with context:
- await async_mock()
-
- assert not message.ack.called
- assert not message.reject.called
- assert message.nack.await_count == 10
-
-
-@pytest.mark.asyncio
-async def test_ignore_skip(async_mock: AsyncMock, message):
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- raise SkipMessage()
-
- assert not message.nack.called
- assert not message.reject.called
- assert not message.ack.called
-
-
-@pytest.mark.asyncio
-async def test_additional_params_with_handler_exception(async_mock: AsyncMock, message):
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- raise NackMessage(delay=5)
-
- message.nack.assert_called_with(delay=5)
diff --git a/tests/brokers/test_response.py b/tests/brokers/test_response.py
index 785e0a21cf..a8b669cc52 100644
--- a/tests/brokers/test_response.py
+++ b/tests/brokers/test_response.py
@@ -1,25 +1,26 @@
-from faststream.broker.response import Response, ensure_response
+from faststream.response import ensure_response
+from faststream.response.response import Response
-def test_raw_data():
+def test_raw_data() -> None:
resp = ensure_response(1)
assert resp.body == 1
assert resp.headers == {}
-def test_response_with_response_instance():
+def test_response_with_response_instance() -> None:
resp = ensure_response(Response(1, headers={"some": 1}))
assert resp.body == 1
assert resp.headers == {"some": 1}
-def test_headers_override():
- resp = Response(1, headers={"some": 1})
- resp.add_headers({"some": 2})
- assert resp.headers == {"some": 2}
+def test_add_headers_not_overrides() -> None:
+ publish_cmd = Response(1, headers={1: 1, 2: 2}).as_publish_command()
+ publish_cmd.add_headers({1: "ignored", 3: 3}, override=False)
+ assert publish_cmd.headers == {1: 1, 2: 2, 3: 3}
-def test_headers_with_default():
- resp = Response(1, headers={"some": 1})
- resp.add_headers({"some": 2}, override=False)
- assert resp.headers == {"some": 1}
+def test_add_headers_overrides() -> None:
+ publish_cmd = Response(1, headers={1: "ignored", 2: 2}).as_publish_command()
+ publish_cmd.add_headers({1: 1, 3: 3}, override=True)
+ assert publish_cmd.headers == {1: 1, 2: 2, 3: 3}
diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py
index 80d2bee8e4..21900fdd09 100644
--- a/tests/cli/conftest.py
+++ b/tests/cli/conftest.py
@@ -3,7 +3,7 @@
from faststream import FastStream
-@pytest.fixture
+@pytest.fixture()
def broker():
# separate import from e2e tests
from faststream.rabbit import RabbitBroker
@@ -11,16 +11,11 @@ def broker():
return RabbitBroker()
-@pytest.fixture
+@pytest.fixture()
def app_without_logger(broker):
- return FastStream(broker, None)
+ return FastStream(broker, logger=None)
-@pytest.fixture
-def app_without_broker():
- return FastStream()
-
-
-@pytest.fixture
+@pytest.fixture()
def app(broker):
return FastStream(broker)
diff --git a/tests/cli/rabbit/test_app.py b/tests/cli/rabbit/test_app.py
index 68fc2517e7..c21e26d996 100644
--- a/tests/cli/rabbit/test_app.py
+++ b/tests/cli/rabbit/test_app.py
@@ -8,45 +8,31 @@
import pytest
from faststream import FastStream, TestApp
-from faststream._compat import IS_WINDOWS
-from faststream.log import logger
+from faststream._internal._compat import IS_WINDOWS
+from faststream._internal.log import logger
-def test_init(app: FastStream, broker):
+def test_init(app: FastStream, broker) -> None:
assert app.broker is broker
assert app.logger is logger
-def test_init_without_broker(app_without_broker: FastStream):
- assert app_without_broker.broker is None
-
-
-def test_init_without_logger(app_without_logger: FastStream):
- assert app_without_logger.logger is None
-
-
-def test_set_broker(broker, app_without_broker: FastStream):
- assert app_without_broker.broker is None
- app_without_broker.set_broker(broker)
- assert app_without_broker.broker is broker
-
-
-def test_log(app: FastStream, app_without_logger: FastStream):
+def test_log(app: FastStream, app_without_logger: FastStream) -> None:
app._log(logging.INFO, "test")
app_without_logger._log(logging.INFO, "test")
-@pytest.mark.asyncio
-async def test_on_startup_calls(async_mock: AsyncMock, mock: Mock):
- def call1():
+@pytest.mark.asyncio()
+async def test_on_startup_calls(async_mock: AsyncMock, mock: Mock) -> None:
+ def call1() -> None:
mock.call_start1()
assert not async_mock.call_start2.called
- async def call2():
+ async def call2() -> None:
await async_mock.call_start2()
assert mock.call_start1.call_count == 1
- test_app = FastStream(on_startup=[call1, call2])
+ test_app = FastStream(AsyncMock(), on_startup=[call1, call2])
await test_app.start()
@@ -54,36 +40,41 @@ async def call2():
async_mock.call_start2.assert_called_once()
-@pytest.mark.asyncio
-async def test_startup_calls_lifespans(mock: Mock, app_without_broker: FastStream):
- def call1():
+@pytest.mark.asyncio()
+async def test_startup_calls_lifespans(
+ mock: Mock,
+ app: FastStream,
+ async_mock: AsyncMock,
+) -> None:
+ def call1() -> None:
mock.call_start1()
assert not mock.call_start2.called
- def call2():
+ def call2() -> None:
mock.call_start2()
assert mock.call_start1.call_count == 1
- app_without_broker.on_startup(call1)
- app_without_broker.on_startup(call2)
+ app.on_startup(call1)
+ app.on_startup(call2)
- await app_without_broker.start()
+ with patch.object(app.broker, "start", async_mock):
+ await app.start()
mock.call_start1.assert_called_once()
mock.call_start2.assert_called_once()
-@pytest.mark.asyncio
-async def test_on_shutdown_calls(async_mock: AsyncMock, mock: Mock):
- def call1():
+@pytest.mark.asyncio()
+async def test_on_shutdown_calls(async_mock: AsyncMock, mock: Mock) -> None:
+ def call1() -> None:
mock.call_stop1()
assert not async_mock.call_stop2.called
- async def call2():
+ async def call2() -> None:
await async_mock.call_stop2()
assert mock.call_stop1.call_count == 1
- test_app = FastStream(on_shutdown=[call1, call2])
+ test_app = FastStream(AsyncMock(), on_shutdown=[call1, call2])
await test_app.stop()
@@ -91,36 +82,38 @@ async def call2():
async_mock.call_stop2.assert_called_once()
-@pytest.mark.asyncio
-async def test_shutdown_calls_lifespans(mock: Mock, app_without_broker: FastStream):
- def call1():
+@pytest.mark.asyncio()
+async def test_shutdown_calls_lifespans(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
+
+ def call1() -> None:
mock.call_stop1()
assert not mock.call_stop2.called
- def call2():
+ def call2() -> None:
mock.call_stop2()
assert mock.call_stop1.call_count == 1
- app_without_broker.on_shutdown(call1)
- app_without_broker.on_shutdown(call2)
+ app.on_shutdown(call1)
+ app.on_shutdown(call2)
- await app_without_broker.stop()
+ await app.stop()
mock.call_stop1.assert_called_once()
mock.call_stop2.assert_called_once()
-@pytest.mark.asyncio
-async def test_after_startup_calls(async_mock: AsyncMock, mock: Mock, broker):
- def call1():
+@pytest.mark.asyncio()
+async def test_after_startup_calls(async_mock: AsyncMock, mock: Mock, broker) -> None:
+ def call1() -> None:
mock.after_startup1()
assert not async_mock.after_startup2.called
- async def call2():
+ async def call2() -> None:
await async_mock.after_startup2()
assert mock.after_startup1.call_count == 1
- test_app = FastStream(broker=broker, after_startup=[call1, call2])
+ test_app = FastStream(broker, after_startup=[call1, call2])
with patch.object(test_app.broker, "start", async_mock.broker_start):
await test_app.start()
@@ -129,20 +122,30 @@ async def call2():
async_mock.after_startup2.assert_called_once()
-@pytest.mark.asyncio
-async def test_startup_lifespan_before_broker_started(async_mock, app: FastStream):
+@pytest.mark.asyncio()
+async def test_startup_lifespan_before_broker_started(
+ async_mock: AsyncMock,
+ app: FastStream,
+) -> None:
@app.on_startup
- async def call():
+ async def call() -> None:
await async_mock.before()
assert not async_mock.broker_start.called
@app.after_startup
- async def call_after():
+ async def call_after() -> None:
await async_mock.after()
async_mock.before.assert_awaited_once()
async_mock.broker_start.assert_called_once()
- with patch.object(app.broker, "start", async_mock.broker_start):
+ with (
+ patch.object(app.broker, "start", async_mock.broker_start),
+ patch.object(
+ app.broker,
+ "connect",
+ async_mock.broker_connect,
+ ),
+ ):
await app.start()
async_mock.broker_start.assert_called_once()
@@ -150,36 +153,45 @@ async def call_after():
async_mock.before.assert_awaited_once()
-@pytest.mark.asyncio
-async def test_after_shutdown_calls(async_mock: AsyncMock, mock: Mock, broker):
- def call1():
+@pytest.mark.asyncio()
+async def test_after_shutdown_calls(async_mock: AsyncMock, mock: Mock, broker) -> None:
+ def call1() -> None:
mock.after_shutdown1()
assert not async_mock.after_shutdown2.called
- async def call2():
+ async def call2() -> None:
await async_mock.after_shutdown2()
assert mock.after_shutdown1.call_count == 1
- test_app = FastStream(broker=broker, after_shutdown=[call1, call2])
+ test_app = FastStream(broker, after_shutdown=[call1, call2])
- with patch.object(test_app.broker, "start", async_mock.broker_start):
+ with (
+ patch.object(test_app.broker, "start", async_mock.broker_start),
+ patch.object(
+ test_app.broker,
+ "connect",
+ async_mock.broker_connect,
+ ),
+ ):
await test_app.stop()
mock.after_shutdown1.assert_called_once()
async_mock.after_shutdown2.assert_called_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
async def test_shutdown_lifespan_after_broker_stopped(
- mock, async_mock, app: FastStream
-):
+ mock,
+ async_mock: AsyncMock,
+ app: FastStream,
+) -> None:
@app.after_shutdown
- async def call():
+ async def call() -> None:
await async_mock.after()
async_mock.broker_stop.assert_called_once()
@app.on_shutdown
- async def call_before():
+ async def call_before() -> None:
await async_mock.before()
assert not async_mock.broker_stop.called
@@ -191,12 +203,13 @@ async def call_before():
async_mock.before.assert_awaited_once()
-@pytest.mark.asyncio
-async def test_running(async_mock, app: FastStream):
+@pytest.mark.asyncio()
+async def test_running(async_mock: AsyncMock, app: FastStream) -> None:
app.exit()
- with patch.object(app.broker, "start", async_mock.broker_run), patch.object(
- app.broker, "close", async_mock.broker_stopped
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run),
+ patch.object(app.broker, "close", async_mock.broker_stopped),
):
await app.run()
@@ -204,44 +217,45 @@ async def test_running(async_mock, app: FastStream):
async_mock.broker_stopped.assert_called_once()
-@pytest.mark.asyncio
-async def test_exception_group(async_mock: AsyncMock, app: FastStream):
+@pytest.mark.asyncio()
+async def test_exception_group(async_mock: AsyncMock, app: FastStream) -> None:
async_mock.side_effect = ValueError("Ooops!")
@app.on_startup
- async def f():
+ async def f() -> None:
await async_mock()
with pytest.raises(ValueError, match="Ooops!"):
await app.run()
-@pytest.mark.asyncio
-async def test_running_lifespan_contextmanager(async_mock, mock: Mock, app: FastStream):
+@pytest.mark.asyncio()
+async def test_running_lifespan_contextmanager(
+ async_mock: AsyncMock,
+ mock: Mock,
+ app: FastStream,
+) -> None:
@asynccontextmanager
async def lifespan(env: str):
mock.on(env)
yield
mock.off()
- app = FastStream(app.broker, lifespan=lifespan)
+ app = FastStream(async_mock, lifespan=lifespan)
app.exit()
- with patch.object(app.broker, "start", async_mock.broker_run), patch.object(
- app.broker, "close", async_mock.broker_stopped
- ):
- await app.run(run_extra_options={"env": "test"})
+ await app.run(run_extra_options={"env": "test"})
- async_mock.broker_run.assert_called_once()
- async_mock.broker_stopped.assert_called_once()
+ async_mock.start.assert_called_once()
+ async_mock.close.assert_called_once()
mock.on.assert_called_once_with("test")
mock.off.assert_called_once()
-@pytest.mark.asyncio
-async def test_test_app(mock: Mock):
- app = FastStream()
+@pytest.mark.asyncio()
+async def test_test_app(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -253,23 +267,23 @@ async def test_test_app(mock: Mock):
mock.off.assert_called_once()
-@pytest.mark.asyncio
-async def test_test_app_with_excp(mock: Mock):
- app = FastStream()
+@pytest.mark.asyncio()
+async def test_test_app_with_excp(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
with pytest.raises(ValueError): # noqa: PT011
async with TestApp(app):
- raise ValueError()
+ raise ValueError
mock.on.assert_called_once()
mock.off.assert_called_once()
-def test_sync_test_app(mock: Mock):
- app = FastStream()
+def test_sync_test_app(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -281,21 +295,21 @@ def test_sync_test_app(mock: Mock):
mock.off.assert_called_once()
-def test_sync_test_app_with_excp(mock: Mock):
- app = FastStream()
+def test_sync_test_app_with_excp(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
with pytest.raises(ValueError), TestApp(app): # noqa: PT011
- raise ValueError()
+ raise ValueError
mock.on.assert_called_once()
mock.off.assert_called_once()
-@pytest.mark.asyncio
-async def test_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream):
+@pytest.mark.asyncio()
+async def test_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream) -> None:
@asynccontextmanager
async def lifespan(env: str):
await async_mock.on(env)
@@ -304,8 +318,9 @@ async def lifespan(env: str):
app = FastStream(app.broker, lifespan=lifespan)
- with patch.object(app.broker, "start", async_mock.broker_run), patch.object(
- app.broker, "close", async_mock.broker_stopped
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run),
+ patch.object(app.broker, "close", async_mock.broker_stopped),
):
async with TestApp(app, {"env": "test"}):
pass
@@ -316,7 +331,7 @@ async def lifespan(env: str):
async_mock.broker_stopped.assert_called_once()
-def test_sync_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream):
+def test_sync_lifespan_contextmanager(async_mock: AsyncMock, app: FastStream) -> None:
@asynccontextmanager
async def lifespan(env: str):
await async_mock.on(env)
@@ -325,9 +340,18 @@ async def lifespan(env: str):
app = FastStream(app.broker, lifespan=lifespan)
- with patch.object(app.broker, "start", async_mock.broker_run), patch.object(
- app.broker, "close", async_mock.broker_stopped
- ), TestApp(app, {"env": "test"}):
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run),
+ patch.object(
+ app.broker,
+ "close",
+ async_mock.broker_stopped,
+ ),
+ TestApp(
+ app,
+ {"env": "test"},
+ ),
+ ):
pass
async_mock.on.assert_awaited_once_with("test")
@@ -336,11 +360,12 @@ async def lifespan(env: str):
async_mock.broker_stopped.assert_called_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skipif(IS_WINDOWS, reason="does not run on windows")
-async def test_stop_with_sigint(async_mock, app: FastStream):
- with patch.object(app.broker, "start", async_mock.broker_run_sigint), patch.object(
- app.broker, "close", async_mock.broker_stopped_sigint
+async def test_stop_with_sigint(async_mock, app: FastStream) -> None:
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run_sigint),
+ patch.object(app.broker, "close", async_mock.broker_stopped_sigint),
):
async with anyio.create_task_group() as tg:
tg.start_soon(app.run)
@@ -350,11 +375,12 @@ async def test_stop_with_sigint(async_mock, app: FastStream):
async_mock.broker_stopped_sigint.assert_called_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skipif(IS_WINDOWS, reason="does not run on windows")
-async def test_stop_with_sigterm(async_mock, app: FastStream):
- with patch.object(app.broker, "start", async_mock.broker_run_sigterm), patch.object(
- app.broker, "close", async_mock.broker_stopped_sigterm
+async def test_stop_with_sigterm(async_mock, app: FastStream) -> None:
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run_sigterm),
+ patch.object(app.broker, "close", async_mock.broker_stopped_sigterm),
):
async with anyio.create_task_group() as tg:
tg.start_soon(app.run)
@@ -364,9 +390,9 @@ async def test_stop_with_sigterm(async_mock, app: FastStream):
async_mock.broker_stopped_sigterm.assert_called_once()
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@pytest.mark.skipif(IS_WINDOWS, reason="does not run on windows")
-async def test_run_asgi(async_mock: AsyncMock, app: FastStream):
+async def test_run_asgi(async_mock: AsyncMock, app: FastStream) -> None:
asgi_routes = [("/", lambda scope, receive, send: None)]
asgi_app = app.as_asgi(asgi_routes=asgi_routes)
assert asgi_app.broker is app.broker
@@ -378,15 +404,17 @@ async def test_run_asgi(async_mock: AsyncMock, app: FastStream):
assert asgi_app._after_shutdown_calling is app._after_shutdown_calling
assert asgi_app.routes == asgi_routes
- with patch.object(app.broker, "start", async_mock.broker_run), patch.object(
- app.broker, "close", async_mock.broker_stopped
+ with (
+ patch.object(app.broker, "start", async_mock.broker_run),
+ patch.object(app.broker, "close", async_mock.broker_stopped),
):
async with anyio.create_task_group() as tg:
tg.start_soon(app.run)
tg.start_soon(_kill, signal.SIGINT)
async_mock.broker_run.assert_called_once()
+ async_mock.broker_stopped.assert_called_once()
-async def _kill(sig):
+async def _kill(sig) -> None:
os.kill(os.getpid(), sig)
diff --git a/tests/cli/rabbit/test_logs.py b/tests/cli/rabbit/test_logs.py
index 79e140da99..8a4168ffca 100644
--- a/tests/cli/rabbit/test_logs.py
+++ b/tests/cli/rabbit/test_logs.py
@@ -3,58 +3,42 @@
import pytest
from faststream import FastStream
-from faststream.cli.utils.logs import LogLevels, get_log_level, set_log_level
+from faststream._internal.cli.utils.logs import get_log_level, set_log_level
from faststream.rabbit import RabbitBroker
-@pytest.mark.parametrize(
- "level",
- ( # noqa: PT007
- pytest.param(logging.ERROR, id=str(logging.ERROR)),
- *(pytest.param(level, id=level) for level in LogLevels.__members__),
- *(
- pytest.param(level, id=str(level))
- for level in LogLevels.__members__.values()
- ),
- ),
-)
-def test_set_level(level, app: FastStream):
- level = get_log_level(level)
- set_log_level(level, app)
- assert app.logger.level is app.broker.logger.level is level
+def test_set_level(app: FastStream) -> None:
+ set_log_level(logging.ERROR, app)
+ broker_state = app.broker._state.get()
+ broker_state._setup_logger_state()
+ broker_logger = broker_state.logger_state.logger.logger
+ assert app.logger.level == broker_logger.level == logging.ERROR
+
+
+def test_set_default(broker) -> None:
+ app = FastStream(broker)
+ level = "wrong_level"
+ set_log_level(get_log_level(level), app)
+ assert app.logger.level is logging.INFO
@pytest.mark.parametrize(
- ("level", "broker"),
- ( # noqa: PT007
+ ("app"),
+ (
pytest.param(
- logging.CRITICAL,
- FastStream(),
- id="empty app",
- ),
- pytest.param(
- logging.CRITICAL,
FastStream(RabbitBroker(), logger=None),
id="app without logger",
),
pytest.param(
- logging.CRITICAL,
FastStream(RabbitBroker(logger=None)),
id="broker without logger",
),
pytest.param(
- logging.CRITICAL,
FastStream(RabbitBroker(logger=None), logger=None),
id="both without logger",
),
),
)
-def test_set_level_to_none(level, app: FastStream):
- set_log_level(get_log_level(level), app)
-
-
-def test_set_default():
- app = FastStream()
- level = "wrong_level"
- set_log_level(get_log_level(level), app)
- assert app.logger.level is logging.INFO
+def test_set_level_to_none(app: FastStream) -> None:
+ app._setup()
+ set_log_level(logging.CRITICAL, app)
diff --git a/tests/cli/supervisors/test_base_reloader.py b/tests/cli/supervisors/test_base_reloader.py
index ebc29e1d4e..77f50271d3 100644
--- a/tests/cli/supervisors/test_base_reloader.py
+++ b/tests/cli/supervisors/test_base_reloader.py
@@ -2,7 +2,7 @@
import pytest
-from faststream.cli.supervisors.basereload import BaseReload
+from faststream._internal.cli.supervisors.basereload import BaseReload
class PatchedBaseReload(BaseReload):
@@ -14,16 +14,16 @@ def should_restart(self) -> bool:
return True
-def empty(*args, **kwargs):
+def empty(*args, **kwargs) -> None:
pass
-@pytest.mark.slow
-def test_base():
+@pytest.mark.slow()
+def test_base() -> None:
processor = PatchedBaseReload(target=empty, args=())
processor._args = (processor.pid,)
processor.run()
code = abs(processor._process.exitcode or 0)
- assert code == signal.SIGTERM.value or code == 0
+ assert code in {signal.SIGTERM.value, 0}
diff --git a/tests/cli/supervisors/test_multiprocess.py b/tests/cli/supervisors/test_multiprocess.py
index d50ce7995d..a00897a790 100644
--- a/tests/cli/supervisors/test_multiprocess.py
+++ b/tests/cli/supervisors/test_multiprocess.py
@@ -4,20 +4,20 @@
import pytest
-from faststream.cli.supervisors.multiprocess import Multiprocess
+from faststream._internal.cli.supervisors.multiprocess import Multiprocess
-def exit(parent_id): # pragma: no cover
+def exit(parent_id) -> None: # pragma: no cover
os.kill(parent_id, signal.SIGINT)
-@pytest.mark.slow
+@pytest.mark.slow()
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-def test_base():
+def test_base() -> None:
processor = Multiprocess(target=exit, args=(), workers=5)
processor._args = (processor.pid,)
processor.run()
for p in processor.processes:
code = abs(p.exitcode)
- assert code == signal.SIGTERM.value or code == 0
+ assert code in {signal.SIGTERM.value, 0}
diff --git a/tests/cli/supervisors/test_watchfiles.py b/tests/cli/supervisors/test_watchfiles.py
index 511ccac0a2..8812bbf11a 100644
--- a/tests/cli/supervisors/test_watchfiles.py
+++ b/tests/cli/supervisors/test_watchfiles.py
@@ -7,37 +7,37 @@
import pytest
-from faststream.cli.supervisors.watchfiles import WatchReloader
+from faststream._internal.cli.supervisors.watchfiles import WatchReloader
DIR = Path(__file__).resolve().parent
-def exit(parent_id): # pragma: no cover
+def exit(parent_id) -> None: # pragma: no cover
os.kill(parent_id, signal.SIGINT)
-@pytest.mark.slow
+@pytest.mark.slow()
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-def test_base():
+def test_base() -> None:
processor = WatchReloader(target=exit, args=(), reload_dirs=[DIR])
processor._args = (processor.pid,)
processor.run()
code = abs(processor._process.exitcode)
- assert code == signal.SIGTERM.value or code == 0
+ assert code in {signal.SIGTERM.value, 0}
-def touch_file(file: Path): # pragma: no cover
+def touch_file(file: Path) -> None: # pragma: no cover
while True:
time.sleep(0.1)
with file.open("a") as f:
f.write("hello")
-@pytest.mark.slow
+@pytest.mark.slow()
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-def test_restart(mock: Mock):
+def test_restart(mock: Mock) -> None:
file = DIR / "file.py"
processor = WatchReloader(target=touch_file, args=(file,), reload_dirs=[DIR])
diff --git a/tests/cli/test_app_state.py b/tests/cli/test_app_state.py
new file mode 100644
index 0000000000..9896b8839a
--- /dev/null
+++ b/tests/cli/test_app_state.py
@@ -0,0 +1,31 @@
+from unittest.mock import AsyncMock, patch
+
+import pytest
+
+from faststream import FastStream
+
+
+@pytest.mark.asyncio()
+async def test_state_running(app: FastStream) -> None:
+ with patch(
+ "faststream._internal.application.Application.start", new_callable=AsyncMock
+ ):
+ await app._startup()
+
+ assert app.running
+
+
+@pytest.mark.asyncio()
+async def test_state_stopped(app: FastStream) -> None:
+ with (
+ patch(
+ "faststream._internal.application.Application.start", new_callable=AsyncMock
+ ),
+ patch(
+ "faststream._internal.application.Application.stop", new_callable=AsyncMock
+ ),
+ ):
+ await app._startup()
+ await app._shutdown()
+
+ assert not app.running
diff --git a/tests/cli/test_asyncapi_docs.py b/tests/cli/test_asyncapi_docs.py
index 816710c9ad..344ba54013 100644
--- a/tests/cli/test_asyncapi_docs.py
+++ b/tests/cli/test_asyncapi_docs.py
@@ -1,5 +1,6 @@
import json
import sys
+import traceback
from http.server import HTTPServer
from pathlib import Path
from unittest.mock import Mock
@@ -9,22 +10,25 @@
from typer.testing import CliRunner
from docs.docs_src.getting_started.asyncapi.serve import (
- gen_json_cmd,
- gen_yaml_cmd,
- serve_cmd,
+ asyncapi_serve_cmd,
+ gen_asyncapi_json_cmd,
+ gen_asyncapi_yaml_cmd,
)
-from faststream.cli.main import cli
+from faststream._internal.cli.main import cli
from tests.marks import require_aiokafka
-GEN_JSON_CMD = gen_json_cmd.split(" ")[1:-1]
-GEN_YAML_CMD = gen_yaml_cmd.split(" ")[1:-1]
-SERVE_CMD = serve_cmd.split(" ")[1:-1]
+GEN_JSON_CMD = gen_asyncapi_json_cmd.split(" ")[1:-1]
+GEN_YAML_CMD = gen_asyncapi_yaml_cmd.split(" ")[1:-1]
+SERVE_CMD = asyncapi_serve_cmd.split(" ")[1:-1]
@require_aiokafka
-def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project: Path):
+def test_gen_asyncapi_json_for_kafka_app(
+ runner: CliRunner, kafka_ascynapi_project: str
+) -> None:
r = runner.invoke(
- cli, [*GEN_JSON_CMD, "--out", "schema.json", str(kafka_basic_project)]
+ cli,
+ [*GEN_JSON_CMD, "--out", "schema.json", kafka_ascynapi_project],
)
assert r.exit_code == 0
@@ -39,8 +43,10 @@ def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project:
@require_aiokafka
-def test_gen_asyncapi_yaml_for_kafka_app(runner: CliRunner, kafka_basic_project: Path):
- r = runner.invoke(cli, GEN_YAML_CMD + [str(kafka_basic_project)]) # noqa: RUF005
+def test_gen_asyncapi_yaml_for_kafka_app(
+ runner: CliRunner, kafka_ascynapi_project: str
+) -> None:
+ r = runner.invoke(cli, GEN_YAML_CMD + [kafka_ascynapi_project]) # noqa: RUF005
assert r.exit_code == 0
schema_path = Path.cwd() / "asyncapi.yaml"
@@ -53,7 +59,7 @@ def test_gen_asyncapi_yaml_for_kafka_app(runner: CliRunner, kafka_basic_project:
schema_path.unlink()
-def test_gen_wrong_path(runner: CliRunner):
+def test_gen_wrong_path(runner: CliRunner) -> None:
r = runner.invoke(cli, GEN_JSON_CMD + ["basic:app1"]) # noqa: RUF005
assert r.exit_code == 2
assert "No such file or directory" in r.stdout
@@ -62,15 +68,15 @@ def test_gen_wrong_path(runner: CliRunner):
@require_aiokafka
def test_serve_asyncapi_docs(
runner: CliRunner,
- kafka_basic_project: Path,
- monkeypatch,
+ kafka_ascynapi_project: str,
+ monkeypatch: pytest.MonkeyPatch,
mock: Mock,
-):
+) -> None:
with monkeypatch.context() as m:
m.setattr(HTTPServer, "serve_forever", mock)
- r = runner.invoke(cli, SERVE_CMD + [str(kafka_basic_project)]) # noqa: RUF005
+ r = runner.invoke(cli, SERVE_CMD + [kafka_ascynapi_project]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, r.exc_info
mock.assert_called_once()
@@ -78,18 +84,18 @@ def test_serve_asyncapi_docs(
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_serve_asyncapi_json_schema(
runner: CliRunner,
- kafka_basic_project: Path,
- monkeypatch,
+ kafka_ascynapi_project: str,
+ monkeypatch: pytest.MonkeyPatch,
mock: Mock,
-):
- r = runner.invoke(cli, GEN_JSON_CMD + [str(kafka_basic_project)]) # noqa: RUF005
+) -> None:
+ r = runner.invoke(cli, GEN_JSON_CMD + [kafka_ascynapi_project]) # noqa: RUF005
schema_path = Path.cwd() / "asyncapi.json"
with monkeypatch.context() as m:
m.setattr(HTTPServer, "serve_forever", mock)
r = runner.invoke(cli, SERVE_CMD + [str(schema_path)]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, traceback.format_tb(r.exc_info[2])
mock.assert_called_once()
schema_path.unlink()
@@ -99,18 +105,18 @@ def test_serve_asyncapi_json_schema(
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_serve_asyncapi_yaml_schema(
runner: CliRunner,
- kafka_basic_project: Path,
- monkeypatch,
+ kafka_ascynapi_project: str,
+ monkeypatch: pytest.MonkeyPatch,
mock: Mock,
-):
- r = runner.invoke(cli, GEN_YAML_CMD + [str(kafka_basic_project)]) # noqa: RUF005
+) -> None:
+ r = runner.invoke(cli, GEN_YAML_CMD + [kafka_ascynapi_project]) # noqa: RUF005
schema_path = Path.cwd() / "asyncapi.yaml"
with monkeypatch.context() as m:
m.setattr(HTTPServer, "serve_forever", mock)
r = runner.invoke(cli, SERVE_CMD + [str(schema_path)]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, traceback.format_tb(r.exc_info[2])
mock.assert_called_once()
schema_path.unlink()
diff --git a/tests/cli/test_logs.py b/tests/cli/test_logs.py
new file mode 100644
index 0000000000..85a7aa10f6
--- /dev/null
+++ b/tests/cli/test_logs.py
@@ -0,0 +1,63 @@
+import logging
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+from typer.testing import CliRunner
+
+from faststream import FastStream
+from faststream._internal.cli.main import cli as faststream_app
+from faststream._internal.cli.utils.logs import get_log_level
+
+
+@pytest.mark.parametrize(
+ (
+ "level",
+ "expected_level",
+ ),
+ (
+ pytest.param("critical", logging.CRITICAL),
+ pytest.param("fatal", logging.FATAL),
+ pytest.param("error", logging.ERROR),
+ pytest.param("warning", logging.WARNING),
+ pytest.param("warn", logging.WARNING),
+ pytest.param("info", logging.INFO),
+ pytest.param("debug", logging.DEBUG),
+ pytest.param("notset", logging.NOTSET),
+ ),
+)
+def test_get_level(level: str, expected_level: int) -> None:
+ assert get_log_level(level) == expected_level
+
+
+def test_run_with_log_level(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
+ app.run = AsyncMock()
+
+ with patch(
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
+ return_value=(None, app),
+ ):
+ result = runner.invoke(
+ faststream_app,
+ ["run", "-l", "warning", "faststream:app"],
+ )
+
+ assert result.exit_code == 0, result.output
+
+ assert app.logger.level == logging.WARNING
+
+
+def test_run_with_wrong_log_level(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
+ app.run = AsyncMock()
+
+ with patch(
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
+ return_value=(None, app),
+ ):
+ result = runner.invoke(
+ faststream_app,
+ ["run", "-l", "30", "faststream:app"],
+ )
+
+ assert result.exit_code == 2, result.output
diff --git a/tests/cli/test_publish.py b/tests/cli/test_publish.py
index 0887e95c8c..3f55bd6e82 100644
--- a/tests/cli/test_publish.py
+++ b/tests/cli/test_publish.py
@@ -1,9 +1,11 @@
+from typing import TYPE_CHECKING
from unittest.mock import AsyncMock, patch
-from dirty_equals import IsPartialDict
+from typer.testing import CliRunner
from faststream import FastStream
-from faststream.cli.main import cli as faststream_app
+from faststream._internal.cli.main import cli as faststream_app
+from faststream.response.publish_type import PublishType
from tests.marks import (
require_aiokafka,
require_aiopika,
@@ -12,25 +14,36 @@
require_redis,
)
+if TYPE_CHECKING:
+ from faststream.confluent.response import (
+ KafkaPublishCommand as ConfluentPublishCommand,
+ )
+ from faststream.kafka.response import KafkaPublishCommand
+ from faststream.nats.response import NatsPublishCommand
+ from faststream.rabbit.response import RabbitPublishCommand
+ from faststream.redis.response import RedisPublishCommand
-def get_mock_app(broker_type, producer_type) -> FastStream:
+
+def get_mock_app(broker_type, producer_type) -> tuple[FastStream, AsyncMock]:
broker = broker_type()
broker.connect = AsyncMock()
mock_producer = AsyncMock(spec=producer_type)
mock_producer.publish = AsyncMock()
- broker._producer = mock_producer
- return FastStream(broker)
+ mock_producer._parser = AsyncMock()
+ mock_producer._decoder = AsyncMock()
+ broker._state.patch_value(producer=mock_producer)
+ return FastStream(broker), mock_producer
@require_redis
-def test_publish_command_with_redis_options(runner):
+def test_publish_command_with_redis_options(runner) -> None:
from faststream.redis import RedisBroker
from faststream.redis.publisher.producer import RedisFastProducer
- mock_app = get_mock_app(RedisBroker, RedisFastProducer)
+ mock_app, producer_mock = get_mock_app(RedisBroker, RedisFastProducer)
with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
return_value=(None, mock_app),
):
result = runner.invoke(
@@ -40,13 +53,9 @@ def test_publish_command_with_redis_options(runner):
"fastream:app",
"hello world",
"--channel",
- "test channel",
+ "channelname",
"--reply_to",
"tester",
- "--list",
- "0.1",
- "--stream",
- "stream url",
"--correlation_id",
"someId",
],
@@ -54,26 +63,22 @@ def test_publish_command_with_redis_options(runner):
assert result.exit_code == 0
- assert mock_app.broker._producer.publish.call_args.args[0] == "hello world"
- assert mock_app.broker._producer.publish.call_args.kwargs == IsPartialDict(
- channel="test channel",
- reply_to="tester",
- list="0.1",
- stream="stream url",
- correlation_id="someId",
- rpc=False,
- )
+ cmd: RedisPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.reply_to == "tester"
+ assert cmd.destination == "channelname"
+ assert cmd.correlation_id == "someId"
@require_confluent
-def test_publish_command_with_confluent_options(runner):
+def test_publish_command_with_confluent_options(runner) -> None:
from faststream.confluent import KafkaBroker as ConfluentBroker
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
- mock_app = get_mock_app(ConfluentBroker, AsyncConfluentFastProducer)
+ mock_app, producer_mock = get_mock_app(ConfluentBroker, AsyncConfluentFastProducer)
with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
return_value=(None, mock_app),
):
result = runner.invoke(
@@ -83,30 +88,29 @@ def test_publish_command_with_confluent_options(runner):
"fastream:app",
"hello world",
"--topic",
- "confluent topic",
+ "topicname",
"--correlation_id",
"someId",
],
)
assert result.exit_code == 0
- assert mock_app.broker._producer.publish.call_args.args[0] == "hello world"
- assert mock_app.broker._producer.publish.call_args.kwargs == IsPartialDict(
- topic="confluent topic",
- correlation_id="someId",
- rpc=False,
- )
+
+ cmd: ConfluentPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "topicname"
+ assert cmd.correlation_id == "someId"
@require_aiokafka
-def test_publish_command_with_kafka_options(runner):
+def test_publish_command_with_kafka_options(runner) -> None:
from faststream.kafka import KafkaBroker
from faststream.kafka.publisher.producer import AioKafkaFastProducer
- mock_app = get_mock_app(KafkaBroker, AioKafkaFastProducer)
+ mock_app, producer_mock = get_mock_app(KafkaBroker, AioKafkaFastProducer)
with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
return_value=(None, mock_app),
):
result = runner.invoke(
@@ -116,30 +120,29 @@ def test_publish_command_with_kafka_options(runner):
"fastream:app",
"hello world",
"--topic",
- "kafka topic",
+ "topicname",
"--correlation_id",
"someId",
],
)
assert result.exit_code == 0
- assert mock_app.broker._producer.publish.call_args.args[0] == "hello world"
- assert mock_app.broker._producer.publish.call_args.kwargs == IsPartialDict(
- topic="kafka topic",
- correlation_id="someId",
- rpc=False,
- )
+
+ cmd: KafkaPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "topicname"
+ assert cmd.correlation_id == "someId"
@require_nats
-def test_publish_command_with_nats_options(runner):
+def test_publish_command_with_nats_options(runner) -> None:
from faststream.nats import NatsBroker
from faststream.nats.publisher.producer import NatsFastProducer
- mock_app = get_mock_app(NatsBroker, NatsFastProducer)
+ mock_app, producer_mock = get_mock_app(NatsBroker, NatsFastProducer)
with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
return_value=(None, mock_app),
):
result = runner.invoke(
@@ -149,7 +152,7 @@ def test_publish_command_with_nats_options(runner):
"fastream:app",
"hello world",
"--subject",
- "nats subject",
+ "subjectname",
"--reply_to",
"tester",
"--correlation_id",
@@ -159,24 +162,22 @@ def test_publish_command_with_nats_options(runner):
assert result.exit_code == 0
- assert mock_app.broker._producer.publish.call_args.args[0] == "hello world"
- assert mock_app.broker._producer.publish.call_args.kwargs == IsPartialDict(
- subject="nats subject",
- reply_to="tester",
- correlation_id="someId",
- rpc=False,
- )
+ cmd: NatsPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "subjectname"
+ assert cmd.reply_to == "tester"
+ assert cmd.correlation_id == "someId"
@require_aiopika
-def test_publish_command_with_rabbit_options(runner):
+def test_publish_command_with_rabbit_options(runner) -> None:
from faststream.rabbit import RabbitBroker
from faststream.rabbit.publisher.producer import AioPikaFastProducer
- mock_app = get_mock_app(RabbitBroker, AioPikaFastProducer)
+ mock_app, producer_mock = get_mock_app(RabbitBroker, AioPikaFastProducer)
with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
return_value=(None, mock_app),
):
result = runner.invoke(
@@ -185,20 +186,48 @@ def test_publish_command_with_rabbit_options(runner):
"publish",
"fastream:app",
"hello world",
+ "--queue",
+ "queuename",
"--correlation_id",
"someId",
- "--raise_timeout",
- "True",
],
)
assert result.exit_code == 0
- assert mock_app.broker._producer.publish.call_args.args[0] == "hello world"
- assert mock_app.broker._producer.publish.call_args.kwargs == IsPartialDict(
- {
- "correlation_id": "someId",
- "raise_timeout": "True",
- "rpc": False,
- }
+ cmd: RabbitPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "queuename"
+ assert cmd.correlation_id == "someId"
+
+
+@require_nats
+def test_publish_nats_request_command(runner: CliRunner) -> None:
+ from faststream.nats import NatsBroker
+ from faststream.nats.publisher.producer import NatsFastProducer
+
+ mock_app, producer_mock = get_mock_app(NatsBroker, NatsFastProducer)
+
+ with patch(
+ "faststream._internal.cli.utils.imports._import_object_or_factory",
+ return_value=(None, mock_app),
+ ):
+ runner.invoke(
+ faststream_app,
+ [
+ "publish",
+ "fastream:app",
+ "hello world",
+ "--subject",
+ "subjectname",
+ "--rpc",
+ "--timeout",
+ "1.0",
+ ],
)
+
+ cmd: NatsPublishCommand = producer_mock.request.call_args.args[0]
+
+ assert cmd.destination == "subjectname"
+ assert cmd.timeout == 1.0
+ assert cmd.publish_type is PublishType.REQUEST
diff --git a/tests/cli/test_run.py b/tests/cli/test_run.py
index eb736baaeb..294f0aee56 100644
--- a/tests/cli/test_run.py
+++ b/tests/cli/test_run.py
@@ -1,200 +1,175 @@
import logging
-from unittest.mock import AsyncMock, Mock, patch
+from unittest.mock import AsyncMock, MagicMock, patch
-import pytest
+from dirty_equals import IsPartialDict
from typer.testing import CliRunner
-from faststream._internal.application import Application
-from faststream.app import FastStream
-from faststream.asgi import AsgiFastStream
-from faststream.cli.main import cli as faststream_app
-from faststream.cli.utils.logs import get_log_level
+from faststream import FastStream
+from faststream._internal.cli.main import cli as faststream_app
-
-@pytest.mark.parametrize(
- "app", [pytest.param(FastStream()), pytest.param(AsgiFastStream())]
+IMPORT_FUNCTION_MOCK_PATH = (
+ "faststream._internal.cli.utils.imports._import_object_or_factory"
)
-def test_run(runner: CliRunner, app: Application):
+
+
+def test_run(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
app.run = AsyncMock()
- with patch(
- "faststream.cli.utils.imports._import_obj_or_factory", return_value=(None, app)
- ):
+ with patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)):
result = runner.invoke(
faststream_app,
[
"run",
"faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
+ "--extra",
+ "1",
],
)
+
+ assert result.exit_code == 0
+
app.run.assert_awaited_once_with(
- logging.INFO, {"host": "0.0.0.0", "port": "8000"}
+ logging.INFO,
+ {"extra": "1"},
)
- assert result.exit_code == 0
-@pytest.mark.parametrize("app", [pytest.param(AsgiFastStream())])
-def test_run_as_asgi_with_single_worker(runner: CliRunner, app: Application):
+def test_run_factory(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
app.run = AsyncMock()
+ app_factory = MagicMock(return_value=app)
- with patch(
- "faststream.cli.utils.imports._import_obj_or_factory", return_value=(None, app)
- ):
+ with patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app_factory)):
result = runner.invoke(
faststream_app,
[
"run",
"faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
- "--workers",
- "1",
+ "-f",
],
)
- app.run.assert_awaited_once_with(
- logging.INFO, {"host": "0.0.0.0", "port": "8000"}
- )
+
assert result.exit_code == 0
+ app_factory.assert_called_once()
+ app.run.assert_awaited_once()
+
-@pytest.mark.parametrize("workers", [3, 5, 7])
-@pytest.mark.parametrize("app", [pytest.param(AsgiFastStream())])
-def test_run_as_asgi_with_many_workers(
- runner: CliRunner, workers: int, app: Application
-):
- asgi_multiprocess = "faststream.cli.supervisors.asgi_multiprocess.ASGIMultiprocess"
- _import_obj_or_factory = "faststream.cli.utils.imports._import_obj_or_factory"
+def test_run_workers(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
+ app.run = AsyncMock()
- with patch(asgi_multiprocess) as asgi_runner, patch(
- _import_obj_or_factory, return_value=(None, app)
+ with (
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)),
+ patch(
+ "faststream._internal.cli.supervisors.multiprocess.Multiprocess",
+ ) as mock,
):
+ app_str = "faststream:app"
result = runner.invoke(
faststream_app,
- [
- "run",
- "faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
- "--workers",
- str(workers),
- ],
+ ["run", app_str, "-w", "2"],
)
+
assert result.exit_code == 0
- asgi_runner.assert_called_once()
- asgi_runner.assert_called_once_with(
- target="faststream:app",
- args=("faststream:app", {"host": "0.0.0.0", "port": "8000"}, False, 0),
- workers=workers,
- )
- asgi_runner().run.assert_called_once()
+ assert mock.call_args.kwargs == IsPartialDict({
+ "args": (app_str, {}, False, logging.NOTSET, logging.DEBUG),
+ "workers": 2,
+ })
-@pytest.mark.parametrize(
- "log_level",
- ["critical", "fatal", "error", "warning", "warn", "info", "debug", "notset"],
-)
-@pytest.mark.parametrize("app", [pytest.param(AsgiFastStream())])
-def test_run_as_asgi_mp_with_log_level(
- runner: CliRunner, app: Application, log_level: str
-):
- asgi_multiprocess = "faststream.cli.supervisors.asgi_multiprocess.ASGIMultiprocess"
- _import_obj_or_factory = "faststream.cli.utils.imports._import_obj_or_factory"
-
- with patch(asgi_multiprocess) as asgi_runner, patch(
- _import_obj_or_factory, return_value=(None, app)
+def test_run_factory_with_workers(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
+ app.run = AsyncMock()
+ app_factory = MagicMock(return_value=app)
+
+ with (
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app_factory)),
+ patch(
+ "faststream._internal.cli.supervisors.multiprocess.Multiprocess",
+ ) as mock,
):
+ app_str = "faststream:app"
result = runner.invoke(
faststream_app,
- [
- "run",
- "faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
- "--workers",
- "3",
- "--log-level",
- log_level,
- ],
+ ["run", app_str, "-f", "-w", "2"],
)
+
assert result.exit_code == 0
- asgi_runner.assert_called_once()
- asgi_runner.assert_called_once_with(
- target="faststream:app",
- args=(
- "faststream:app",
- {"host": "0.0.0.0", "port": "8000"},
- False,
- get_log_level(log_level),
- ),
- workers=3,
- )
- asgi_runner().run.assert_called_once()
+ assert mock.call_args.kwargs == IsPartialDict({
+ "args": (app_str, {}, True, logging.NOTSET, logging.DEBUG),
+ "workers": 2,
+ })
-@pytest.mark.parametrize(
- "app", [pytest.param(FastStream()), pytest.param(AsgiFastStream())]
-)
-def test_run_as_factory(runner: CliRunner, app: Application):
+def test_run_reloader(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
app.run = AsyncMock()
- app_factory = Mock(return_value=app)
-
- with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
- return_value=(None, app_factory),
+ with (
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)),
+ patch(
+ "faststream._internal.cli.supervisors.watchfiles.WatchReloader",
+ ) as mock,
):
+ app_str = "faststream:app"
+
result = runner.invoke(
faststream_app,
[
"run",
- "faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
- "--factory",
+ app_str,
+ "-r",
+ "--app-dir",
+ "test",
+ "--extension",
+ "yaml",
],
)
- app_factory.assert_called()
- app.run.assert_awaited_once_with(
- logging.INFO, {"host": "0.0.0.0", "port": "8000"}
- )
+
assert result.exit_code == 0
+ assert mock.call_args.kwargs == IsPartialDict({
+ "args": (app_str, {}, False, logging.NOTSET),
+ "reload_dirs": ["test"],
+ "extra_extensions": ["yaml"],
+ })
-@pytest.mark.parametrize(
- "app", [pytest.param(FastStream()), pytest.param(AsgiFastStream())]
-)
-def test_run_app_like_factory_but_its_fake(runner: CliRunner, app: Application):
+
+def test_run_reloader_with_factory(runner: CliRunner) -> None:
+ app = FastStream(MagicMock())
app.run = AsyncMock()
+ app_factory = MagicMock(return_value=app)
- with patch(
- "faststream.cli.utils.imports._import_obj_or_factory",
- return_value=(None, app),
+ with (
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app_factory)),
+ patch(
+ "faststream._internal.cli.supervisors.watchfiles.WatchReloader",
+ ) as mock,
):
+ app_str = "faststream:app"
+
result = runner.invoke(
faststream_app,
[
"run",
- "faststream:app",
- "--host",
- "0.0.0.0",
- "--port",
- "8000",
- "--factory",
+ app_str,
+ "-f",
+ "-r",
+ "--app-dir",
+ "test",
+ "--extension",
+ "yaml",
],
)
- app.run.assert_not_called()
- assert result.exit_code != 0
+
+ assert result.exit_code == 0
+
+ assert mock.call_args.kwargs == IsPartialDict({
+ "args": (app_str, {}, True, logging.NOTSET),
+ "reload_dirs": ["test"],
+ "extra_extensions": ["yaml"],
+ })
diff --git a/tests/cli/test_run_asgi.py b/tests/cli/test_run_asgi.py
new file mode 100644
index 0000000000..eb0e34d28a
--- /dev/null
+++ b/tests/cli/test_run_asgi.py
@@ -0,0 +1,126 @@
+import logging
+from unittest.mock import AsyncMock, MagicMock, patch
+
+from typer.testing import CliRunner
+
+from faststream._internal.cli.main import cli as faststream_app
+from faststream.asgi import AsgiFastStream
+
+IMPORT_FUNCTION_MOCK_PATH = (
+ "faststream._internal.cli.utils.imports._import_object_or_factory"
+)
+
+
+def test_run_as_asgi(runner: CliRunner) -> None:
+ app = AsgiFastStream(AsyncMock())
+ app.run = AsyncMock()
+
+ with patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)):
+ result = runner.invoke(
+ faststream_app,
+ [
+ "run",
+ "faststream:app",
+ "--host",
+ "0.0.0.0",
+ "--port",
+ "8000",
+ "--workers",
+ "1",
+ ],
+ )
+
+ assert result.exit_code == 0
+
+ app.run.assert_awaited_once_with(
+ logging.INFO,
+ {"host": "0.0.0.0", "port": "8000"},
+ )
+
+
+def test_run_as_asgi_with_workers(runner: CliRunner) -> None:
+ app = AsgiFastStream(AsyncMock())
+ app.run = AsyncMock()
+
+ asgi_multiprocess = (
+ "faststream._internal.cli.supervisors.asgi_multiprocess.ASGIMultiprocess"
+ )
+
+ with (
+ patch(asgi_multiprocess) as asgi_runner,
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)),
+ ):
+ workers = 2
+
+ result = runner.invoke(
+ faststream_app,
+ [
+ "run",
+ "faststream:app",
+ "-w",
+ str(workers),
+ ],
+ )
+
+ assert result.exit_code == 0
+
+ asgi_runner.assert_called_once_with(
+ target="faststream:app",
+ args=("faststream:app", {}, False, 0),
+ workers=workers,
+ )
+
+
+def test_run_as_asgi_factory(runner: CliRunner) -> None:
+ app = AsgiFastStream(AsyncMock())
+ app.run = AsyncMock()
+ app_factory = MagicMock(return_value=app)
+
+ with patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app_factory)):
+ result = runner.invoke(
+ faststream_app,
+ ["run", "-f", "faststream:app"],
+ )
+
+ assert result.exit_code == 0
+
+ app_factory.assert_called_once()
+ app.run.assert_awaited_once_with(logging.INFO, {})
+
+
+def test_run_as_asgi_multiprocess_with_log_level(runner: CliRunner) -> None:
+ app = AsgiFastStream(AsyncMock())
+ app.run = AsyncMock()
+
+ asgi_multiprocess = (
+ "faststream._internal.cli.supervisors.asgi_multiprocess.ASGIMultiprocess"
+ )
+
+ with (
+ patch(asgi_multiprocess) as asgi_runner,
+ patch(IMPORT_FUNCTION_MOCK_PATH, return_value=(None, app)),
+ ):
+ result = runner.invoke(
+ faststream_app,
+ [
+ "run",
+ "faststream:app",
+ "--workers",
+ "2",
+ "--log-level",
+ "critical",
+ ],
+ )
+ assert result.exit_code == 0
+
+ asgi_runner.assert_called_once_with(
+ target="faststream:app",
+ args=(
+ "faststream:app",
+ {},
+ False,
+ logging.CRITICAL,
+ ),
+ workers=2,
+ )
+ asgi_runner().run.assert_called_once()
diff --git a/tests/cli/test_version.py b/tests/cli/test_version.py
index 98fbfd5c23..a361177f21 100644
--- a/tests/cli/test_version.py
+++ b/tests/cli/test_version.py
@@ -1,9 +1,9 @@
import platform
-from faststream.cli.main import cli
+from faststream._internal.cli.main import cli
-def test_version(runner, version):
+def test_version(runner, version) -> None:
result = runner.invoke(cli, ["--version"])
assert result.exit_code == 0
assert version in result.stdout
diff --git a/tests/cli/utils/test_imports.py b/tests/cli/utils/test_imports.py
index f97e26c0ff..11263bc3c7 100644
--- a/tests/cli/utils/test_imports.py
+++ b/tests/cli/utils/test_imports.py
@@ -3,20 +3,24 @@
import pytest
from typer import BadParameter
+from faststream._internal.cli.utils.imports import (
+ _get_obj_path,
+ _import_object,
+ import_from_string,
+)
from faststream.app import FastStream
-from faststream.cli.utils.imports import get_app_path, import_from_string, import_object
from tests.marks import require_aiokafka, require_aiopika, require_nats
-def test_import_wrong():
- dir, app = get_app_path("tests:test_object")
+def test_import_wrong() -> None:
+ dir, app = _get_obj_path("tests:test_object")
with pytest.raises(FileNotFoundError):
- import_object(dir, app)
+ _import_object(dir, app)
@pytest.mark.parametrize(
("test_input", "exp_module", "exp_app"),
- ( # noqa: PT007
+ (
pytest.param(
"module:app",
"module",
@@ -31,25 +35,25 @@ def test_import_wrong():
),
),
)
-def test_get_app_path(test_input, exp_module, exp_app):
- dir, app = get_app_path(test_input)
+def test_get_app_path(test_input: str, exp_module: str, exp_app: str) -> None:
+ dir, app = _get_obj_path(test_input)
assert app == exp_app
assert dir == Path.cwd() / exp_module
-def test_get_app_path_wrong():
- with pytest.raises(ValueError, match="`module.app` is not a FastStream"):
- get_app_path("module.app")
+def test_get_app_path_wrong() -> None:
+ with pytest.raises(ValueError, match=r"`module.app` is not a path to object"):
+ _get_obj_path("module.app")
-def test_import_from_string_import_wrong():
+def test_import_from_string_import_wrong() -> None:
with pytest.raises(BadParameter):
import_from_string("tests:test_object")
@pytest.mark.parametrize(
("test_input", "exp_module"),
- ( # noqa: PT007
+ (
pytest.param("examples.kafka.testing:app", "examples/kafka/testing.py"),
pytest.param("examples.nats.e01_basic:app", "examples/nats/e01_basic.py"),
pytest.param("examples.rabbit.topic:app", "examples/rabbit/topic.py"),
@@ -58,7 +62,7 @@ def test_import_from_string_import_wrong():
@require_nats
@require_aiopika
@require_aiokafka
-def test_import_from_string(test_input, exp_module):
+def test_import_from_string(test_input: str, exp_module: str) -> None:
module, app = import_from_string(test_input)
assert isinstance(app, FastStream)
assert module == (Path.cwd() / exp_module).parent
@@ -66,7 +70,7 @@ def test_import_from_string(test_input, exp_module):
@pytest.mark.parametrize(
("test_input", "exp_module"),
- ( # noqa: PT007
+ (
pytest.param(
"examples.kafka:app",
"examples/kafka/__init__.py",
@@ -87,12 +91,12 @@ def test_import_from_string(test_input, exp_module):
@require_nats
@require_aiopika
@require_aiokafka
-def test_import_module(test_input, exp_module):
+def test_import_module(test_input: str, exp_module: str) -> None:
module, app = import_from_string(test_input)
assert isinstance(app, FastStream)
assert module == (Path.cwd() / exp_module).parent
-def test_import_from_string_wrong():
+def test_import_from_string_wrong() -> None:
with pytest.raises(BadParameter):
import_from_string("module.app")
diff --git a/tests/cli/utils/test_parser.py b/tests/cli/utils/test_parser.py
index c6bc939c01..449233a612 100644
--- a/tests/cli/utils/test_parser.py
+++ b/tests/cli/utils/test_parser.py
@@ -1,8 +1,6 @@
-from typing import Tuple
-
import pytest
-from faststream.cli.utils.parser import is_bind_arg, parse_cli_args
+from faststream._internal.cli.utils.parser import is_bind_arg, parse_cli_args
APPLICATION = "module:app"
@@ -28,18 +26,22 @@
@pytest.mark.parametrize(
"args",
- ( # noqa: PT007
- (APPLICATION, *ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
- (*ARG1, APPLICATION, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
- (*ARG1, *ARG2, APPLICATION, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
- (*ARG1, *ARG2, *ARG3, APPLICATION, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
- (*ARG1, *ARG2, *ARG3, *ARG4, APPLICATION, *ARG5, *ARG6, *ARG7, *ARG8),
- (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, APPLICATION, *ARG6, *ARG7, *ARG8),
- (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, APPLICATION, *ARG7, *ARG8),
- (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8, APPLICATION),
+ (
+ pytest.param(
+ (APPLICATION, *ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
+ id="app first",
+ ),
+ pytest.param(
+ (*ARG1, *ARG2, *ARG3, APPLICATION, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8),
+ id="app middle",
+ ),
+ pytest.param(
+ (*ARG1, *ARG2, *ARG3, *ARG4, *ARG5, *ARG6, *ARG7, *ARG8, APPLICATION),
+ id="app last",
+ ),
),
)
-def test_custom_argument_parsing(args: Tuple[str]):
+def test_custom_argument_parsing(args: tuple[str]) -> None:
app_name, extra = parse_cli_args(*args)
assert app_name == APPLICATION
assert extra == {
@@ -55,14 +57,25 @@ def test_custom_argument_parsing(args: Tuple[str]):
@pytest.mark.parametrize(
- "args", ["0.0.0.0:8000", "[::]:8000", "fd://2", "unix:/tmp/socket.sock"]
+ "args",
+ (
+ pytest.param("0.0.0.0:8000"),
+ pytest.param("[::]:8000"),
+ pytest.param("fd://2"),
+ pytest.param("unix:/tmp/socket.sock"),
+ ),
)
def test_bind_arg(args: str):
assert is_bind_arg(args) is True
@pytest.mark.parametrize(
- "args", ["main:app", "src.main:app", "examples.nats.e01_basic:app2"]
+ "args",
+ (
+ pytest.param("main:app"),
+ pytest.param("src.main:app"),
+ pytest.param("examples.nats.e01_basic:app2"),
+ ),
)
def test_not_bind_arg(args: str):
assert is_bind_arg(args) is False
diff --git a/tests/conftest.py b/tests/conftest.py
index e6b227941e..02f1c26724 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -6,26 +6,26 @@
from typer.testing import CliRunner
from faststream.__about__ import __version__
-from faststream.utils import context as global_context
+from faststream._internal.context import ContextRepo
@pytest.hookimpl(tryfirst=True)
-def pytest_keyboard_interrupt(excinfo): # pragma: no cover
+def pytest_keyboard_interrupt(excinfo) -> None: # pragma: no cover
pytest.mark.skip("Interrupted Test Session")
-def pytest_collection_modifyitems(items):
+def pytest_collection_modifyitems(items) -> None:
for item in items:
item.add_marker("all")
-@pytest.fixture
-def queue():
+@pytest.fixture()
+def queue() -> str:
return str(uuid4())
-@pytest.fixture
-def event():
+@pytest.fixture()
+def event() -> asyncio.Event:
return asyncio.Event()
@@ -34,31 +34,35 @@ def runner() -> CliRunner:
return CliRunner()
-@pytest.fixture
-def mock():
+@pytest.fixture()
+def mock() -> MagicMock:
m = MagicMock()
yield m
m.reset_mock()
-@pytest.fixture
-def async_mock():
+@pytest.fixture()
+def async_mock() -> AsyncMock:
m = AsyncMock()
yield m
m.reset_mock()
@pytest.fixture(scope="session")
-def version():
+def version() -> str:
return __version__
-@pytest.fixture
-def context():
- yield global_context
- global_context.clear()
+@pytest.fixture()
+def context() -> ContextRepo:
+ return ContextRepo()
-@pytest.fixture
-def kafka_basic_project():
+@pytest.fixture()
+def kafka_basic_project() -> str:
return "docs.docs_src.kafka.basic.basic:app"
+
+
+@pytest.fixture()
+def kafka_ascynapi_project() -> str:
+ return "docs.docs_src.kafka.basic.basic:asyncapi"
diff --git a/tests/a_docs/confluent/consumes_basics/__init__.py b/tests/docs/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/consumes_basics/__init__.py
rename to tests/docs/__init__.py
diff --git a/tests/docs/confluent/__init__.py b/tests/docs/confluent/__init__.py
new file mode 100644
index 0000000000..c4a1803708
--- /dev/null
+++ b/tests/docs/confluent/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("confluent_kafka")
diff --git a/tests/a_docs/confluent/publish_batch/__init__.py b/tests/docs/confluent/ack/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/publish_batch/__init__.py
rename to tests/docs/confluent/ack/__init__.py
diff --git a/tests/docs/confluent/ack/test_errors.py b/tests/docs/confluent/ack/test_errors.py
new file mode 100644
index 0000000000..e3b40a4d75
--- /dev/null
+++ b/tests/docs/confluent/ack/test_errors.py
@@ -0,0 +1,24 @@
+from unittest.mock import patch
+
+import pytest
+
+from faststream.confluent import TestApp, TestKafkaBroker
+from faststream.confluent.client import AsyncConfluentConsumer
+from tests.tools import spy_decorator
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+@pytest.mark.slow()
+async def test_ack_exc() -> None:
+ from docs.docs_src.confluent.ack.errors import app, broker, handle
+
+ with patch.object(
+ AsyncConfluentConsumer,
+ "commit",
+ spy_decorator(AsyncConfluentConsumer.commit),
+ ) as m:
+ async with TestKafkaBroker(broker, with_real=True), TestApp(app):
+ await handle.wait_call(20)
+
+ assert m.mock.call_count
diff --git a/tests/a_docs/confluent/publish_example/__init__.py b/tests/docs/confluent/additional_config/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/publish_example/__init__.py
rename to tests/docs/confluent/additional_config/__init__.py
diff --git a/tests/docs/confluent/additional_config/test_app.py b/tests/docs/confluent/additional_config/test_app.py
new file mode 100644
index 0000000000..6756792d0f
--- /dev/null
+++ b/tests/docs/confluent/additional_config/test_app.py
@@ -0,0 +1,15 @@
+import pytest
+
+from docs.docs_src.confluent.additional_config.app import (
+ HelloWorld,
+ broker,
+ on_hello_world,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
+ on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/confluent/publish_with_partition_key/__init__.py b/tests/docs/confluent/basic/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/publish_with_partition_key/__init__.py
rename to tests/docs/confluent/basic/__init__.py
diff --git a/tests/docs/confluent/basic/test_basic.py b/tests/docs/confluent/basic/test_basic.py
new file mode 100644
index 0000000000..014065408b
--- /dev/null
+++ b/tests/docs/confluent/basic/test_basic.py
@@ -0,0 +1,15 @@
+import pytest
+
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_basic() -> None:
+ from docs.docs_src.confluent.basic.basic import broker, on_input_data
+
+ publisher = broker._publishers[0]
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish({"data": 1.0}, "input_data")
+ on_input_data.mock.assert_called_once_with({"data": 1.0})
+ publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/docs/confluent/basic/test_cmd_run.py b/tests/docs/confluent/basic/test_cmd_run.py
new file mode 100644
index 0000000000..bee99ab1fc
--- /dev/null
+++ b/tests/docs/confluent/basic/test_cmd_run.py
@@ -0,0 +1,38 @@
+from unittest.mock import Mock
+
+import pytest
+from typer.testing import CliRunner
+
+from faststream._internal.cli.main import cli
+from faststream.app import FastStream
+
+
+@pytest.fixture()
+def confluent_basic_project() -> str:
+ return "docs.docs_src.confluent.basic.basic:app"
+
+
+@pytest.mark.confluent()
+def test_run_cmd(
+ runner: CliRunner,
+ mock: Mock,
+ monkeypatch: pytest.MonkeyPatch,
+ confluent_basic_project,
+) -> None:
+ async def patched_run(self: FastStream, *args, **kwargs) -> None:
+ await self.start()
+ await self.stop()
+ mock()
+
+ with monkeypatch.context() as m:
+ m.setattr(FastStream, "run", patched_run)
+ r = runner.invoke(
+ cli,
+ [
+ "run",
+ confluent_basic_project,
+ ],
+ )
+
+ assert r.exit_code == 0
+ mock.assert_called_once()
diff --git a/tests/a_docs/confluent/publisher_object/__init__.py b/tests/docs/confluent/batch_consuming_pydantic/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/publisher_object/__init__.py
rename to tests/docs/confluent/batch_consuming_pydantic/__init__.py
diff --git a/tests/docs/confluent/batch_consuming_pydantic/test_app.py b/tests/docs/confluent/batch_consuming_pydantic/test_app.py
new file mode 100644
index 0000000000..95474417d9
--- /dev/null
+++ b/tests/docs/confluent/batch_consuming_pydantic/test_app.py
@@ -0,0 +1,21 @@
+import pytest
+
+from docs.docs_src.confluent.batch_consuming_pydantic.app import (
+ HelloWorld,
+ broker,
+ handle_batch,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_me() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish_batch(
+ HelloWorld(msg="First Hello"),
+ HelloWorld(msg="Second Hello"),
+ topic="test_batch",
+ )
+ handle_batch.mock.assert_called_with(
+ [dict(HelloWorld(msg="First Hello")), dict(HelloWorld(msg="Second Hello"))],
+ )
diff --git a/tests/a_docs/confluent/raw_publish/__init__.py b/tests/docs/confluent/consumes_basics/__init__.py
similarity index 100%
rename from tests/a_docs/confluent/raw_publish/__init__.py
rename to tests/docs/confluent/consumes_basics/__init__.py
diff --git a/tests/docs/confluent/consumes_basics/test_app.py b/tests/docs/confluent/consumes_basics/test_app.py
new file mode 100644
index 0000000000..c79885711b
--- /dev/null
+++ b/tests/docs/confluent/consumes_basics/test_app.py
@@ -0,0 +1,15 @@
+import pytest
+
+from docs.docs_src.confluent.consumes_basics.app import (
+ HelloWorld,
+ broker,
+ on_hello_world,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
+ on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/getting_started/__init__.py b/tests/docs/confluent/publish_batch/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/__init__.py
rename to tests/docs/confluent/publish_batch/__init__.py
diff --git a/tests/docs/confluent/publish_batch/test_app.py b/tests/docs/confluent/publish_batch/test_app.py
new file mode 100644
index 0000000000..d8042ba94d
--- /dev/null
+++ b/tests/docs/confluent/publish_batch/test_app.py
@@ -0,0 +1,32 @@
+import pytest
+
+from docs.docs_src.confluent.publish_batch.app import (
+ Data,
+ broker,
+ decrease_and_increase,
+ on_input_data_1,
+ on_input_data_2,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_batch_publish_decorator() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=2.0), "input_data_1")
+
+ on_input_data_1.mock.assert_called_once_with(dict(Data(data=2.0)))
+ decrease_and_increase.mock.assert_called_once_with(
+ [dict(Data(data=1.0)), dict(Data(data=4.0))],
+ )
+
+
+@pytest.mark.asyncio()
+async def test_batch_publish_call() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=2.0), "input_data_2")
+
+ on_input_data_2.mock.assert_called_once_with(dict(Data(data=2.0)))
+ decrease_and_increase.mock.assert_called_once_with(
+ [dict(Data(data=1.0)), dict(Data(data=4.0))],
+ )
diff --git a/tests/docs/confluent/publish_batch/test_issues.py b/tests/docs/confluent/publish_batch/test_issues.py
new file mode 100644
index 0000000000..3c511da0d8
--- /dev/null
+++ b/tests/docs/confluent/publish_batch/test_issues.py
@@ -0,0 +1,22 @@
+import pytest
+
+from faststream import FastStream
+from faststream.confluent import KafkaBroker, TestKafkaBroker
+
+broker = KafkaBroker()
+batch_producer = broker.publisher("response", batch=True)
+
+
+@batch_producer
+@broker.subscriber("test")
+async def handle(msg: str) -> list[int]:
+ return [1, 2, 3]
+
+
+app = FastStream(broker)
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish("", "test")
diff --git a/tests/a_docs/getting_started/asyncapi/__init__.py b/tests/docs/confluent/publish_example/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/asyncapi/__init__.py
rename to tests/docs/confluent/publish_example/__init__.py
diff --git a/tests/docs/confluent/publish_example/test_app.py b/tests/docs/confluent/publish_example/test_app.py
new file mode 100644
index 0000000000..beb23d4b36
--- /dev/null
+++ b/tests/docs/confluent/publish_example/test_app.py
@@ -0,0 +1,18 @@
+import pytest
+
+from docs.docs_src.confluent.publish_example.app import (
+ Data,
+ broker,
+ on_input_data,
+ to_output_data,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=0.2), "input_data")
+
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
diff --git a/tests/a_docs/getting_started/cli/__init__.py b/tests/docs/confluent/publish_with_partition_key/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/__init__.py
rename to tests/docs/confluent/publish_with_partition_key/__init__.py
diff --git a/tests/docs/confluent/publish_with_partition_key/test_app.py b/tests/docs/confluent/publish_with_partition_key/test_app.py
new file mode 100644
index 0000000000..517bd7940c
--- /dev/null
+++ b/tests/docs/confluent/publish_with_partition_key/test_app.py
@@ -0,0 +1,30 @@
+import pytest
+
+from docs.docs_src.confluent.publish_with_partition_key.app import (
+ Data,
+ broker,
+ on_input_data,
+ to_output_data,
+)
+from faststream.confluent import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
+
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
+
+
+@pytest.mark.skip("we are not checking the key")
+@pytest.mark.asyncio()
+async def test_keys() -> None:
+ async with TestKafkaBroker(broker):
+ # we should be able to publish a message with the key
+ await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
+
+ # we need to check the key as well
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)), key=b"my_key")
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)), key=b"key")
diff --git a/tests/a_docs/getting_started/context/__init__.py b/tests/docs/confluent/publisher_object/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/context/__init__.py
rename to tests/docs/confluent/publisher_object/__init__.py
diff --git a/tests/a_docs/confluent/publisher_object/test_publisher_object.py b/tests/docs/confluent/publisher_object/test_publisher_object.py
similarity index 100%
rename from tests/a_docs/confluent/publisher_object/test_publisher_object.py
rename to tests/docs/confluent/publisher_object/test_publisher_object.py
diff --git a/tests/a_docs/getting_started/dependencies/__init__.py b/tests/docs/confluent/raw_publish/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/dependencies/__init__.py
rename to tests/docs/confluent/raw_publish/__init__.py
diff --git a/tests/a_docs/confluent/raw_publish/test_raw_publish.py b/tests/docs/confluent/raw_publish/test_raw_publish.py
similarity index 100%
rename from tests/a_docs/confluent/raw_publish/test_raw_publish.py
rename to tests/docs/confluent/raw_publish/test_raw_publish.py
diff --git a/tests/docs/confluent/test_security.py b/tests/docs/confluent/test_security.py
new file mode 100644
index 0000000000..5d78c1391b
--- /dev/null
+++ b/tests/docs/confluent/test_security.py
@@ -0,0 +1,139 @@
+import pytest
+
+from tests.brokers.confluent.test_security import patch_aio_consumer_and_producer
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_base_security() -> None:
+ from docs.docs_src.confluent.security.basic import broker as basic_broker
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with basic_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_scram256() -> None:
+ from docs.docs_src.confluent.security.sasl_scram256 import (
+ broker as scram256_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with scram256_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "SCRAM-SHA-256"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert (
+ producer_call_kwargs["security_protocol"]
+ == call_kwargs["security_protocol"]
+ )
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_scram512() -> None:
+ from docs.docs_src.confluent.security.sasl_scram512 import (
+ broker as scram512_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with scram512_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "SCRAM-SHA-512"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert (
+ producer_call_kwargs["security_protocol"]
+ == call_kwargs["security_protocol"]
+ )
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_plaintext() -> None:
+ from docs.docs_src.confluent.security.plaintext import (
+ broker as plaintext_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with plaintext_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "PLAIN"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert (
+ producer_call_kwargs["security_protocol"]
+ == call_kwargs["security_protocol"]
+ )
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_oathbearer() -> None:
+ from docs.docs_src.confluent.security.sasl_oauthbearer import (
+ broker as oauthbearer_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with oauthbearer_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "OAUTHBEARER"
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert (
+ producer_call_kwargs["security_protocol"]
+ == call_kwargs["security_protocol"]
+ )
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+async def test_gssapi() -> None:
+ from docs.docs_src.confluent.security.sasl_gssapi import (
+ broker as gssapi_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with gssapi_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {
+ "sasl_mechanism": "GSSAPI",
+ "security_protocol": "SASL_SSL",
+ }
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert (
+ producer_call_kwargs["security_protocol"]
+ == call_kwargs["security_protocol"]
+ )
diff --git a/tests/a_docs/getting_started/dependencies/basic/__init__.py b/tests/docs/getting_started/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/dependencies/basic/__init__.py
rename to tests/docs/getting_started/__init__.py
diff --git a/tests/a_docs/getting_started/index/__init__.py b/tests/docs/getting_started/asyncapi/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/index/__init__.py
rename to tests/docs/getting_started/asyncapi/__init__.py
diff --git a/tests/a_docs/getting_started/cli/kafka/__init__.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/cli/kafka/__init__.py
rename to tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py
diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_basic.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_basic.py
new file mode 100644
index 0000000000..05577fbbb2
--- /dev/null
+++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_basic.py
@@ -0,0 +1,65 @@
+from docs.docs_src.getting_started.asyncapi.asyncapi_customization.basic import app
+from faststream.specification.asyncapi import AsyncAPI
+
+
+def test_basic_customization() -> None:
+ schema = AsyncAPI(app.broker, schema_version="2.6.0").to_jsonable()
+
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {
+ "input_data:OnInputData": {
+ "bindings": {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "input_data"},
+ },
+ "servers": ["development"],
+ "publish": {
+ "message": {
+ "$ref": "#/components/messages/input_data:OnInputData:Message",
+ },
+ },
+ },
+ "output_data:Publisher": {
+ "bindings": {
+ "kafka": {"bindingVersion": "0.4.0", "topic": "output_data"},
+ },
+ "subscribe": {
+ "message": {
+ "$ref": "#/components/messages/output_data:Publisher:Message",
+ },
+ },
+ "servers": ["development"],
+ },
+ },
+ "components": {
+ "messages": {
+ "input_data:OnInputData:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/OnInputData:Message:Payload",
+ },
+ "title": "input_data:OnInputData:Message",
+ },
+ "output_data:Publisher:Message": {
+ "correlationId": {"location": "$message.header#/correlation_id"},
+ "payload": {
+ "$ref": "#/components/schemas/output_data:PublisherPayload",
+ },
+ "title": "output_data:Publisher:Message",
+ },
+ },
+ "schemas": {
+ "OnInputData:Message:Payload": {"title": "OnInputData:Message:Payload"},
+ "output_data:PublisherPayload": {},
+ },
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "kafka",
+ "protocolVersion": "auto",
+ "url": "localhost:9092",
+ },
+ },
+ }
diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_broker.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_broker.py
new file mode 100644
index 0000000000..81c8610437
--- /dev/null
+++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_broker.py
@@ -0,0 +1,16 @@
+from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_broker import (
+ docs_obj,
+)
+
+
+def test_broker_customization() -> None:
+ schema = docs_obj.to_jsonable()
+
+ assert schema["servers"] == {
+ "development": {
+ "url": "non-sensitive-url:9092",
+ "protocol": "kafka",
+ "description": "Kafka broker running locally",
+ "protocolVersion": "auto",
+ },
+ }
diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/test_handler.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
new file mode 100644
index 0000000000..c7499bb15e
--- /dev/null
+++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
@@ -0,0 +1,38 @@
+from dirty_equals import IsPartialDict
+
+from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_handler import (
+ docs_obj,
+)
+
+
+def test_handler_customization() -> None:
+ schema = docs_obj.to_jsonable()
+
+ (subscriber_key, subscriber_value), (publisher_key, publisher_value) = schema[
+ "channels"
+ ].items()
+
+ assert subscriber_key == "input_data:Consume", subscriber_key
+ assert subscriber_value == IsPartialDict({
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "input_data", "bindingVersion": "0.4.0"}},
+ "publish": {
+ "message": {"$ref": "#/components/messages/input_data:Consume:Message"},
+ },
+ }), subscriber_value
+ desc = subscriber_value["description"]
+ assert ( # noqa: PT018
+ "Consumer function\n\n" in desc
+ and "Args:\n" in desc
+ and " msg: input msg" in desc
+ ), desc
+
+ assert publisher_key == "output_data:Produce", publisher_key
+ assert publisher_value == {
+ "description": "My publisher description",
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "output_data", "bindingVersion": "0.4.0"}},
+ "subscribe": {
+ "message": {"$ref": "#/components/messages/output_data:Produce:Message"}
+ },
+ }
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_info.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_info.py
similarity index 77%
rename from tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_info.py
rename to tests/docs/getting_started/asyncapi/asyncapi_customization/test_info.py
index 7d7a02a886..5fccbda829 100644
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_info.py
+++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_info.py
@@ -1,11 +1,10 @@
from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_info import (
- app,
+ docs_obj,
)
-from faststream.asyncapi.generate import get_app_schema
-def test_info_customization():
- schema = get_app_schema(app).to_jsonable()
+def test_info_customization() -> None:
+ schema = docs_obj.to_jsonable()
assert schema["info"] == {
"title": "My App",
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_payload.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_payload.py
similarity index 75%
rename from tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_payload.py
rename to tests/docs/getting_started/asyncapi/asyncapi_customization/test_payload.py
index 5b4d693321..ae7e94eb92 100644
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_payload.py
+++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/test_payload.py
@@ -1,11 +1,10 @@
from docs.docs_src.getting_started.asyncapi.asyncapi_customization.payload_info import (
- app,
+ docs_obj,
)
-from faststream.asyncapi.generate import get_app_schema
-def test_payload_customization():
- schema = get_app_schema(app).to_jsonable()
+def test_payload_customization() -> None:
+ schema = docs_obj.to_jsonable()
assert schema["components"]["schemas"] == {
"DataBasic": {
@@ -16,10 +15,10 @@ def test_payload_customization():
"minimum": 0,
"title": "Data",
"type": "number",
- }
+ },
},
"required": ["data"],
"title": "DataBasic",
"type": "object",
- }
+ },
}
diff --git a/tests/a_docs/getting_started/lifespan/__init__.py b/tests/docs/getting_started/cli/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/lifespan/__init__.py
rename to tests/docs/getting_started/cli/__init__.py
diff --git a/tests/docs/getting_started/cli/confluent/__init__.py b/tests/docs/getting_started/cli/confluent/__init__.py
new file mode 100644
index 0000000000..c4a1803708
--- /dev/null
+++ b/tests/docs/getting_started/cli/confluent/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("confluent_kafka")
diff --git a/tests/docs/getting_started/cli/confluent/test_confluent_context.py b/tests/docs/getting_started/cli/confluent/test_confluent_context.py
new file mode 100644
index 0000000000..3615e32d80
--- /dev/null
+++ b/tests/docs/getting_started/cli/confluent/test_confluent_context.py
@@ -0,0 +1,16 @@
+import pytest
+
+from faststream import TestApp
+from faststream.confluent import TestKafkaBroker
+from tests.marks import pydantic_v2
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+async def test() -> None:
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.cli.confluent_context import app, broker
+
+ async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/a_docs/kafka/__init__.py b/tests/docs/getting_started/cli/kafka/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/__init__.py
rename to tests/docs/getting_started/cli/kafka/__init__.py
diff --git a/tests/docs/getting_started/cli/kafka/test_kafka_context.py b/tests/docs/getting_started/cli/kafka/test_kafka_context.py
new file mode 100644
index 0000000000..9b26e90f34
--- /dev/null
+++ b/tests/docs/getting_started/cli/kafka/test_kafka_context.py
@@ -0,0 +1,16 @@
+import pytest
+
+from faststream import TestApp
+from faststream.kafka import TestKafkaBroker
+from tests.marks import pydantic_v2
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+async def test() -> None:
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.cli.kafka_context import app, broker
+
+ async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/docs/getting_started/cli/nats/__init__.py b/tests/docs/getting_started/cli/nats/__init__.py
new file mode 100644
index 0000000000..87ead90ee6
--- /dev/null
+++ b/tests/docs/getting_started/cli/nats/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("nats")
diff --git a/tests/docs/getting_started/cli/nats/test_nats_context.py b/tests/docs/getting_started/cli/nats/test_nats_context.py
new file mode 100644
index 0000000000..fcb8ed5bb9
--- /dev/null
+++ b/tests/docs/getting_started/cli/nats/test_nats_context.py
@@ -0,0 +1,16 @@
+import pytest
+
+from faststream import TestApp
+from faststream.nats import TestNatsBroker
+from tests.marks import pydantic_v2
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+async def test() -> None:
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.cli.nats_context import app, broker
+
+ async with TestNatsBroker(broker), TestApp(app, {"env": ""}):
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/docs/getting_started/cli/rabbit/__init__.py b/tests/docs/getting_started/cli/rabbit/__init__.py
new file mode 100644
index 0000000000..ebec43fcd5
--- /dev/null
+++ b/tests/docs/getting_started/cli/rabbit/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("aio_pika")
diff --git a/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py b/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py
new file mode 100644
index 0000000000..2fef4df0cd
--- /dev/null
+++ b/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py
@@ -0,0 +1,21 @@
+import pytest
+
+from faststream import TestApp
+from faststream.rabbit import TestRabbitBroker
+from tests.marks import pydantic_v2
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+async def test() -> None:
+ with mock_pydantic_settings_env(
+ {"host": "amqp://guest:guest@localhost:5673/"}, # pragma: allowlist secret
+ ):
+ from docs.docs_src.getting_started.cli.rabbit_context import app, broker
+
+ async with TestRabbitBroker(broker), TestApp(app, {"env": ".env"}):
+ assert (
+ app.context.get("settings").host
+ == "amqp://guest:guest@localhost:5673/" # pragma: allowlist secret
+ )
diff --git a/tests/docs/getting_started/cli/redis/__init__.py b/tests/docs/getting_started/cli/redis/__init__.py
new file mode 100644
index 0000000000..4752ef19b1
--- /dev/null
+++ b/tests/docs/getting_started/cli/redis/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis")
diff --git a/tests/docs/getting_started/cli/redis/test_redis_context.py b/tests/docs/getting_started/cli/redis/test_redis_context.py
new file mode 100644
index 0000000000..07536cbfdc
--- /dev/null
+++ b/tests/docs/getting_started/cli/redis/test_redis_context.py
@@ -0,0 +1,16 @@
+import pytest
+
+from faststream import TestApp
+from faststream.redis import TestRedisBroker
+from tests.marks import pydantic_v2
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+async def test() -> None:
+ with mock_pydantic_settings_env({"host": "redis://localhost:6380"}):
+ from docs.docs_src.getting_started.cli.redis_context import app, broker
+
+ async with TestRedisBroker(broker), TestApp(app, {"env": ".env"}):
+ assert app.context.get("settings").host == "redis://localhost:6380"
diff --git a/tests/a_docs/getting_started/publishing/__init__.py b/tests/docs/getting_started/config/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/publishing/__init__.py
rename to tests/docs/getting_started/config/__init__.py
diff --git a/tests/a_docs/getting_started/config/test_settings_base_1.py b/tests/docs/getting_started/config/test_settings_base_1.py
similarity index 82%
rename from tests/a_docs/getting_started/config/test_settings_base_1.py
rename to tests/docs/getting_started/config/test_settings_base_1.py
index fd42ba6533..66927a45a4 100644
--- a/tests/a_docs/getting_started/config/test_settings_base_1.py
+++ b/tests/docs/getting_started/config/test_settings_base_1.py
@@ -2,7 +2,7 @@
@pydantic_v1
-def test_exists_and_valid():
+def test_exists_and_valid() -> None:
from docs.docs_src.getting_started.config.settings_base_1 import settings
assert settings.queue == "test-queue"
diff --git a/tests/a_docs/getting_started/config/test_settings_base_2.py b/tests/docs/getting_started/config/test_settings_base_2.py
similarity index 90%
rename from tests/a_docs/getting_started/config/test_settings_base_2.py
rename to tests/docs/getting_started/config/test_settings_base_2.py
index 780f73f278..584a746ef9 100644
--- a/tests/a_docs/getting_started/config/test_settings_base_2.py
+++ b/tests/docs/getting_started/config/test_settings_base_2.py
@@ -3,7 +3,7 @@
@pydantic_v2
-def test_exists_and_valid():
+def test_exists_and_valid() -> None:
with mock_pydantic_settings_env({"url": "localhost:9092"}):
from docs.docs_src.getting_started.config.settings_base_2 import settings
diff --git a/tests/a_docs/getting_started/config/test_settings_env.py b/tests/docs/getting_started/config/test_settings_env.py
similarity index 90%
rename from tests/a_docs/getting_started/config/test_settings_env.py
rename to tests/docs/getting_started/config/test_settings_env.py
index 960485ed4c..bc3efbe3d2 100644
--- a/tests/a_docs/getting_started/config/test_settings_env.py
+++ b/tests/docs/getting_started/config/test_settings_env.py
@@ -3,7 +3,7 @@
@pydantic_v2
-def test_exists_and_valid():
+def test_exists_and_valid() -> None:
with mock_pydantic_settings_env({"url": "localhost:9092"}):
from docs.docs_src.getting_started.config.settings_env import settings
diff --git a/tests/a_docs/getting_started/config/test_usage.py b/tests/docs/getting_started/config/test_usage.py
similarity index 90%
rename from tests/a_docs/getting_started/config/test_usage.py
rename to tests/docs/getting_started/config/test_usage.py
index 2ae34dda33..6459f1d1d3 100644
--- a/tests/a_docs/getting_started/config/test_usage.py
+++ b/tests/docs/getting_started/config/test_usage.py
@@ -4,7 +4,7 @@
@pydantic_v2
@require_aiopika
-def test_exists_and_valid():
+def test_exists_and_valid() -> None:
with mock_pydantic_settings_env({"url": "localhost:9092"}):
from docs.docs_src.getting_started.config.usage import settings
diff --git a/tests/a_docs/getting_started/routers/__init__.py b/tests/docs/getting_started/context/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/routers/__init__.py
rename to tests/docs/getting_started/context/__init__.py
diff --git a/tests/docs/getting_started/context/test_annotated.py b/tests/docs/getting_started/context/test_annotated.py
new file mode 100644
index 0000000000..5dd5886d1c
--- /dev/null
+++ b/tests/docs/getting_started/context/test_annotated.py
@@ -0,0 +1,90 @@
+import pytest
+
+from tests.marks import (
+ python39,
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@python39
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_annotated_kafka() -> None:
+ from docs.docs_src.getting_started.context.kafka.annotated import (
+ base_handler,
+ broker,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@python39
+@pytest.mark.asyncio()
+@require_confluent
+async def test_annotated_confluent() -> None:
+ from docs.docs_src.getting_started.context.confluent.annotated import (
+ base_handler,
+ broker,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@python39
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_annotated_rabbit() -> None:
+ from docs.docs_src.getting_started.context.rabbit.annotated import (
+ base_handler,
+ broker,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@python39
+@pytest.mark.asyncio()
+@require_nats
+async def test_annotated_nats() -> None:
+ from docs.docs_src.getting_started.context.nats.annotated import (
+ base_handler,
+ broker,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@python39
+@pytest.mark.asyncio()
+@require_redis
+async def test_annotated_redis() -> None:
+ from docs.docs_src.getting_started.context.redis.annotated import (
+ base_handler,
+ broker,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
diff --git a/tests/docs/getting_started/context/test_base.py b/tests/docs/getting_started/context/test_base.py
new file mode 100644
index 0000000000..8785fbb7bc
--- /dev/null
+++ b/tests/docs/getting_started/context/test_base.py
@@ -0,0 +1,72 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_base_kafka() -> None:
+ from docs.docs_src.getting_started.context.kafka.base import base_handler, broker
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_base_confluent() -> None:
+ from docs.docs_src.getting_started.context.confluent.base import (
+ base_handler,
+ broker,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_base_rabbit() -> None:
+ from docs.docs_src.getting_started.context.rabbit.base import base_handler, broker
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_base_nats() -> None:
+ from docs.docs_src.getting_started.context.nats.base import base_handler, broker
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_base_redis() -> None:
+ from docs.docs_src.getting_started.context.redis.base import base_handler, broker
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish("Hi!", "test")
+
+ base_handler.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/context/test_cast.py b/tests/docs/getting_started/context/test_cast.py
similarity index 88%
rename from tests/a_docs/getting_started/context/test_cast.py
rename to tests/docs/getting_started/context/test_cast.py
index 33cbfbedcc..ebc15b0579 100644
--- a/tests/a_docs/getting_started/context/test_cast.py
+++ b/tests/docs/getting_started/context/test_cast.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_cast_kafka():
+async def test_cast_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.cast import (
broker,
handle,
@@ -29,9 +29,9 @@ async def test_cast_kafka():
handle_int.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_cast_confluent():
+async def test_cast_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.cast import (
broker,
handle,
@@ -49,9 +49,9 @@ async def test_cast_confluent():
handle_int.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_cast_rabbit():
+async def test_cast_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.cast import (
broker,
handle,
@@ -69,9 +69,9 @@ async def test_cast_rabbit():
handle_int.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_cast_nats():
+async def test_cast_nats() -> None:
from docs.docs_src.getting_started.context.nats.cast import (
broker,
handle,
@@ -89,9 +89,9 @@ async def test_cast_nats():
handle_int.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_cast_redis():
+async def test_cast_redis() -> None:
from docs.docs_src.getting_started.context.redis.cast import (
broker,
handle,
diff --git a/tests/a_docs/getting_started/context/test_custom_global.py b/tests/docs/getting_started/context/test_custom_global.py
similarity index 83%
rename from tests/a_docs/getting_started/context/test_custom_global.py
rename to tests/docs/getting_started/context/test_custom_global.py
index 1089195a20..80a996e7d9 100644
--- a/tests/a_docs/getting_started/context/test_custom_global.py
+++ b/tests/docs/getting_started/context/test_custom_global.py
@@ -10,9 +10,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_custom_global_context_kafka():
+async def test_custom_global_context_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.custom_global_context import (
app,
broker,
@@ -26,9 +26,9 @@ async def test_custom_global_context_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_custom_global_context_confluent():
+async def test_custom_global_context_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.custom_global_context import (
app,
broker,
@@ -42,9 +42,9 @@ async def test_custom_global_context_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_custom_global_context_rabbit():
+async def test_custom_global_context_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.custom_global_context import (
app,
broker,
@@ -58,9 +58,9 @@ async def test_custom_global_context_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_custom_global_context_nats():
+async def test_custom_global_context_nats() -> None:
from docs.docs_src.getting_started.context.nats.custom_global_context import (
app,
broker,
@@ -74,9 +74,9 @@ async def test_custom_global_context_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_custom_global_context_redis():
+async def test_custom_global_context_redis() -> None:
from docs.docs_src.getting_started.context.redis.custom_global_context import (
app,
broker,
diff --git a/tests/a_docs/getting_started/context/test_custom_local.py b/tests/docs/getting_started/context/test_custom_local.py
similarity index 82%
rename from tests/a_docs/getting_started/context/test_custom_local.py
rename to tests/docs/getting_started/context/test_custom_local.py
index 4761aa3b5a..a1fce85f4c 100644
--- a/tests/a_docs/getting_started/context/test_custom_local.py
+++ b/tests/docs/getting_started/context/test_custom_local.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_custom_local_context_kafka():
+async def test_custom_local_context_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.custom_local_context import (
broker,
handle,
@@ -24,9 +24,9 @@ async def test_custom_local_context_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_custom_local_context_confluent():
+async def test_custom_local_context_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.custom_local_context import (
broker,
handle,
@@ -39,9 +39,9 @@ async def test_custom_local_context_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_custom_local_context_rabbit():
+async def test_custom_local_context_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.custom_local_context import (
broker,
handle,
@@ -54,9 +54,9 @@ async def test_custom_local_context_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_custom_local_context_nats():
+async def test_custom_local_context_nats() -> None:
from docs.docs_src.getting_started.context.nats.custom_local_context import (
broker,
handle,
@@ -69,9 +69,9 @@ async def test_custom_local_context_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_custom_local_context_redis():
+async def test_custom_local_context_redis() -> None:
from docs.docs_src.getting_started.context.redis.custom_local_context import (
broker,
handle,
diff --git a/tests/a_docs/getting_started/context/test_default_arguments.py b/tests/docs/getting_started/context/test_default_arguments.py
similarity index 83%
rename from tests/a_docs/getting_started/context/test_default_arguments.py
rename to tests/docs/getting_started/context/test_default_arguments.py
index 0f0360bbb6..31acc73b2c 100644
--- a/tests/a_docs/getting_started/context/test_default_arguments.py
+++ b/tests/docs/getting_started/context/test_default_arguments.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_default_arguments_kafka():
+async def test_default_arguments_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.default_arguments import (
broker,
handle,
@@ -24,9 +24,9 @@ async def test_default_arguments_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_default_arguments_confluent():
+async def test_default_arguments_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.default_arguments import (
broker,
handle,
@@ -39,9 +39,9 @@ async def test_default_arguments_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_default_arguments_rabbit():
+async def test_default_arguments_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.default_arguments import (
broker,
handle,
@@ -54,9 +54,9 @@ async def test_default_arguments_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_default_arguments_nats():
+async def test_default_arguments_nats() -> None:
from docs.docs_src.getting_started.context.nats.default_arguments import (
broker,
handle,
@@ -69,9 +69,9 @@ async def test_default_arguments_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_default_arguments_redis():
+async def test_default_arguments_redis() -> None:
from docs.docs_src.getting_started.context.redis.default_arguments import (
broker,
handle,
diff --git a/tests/a_docs/getting_started/context/test_existed_context.py b/tests/docs/getting_started/context/test_existed_context.py
similarity index 80%
rename from tests/a_docs/getting_started/context/test_existed_context.py
rename to tests/docs/getting_started/context/test_existed_context.py
index 22c14f4760..ad9211fda7 100644
--- a/tests/a_docs/getting_started/context/test_existed_context.py
+++ b/tests/docs/getting_started/context/test_existed_context.py
@@ -9,16 +9,16 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_existed_context_kafka():
+async def test_existed_context_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.existed_context import (
broker_object,
)
from faststream.kafka import TestKafkaBroker
@broker_object.subscriber("response")
- async def resp(): ...
+ async def resp() -> None: ...
async with TestKafkaBroker(broker_object) as br:
await br.publish("Hi!", "test-topic")
@@ -27,16 +27,16 @@ async def resp(): ...
assert resp.mock.call_count == 2
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_existed_context_confluent():
+async def test_existed_context_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.existed_context import (
broker_object,
)
from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
@broker_object.subscriber("response")
- async def resp(): ...
+ async def resp() -> None: ...
async with TestConfluentKafkaBroker(broker_object) as br:
await br.publish("Hi!", "test-topic")
@@ -45,16 +45,16 @@ async def resp(): ...
assert resp.mock.call_count == 2
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_existed_context_rabbit():
+async def test_existed_context_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.existed_context import (
broker_object,
)
from faststream.rabbit import TestRabbitBroker
@broker_object.subscriber("response")
- async def resp(): ...
+ async def resp() -> None: ...
async with TestRabbitBroker(broker_object) as br:
await br.publish("Hi!", "test-queue")
@@ -63,16 +63,16 @@ async def resp(): ...
assert resp.mock.call_count == 2
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_existed_context_nats():
+async def test_existed_context_nats() -> None:
from docs.docs_src.getting_started.context.nats.existed_context import (
broker_object,
)
from faststream.nats import TestNatsBroker
@broker_object.subscriber("response")
- async def resp(): ...
+ async def resp() -> None: ...
async with TestNatsBroker(broker_object) as br:
await br.publish("Hi!", "test-subject")
@@ -81,16 +81,16 @@ async def resp(): ...
assert resp.mock.call_count == 2
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_existed_context_redis():
+async def test_existed_context_redis() -> None:
from docs.docs_src.getting_started.context.redis.existed_context import (
broker_object,
)
from faststream.redis import TestRedisBroker
@broker_object.subscriber("response")
- async def resp(): ...
+ async def resp() -> None: ...
async with TestRedisBroker(broker_object) as br:
await br.publish("Hi!", "test-channel")
diff --git a/tests/a_docs/getting_started/context/test_fields_access.py b/tests/docs/getting_started/context/test_fields_access.py
similarity index 84%
rename from tests/a_docs/getting_started/context/test_fields_access.py
rename to tests/docs/getting_started/context/test_fields_access.py
index 084ade7abb..10b6d07bd7 100644
--- a/tests/a_docs/getting_started/context/test_fields_access.py
+++ b/tests/docs/getting_started/context/test_fields_access.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_fields_access_kafka():
+async def test_fields_access_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.fields_access import (
broker,
handle,
@@ -24,9 +24,9 @@ async def test_fields_access_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_fields_access_confluent():
+async def test_fields_access_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.fields_access import (
broker,
handle,
@@ -39,9 +39,9 @@ async def test_fields_access_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_fields_access_rabbit():
+async def test_fields_access_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.fields_access import (
broker,
handle,
@@ -54,9 +54,9 @@ async def test_fields_access_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_fields_access_nats():
+async def test_fields_access_nats() -> None:
from docs.docs_src.getting_started.context.nats.fields_access import (
broker,
handle,
@@ -69,9 +69,9 @@ async def test_fields_access_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_fields_access_redis():
+async def test_fields_access_redis() -> None:
from docs.docs_src.getting_started.context.redis.fields_access import (
broker,
handle,
diff --git a/tests/docs/getting_started/context/test_initial.py b/tests/docs/getting_started/context/test_initial.py
new file mode 100644
index 0000000000..7b973b8dfc
--- /dev/null
+++ b/tests/docs/getting_started/context/test_initial.py
@@ -0,0 +1,85 @@
+import pytest
+
+from tests.marks import (
+ python39,
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@python39
+@require_aiokafka
+async def test_kafka() -> None:
+ from docs.docs_src.getting_started.context.kafka.initial import broker
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish("", "test-topic")
+ await br.publish("", "test-topic")
+
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_confluent
+async def test_confluent() -> None:
+ from docs.docs_src.getting_started.context.confluent.initial import broker
+ from faststream.confluent import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish("", "test-topic")
+ await br.publish("", "test-topic")
+
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_aiopika
+async def test_rabbit() -> None:
+ from docs.docs_src.getting_started.context.rabbit.initial import broker
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish("", "test-queue")
+ await br.publish("", "test-queue")
+
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_nats
+async def test_nats() -> None:
+ from docs.docs_src.getting_started.context.nats.initial import broker
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish("", "test-subject")
+ await br.publish("", "test-subject")
+
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_redis
+async def test_redis() -> None:
+ from docs.docs_src.getting_started.context.redis.initial import broker
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish("", "test-channel")
+ await br.publish("", "test-channel")
+
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
diff --git a/tests/a_docs/getting_started/context/test_manual_local_context.py b/tests/docs/getting_started/context/test_manual_local_context.py
similarity index 82%
rename from tests/a_docs/getting_started/context/test_manual_local_context.py
rename to tests/docs/getting_started/context/test_manual_local_context.py
index f6c119132e..950ff32a60 100644
--- a/tests/a_docs/getting_started/context/test_manual_local_context.py
+++ b/tests/docs/getting_started/context/test_manual_local_context.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_manual_local_context_kafka():
+async def test_manual_local_context_kafka() -> None:
from docs.docs_src.getting_started.context.kafka.manual_local_context import (
broker,
handle,
@@ -24,9 +24,9 @@ async def test_manual_local_context_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_manual_local_context_confluent():
+async def test_manual_local_context_confluent() -> None:
from docs.docs_src.getting_started.context.confluent.manual_local_context import (
broker,
handle,
@@ -39,9 +39,9 @@ async def test_manual_local_context_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_manual_local_context_rabbit():
+async def test_manual_local_context_rabbit() -> None:
from docs.docs_src.getting_started.context.rabbit.manual_local_context import (
broker,
handle,
@@ -54,9 +54,9 @@ async def test_manual_local_context_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_manual_local_context_nats():
+async def test_manual_local_context_nats() -> None:
from docs.docs_src.getting_started.context.nats.manual_local_context import (
broker,
handle,
@@ -69,9 +69,9 @@ async def test_manual_local_context_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_manual_local_context_redis():
+async def test_manual_local_context_redis() -> None:
from docs.docs_src.getting_started.context.redis.manual_local_context import (
broker,
handle,
diff --git a/tests/a_docs/getting_started/context/test_nested.py b/tests/docs/getting_started/context/test_nested.py
similarity index 87%
rename from tests/a_docs/getting_started/context/test_nested.py
rename to tests/docs/getting_started/context/test_nested.py
index c782af41a1..070bc45e51 100644
--- a/tests/a_docs/getting_started/context/test_nested.py
+++ b/tests/docs/getting_started/context/test_nested.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test():
+async def test() -> None:
from docs.docs_src.getting_started.context.nested import broker, handler
from faststream.rabbit import TestRabbitBroker
diff --git a/tests/a_docs/getting_started/serialization/__init__.py b/tests/docs/getting_started/dependencies/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/serialization/__init__.py
rename to tests/docs/getting_started/dependencies/__init__.py
diff --git a/tests/a_docs/getting_started/subscription/__init__.py b/tests/docs/getting_started/dependencies/basic/__init__.py
similarity index 100%
rename from tests/a_docs/getting_started/subscription/__init__.py
rename to tests/docs/getting_started/dependencies/basic/__init__.py
diff --git a/tests/a_docs/getting_started/dependencies/basic/test_base.py b/tests/docs/getting_started/dependencies/basic/test_base.py
similarity index 100%
rename from tests/a_docs/getting_started/dependencies/basic/test_base.py
rename to tests/docs/getting_started/dependencies/basic/test_base.py
diff --git a/tests/docs/getting_started/dependencies/basic/test_depends.py b/tests/docs/getting_started/dependencies/basic/test_depends.py
new file mode 100644
index 0000000000..4a54aa65a6
--- /dev/null
+++ b/tests/docs/getting_started/dependencies/basic/test_depends.py
@@ -0,0 +1,79 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_depends_kafka() -> None:
+ from docs.docs_src.getting_started.dependencies.basic.kafka.depends import (
+ broker,
+ handler,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker):
+ await broker.publish({}, "test")
+ handler.mock.assert_called_once_with({})
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_depends_confluent() -> None:
+ from docs.docs_src.getting_started.dependencies.basic.confluent.depends import (
+ broker,
+ handler,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker):
+ await broker.publish({}, "test")
+ handler.mock.assert_called_once_with({})
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_depends_rabbit() -> None:
+ from docs.docs_src.getting_started.dependencies.basic.rabbit.depends import (
+ broker,
+ handler,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker):
+ await broker.publish({}, "test")
+ handler.mock.assert_called_once_with({})
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_depends_nats() -> None:
+ from docs.docs_src.getting_started.dependencies.basic.nats.depends import (
+ broker,
+ handler,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker):
+ await broker.publish({}, "test")
+ handler.mock.assert_called_once_with({})
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_depends_redis() -> None:
+ from docs.docs_src.getting_started.dependencies.basic.redis.depends import (
+ broker,
+ handler,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker):
+ await broker.publish({}, "test")
+ handler.mock.assert_called_once_with({})
diff --git a/tests/a_docs/getting_started/dependencies/basic/test_nested_depends.py b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py
similarity index 83%
rename from tests/a_docs/getting_started/dependencies/basic/test_nested_depends.py
rename to tests/docs/getting_started/dependencies/basic/test_nested_depends.py
index a09d1aa27d..4acdadb0d9 100644
--- a/tests/a_docs/getting_started/dependencies/basic/test_nested_depends.py
+++ b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py
@@ -9,9 +9,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_nested_depends_kafka():
+async def test_nested_depends_kafka() -> None:
from docs.docs_src.getting_started.dependencies.basic.kafka.nested_depends import (
broker,
handler,
@@ -23,9 +23,9 @@ async def test_nested_depends_kafka():
handler.mock.assert_called_once_with({})
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_nested_depends_confluent():
+async def test_nested_depends_confluent() -> None:
from docs.docs_src.getting_started.dependencies.basic.confluent.nested_depends import (
broker,
handler,
@@ -37,9 +37,9 @@ async def test_nested_depends_confluent():
handler.mock.assert_called_once_with({})
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_nested_depends_rabbit():
+async def test_nested_depends_rabbit() -> None:
from docs.docs_src.getting_started.dependencies.basic.rabbit.nested_depends import (
broker,
handler,
@@ -51,9 +51,9 @@ async def test_nested_depends_rabbit():
handler.mock.assert_called_once_with({})
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_nested_depends_nats():
+async def test_nested_depends_nats() -> None:
from docs.docs_src.getting_started.dependencies.basic.nats.nested_depends import (
broker,
handler,
@@ -65,9 +65,9 @@ async def test_nested_depends_nats():
handler.mock.assert_called_once_with({})
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_nested_depends_redis():
+async def test_nested_depends_redis() -> None:
from docs.docs_src.getting_started.dependencies.basic.redis.nested_depends import (
broker,
handler,
diff --git a/tests/docs/getting_started/dependencies/test_basic.py b/tests/docs/getting_started/dependencies/test_basic.py
new file mode 100644
index 0000000000..98acd5658f
--- /dev/null
+++ b/tests/docs/getting_started/dependencies/test_basic.py
@@ -0,0 +1,23 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import require_aiokafka
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_basic_kafka() -> None:
+ from docs.docs_src.getting_started.dependencies.basic_kafka import (
+ app,
+ broker,
+ handle,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with(
+ {
+ "name": "John",
+ "user_id": 1,
+ },
+ )
diff --git a/tests/a_docs/getting_started/dependencies/test_class.py b/tests/docs/getting_started/dependencies/test_class.py
similarity index 85%
rename from tests/a_docs/getting_started/dependencies/test_class.py
rename to tests/docs/getting_started/dependencies/test_class.py
index 5bbfd16850..74fe4d89e3 100644
--- a/tests/a_docs/getting_started/dependencies/test_class.py
+++ b/tests/docs/getting_started/dependencies/test_class.py
@@ -4,9 +4,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_basic_kafka():
+async def test_basic_kafka() -> None:
from docs.docs_src.getting_started.dependencies.class_kafka import (
app,
broker,
@@ -19,5 +19,5 @@ async def test_basic_kafka():
{
"name": "John",
"user_id": 1,
- }
+ },
)
diff --git a/tests/a_docs/getting_started/dependencies/test_global.py b/tests/docs/getting_started/dependencies/test_global.py
similarity index 88%
rename from tests/a_docs/getting_started/dependencies/test_global.py
rename to tests/docs/getting_started/dependencies/test_global.py
index 4d543aabb4..528ed0954b 100644
--- a/tests/a_docs/getting_started/dependencies/test_global.py
+++ b/tests/docs/getting_started/dependencies/test_global.py
@@ -4,9 +4,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_global_kafka():
+async def test_global_kafka() -> None:
from docs.docs_src.getting_started.dependencies.global_kafka import (
app,
broker,
@@ -19,7 +19,7 @@ async def test_global_kafka():
{
"name": "John",
"user_id": 1,
- }
+ },
)
with pytest.raises(ValueError): # noqa: PT011
diff --git a/tests/a_docs/getting_started/dependencies/test_global_broker.py b/tests/docs/getting_started/dependencies/test_global_broker.py
similarity index 87%
rename from tests/a_docs/getting_started/dependencies/test_global_broker.py
rename to tests/docs/getting_started/dependencies/test_global_broker.py
index c0b9de9295..5f033c8135 100644
--- a/tests/a_docs/getting_started/dependencies/test_global_broker.py
+++ b/tests/docs/getting_started/dependencies/test_global_broker.py
@@ -4,9 +4,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_global_broker_kafka():
+async def test_global_broker_kafka() -> None:
from docs.docs_src.getting_started.dependencies.global_broker_kafka import (
app,
broker,
@@ -19,7 +19,7 @@ async def test_global_broker_kafka():
{
"name": "John",
"user_id": 1,
- }
+ },
)
with pytest.raises(ValueError): # noqa: PT011
diff --git a/tests/a_docs/getting_started/dependencies/test_sub_dep.py b/tests/docs/getting_started/dependencies/test_sub_dep.py
similarity index 88%
rename from tests/a_docs/getting_started/dependencies/test_sub_dep.py
rename to tests/docs/getting_started/dependencies/test_sub_dep.py
index 832b0c853c..e83ad3d034 100644
--- a/tests/a_docs/getting_started/dependencies/test_sub_dep.py
+++ b/tests/docs/getting_started/dependencies/test_sub_dep.py
@@ -4,9 +4,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_sub_dep_kafka():
+async def test_sub_dep_kafka() -> None:
from docs.docs_src.getting_started.dependencies.sub_dep_kafka import (
app,
broker,
@@ -19,7 +19,7 @@ async def test_sub_dep_kafka():
{
"name": "John",
"user_id": 1,
- }
+ },
)
with pytest.raises(AssertionError):
diff --git a/tests/a_docs/getting_started/dependencies/test_yield.py b/tests/docs/getting_started/dependencies/test_yield.py
similarity index 86%
rename from tests/a_docs/getting_started/dependencies/test_yield.py
rename to tests/docs/getting_started/dependencies/test_yield.py
index 7ad8615a3f..3079c2d98b 100644
--- a/tests/a_docs/getting_started/dependencies/test_yield.py
+++ b/tests/docs/getting_started/dependencies/test_yield.py
@@ -4,9 +4,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_yield_kafka():
+async def test_yield_kafka() -> None:
from docs.docs_src.getting_started.dependencies.yield_kafka import (
app,
broker,
diff --git a/tests/a_docs/index/__init__.py b/tests/docs/getting_started/index/__init__.py
similarity index 100%
rename from tests/a_docs/index/__init__.py
rename to tests/docs/getting_started/index/__init__.py
diff --git a/tests/docs/getting_started/index/test_basic.py b/tests/docs/getting_started/index/test_basic.py
new file mode 100644
index 0000000000..5abf32818d
--- /dev/null
+++ b/tests/docs/getting_started/index/test_basic.py
@@ -0,0 +1,69 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_quickstart_index_kafka() -> None:
+ from docs.docs_src.getting_started.index.base_kafka import base_handler, broker
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_quickstart_index_confluent() -> None:
+ from docs.docs_src.getting_started.index.base_confluent import base_handler, broker
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br:
+ await br.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_quickstart_index_rabbit() -> None:
+ from docs.docs_src.getting_started.index.base_rabbit import base_handler, broker
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_quickstart_index_nats() -> None:
+ from docs.docs_src.getting_started.index.base_nats import base_handler, broker
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_quickstart_index_redis() -> None:
+ from docs.docs_src.getting_started.index.base_redis import base_handler, broker
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
diff --git a/tests/a_docs/integration/__init__.py b/tests/docs/getting_started/lifespan/__init__.py
similarity index 100%
rename from tests/a_docs/integration/__init__.py
rename to tests/docs/getting_started/lifespan/__init__.py
diff --git a/tests/docs/getting_started/lifespan/test_basic.py b/tests/docs/getting_started/lifespan/test_basic.py
new file mode 100644
index 0000000000..97706e94ab
--- /dev/null
+++ b/tests/docs/getting_started/lifespan/test_basic.py
@@ -0,0 +1,77 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ pydantic_v2,
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+from tests.mocks import mock_pydantic_settings_env
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit_basic_lifespan() -> None:
+ from faststream.rabbit import TestRabbitBroker
+
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ assert app.context.get("settings").host == "localhost"
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka_basic_lifespan() -> None:
+ from faststream.kafka import TestKafkaBroker
+
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ assert app.context.get("settings").host == "localhost"
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent_basic_lifespan() -> None:
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.lifespan.confluent.basic import app, broker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ assert app.context.get("settings").host == "localhost"
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats_basic_lifespan() -> None:
+ from faststream.nats import TestNatsBroker
+
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.lifespan.nats.basic import app, broker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ assert app.context.get("settings").host == "localhost"
+
+
+@pydantic_v2
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis_basic_lifespan() -> None:
+ from faststream.redis import TestRedisBroker
+
+ with mock_pydantic_settings_env({"host": "localhost"}):
+ from docs.docs_src.getting_started.lifespan.redis.basic import app, broker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/docs/getting_started/lifespan/test_ml.py b/tests/docs/getting_started/lifespan/test_ml.py
new file mode 100644
index 0000000000..7c205172a6
--- /dev/null
+++ b/tests/docs/getting_started/lifespan/test_ml.py
@@ -0,0 +1,70 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.rabbit.ml import app, broker, predict
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.kafka.ml import app, broker, predict
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.confluent.ml import app, broker, predict
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.nats.ml import app, broker, predict
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.redis.ml import app, broker, predict
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
diff --git a/tests/docs/getting_started/lifespan/test_ml_context.py b/tests/docs/getting_started/lifespan/test_ml_context.py
new file mode 100644
index 0000000000..4765957d61
--- /dev/null
+++ b/tests/docs/getting_started/lifespan/test_ml_context.py
@@ -0,0 +1,90 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.rabbit.ml_context import (
+ app,
+ broker,
+ predict,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.kafka.ml_context import (
+ app,
+ broker,
+ predict,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.confluent.ml_context import (
+ app,
+ broker,
+ predict,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.nats.ml_context import (
+ app,
+ broker,
+ predict,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis_ml_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.redis.ml_context import (
+ app,
+ broker,
+ predict,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ assert await (await broker.request(1.0, "test")).decode() == {"result": 42.0}
+
+ predict.mock.assert_called_once_with(1.0)
diff --git a/tests/docs/getting_started/lifespan/test_multi.py b/tests/docs/getting_started/lifespan/test_multi.py
new file mode 100644
index 0000000000..8d4b0e2a98
--- /dev/null
+++ b/tests/docs/getting_started/lifespan/test_multi.py
@@ -0,0 +1,11 @@
+import pytest
+
+from faststream import TestApp
+
+
+@pytest.mark.asyncio()
+async def test_multi_lifespan() -> None:
+ from docs.docs_src.getting_started.lifespan.multiple import app
+
+ async with TestApp(app):
+ assert app.context.get("field") == 1
diff --git a/tests/docs/getting_started/lifespan/test_testing.py b/tests/docs/getting_started/lifespan/test_testing.py
new file mode 100644
index 0000000000..83621708c7
--- /dev/null
+++ b/tests/docs/getting_started/lifespan/test_testing.py
@@ -0,0 +1,65 @@
+import pytest
+
+from tests.marks import (
+ python39,
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@python39
+@require_redis
+async def test_lifespan_redis() -> None:
+ from docs.docs_src.getting_started.lifespan.redis.testing import (
+ test_lifespan as _test_lifespan_red,
+ )
+
+ await _test_lifespan_red()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_confluent
+async def test_lifespan_confluent() -> None:
+ from docs.docs_src.getting_started.lifespan.confluent.testing import (
+ test_lifespan as _test_lifespan_confluent,
+ )
+
+ await _test_lifespan_confluent()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_aiokafka
+async def test_lifespan_kafka() -> None:
+ from docs.docs_src.getting_started.lifespan.kafka.testing import (
+ test_lifespan as _test_lifespan_k,
+ )
+
+ await _test_lifespan_k()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_aiopika
+async def test_lifespan_rabbit() -> None:
+ from docs.docs_src.getting_started.lifespan.rabbit.testing import (
+ test_lifespan as _test_lifespan_r,
+ )
+
+ await _test_lifespan_r()
+
+
+@pytest.mark.asyncio()
+@python39
+@require_nats
+async def test_lifespan_nats() -> None:
+ from docs.docs_src.getting_started.lifespan.nats.testing import (
+ test_lifespan as _test_lifespan_n,
+ )
+
+ await _test_lifespan_n()
diff --git a/tests/a_docs/integration/fastapi/__init__.py b/tests/docs/getting_started/publishing/__init__.py
similarity index 100%
rename from tests/a_docs/integration/fastapi/__init__.py
rename to tests/docs/getting_started/publishing/__init__.py
diff --git a/tests/docs/getting_started/publishing/test_broker.py b/tests/docs/getting_started/publishing/test_broker.py
new file mode 100644
index 0000000000..e60b37bd19
--- /dev/null
+++ b/tests/docs/getting_started/publishing/test_broker.py
@@ -0,0 +1,90 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_broker_kafka() -> None:
+ from docs.docs_src.getting_started.publishing.kafka.broker import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_broker_confluent() -> None:
+ from docs.docs_src.getting_started.publishing.confluent.broker import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_broker_rabbit() -> None:
+ from docs.docs_src.getting_started.publishing.rabbit.broker import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_broker_nats() -> None:
+ from docs.docs_src.getting_started.publishing.nats.broker import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_broker_redis() -> None:
+ from docs.docs_src.getting_started.publishing.redis.broker import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/getting_started/publishing/test_broker_context.py b/tests/docs/getting_started/publishing/test_broker_context.py
similarity index 81%
rename from tests/a_docs/getting_started/publishing/test_broker_context.py
rename to tests/docs/getting_started/publishing/test_broker_context.py
index 9893ef819c..0128b2bbc5 100644
--- a/tests/a_docs/getting_started/publishing/test_broker_context.py
+++ b/tests/docs/getting_started/publishing/test_broker_context.py
@@ -10,10 +10,10 @@
)
-@pytest.mark.asyncio
-@pytest.mark.kafka
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
@require_aiokafka
-async def test_broker_context_kafka():
+async def test_broker_context_kafka() -> None:
from docs.docs_src.getting_started.publishing.kafka.broker_context import (
app,
broker,
@@ -26,10 +26,10 @@ async def test_broker_context_kafka():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
-@pytest.mark.confluent
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
@require_confluent
-async def test_broker_context_confluent():
+async def test_broker_context_confluent() -> None:
from docs.docs_src.getting_started.publishing.confluent.broker_context import (
app,
broker,
@@ -42,10 +42,10 @@ async def test_broker_context_confluent():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
-@pytest.mark.nats
+@pytest.mark.asyncio()
+@pytest.mark.nats()
@require_nats
-async def test_broker_context_nats():
+async def test_broker_context_nats() -> None:
from docs.docs_src.getting_started.publishing.nats.broker_context import (
app,
broker,
@@ -58,10 +58,10 @@ async def test_broker_context_nats():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
-@pytest.mark.rabbit
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
@require_aiopika
-async def test_broker_context_rabbit():
+async def test_broker_context_rabbit() -> None:
from docs.docs_src.getting_started.publishing.rabbit.broker_context import (
app,
broker,
@@ -74,10 +74,10 @@ async def test_broker_context_rabbit():
handle.mock.assert_called_once_with("Hi!")
-@pytest.mark.asyncio
-@pytest.mark.redis
+@pytest.mark.asyncio()
+@pytest.mark.redis()
@require_redis
-async def test_broker_context_redis():
+async def test_broker_context_redis() -> None:
from docs.docs_src.getting_started.publishing.redis.broker_context import (
app,
broker,
diff --git a/tests/docs/getting_started/publishing/test_decorator.py b/tests/docs/getting_started/publishing/test_decorator.py
new file mode 100644
index 0000000000..1ded7fdf57
--- /dev/null
+++ b/tests/docs/getting_started/publishing/test_decorator.py
@@ -0,0 +1,95 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_decorator_kafka() -> None:
+ from docs.docs_src.getting_started.publishing.kafka.decorator import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+ next(iter(broker._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_decorator_confluent() -> None:
+ from docs.docs_src.getting_started.publishing.confluent.decorator import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+ next(iter(broker._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_decorator_rabbit() -> None:
+ from docs.docs_src.getting_started.publishing.rabbit.decorator import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+ next(iter(broker._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_decorator_nats() -> None:
+ from docs.docs_src.getting_started.publishing.nats.decorator import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+ next(iter(broker._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_decorator_redis() -> None:
+ from docs.docs_src.getting_started.publishing.redis.decorator import (
+ app,
+ broker,
+ handle,
+ handle_next,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with("")
+ handle_next.mock.assert_called_once_with("Hi!")
+ next(iter(broker._publishers)).mock.assert_called_once_with("Hi!")
diff --git a/tests/docs/getting_started/publishing/test_direct.py b/tests/docs/getting_started/publishing/test_direct.py
new file mode 100644
index 0000000000..de6030506a
--- /dev/null
+++ b/tests/docs/getting_started/publishing/test_direct.py
@@ -0,0 +1,59 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_handle_kafka() -> None:
+ from docs.docs_src.getting_started.publishing.kafka.direct_testing import (
+ test_handle as test_handle_k,
+ )
+
+ await test_handle_k()
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_handle_confluent() -> None:
+ from docs.docs_src.getting_started.publishing.confluent.direct_testing import (
+ test_handle as test_handle_confluent,
+ )
+
+ await test_handle_confluent()
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_handle_rabbit() -> None:
+ from docs.docs_src.getting_started.publishing.rabbit.direct_testing import (
+ test_handle as test_handle_r,
+ )
+
+ await test_handle_r()
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_handle_nats() -> None:
+ from docs.docs_src.getting_started.publishing.nats.direct_testing import (
+ test_handle as test_handle_n,
+ )
+
+ await test_handle_n()
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_handle_redis() -> None:
+ from docs.docs_src.getting_started.publishing.redis.direct_testing import (
+ test_handle as test_handle_red,
+ )
+
+ await test_handle_red()
diff --git a/tests/docs/getting_started/publishing/test_object.py b/tests/docs/getting_started/publishing/test_object.py
new file mode 100644
index 0000000000..0b26e89ab3
--- /dev/null
+++ b/tests/docs/getting_started/publishing/test_object.py
@@ -0,0 +1,59 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_handle_kafka() -> None:
+ from docs.docs_src.getting_started.publishing.kafka.object_testing import (
+ test_handle as test_handle_k,
+ )
+
+ await test_handle_k()
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_handle_confluent() -> None:
+ from docs.docs_src.getting_started.publishing.confluent.object_testing import (
+ test_handle as test_handle_confluent,
+ )
+
+ await test_handle_confluent()
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_handle_rabbit() -> None:
+ from docs.docs_src.getting_started.publishing.rabbit.object_testing import (
+ test_handle as test_handle_r,
+ )
+
+ await test_handle_r()
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_handle_nats() -> None:
+ from docs.docs_src.getting_started.publishing.nats.object_testing import (
+ test_handle as test_handle_n,
+ )
+
+ await test_handle_n()
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_handle_redis() -> None:
+ from docs.docs_src.getting_started.publishing.redis.object_testing import (
+ test_handle as test_handle_red,
+ )
+
+ await test_handle_red()
diff --git a/tests/a_docs/integration/http/__init__.py b/tests/docs/getting_started/routers/__init__.py
similarity index 100%
rename from tests/a_docs/integration/http/__init__.py
rename to tests/docs/getting_started/routers/__init__.py
diff --git a/tests/docs/getting_started/routers/test_base.py b/tests/docs/getting_started/routers/test_base.py
new file mode 100644
index 0000000000..2422d0319b
--- /dev/null
+++ b/tests/docs/getting_started/routers/test_base.py
@@ -0,0 +1,90 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_base_router_kafka() -> None:
+ from docs.docs_src.getting_started.routers.kafka.router import (
+ app,
+ broker,
+ handle,
+ handle_response,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+ handle_response.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_base_router_confluent() -> None:
+ from docs.docs_src.getting_started.routers.confluent.router import (
+ app,
+ broker,
+ handle,
+ handle_response,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+ handle_response.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_base_router_rabbit() -> None:
+ from docs.docs_src.getting_started.routers.rabbit.router import (
+ app,
+ broker,
+ handle,
+ handle_response,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+ handle_response.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_base_router_nats() -> None:
+ from docs.docs_src.getting_started.routers.nats.router import (
+ app,
+ broker,
+ handle,
+ handle_response,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+ handle_response.mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_base_router_redis() -> None:
+ from docs.docs_src.getting_started.routers.redis.router import (
+ app,
+ broker,
+ handle,
+ handle_response,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker), TestApp(app):
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+ handle_response.mock.assert_called_once_with("Hi!")
diff --git a/tests/docs/getting_started/routers/test_delay.py b/tests/docs/getting_started/routers/test_delay.py
new file mode 100644
index 0000000000..8b7c119a5b
--- /dev/null
+++ b/tests/docs/getting_started/routers/test_delay.py
@@ -0,0 +1,95 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_delay_router_kafka() -> None:
+ from docs.docs_src.getting_started.routers.kafka.router_delay import (
+ app,
+ broker,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_delay_router_confluent() -> None:
+ from docs.docs_src.getting_started.routers.confluent.router_delay import (
+ app,
+ broker,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_delay_router_rabbit() -> None:
+ from docs.docs_src.getting_started.routers.rabbit.router_delay import (
+ app,
+ broker,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_delay_router_nats() -> None:
+ from docs.docs_src.getting_started.routers.nats.router_delay import (
+ app,
+ broker,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_delay_router_redis() -> None:
+ from docs.docs_src.getting_started.routers.redis.router_delay import (
+ app,
+ broker,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
diff --git a/tests/docs/getting_started/routers/test_delay_equal.py b/tests/docs/getting_started/routers/test_delay_equal.py
new file mode 100644
index 0000000000..52259ba511
--- /dev/null
+++ b/tests/docs/getting_started/routers/test_delay_equal.py
@@ -0,0 +1,125 @@
+import pytest
+
+from faststream import TestApp
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_delay_router_kafka() -> None:
+ from docs.docs_src.getting_started.routers.kafka.delay_equal import (
+ app,
+ broker,
+ )
+ from docs.docs_src.getting_started.routers.kafka.router_delay import (
+ broker as control_broker,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ assert len(broker._subscribers) == len(control_broker._subscribers)
+ assert len(broker._publishers) == len(control_broker._publishers)
+
+ async with TestKafkaBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_delay_router_confluent() -> None:
+ from docs.docs_src.getting_started.routers.confluent.delay_equal import (
+ app,
+ broker,
+ )
+ from docs.docs_src.getting_started.routers.confluent.router_delay import (
+ broker as control_broker,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ assert len(broker._subscribers) == len(control_broker._subscribers)
+ assert len(broker._publishers) == len(control_broker._publishers)
+
+ async with TestConfluentKafkaBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_delay_router_rabbit() -> None:
+ from docs.docs_src.getting_started.routers.rabbit.delay_equal import (
+ app,
+ broker,
+ )
+ from docs.docs_src.getting_started.routers.rabbit.router_delay import (
+ broker as control_broker,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ assert len(broker._subscribers) == len(control_broker._subscribers)
+ assert len(broker._publishers) == len(control_broker._publishers)
+
+ async with TestRabbitBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_delay_router_nats() -> None:
+ from docs.docs_src.getting_started.routers.nats.delay_equal import (
+ app,
+ broker,
+ )
+ from docs.docs_src.getting_started.routers.nats.router_delay import (
+ broker as control_broker,
+ )
+ from faststream.nats import TestNatsBroker
+
+ assert len(broker._subscribers) == len(control_broker._subscribers)
+ assert len(broker._publishers) == len(control_broker._publishers)
+
+ async with TestNatsBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_delay_router_redis() -> None:
+ from docs.docs_src.getting_started.routers.redis.delay_equal import (
+ app,
+ broker,
+ )
+ from docs.docs_src.getting_started.routers.redis.router_delay import (
+ broker as control_broker,
+ )
+ from faststream.redis import TestRedisBroker
+
+ assert len(broker._subscribers) == len(control_broker._subscribers)
+ assert len(broker._publishers) == len(control_broker._publishers)
+
+ async with TestRedisBroker(broker) as br, TestApp(app):
+ next(iter(br._subscribers)).calls[0].handler.mock.assert_called_once_with(
+ {"name": "John", "user_id": 1},
+ )
+
+ next(iter(br._publishers)).mock.assert_called_once_with("Hi!")
diff --git a/tests/a_docs/kafka/ack/__init__.py b/tests/docs/getting_started/serialization/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/ack/__init__.py
rename to tests/docs/getting_started/serialization/__init__.py
diff --git a/tests/a_docs/getting_started/serialization/test_parser.py b/tests/docs/getting_started/serialization/test_parser.py
similarity index 84%
rename from tests/a_docs/getting_started/serialization/test_parser.py
rename to tests/docs/getting_started/serialization/test_parser.py
index 3a68f8abf0..73a909c722 100644
--- a/tests/a_docs/getting_started/serialization/test_parser.py
+++ b/tests/docs/getting_started/serialization/test_parser.py
@@ -10,9 +10,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_parser_nats():
+async def test_parser_nats() -> None:
from docs.docs_src.getting_started.serialization.parser_nats import (
app,
broker,
@@ -24,9 +24,9 @@ async def test_parser_nats():
handle.mock.assert_called_once_with("")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_parser_kafka():
+async def test_parser_kafka() -> None:
from docs.docs_src.getting_started.serialization.parser_kafka import (
app,
broker,
@@ -38,9 +38,9 @@ async def test_parser_kafka():
handle.mock.assert_called_once_with("")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_parser_confluent():
+async def test_parser_confluent() -> None:
from docs.docs_src.getting_started.serialization.parser_confluent import (
app,
broker,
@@ -52,9 +52,9 @@ async def test_parser_confluent():
handle.mock.assert_called_once_with("")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_parser_rabbit():
+async def test_parser_rabbit() -> None:
from docs.docs_src.getting_started.serialization.parser_rabbit import (
app,
broker,
@@ -66,9 +66,9 @@ async def test_parser_rabbit():
handle.mock.assert_called_once_with("")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_parser_redis():
+async def test_parser_redis() -> None:
from docs.docs_src.getting_started.serialization.parser_redis import (
app,
broker,
diff --git a/tests/a_docs/kafka/basic/__init__.py b/tests/docs/getting_started/subscription/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/basic/__init__.py
rename to tests/docs/getting_started/subscription/__init__.py
diff --git a/tests/docs/getting_started/subscription/test_annotated.py b/tests/docs/getting_started/subscription/test_annotated.py
new file mode 100644
index 0000000000..0c9d24a927
--- /dev/null
+++ b/tests/docs/getting_started/subscription/test_annotated.py
@@ -0,0 +1,110 @@
+from typing import Any
+
+import pytest
+from fast_depends.exceptions import ValidationError
+from typing_extensions import TypeAlias
+
+from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.testing.broker import TestBroker
+from tests.marks import (
+ python39,
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+Setup: TypeAlias = tuple[
+ BrokerUsecase[Any, Any],
+ SubscriberUsecase[Any],
+ type[TestBroker],
+]
+
+
+@pytest.mark.asyncio()
+@python39
+class BaseCase:
+ async def test_handle(self, setup: Setup) -> None:
+ broker, handle, test_class = setup
+
+ async with test_class(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+
+ assert handle.mock is None
+
+ async def test_validation_error(self, setup: Setup) -> None:
+ broker, handle, test_class = setup
+
+ async with test_class(broker) as br:
+ with pytest.raises(ValidationError):
+ await br.publish("wrong message", "test")
+
+ handle.mock.assert_called_once_with("wrong message")
+
+
+@require_aiokafka
+class TestKafka(BaseCase):
+ @pytest.fixture(scope="class")
+ def setup(self) -> Setup:
+ from docs.docs_src.getting_started.subscription.kafka.pydantic_annotated_fields import (
+ broker,
+ handle,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ return (broker, handle, TestKafkaBroker)
+
+
+@require_confluent
+class TestConfluent(BaseCase):
+ @pytest.fixture(scope="class")
+ def setup(self) -> Setup:
+ from docs.docs_src.getting_started.subscription.confluent.pydantic_annotated_fields import (
+ broker,
+ handle,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ return (broker, handle, TestConfluentKafkaBroker)
+
+
+@require_aiopika
+class TestRabbit(BaseCase):
+ @pytest.fixture(scope="class")
+ def setup(self) -> Setup:
+ from docs.docs_src.getting_started.subscription.rabbit.pydantic_annotated_fields import (
+ broker,
+ handle,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ return (broker, handle, TestRabbitBroker)
+
+
+@require_nats
+class TestNats(BaseCase):
+ @pytest.fixture(scope="class")
+ def setup(self) -> Setup:
+ from docs.docs_src.getting_started.subscription.nats.pydantic_annotated_fields import (
+ broker,
+ handle,
+ )
+ from faststream.nats import TestNatsBroker
+
+ return (broker, handle, TestNatsBroker)
+
+
+@require_redis
+class TestRedis(BaseCase):
+ @pytest.fixture(scope="class")
+ def setup(self) -> Setup:
+ from docs.docs_src.getting_started.subscription.redis.pydantic_annotated_fields import (
+ broker,
+ handle,
+ )
+ from faststream.redis import TestRedisBroker
+
+ return (broker, handle, TestRedisBroker)
diff --git a/tests/a_docs/getting_started/subscription/test_filter.py b/tests/docs/getting_started/subscription/test_filter.py
similarity index 87%
rename from tests/a_docs/getting_started/subscription/test_filter.py
rename to tests/docs/getting_started/subscription/test_filter.py
index 4789c6dda8..60264d60cc 100644
--- a/tests/a_docs/getting_started/subscription/test_filter.py
+++ b/tests/docs/getting_started/subscription/test_filter.py
@@ -10,9 +10,9 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_kafka_filtering():
+async def test_kafka_filtering() -> None:
from docs.docs_src.getting_started.subscription.kafka.filter import (
app,
broker,
@@ -26,9 +26,9 @@ async def test_kafka_filtering():
default_handler.mock.assert_called_once_with("Hello, FastStream!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_confluent_filtering():
+async def test_confluent_filtering() -> None:
from docs.docs_src.getting_started.subscription.confluent.filter import (
app,
broker,
@@ -42,9 +42,9 @@ async def test_confluent_filtering():
default_handler.mock.assert_called_once_with("Hello, FastStream!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_rabbit_filtering():
+async def test_rabbit_filtering() -> None:
from docs.docs_src.getting_started.subscription.rabbit.filter import (
app,
broker,
@@ -58,9 +58,9 @@ async def test_rabbit_filtering():
default_handler.mock.assert_called_once_with("Hello, FastStream!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_nats_filtering():
+async def test_nats_filtering() -> None:
from docs.docs_src.getting_started.subscription.nats.filter import (
app,
broker,
@@ -74,9 +74,9 @@ async def test_nats_filtering():
default_handler.mock.assert_called_once_with("Hello, FastStream!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_redis_filtering():
+async def test_redis_filtering() -> None:
from docs.docs_src.getting_started.subscription.redis.filter import (
app,
broker,
diff --git a/tests/docs/getting_started/subscription/test_pydantic.py b/tests/docs/getting_started/subscription/test_pydantic.py
new file mode 100644
index 0000000000..cef46891c3
--- /dev/null
+++ b/tests/docs/getting_started/subscription/test_pydantic.py
@@ -0,0 +1,79 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_pydantic_model_rabbit() -> None:
+ from docs.docs_src.getting_started.subscription.rabbit.pydantic_model import (
+ broker,
+ handle,
+ )
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test-queue")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_pydantic_model_kafka() -> None:
+ from docs.docs_src.getting_started.subscription.kafka.pydantic_model import (
+ broker,
+ handle,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test-topic")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_pydantic_model_confluent() -> None:
+ from docs.docs_src.getting_started.subscription.confluent.pydantic_model import (
+ broker,
+ handle,
+ )
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test-topic")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_pydantic_model_nats() -> None:
+ from docs.docs_src.getting_started.subscription.nats.pydantic_model import (
+ broker,
+ handle,
+ )
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test-subject")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_pydantic_model_redis() -> None:
+ from docs.docs_src.getting_started.subscription.redis.pydantic_model import (
+ broker,
+ handle,
+ )
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish({"name": "John", "user_id": 1}, "test-channel")
+ handle.mock.assert_called_once_with({"name": "John", "user_id": 1})
diff --git a/tests/docs/getting_started/subscription/test_real.py b/tests/docs/getting_started/subscription/test_real.py
new file mode 100644
index 0000000000..f033a0a11e
--- /dev/null
+++ b/tests/docs/getting_started/subscription/test_real.py
@@ -0,0 +1,119 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.kafka()
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_handle_kafka() -> None:
+ from docs.docs_src.getting_started.subscription.kafka.real_testing import (
+ test_handle as test_handle_k,
+ )
+
+ await test_handle_k()
+
+
+@pytest.mark.kafka()
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_validate_kafka() -> None:
+ from docs.docs_src.getting_started.subscription.kafka.real_testing import (
+ test_validation_error as test_validation_error_k,
+ )
+
+ await test_validation_error_k()
+
+
+@pytest.mark.confluent()
+@pytest.mark.asyncio()
+@require_confluent
+async def test_handle_confluent() -> None:
+ from docs.docs_src.getting_started.subscription.confluent.real_testing import (
+ test_handle as test_handle_confluent,
+ )
+
+ await test_handle_confluent()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+@require_confluent
+async def test_validate_confluent() -> None:
+ from docs.docs_src.getting_started.subscription.confluent.real_testing import (
+ test_validation_error as test_validation_error_confluent,
+ )
+
+ await test_validation_error_confluent()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+@require_aiopika
+async def test_handle_rabbit() -> None:
+ from docs.docs_src.getting_started.subscription.rabbit.real_testing import (
+ test_handle as test_handle_r,
+ )
+
+ await test_handle_r()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+@require_aiopika
+async def test_validate_rabbit() -> None:
+ from docs.docs_src.getting_started.subscription.rabbit.real_testing import (
+ test_validation_error as test_validation_error_r,
+ )
+
+ await test_validation_error_r()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+@require_nats
+async def test_handle_nats() -> None:
+ from docs.docs_src.getting_started.subscription.nats.real_testing import (
+ test_handle as test_handle_n,
+ )
+
+ await test_handle_n()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+@require_nats
+async def test_validate_nats() -> None:
+ from docs.docs_src.getting_started.subscription.nats.real_testing import (
+ test_validation_error as test_validation_error_n,
+ )
+
+ await test_validation_error_n()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+@require_redis
+async def test_handle_redis() -> None:
+ from docs.docs_src.getting_started.subscription.redis.real_testing import (
+ test_handle as test_handle_red,
+ )
+
+ await test_handle_red()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+@require_redis
+async def test_validate_redis() -> None:
+ from docs.docs_src.getting_started.subscription.redis.real_testing import (
+ test_validation_error as test_validation_error_red,
+ )
+
+ await test_validation_error_red()
diff --git a/tests/docs/getting_started/subscription/test_testing.py b/tests/docs/getting_started/subscription/test_testing.py
new file mode 100644
index 0000000000..1c1a786b21
--- /dev/null
+++ b/tests/docs/getting_started/subscription/test_testing.py
@@ -0,0 +1,119 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.kafka()
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_handle_kafka() -> None:
+ from docs.docs_src.getting_started.subscription.kafka.testing import (
+ test_handle as test_handle_k,
+ )
+
+ await test_handle_k()
+
+
+@pytest.mark.kafka()
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_validate_kafka() -> None:
+ from docs.docs_src.getting_started.subscription.kafka.testing import (
+ test_validation_error as test_validation_error_k,
+ )
+
+ await test_validation_error_k()
+
+
+@pytest.mark.confluent()
+@pytest.mark.asyncio()
+@require_confluent
+async def test_handle_confluent() -> None:
+ from docs.docs_src.getting_started.subscription.confluent.testing import (
+ test_handle as test_handle_confluent,
+ )
+
+ await test_handle_confluent()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.confluent()
+@require_confluent
+async def test_validate_confluent() -> None:
+ from docs.docs_src.getting_started.subscription.confluent.testing import (
+ test_validation_error as test_validation_error_confluent,
+ )
+
+ await test_validation_error_confluent()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+@require_aiopika
+async def test_handle_rabbit() -> None:
+ from docs.docs_src.getting_started.subscription.rabbit.testing import (
+ test_handle as test_handle_r,
+ )
+
+ await test_handle_r()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+@require_aiopika
+async def test_validate_rabbit() -> None:
+ from docs.docs_src.getting_started.subscription.rabbit.testing import (
+ test_validation_error as test_validation_error_r,
+ )
+
+ await test_validation_error_r()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+@require_nats
+async def test_handle_nats() -> None:
+ from docs.docs_src.getting_started.subscription.nats.testing import (
+ test_handle as test_handle_n,
+ )
+
+ await test_handle_n()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+@require_nats
+async def test_validate_nats() -> None:
+ from docs.docs_src.getting_started.subscription.nats.testing import (
+ test_validation_error as test_validation_error_n,
+ )
+
+ await test_validation_error_n()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+@require_redis
+async def test_handle_redis() -> None:
+ from docs.docs_src.getting_started.subscription.redis.testing import (
+ test_handle as test_handle_rd,
+ )
+
+ await test_handle_rd()
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+@require_redis
+async def test_validate_redis() -> None:
+ from docs.docs_src.getting_started.subscription.redis.testing import (
+ test_validation_error as test_validation_error_rd,
+ )
+
+ await test_validation_error_rd()
diff --git a/tests/a_docs/kafka/batch_consuming_pydantic/__init__.py b/tests/docs/index/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/batch_consuming_pydantic/__init__.py
rename to tests/docs/index/__init__.py
diff --git a/tests/docs/index/test_basic.py b/tests/docs/index/test_basic.py
new file mode 100644
index 0000000000..a518aff1ff
--- /dev/null
+++ b/tests/docs/index/test_basic.py
@@ -0,0 +1,89 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_index_kafka_base() -> None:
+ from docs.docs_src.index.kafka.basic import broker, handle_msg
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish({"user": "John", "user_id": 1}, "in-topic")
+
+ handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
+
+ list(br._publishers)[0].mock.assert_called_once_with( # noqa: RUF015
+ "User: 1 - John registered",
+ )
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_index_confluent_base() -> None:
+ from docs.docs_src.index.confluent.basic import broker, handle_msg
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(broker) as br:
+ await br.publish({"user": "John", "user_id": 1}, "in-topic")
+
+ handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
+
+ list(br._publishers)[0].mock.assert_called_once_with( # noqa: RUF015
+ "User: 1 - John registered",
+ )
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_index_rabbit_base() -> None:
+ from docs.docs_src.index.rabbit.basic import broker, handle_msg
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(broker) as br:
+ await br.publish({"user": "John", "user_id": 1}, "in-queue")
+
+ handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
+
+ list(br._publishers)[0].mock.assert_called_once_with( # noqa: RUF015
+ "User: 1 - John registered",
+ )
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_index_nats_base() -> None:
+ from docs.docs_src.index.nats.basic import broker, handle_msg
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(broker) as br:
+ await br.publish({"user": "John", "user_id": 1}, "in-subject")
+
+ handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
+
+ list(br._publishers)[0].mock.assert_called_once_with( # noqa: RUF015
+ "User: 1 - John registered",
+ )
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_index_redis_base() -> None:
+ from docs.docs_src.index.redis.basic import broker, handle_msg
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish({"user": "John", "user_id": 1}, "in-channel")
+
+ handle_msg.mock.assert_called_once_with({"user": "John", "user_id": 1})
+
+ list(br._publishers)[0].mock.assert_called_once_with( # noqa: RUF015
+ "User: 1 - John registered",
+ )
diff --git a/tests/a_docs/index/test_dependencies.py b/tests/docs/index/test_dependencies.py
similarity index 87%
rename from tests/a_docs/index/test_dependencies.py
rename to tests/docs/index/test_dependencies.py
index c0ba757a81..8d8eedad25 100644
--- a/tests/a_docs/index/test_dependencies.py
+++ b/tests/docs/index/test_dependencies.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiokafka
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_index_dep():
+async def test_index_dep() -> None:
from docs.docs_src.index.dependencies import base_handler, broker
from faststream.kafka import TestKafkaBroker
diff --git a/tests/docs/index/test_pydantic.py b/tests/docs/index/test_pydantic.py
new file mode 100644
index 0000000000..2267d15675
--- /dev/null
+++ b/tests/docs/index/test_pydantic.py
@@ -0,0 +1,93 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka_correct() -> None:
+ from docs.docs_src.index.kafka.test import test_correct as test_k_correct
+
+ await test_k_correct()
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka_invalid() -> None:
+ from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid
+
+ await test_k_invalid()
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent_correct() -> None:
+ from docs.docs_src.index.confluent.test import (
+ test_correct as test_confluent_correct,
+ )
+
+ await test_confluent_correct()
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent_invalid() -> None:
+ from docs.docs_src.index.confluent.test import (
+ test_invalid as test_confluent_invalid,
+ )
+
+ await test_confluent_invalid()
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit_correct() -> None:
+ from docs.docs_src.index.rabbit.test import test_correct as test_r_correct
+
+ await test_r_correct()
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit_invalid() -> None:
+ from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid
+
+ await test_r_invalid()
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats_correct() -> None:
+ from docs.docs_src.index.nats.test import test_correct as test_n_correct
+
+ await test_n_correct()
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats_invalid() -> None:
+ from docs.docs_src.index.nats.test import test_invalid as test_n_invalid
+
+ await test_n_invalid()
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis_correct() -> None:
+ from docs.docs_src.index.redis.test import test_correct as test_red_correct
+
+ await test_red_correct()
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis_invalid() -> None:
+ from docs.docs_src.index.redis.test import test_invalid as test_red_invalid
+
+ await test_red_invalid()
diff --git a/tests/a_docs/kafka/consumes_basics/__init__.py b/tests/docs/integration/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/consumes_basics/__init__.py
rename to tests/docs/integration/__init__.py
diff --git a/tests/a_docs/kafka/publish_batch/__init__.py b/tests/docs/integration/fastapi/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/publish_batch/__init__.py
rename to tests/docs/integration/fastapi/__init__.py
diff --git a/tests/docs/integration/fastapi/test_base.py b/tests/docs/integration/fastapi/test_base.py
new file mode 100644
index 0000000000..223df2514d
--- /dev/null
+++ b/tests/docs/integration/fastapi/test_base.py
@@ -0,0 +1,105 @@
+import pytest
+from fastapi.testclient import TestClient
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_fastapi_kafka_base() -> None:
+ from docs.docs_src.integrations.fastapi.kafka.base import app, hello, router
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ await br.publish({"m": {}}, "test")
+
+ hello.mock.assert_called_once_with({"m": {}})
+
+ list(br._publishers)[0].mock.assert_called_with( # noqa: RUF015
+ {"response": "Hello, Kafka!"},
+ )
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_fastapi_confluent_base() -> None:
+ from docs.docs_src.integrations.fastapi.confluent.base import app, hello, router
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ async with TestConfluentKafkaBroker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ await br.publish({"m": {}}, "test")
+
+ hello.mock.assert_called_once_with({"m": {}})
+
+ list(br._publishers)[0].mock.assert_called_with( # noqa: RUF015
+ {"response": "Hello, Kafka!"},
+ )
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_fastapi_rabbit_base() -> None:
+ from docs.docs_src.integrations.fastapi.rabbit.base import app, hello, router
+ from faststream.rabbit import TestRabbitBroker
+
+ async with TestRabbitBroker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ await br.publish({"m": {}}, "test")
+
+ hello.mock.assert_called_once_with({"m": {}})
+
+ list(br._publishers)[0].mock.assert_called_with( # noqa: RUF015
+ {"response": "Hello, Rabbit!"},
+ )
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_fastapi_nats_base() -> None:
+ from docs.docs_src.integrations.fastapi.nats.base import app, hello, router
+ from faststream.nats import TestNatsBroker
+
+ async with TestNatsBroker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ await br.publish({"m": {}}, "test")
+
+ hello.mock.assert_called_once_with({"m": {}})
+
+ list(br._publishers)[0].mock.assert_called_with( # noqa: RUF015
+ {"response": "Hello, NATS!"},
+ )
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_fastapi_redis_base() -> None:
+ from docs.docs_src.integrations.fastapi.redis.base import app, hello, router
+ from faststream.redis import TestRedisBroker
+
+ async with TestRedisBroker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ await br.publish({"m": {}}, "test")
+
+ hello.mock.assert_called_once_with({"m": {}})
+
+ list(br._publishers)[0].mock.assert_called_with( # noqa: RUF015
+ {"response": "Hello, Redis!"},
+ )
diff --git a/tests/docs/integration/fastapi/test_depends.py b/tests/docs/integration/fastapi/test_depends.py
new file mode 100644
index 0000000000..7361b3ad5f
--- /dev/null
+++ b/tests/docs/integration/fastapi/test_depends.py
@@ -0,0 +1,90 @@
+import pytest
+from fastapi.testclient import TestClient
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_fastapi_kafka_depends() -> None:
+ from docs.docs_src.integrations.fastapi.kafka.depends import app, router
+ from faststream.kafka import TestKafkaBroker
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ async with TestKafkaBroker(router.broker):
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ handler.mock.assert_called_once_with("Hello, Kafka!")
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_fastapi_confluent_depends() -> None:
+ from docs.docs_src.integrations.fastapi.confluent.depends import app, router
+ from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ async with TestConfluentKafkaBroker(router.broker):
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ handler.mock.assert_called_once_with("Hello, Kafka!")
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_fastapi_rabbit_depends() -> None:
+ from docs.docs_src.integrations.fastapi.rabbit.depends import app, router
+ from faststream.rabbit import TestRabbitBroker
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ async with TestRabbitBroker(router.broker):
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ handler.mock.assert_called_once_with("Hello, Rabbit!")
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_fastapi_nats_depends() -> None:
+ from docs.docs_src.integrations.fastapi.nats.depends import app, router
+ from faststream.nats import TestNatsBroker
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ async with TestNatsBroker(router.broker):
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ handler.mock.assert_called_once_with("Hello, NATS!")
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_fastapi_redis_depends() -> None:
+ from docs.docs_src.integrations.fastapi.redis.depends import app, router
+ from faststream.redis import TestRedisBroker
+
+ @router.subscriber("test")
+ async def handler() -> None: ...
+
+ async with TestRedisBroker(router.broker):
+ with TestClient(app) as client:
+ assert client.get("/").text == '"Hello, HTTP!"'
+
+ handler.mock.assert_called_once_with("Hello, Redis!")
diff --git a/tests/a_docs/integration/fastapi/test_routers.py b/tests/docs/integration/fastapi/test_routers.py
similarity index 89%
rename from tests/a_docs/integration/fastapi/test_routers.py
rename to tests/docs/integration/fastapi/test_routers.py
index c8122b0414..e8429612ea 100644
--- a/tests/a_docs/integration/fastapi/test_routers.py
+++ b/tests/docs/integration/fastapi/test_routers.py
@@ -11,10 +11,10 @@
class BaseCase:
- def test_running(self, data):
+ def test_running(self, data) -> None:
app, broker = data
- handlers = broker._subscribers.values()
+ handlers = broker._subscribers
assert len(handlers) == 2
for h in handlers:
@@ -25,7 +25,7 @@ def test_running(self, data):
assert h.running
-@pytest.mark.kafka
+@pytest.mark.kafka()
@require_aiokafka
class TestKafka(BaseCase):
@pytest.fixture(scope="class")
@@ -35,7 +35,7 @@ def data(self):
return (app, core_router.broker)
-@pytest.mark.confluent
+@pytest.mark.confluent()
@require_confluent
class TestConfluent(BaseCase):
@pytest.fixture(scope="class")
@@ -48,7 +48,7 @@ def data(self):
return (app, core_router.broker)
-@pytest.mark.nats
+@pytest.mark.nats()
@require_nats
class TestNats(BaseCase):
@pytest.fixture(scope="class")
@@ -58,7 +58,7 @@ def data(self):
return (app, core_router.broker)
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
@require_aiopika
class TestRabbit(BaseCase):
@pytest.fixture(scope="class")
@@ -68,7 +68,7 @@ def data(self):
return (app, core_router.broker)
-@pytest.mark.redis
+@pytest.mark.redis()
@require_redis
class TestRedis(BaseCase):
@pytest.fixture(scope="class")
diff --git a/tests/a_docs/integration/fastapi/test_send.py b/tests/docs/integration/fastapi/test_send.py
similarity index 80%
rename from tests/a_docs/integration/fastapi/test_send.py
rename to tests/docs/integration/fastapi/test_send.py
index 13bc7376ef..a0ca67ee64 100644
--- a/tests/a_docs/integration/fastapi/test_send.py
+++ b/tests/docs/integration/fastapi/test_send.py
@@ -10,14 +10,14 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_fastapi_kafka_send():
+async def test_fastapi_kafka_send() -> None:
from docs.docs_src.integrations.fastapi.kafka.send import app, router
from faststream.kafka import TestKafkaBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestKafkaBroker(router.broker):
with TestClient(app) as client:
@@ -26,14 +26,14 @@ async def handler(): ...
handler.mock.assert_called_once_with("Hello, Kafka!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_fastapi_confluent_send():
+async def test_fastapi_confluent_send() -> None:
from docs.docs_src.integrations.fastapi.confluent.send import app, router
from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestConfluentKafkaBroker(router.broker):
with TestClient(app) as client:
@@ -42,14 +42,14 @@ async def handler(): ...
handler.mock.assert_called_once_with("Hello, Kafka!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_fastapi_rabbit_send():
+async def test_fastapi_rabbit_send() -> None:
from docs.docs_src.integrations.fastapi.rabbit.send import app, router
from faststream.rabbit import TestRabbitBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestRabbitBroker(router.broker):
with TestClient(app) as client:
@@ -58,14 +58,14 @@ async def handler(): ...
handler.mock.assert_called_once_with("Hello, Rabbit!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_fastapi_nats_send():
+async def test_fastapi_nats_send() -> None:
from docs.docs_src.integrations.fastapi.nats.send import app, router
from faststream.nats import TestNatsBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestNatsBroker(router.broker):
with TestClient(app) as client:
@@ -74,14 +74,14 @@ async def handler(): ...
handler.mock.assert_called_once_with("Hello, NATS!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_fastapi_redis_send():
+async def test_fastapi_redis_send() -> None:
from docs.docs_src.integrations.fastapi.redis.send import app, router
from faststream.redis import TestRedisBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestRedisBroker(router.broker):
with TestClient(app) as client:
diff --git a/tests/a_docs/integration/fastapi/test_startup.py b/tests/docs/integration/fastapi/test_startup.py
similarity index 77%
rename from tests/a_docs/integration/fastapi/test_startup.py
rename to tests/docs/integration/fastapi/test_startup.py
index 011cea69fc..80e62afbeb 100644
--- a/tests/a_docs/integration/fastapi/test_startup.py
+++ b/tests/docs/integration/fastapi/test_startup.py
@@ -10,70 +10,70 @@
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_fastapi_kafka_startup():
+async def test_fastapi_kafka_startup() -> None:
from docs.docs_src.integrations.fastapi.kafka.startup import app, hello, router
from faststream.kafka import TestKafkaBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestKafkaBroker(router.broker):
with TestClient(app):
hello.mock.assert_called_once_with("Hello!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_confluent
-async def test_fastapi_confluent_startup():
+async def test_fastapi_confluent_startup() -> None:
from docs.docs_src.integrations.fastapi.confluent.startup import app, hello, router
from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestConfluentKafkaBroker(router.broker):
with TestClient(app):
hello.mock.assert_called_once_with("Hello!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_fastapi_rabbit_startup():
+async def test_fastapi_rabbit_startup() -> None:
from docs.docs_src.integrations.fastapi.rabbit.startup import app, hello, router
from faststream.rabbit import TestRabbitBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestRabbitBroker(router.broker):
with TestClient(app):
hello.mock.assert_called_once_with("Hello!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_fastapi_nats_startup():
+async def test_fastapi_nats_startup() -> None:
from docs.docs_src.integrations.fastapi.nats.startup import app, hello, router
from faststream.nats import TestNatsBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestNatsBroker(router.broker):
with TestClient(app):
hello.mock.assert_called_once_with("Hello!")
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_fastapi_redis_startup():
+async def test_fastapi_redis_startup() -> None:
from docs.docs_src.integrations.fastapi.redis.startup import app, hello, router
from faststream.redis import TestRedisBroker
@router.subscriber("test")
- async def handler(): ...
+ async def handler() -> None: ...
async with TestRedisBroker(router.broker):
with TestClient(app):
diff --git a/tests/docs/integration/fastapi/test_test.py b/tests/docs/integration/fastapi/test_test.py
new file mode 100644
index 0000000000..55e682b079
--- /dev/null
+++ b/tests/docs/integration/fastapi/test_test.py
@@ -0,0 +1,49 @@
+import pytest
+
+from tests.marks import (
+ require_aiokafka,
+ require_aiopika,
+ require_confluent,
+ require_nats,
+ require_redis,
+)
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_kafka() -> None:
+ from docs.docs_src.integrations.fastapi.kafka.test import test_router
+
+ await test_router()
+
+
+@pytest.mark.asyncio()
+@require_confluent
+async def test_confluent() -> None:
+ from docs.docs_src.integrations.fastapi.confluent.test import test_router
+
+ await test_router()
+
+
+@pytest.mark.asyncio()
+@require_aiopika
+async def test_rabbit() -> None:
+ from docs.docs_src.integrations.fastapi.rabbit.test import test_router
+
+ await test_router()
+
+
+@pytest.mark.asyncio()
+@require_nats
+async def test_nats() -> None:
+ from docs.docs_src.integrations.fastapi.nats.test import test_router
+
+ await test_router()
+
+
+@pytest.mark.asyncio()
+@require_redis
+async def test_redis() -> None:
+ from docs.docs_src.integrations.fastapi.redis.test import test_router
+
+ await test_router()
diff --git a/tests/a_docs/kafka/publish_example/__init__.py b/tests/docs/integration/http/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/publish_example/__init__.py
rename to tests/docs/integration/http/__init__.py
diff --git a/tests/docs/integration/http/test_fastapi.py b/tests/docs/integration/http/test_fastapi.py
new file mode 100644
index 0000000000..3e51eb1401
--- /dev/null
+++ b/tests/docs/integration/http/test_fastapi.py
@@ -0,0 +1,23 @@
+import pytest
+from fastapi.testclient import TestClient
+
+from tests.marks import require_aiokafka
+
+
+@pytest.mark.asyncio()
+@require_aiokafka
+async def test_fastapi_raw_integration() -> None:
+ from docs.docs_src.integrations.http_frameworks_integrations.fastapi import (
+ app,
+ base_handler,
+ broker,
+ )
+ from faststream.kafka import TestKafkaBroker
+
+ async with TestKafkaBroker(broker):
+ with TestClient(app) as client:
+ assert client.get("/").json() == {"Hello": "World"}
+
+ await broker.publish("", "test")
+
+ base_handler.mock.assert_called_once_with("")
diff --git a/tests/docs/kafka/__init__.py b/tests/docs/kafka/__init__.py
new file mode 100644
index 0000000000..bd6bc708fc
--- /dev/null
+++ b/tests/docs/kafka/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("aiokafka")
diff --git a/tests/a_docs/kafka/publish_with_partition_key/__init__.py b/tests/docs/kafka/ack/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/publish_with_partition_key/__init__.py
rename to tests/docs/kafka/ack/__init__.py
diff --git a/tests/docs/kafka/ack/test_errors.py b/tests/docs/kafka/ack/test_errors.py
new file mode 100644
index 0000000000..d150c2bf0d
--- /dev/null
+++ b/tests/docs/kafka/ack/test_errors.py
@@ -0,0 +1,24 @@
+from unittest.mock import patch
+
+import pytest
+from aiokafka import AIOKafkaConsumer
+
+from faststream.kafka import TestApp, TestKafkaBroker
+from tests.tools import spy_decorator
+
+
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+@pytest.mark.slow()
+async def test_ack_exc() -> None:
+ from docs.docs_src.kafka.ack.errors import app, broker, handle
+
+ with patch.object(
+ AIOKafkaConsumer,
+ "commit",
+ spy_decorator(AIOKafkaConsumer.commit),
+ ) as m:
+ async with TestKafkaBroker(broker, with_real=True), TestApp(app):
+ await handle.wait_call(10)
+
+ assert m.mock.call_count
diff --git a/tests/a_docs/kafka/publisher_object/__init__.py b/tests/docs/kafka/basic/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/publisher_object/__init__.py
rename to tests/docs/kafka/basic/__init__.py
diff --git a/tests/docs/kafka/basic/test_basic.py b/tests/docs/kafka/basic/test_basic.py
new file mode 100644
index 0000000000..7121749a4e
--- /dev/null
+++ b/tests/docs/kafka/basic/test_basic.py
@@ -0,0 +1,15 @@
+import pytest
+
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_basic() -> None:
+ from docs.docs_src.kafka.basic.basic import broker, on_input_data
+
+ publisher = list(broker._publishers)[0] # noqa: RUF015
+
+ async with TestKafkaBroker(broker) as br:
+ await br.publish({"data": 1.0}, "input_data")
+ on_input_data.mock.assert_called_once_with({"data": 1.0})
+ publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/docs/kafka/basic/test_cmd_run.py b/tests/docs/kafka/basic/test_cmd_run.py
new file mode 100644
index 0000000000..85d534cc1e
--- /dev/null
+++ b/tests/docs/kafka/basic/test_cmd_run.py
@@ -0,0 +1,35 @@
+import traceback
+from typing import Any
+from unittest.mock import Mock
+
+import pytest
+from typer.testing import CliRunner
+
+from faststream._internal.cli.main import cli
+from faststream.app import FastStream
+
+
+@pytest.mark.kafka()
+def test_run_cmd(
+ runner: CliRunner,
+ mock: Mock,
+ monkeypatch: pytest.MonkeyPatch,
+ kafka_basic_project: str,
+) -> None:
+ async def patched_run(self: FastStream, *args: Any, **kwargs: Any) -> None:
+ await self.start()
+ await self.stop()
+ mock()
+
+ with monkeypatch.context() as m:
+ m.setattr(FastStream, "run", patched_run)
+ r = runner.invoke(
+ cli,
+ [
+ "run",
+ kafka_basic_project,
+ ],
+ )
+
+ assert r.exit_code == 0, (r.output, traceback.format_exception(r.exception))
+ mock.assert_called_once()
diff --git a/tests/a_docs/kafka/raw_publish/__init__.py b/tests/docs/kafka/batch_consuming_pydantic/__init__.py
similarity index 100%
rename from tests/a_docs/kafka/raw_publish/__init__.py
rename to tests/docs/kafka/batch_consuming_pydantic/__init__.py
diff --git a/tests/docs/kafka/batch_consuming_pydantic/test_app.py b/tests/docs/kafka/batch_consuming_pydantic/test_app.py
new file mode 100644
index 0000000000..f080f6d94a
--- /dev/null
+++ b/tests/docs/kafka/batch_consuming_pydantic/test_app.py
@@ -0,0 +1,21 @@
+import pytest
+
+from docs.docs_src.kafka.batch_consuming_pydantic.app import (
+ HelloWorld,
+ broker,
+ handle_batch,
+)
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_me() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish_batch(
+ HelloWorld(msg="First Hello"),
+ HelloWorld(msg="Second Hello"),
+ topic="test_batch",
+ )
+ handle_batch.mock.assert_called_with(
+ [dict(HelloWorld(msg="First Hello")), dict(HelloWorld(msg="Second Hello"))],
+ )
diff --git a/tests/a_docs/nats/ack/__init__.py b/tests/docs/kafka/consumes_basics/__init__.py
similarity index 100%
rename from tests/a_docs/nats/ack/__init__.py
rename to tests/docs/kafka/consumes_basics/__init__.py
diff --git a/tests/docs/kafka/consumes_basics/test_app.py b/tests/docs/kafka/consumes_basics/test_app.py
new file mode 100644
index 0000000000..6a258153dd
--- /dev/null
+++ b/tests/docs/kafka/consumes_basics/test_app.py
@@ -0,0 +1,15 @@
+import pytest
+
+from docs.docs_src.kafka.consumes_basics.app import (
+ HelloWorld,
+ broker,
+ on_hello_world,
+)
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(HelloWorld(msg="First Hello"), "hello_world")
+ on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello")))
diff --git a/tests/a_docs/nats/js/__init__.py b/tests/docs/kafka/publish_batch/__init__.py
similarity index 100%
rename from tests/a_docs/nats/js/__init__.py
rename to tests/docs/kafka/publish_batch/__init__.py
diff --git a/tests/docs/kafka/publish_batch/test_app.py b/tests/docs/kafka/publish_batch/test_app.py
new file mode 100644
index 0000000000..d9fea8cfda
--- /dev/null
+++ b/tests/docs/kafka/publish_batch/test_app.py
@@ -0,0 +1,32 @@
+import pytest
+
+from docs.docs_src.kafka.publish_batch.app import (
+ Data,
+ broker,
+ decrease_and_increase,
+ on_input_data_1,
+ on_input_data_2,
+)
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_batch_publish_decorator() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=2.0), "input_data_1")
+
+ on_input_data_1.mock.assert_called_once_with(dict(Data(data=2.0)))
+ decrease_and_increase.mock.assert_called_once_with(
+ [dict(Data(data=1.0)), dict(Data(data=4.0))],
+ )
+
+
+@pytest.mark.asyncio()
+async def test_batch_publish_call() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=2.0), "input_data_2")
+
+ on_input_data_2.mock.assert_called_once_with(dict(Data(data=2.0)))
+ decrease_and_increase.mock.assert_called_once_with(
+ [dict(Data(data=1.0)), dict(Data(data=4.0))],
+ )
diff --git a/tests/docs/kafka/publish_batch/test_issues.py b/tests/docs/kafka/publish_batch/test_issues.py
new file mode 100644
index 0000000000..031d12929e
--- /dev/null
+++ b/tests/docs/kafka/publish_batch/test_issues.py
@@ -0,0 +1,22 @@
+import pytest
+
+from faststream import FastStream
+from faststream.kafka import KafkaBroker, TestKafkaBroker
+
+broker = KafkaBroker()
+batch_producer = broker.publisher("response", batch=True)
+
+
+@batch_producer
+@broker.subscriber("test")
+async def handle(msg: str) -> list[int]:
+ return [1, 2, 3]
+
+
+app = FastStream(broker)
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish("", "test")
diff --git a/tests/a_docs/rabbit/ack/__init__.py b/tests/docs/kafka/publish_example/__init__.py
similarity index 100%
rename from tests/a_docs/rabbit/ack/__init__.py
rename to tests/docs/kafka/publish_example/__init__.py
diff --git a/tests/docs/kafka/publish_example/test_app.py b/tests/docs/kafka/publish_example/test_app.py
new file mode 100644
index 0000000000..e9ab2a2038
--- /dev/null
+++ b/tests/docs/kafka/publish_example/test_app.py
@@ -0,0 +1,18 @@
+import pytest
+
+from docs.docs_src.kafka.publish_example.app import (
+ Data,
+ broker,
+ on_input_data,
+ to_output_data,
+)
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_base_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=0.2), "input_data")
+
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
diff --git a/tests/a_docs/rabbit/subscription/__init__.py b/tests/docs/kafka/publish_with_partition_key/__init__.py
similarity index 100%
rename from tests/a_docs/rabbit/subscription/__init__.py
rename to tests/docs/kafka/publish_with_partition_key/__init__.py
diff --git a/tests/docs/kafka/publish_with_partition_key/test_app.py b/tests/docs/kafka/publish_with_partition_key/test_app.py
new file mode 100644
index 0000000000..5dd2cac875
--- /dev/null
+++ b/tests/docs/kafka/publish_with_partition_key/test_app.py
@@ -0,0 +1,30 @@
+import pytest
+
+from docs.docs_src.kafka.publish_with_partition_key.app import (
+ Data,
+ broker,
+ on_input_data,
+ to_output_data,
+)
+from faststream.kafka import TestKafkaBroker
+
+
+@pytest.mark.asyncio()
+async def test_app() -> None:
+ async with TestKafkaBroker(broker):
+ await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
+
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)))
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)))
+
+
+@pytest.mark.skip("we are not checking the key")
+@pytest.mark.asyncio()
+async def test_keys() -> None:
+ async with TestKafkaBroker(broker):
+ # we should be able to publish a message with the key
+ await broker.publish(Data(data=0.2), "input_data", key=b"my_key")
+
+ # we need to check the key as well
+ on_input_data.mock.assert_called_once_with(dict(Data(data=0.2)), key=b"my_key")
+ to_output_data.mock.assert_called_once_with(dict(Data(data=1.2)), key=b"key")
diff --git a/tests/a_docs/redis/list/__init__.py b/tests/docs/kafka/publisher_object/__init__.py
similarity index 100%
rename from tests/a_docs/redis/list/__init__.py
rename to tests/docs/kafka/publisher_object/__init__.py
diff --git a/tests/a_docs/kafka/publisher_object/test_publisher_object.py b/tests/docs/kafka/publisher_object/test_publisher_object.py
similarity index 100%
rename from tests/a_docs/kafka/publisher_object/test_publisher_object.py
rename to tests/docs/kafka/publisher_object/test_publisher_object.py
diff --git a/tests/a_docs/redis/pub_sub/__init__.py b/tests/docs/kafka/raw_publish/__init__.py
similarity index 100%
rename from tests/a_docs/redis/pub_sub/__init__.py
rename to tests/docs/kafka/raw_publish/__init__.py
diff --git a/tests/a_docs/kafka/raw_publish/test_raw_publish.py b/tests/docs/kafka/raw_publish/test_raw_publish.py
similarity index 100%
rename from tests/a_docs/kafka/raw_publish/test_raw_publish.py
rename to tests/docs/kafka/raw_publish/test_raw_publish.py
diff --git a/tests/docs/kafka/test_security.py b/tests/docs/kafka/test_security.py
new file mode 100644
index 0000000000..0eedbff4bc
--- /dev/null
+++ b/tests/docs/kafka/test_security.py
@@ -0,0 +1,120 @@
+import ssl
+from contextlib import contextmanager
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+
+
+@contextmanager
+def patch_aio_consumer_and_producer() -> tuple[MagicMock, MagicMock]:
+ try:
+ producer = MagicMock(return_value=AsyncMock())
+
+ with patch("aiokafka.AIOKafkaProducer", new=producer):
+ yield producer
+ finally:
+ pass
+
+
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+async def test_base_security() -> None:
+ from docs.docs_src.kafka.security.basic import broker as basic_broker
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with basic_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["security_protocol"] = "SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
+
+
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+async def test_scram256() -> None:
+ from docs.docs_src.kafka.security.sasl_scram256 import (
+ broker as scram256_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with scram256_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "SCRAM-SHA-256"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
+
+
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+async def test_scram512() -> None:
+ from docs.docs_src.kafka.security.sasl_scram512 import (
+ broker as scram512_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with scram512_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "SCRAM-SHA-512"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
+
+
+@pytest.mark.asyncio()
+@pytest.mark.kafka()
+async def test_plaintext() -> None:
+ from docs.docs_src.kafka.security.plaintext import (
+ broker as plaintext_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with plaintext_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {}
+ call_kwargs["sasl_mechanism"] = "PLAIN"
+ call_kwargs["sasl_plain_username"] = "admin"
+ call_kwargs["sasl_plain_password"] = "password" # pragma: allowlist secret
+ call_kwargs["security_protocol"] = "SASL_SSL"
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
+
+
+@pytest.mark.kafka()
+@pytest.mark.asyncio()
+async def test_gssapi() -> None:
+ from docs.docs_src.kafka.security.sasl_gssapi import (
+ broker as gssapi_broker,
+ )
+
+ with patch_aio_consumer_and_producer() as producer:
+ async with gssapi_broker:
+ producer_call_kwargs = producer.call_args.kwargs
+
+ call_kwargs = {
+ "sasl_mechanism": "GSSAPI",
+ "security_protocol": "SASL_SSL",
+ }
+
+ assert call_kwargs.items() <= producer_call_kwargs.items()
+
+ assert type(producer_call_kwargs["ssl_context"]) is ssl.SSLContext
diff --git a/tests/docs/nats/__init__.py b/tests/docs/nats/__init__.py
new file mode 100644
index 0000000000..87ead90ee6
--- /dev/null
+++ b/tests/docs/nats/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("nats")
diff --git a/tests/a_docs/redis/stream/__init__.py b/tests/docs/nats/ack/__init__.py
similarity index 100%
rename from tests/a_docs/redis/stream/__init__.py
rename to tests/docs/nats/ack/__init__.py
diff --git a/tests/docs/nats/ack/test_errors.py b/tests/docs/nats/ack/test_errors.py
new file mode 100644
index 0000000000..1c91969794
--- /dev/null
+++ b/tests/docs/nats/ack/test_errors.py
@@ -0,0 +1,19 @@
+from unittest.mock import patch
+
+import pytest
+from nats.aio.msg import Msg
+
+from faststream.nats import TestApp, TestNatsBroker
+from tests.tools import spy_decorator
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_ack_exc() -> None:
+ from docs.docs_src.nats.ack.errors import app, broker, handle
+
+ with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m:
+ async with TestNatsBroker(broker, with_real=True), TestApp(app):
+ await handle.wait_call(3)
+
+ assert m.mock.call_count
diff --git a/tests/docs/nats/js/__init__.py b/tests/docs/nats/js/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/a_docs/nats/js/test_kv.py b/tests/docs/nats/js/test_kv.py
similarity index 81%
rename from tests/a_docs/nats/js/test_kv.py
rename to tests/docs/nats/js/test_kv.py
index 791db040cd..2cf01ecd07 100644
--- a/tests/a_docs/nats/js/test_kv.py
+++ b/tests/docs/nats/js/test_kv.py
@@ -4,9 +4,9 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_basic():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_basic() -> None:
from docs.docs_src.nats.js.key_value import app, broker, handler
async with TestNatsBroker(broker, with_real=True), TestApp(app):
diff --git a/tests/a_docs/nats/js/test_main.py b/tests/docs/nats/js/test_main.py
similarity index 82%
rename from tests/a_docs/nats/js/test_main.py
rename to tests/docs/nats/js/test_main.py
index 0972d938ad..70eef4f7fa 100644
--- a/tests/a_docs/nats/js/test_main.py
+++ b/tests/docs/nats/js/test_main.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_main():
+@pytest.mark.asyncio()
+async def test_main() -> None:
from docs.docs_src.nats.js.main import app, broker, handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/docs/nats/js/test_object.py b/tests/docs/nats/js/test_object.py
new file mode 100644
index 0000000000..ac7dbade90
--- /dev/null
+++ b/tests/docs/nats/js/test_object.py
@@ -0,0 +1,17 @@
+import pytest
+
+from faststream import TestApp
+from faststream.nats import TestNatsBroker
+
+
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_basic() -> None:
+ from docs.docs_src.nats.js.object import app, broker, handler
+
+ async with (
+ TestNatsBroker(broker, with_real=True, connect_only=True),
+ TestApp(app),
+ ):
+ await handler.wait_call(3.0)
+ handler.mock.assert_called_once_with("file.txt")
diff --git a/tests/a_docs/nats/js/test_pull_sub.py b/tests/docs/nats/js/test_pull_sub.py
similarity index 83%
rename from tests/a_docs/nats/js/test_pull_sub.py
rename to tests/docs/nats/js/test_pull_sub.py
index a9fc8a5919..989508467d 100644
--- a/tests/a_docs/nats/js/test_pull_sub.py
+++ b/tests/docs/nats/js/test_pull_sub.py
@@ -3,8 +3,8 @@
from faststream.nats import TestApp, TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from docs.docs_src.nats.js.pull_sub import app, broker, handle
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/docs/nats/test_direct.py b/tests/docs/nats/test_direct.py
new file mode 100644
index 0000000000..6ca15a5f5a
--- /dev/null
+++ b/tests/docs/nats/test_direct.py
@@ -0,0 +1,19 @@
+import pytest
+
+from faststream.nats import TestApp, TestNatsBroker
+
+
+@pytest.mark.asyncio()
+async def test_direct() -> None:
+ from docs.docs_src.nats.direct import (
+ app,
+ base_handler1,
+ base_handler2,
+ base_handler3,
+ broker,
+ )
+
+ async with TestNatsBroker(broker), TestApp(app):
+ assert base_handler1.mock.call_count == 2
+ assert base_handler2.mock.call_count == 0
+ assert base_handler3.mock.call_count == 1
diff --git a/tests/a_docs/nats/test_pattern.py b/tests/docs/nats/test_pattern.py
similarity index 88%
rename from tests/a_docs/nats/test_pattern.py
rename to tests/docs/nats/test_pattern.py
index b00029d3b0..9be3baddca 100644
--- a/tests/a_docs/nats/test_pattern.py
+++ b/tests/docs/nats/test_pattern.py
@@ -3,8 +3,8 @@
from faststream.nats import TestApp, TestNatsBroker
-@pytest.mark.asyncio
-async def test_pattern():
+@pytest.mark.asyncio()
+async def test_pattern() -> None:
from docs.docs_src.nats.pattern import (
app,
base_handler1,
diff --git a/tests/docs/rabbit/__init__.py b/tests/docs/rabbit/__init__.py
new file mode 100644
index 0000000000..ebec43fcd5
--- /dev/null
+++ b/tests/docs/rabbit/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("aio_pika")
diff --git a/tests/docs/rabbit/ack/__init__.py b/tests/docs/rabbit/ack/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/docs/rabbit/ack/test_errors.py b/tests/docs/rabbit/ack/test_errors.py
new file mode 100644
index 0000000000..b930a77964
--- /dev/null
+++ b/tests/docs/rabbit/ack/test_errors.py
@@ -0,0 +1,19 @@
+from unittest.mock import patch
+
+import pytest
+from aio_pika import IncomingMessage
+
+from faststream.rabbit import TestApp, TestRabbitBroker
+from tests.tools import spy_decorator
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_ack_exc() -> None:
+ from docs.docs_src.rabbit.ack.errors import app, broker, handle
+
+ with patch.object(IncomingMessage, "ack", spy_decorator(IncomingMessage.ack)) as m:
+ async with TestRabbitBroker(broker, with_real=True), TestApp(app):
+ await handle.wait_call(3)
+
+ m.mock.assert_called_once()
diff --git a/tests/docs/rabbit/subscription/__init__.py b/tests/docs/rabbit/subscription/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/docs/rabbit/subscription/test_direct.py b/tests/docs/rabbit/subscription/test_direct.py
new file mode 100644
index 0000000000..948dc8e0cb
--- /dev/null
+++ b/tests/docs/rabbit/subscription/test_direct.py
@@ -0,0 +1,17 @@
+import pytest
+
+from faststream.rabbit import TestApp, TestRabbitBroker
+
+
+@pytest.mark.asyncio()
+async def test_index() -> None:
+ from docs.docs_src.rabbit.subscription.direct import (
+ app,
+ base_handler1,
+ base_handler3,
+ broker,
+ )
+
+ async with TestRabbitBroker(broker), TestApp(app):
+ base_handler1.mock.assert_called_with(b"")
+ base_handler3.mock.assert_called_once_with(b"")
diff --git a/tests/a_docs/rabbit/subscription/test_fanout.py b/tests/docs/rabbit/subscription/test_fanout.py
similarity index 85%
rename from tests/a_docs/rabbit/subscription/test_fanout.py
rename to tests/docs/rabbit/subscription/test_fanout.py
index 29dc6c2cd3..70761ec2f6 100644
--- a/tests/a_docs/rabbit/subscription/test_fanout.py
+++ b/tests/docs/rabbit/subscription/test_fanout.py
@@ -3,9 +3,9 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_index():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_index() -> None:
from docs.docs_src.rabbit.subscription.fanout import (
app,
base_handler1,
diff --git a/tests/a_docs/rabbit/subscription/test_header.py b/tests/docs/rabbit/subscription/test_header.py
similarity index 87%
rename from tests/a_docs/rabbit/subscription/test_header.py
rename to tests/docs/rabbit/subscription/test_header.py
index 1b96b26a19..ccea7663cc 100644
--- a/tests/a_docs/rabbit/subscription/test_header.py
+++ b/tests/docs/rabbit/subscription/test_header.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from docs.docs_src.rabbit.subscription.header import (
app,
base_handler1,
diff --git a/tests/a_docs/rabbit/subscription/test_index.py b/tests/docs/rabbit/subscription/test_index.py
similarity index 82%
rename from tests/a_docs/rabbit/subscription/test_index.py
rename to tests/docs/rabbit/subscription/test_index.py
index 185ab942e5..09d1a0eacf 100644
--- a/tests/a_docs/rabbit/subscription/test_index.py
+++ b/tests/docs/rabbit/subscription/test_index.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from docs.docs_src.rabbit.subscription.index import app, broker, handle
async with TestRabbitBroker(broker), TestApp(app):
diff --git a/tests/a_docs/rabbit/subscription/test_stream.py b/tests/docs/rabbit/subscription/test_stream.py
similarity index 79%
rename from tests/a_docs/rabbit/subscription/test_stream.py
rename to tests/docs/rabbit/subscription/test_stream.py
index 80e244ca1f..381ecf40ce 100644
--- a/tests/a_docs/rabbit/subscription/test_stream.py
+++ b/tests/docs/rabbit/subscription/test_stream.py
@@ -3,9 +3,9 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_stream():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_stream() -> None:
from docs.docs_src.rabbit.subscription.stream import app, broker, handle
async with TestRabbitBroker(broker, with_real=True), TestApp(app):
diff --git a/tests/a_docs/rabbit/subscription/test_topic.py b/tests/docs/rabbit/subscription/test_topic.py
similarity index 86%
rename from tests/a_docs/rabbit/subscription/test_topic.py
rename to tests/docs/rabbit/subscription/test_topic.py
index 45e2f51c9f..28d9590605 100644
--- a/tests/a_docs/rabbit/subscription/test_topic.py
+++ b/tests/docs/rabbit/subscription/test_topic.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from docs.docs_src.rabbit.subscription.topic import (
app,
base_handler1,
diff --git a/tests/a_docs/rabbit/test_bind.py b/tests/docs/rabbit/test_bind.py
similarity index 95%
rename from tests/a_docs/rabbit/test_bind.py
rename to tests/docs/rabbit/test_bind.py
index d2656a6f5c..76c7b8d6fd 100644
--- a/tests/a_docs/rabbit/test_bind.py
+++ b/tests/docs/rabbit/test_bind.py
@@ -7,8 +7,8 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
-@pytest.mark.rabbit
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
@require_aiopika
async def test_bind(monkeypatch, async_mock: AsyncMock):
from docs.docs_src.rabbit.bind import app, broker, some_exchange, some_queue
diff --git a/tests/a_docs/rabbit/test_declare.py b/tests/docs/rabbit/test_declare.py
similarity index 78%
rename from tests/a_docs/rabbit/test_declare.py
rename to tests/docs/rabbit/test_declare.py
index d3720a1171..6cad81ae37 100644
--- a/tests/a_docs/rabbit/test_declare.py
+++ b/tests/docs/rabbit/test_declare.py
@@ -3,9 +3,9 @@
from faststream import TestApp
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_declare():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_declare() -> None:
from docs.docs_src.rabbit.declare import app, broker
async with TestApp(app):
diff --git a/tests/docs/rabbit/test_security.py b/tests/docs/rabbit/test_security.py
new file mode 100644
index 0000000000..86bbd1252b
--- /dev/null
+++ b/tests/docs/rabbit/test_security.py
@@ -0,0 +1,65 @@
+import pytest
+from aiormq.exceptions import AMQPConnectionError
+
+from faststream.specification.asyncapi import AsyncAPI
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_base_security() -> None:
+ from docs.docs_src.rabbit.security.basic import broker
+
+ with pytest.raises(AMQPConnectionError):
+ async with broker:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [],
+ "url": "amqps://guest:guest@localhost:5672/", # pragma: allowlist secret
+ },
+ },
+ }
+
+
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_plaintext_security() -> None:
+ from docs.docs_src.rabbit.security.plaintext import broker
+
+ with pytest.raises(AMQPConnectionError):
+ async with broker:
+ pass
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert (
+ schema
+ == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "amqps",
+ "protocolVersion": "0.9.1",
+ "security": [{"user-password": []}],
+ "url": "amqps://admin:password@localhost:5672/", # pragma: allowlist secret
+ },
+ },
+ }
+ )
diff --git a/tests/docs/redis/__init__.py b/tests/docs/redis/__init__.py
new file mode 100644
index 0000000000..4752ef19b1
--- /dev/null
+++ b/tests/docs/redis/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis")
diff --git a/tests/docs/redis/list/__init__.py b/tests/docs/redis/list/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/docs/redis/list/test_list_pub.py b/tests/docs/redis/list/test_list_pub.py
new file mode 100644
index 0000000000..cf3b094882
--- /dev/null
+++ b/tests/docs/redis/list/test_list_pub.py
@@ -0,0 +1,15 @@
+import pytest
+
+from faststream.redis import TestRedisBroker
+
+
+@pytest.mark.asyncio()
+async def test_list_publisher() -> None:
+ from docs.docs_src.redis.list.list_pub import broker, on_input_data
+
+ publisher = list(broker._publishers)[0] # noqa: RUF015
+
+ async with TestRedisBroker(broker) as br:
+ await br.publish({"data": 1.0}, list="input-list")
+ on_input_data.mock.assert_called_once_with({"data": 1.0})
+ publisher.mock.assert_called_once_with({"data": 2.0})
diff --git a/tests/a_docs/redis/list/test_list_sub.py b/tests/docs/redis/list/test_list_sub.py
similarity index 83%
rename from tests/a_docs/redis/list/test_list_sub.py
rename to tests/docs/redis/list/test_list_sub.py
index 30ec9320af..b9d8e66e03 100644
--- a/tests/a_docs/redis/list/test_list_sub.py
+++ b/tests/docs/redis/list/test_list_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_list():
+@pytest.mark.asyncio()
+async def test_list() -> None:
from docs.docs_src.redis.list.list_sub import broker, handle
async with TestRedisBroker(broker) as br:
diff --git a/tests/a_docs/redis/list/test_sub_batch.py b/tests/docs/redis/list/test_sub_batch.py
similarity index 86%
rename from tests/a_docs/redis/list/test_sub_batch.py
rename to tests/docs/redis/list/test_sub_batch.py
index 2bceec8aa8..0fe5560719 100644
--- a/tests/a_docs/redis/list/test_sub_batch.py
+++ b/tests/docs/redis/list/test_sub_batch.py
@@ -4,9 +4,9 @@
from tests.marks import python39
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@python39
-async def test_batch():
+async def test_batch() -> None:
from docs.docs_src.redis.list.sub_batch import broker, handle
async with TestRedisBroker(broker) as br:
diff --git a/tests/docs/redis/pub_sub/__init__.py b/tests/docs/redis/pub_sub/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/a_docs/redis/pub_sub/test_channel_sub.py b/tests/docs/redis/pub_sub/test_channel_sub.py
similarity index 82%
rename from tests/a_docs/redis/pub_sub/test_channel_sub.py
rename to tests/docs/redis/pub_sub/test_channel_sub.py
index 8a01f62acc..1f756024c2 100644
--- a/tests/a_docs/redis/pub_sub/test_channel_sub.py
+++ b/tests/docs/redis/pub_sub/test_channel_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_channel():
+@pytest.mark.asyncio()
+async def test_channel() -> None:
from docs.docs_src.redis.pub_sub.channel_sub import broker, handle
async with TestRedisBroker(broker) as br:
diff --git a/tests/a_docs/redis/pub_sub/test_channel_sub_pattern.py b/tests/docs/redis/pub_sub/test_channel_sub_pattern.py
similarity index 83%
rename from tests/a_docs/redis/pub_sub/test_channel_sub_pattern.py
rename to tests/docs/redis/pub_sub/test_channel_sub_pattern.py
index a4bc91236f..cb0bbf8b47 100644
--- a/tests/a_docs/redis/pub_sub/test_channel_sub_pattern.py
+++ b/tests/docs/redis/pub_sub/test_channel_sub_pattern.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_pattern():
+@pytest.mark.asyncio()
+async def test_pattern() -> None:
from docs.docs_src.redis.pub_sub.channel_sub_pattern import broker, handle_test
async with TestRedisBroker(broker) as br:
diff --git a/tests/a_docs/redis/pub_sub/test_pattern_data.py b/tests/docs/redis/pub_sub/test_pattern_data.py
similarity index 82%
rename from tests/a_docs/redis/pub_sub/test_pattern_data.py
rename to tests/docs/redis/pub_sub/test_pattern_data.py
index 3f12374b6e..b532f1b2f9 100644
--- a/tests/a_docs/redis/pub_sub/test_pattern_data.py
+++ b/tests/docs/redis/pub_sub/test_pattern_data.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_pattern_data():
+@pytest.mark.asyncio()
+async def test_pattern_data() -> None:
from docs.docs_src.redis.pub_sub.pattern_data import broker, handle_test
async with TestRedisBroker(broker) as br:
diff --git a/tests/a_docs/redis/pub_sub/test_publihser_object.py b/tests/docs/redis/pub_sub/test_publihser_object.py
similarity index 100%
rename from tests/a_docs/redis/pub_sub/test_publihser_object.py
rename to tests/docs/redis/pub_sub/test_publihser_object.py
diff --git a/tests/a_docs/redis/pub_sub/test_publisher_decorator.py b/tests/docs/redis/pub_sub/test_publisher_decorator.py
similarity index 87%
rename from tests/a_docs/redis/pub_sub/test_publisher_decorator.py
rename to tests/docs/redis/pub_sub/test_publisher_decorator.py
index 0a15552556..c5af28db57 100644
--- a/tests/a_docs/redis/pub_sub/test_publisher_decorator.py
+++ b/tests/docs/redis/pub_sub/test_publisher_decorator.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_publisher():
+@pytest.mark.asyncio()
+async def test_publisher() -> None:
from docs.docs_src.redis.pub_sub.publisher_decorator import (
broker,
on_input_data,
diff --git a/tests/a_docs/redis/pub_sub/test_raw_publish.py b/tests/docs/redis/pub_sub/test_raw_publish.py
similarity index 100%
rename from tests/a_docs/redis/pub_sub/test_raw_publish.py
rename to tests/docs/redis/pub_sub/test_raw_publish.py
diff --git a/tests/docs/redis/stream/__init__.py b/tests/docs/redis/stream/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/a_docs/redis/stream/test_ack_errors.py b/tests/docs/redis/stream/test_ack_errors.py
similarity index 86%
rename from tests/a_docs/redis/stream/test_ack_errors.py
rename to tests/docs/redis/stream/test_ack_errors.py
index 2e76294916..3adb5bc280 100644
--- a/tests/a_docs/redis/stream/test_ack_errors.py
+++ b/tests/docs/redis/stream/test_ack_errors.py
@@ -7,9 +7,9 @@
from tests.tools import spy_decorator
-@pytest.mark.redis
-@pytest.mark.asyncio
-async def test_stream_ack():
+@pytest.mark.redis()
+@pytest.mark.asyncio()
+async def test_stream_ack() -> None:
from docs.docs_src.redis.stream.ack_errors import app, broker, handle
with patch.object(Redis, "xack", spy_decorator(Redis.xack)) as m:
diff --git a/tests/a_docs/redis/stream/test_batch_sub.py b/tests/docs/redis/stream/test_batch_sub.py
similarity index 84%
rename from tests/a_docs/redis/stream/test_batch_sub.py
rename to tests/docs/redis/stream/test_batch_sub.py
index 24908211dc..22c3740344 100644
--- a/tests/a_docs/redis/stream/test_batch_sub.py
+++ b/tests/docs/redis/stream/test_batch_sub.py
@@ -4,9 +4,9 @@
from tests.marks import python39
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@python39
-async def test_stream_batch():
+async def test_stream_batch() -> None:
from docs.docs_src.redis.stream.batch_sub import broker, handle
async with TestRedisBroker(broker) as br:
diff --git a/tests/a_docs/redis/stream/test_group.py b/tests/docs/redis/stream/test_group.py
similarity index 79%
rename from tests/a_docs/redis/stream/test_group.py
rename to tests/docs/redis/stream/test_group.py
index 7573a28312..d8c620cf70 100644
--- a/tests/a_docs/redis/stream/test_group.py
+++ b/tests/docs/redis/stream/test_group.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_stream_group():
+@pytest.mark.asyncio()
+async def test_stream_group() -> None:
from docs.docs_src.redis.stream.group import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/a_docs/redis/stream/test_pub.py b/tests/docs/redis/stream/test_pub.py
similarity index 75%
rename from tests/a_docs/redis/stream/test_pub.py
rename to tests/docs/redis/stream/test_pub.py
index 0656267f7b..cc4d51cbfa 100644
--- a/tests/a_docs/redis/stream/test_pub.py
+++ b/tests/docs/redis/stream/test_pub.py
@@ -3,11 +3,11 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_stream_pub():
+@pytest.mark.asyncio()
+async def test_stream_pub() -> None:
from docs.docs_src.redis.stream.pub import broker, on_input_data
- publisher = list(broker._publishers.values())[0] # noqa: RUF015
+ publisher = list(broker._publishers)[0] # noqa: RUF015
async with TestRedisBroker(broker) as br:
await br.publish({"data": 1.0}, stream="input-stream")
diff --git a/tests/a_docs/redis/stream/test_sub.py b/tests/docs/redis/stream/test_sub.py
similarity index 82%
rename from tests/a_docs/redis/stream/test_sub.py
rename to tests/docs/redis/stream/test_sub.py
index f3d002b6fb..be4c9a993a 100644
--- a/tests/a_docs/redis/stream/test_sub.py
+++ b/tests/docs/redis/stream/test_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestRedisBroker
-@pytest.mark.asyncio
-async def test_stream_sub():
+@pytest.mark.asyncio()
+async def test_stream_sub() -> None:
from docs.docs_src.redis.stream.sub import broker, handle
async with TestRedisBroker(broker) as br:
diff --git a/tests/docs/redis/test_rpc.py b/tests/docs/redis/test_rpc.py
new file mode 100644
index 0000000000..5784e8b2fd
--- /dev/null
+++ b/tests/docs/redis/test_rpc.py
@@ -0,0 +1,14 @@
+import pytest
+
+from faststream.redis import TestApp, TestRedisBroker
+
+
+@pytest.mark.asyncio()
+async def test_rpc() -> None:
+ from docs.docs_src.redis.rpc.app import (
+ app,
+ broker,
+ )
+
+ async with TestRedisBroker(broker), TestApp(app):
+ pass
diff --git a/tests/docs/redis/test_security.py b/tests/docs/redis/test_security.py
new file mode 100644
index 0000000000..c13630c808
--- /dev/null
+++ b/tests/docs/redis/test_security.py
@@ -0,0 +1,89 @@
+from contextlib import contextmanager
+from unittest.mock import AsyncMock, MagicMock, patch
+
+import pytest
+from redis.exceptions import AuthenticationError
+
+from faststream.specification.asyncapi import AsyncAPI
+
+
+@contextmanager
+def patch_asyncio_open_connection() -> tuple[MagicMock, MagicMock]:
+ try:
+ reader = MagicMock()
+ reader.readline = AsyncMock(return_value=b":1\r\n")
+ reader.read = AsyncMock(return_value=b"")
+
+ writer = MagicMock()
+ writer.drain = AsyncMock()
+ writer.wait_closed = AsyncMock()
+
+ open_connection = AsyncMock(return_value=(reader, writer))
+
+ with patch("asyncio.open_connection", new=open_connection):
+ yield open_connection
+ finally:
+ pass
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+async def test_base_security() -> None:
+ with patch_asyncio_open_connection() as connection:
+ from docs.docs_src.redis.security.basic import broker
+
+ async with broker:
+ await broker.ping(0.01)
+
+ assert connection.call_args.kwargs["ssl"]
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {"messages": {}, "schemas": {}, "securitySchemes": {}},
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [],
+ "url": "redis://localhost:6379",
+ },
+ },
+ }
+
+
+@pytest.mark.asyncio()
+@pytest.mark.redis()
+async def test_plaintext_security() -> None:
+ with patch_asyncio_open_connection() as connection:
+ from docs.docs_src.redis.security.plaintext import broker
+
+ with pytest.raises(AuthenticationError):
+ async with broker:
+ await broker._connection.ping()
+
+ assert connection.call_args.kwargs["ssl"]
+
+ schema = AsyncAPI(broker, schema_version="2.6.0").to_jsonable()
+ assert schema == {
+ "asyncapi": "2.6.0",
+ "channels": {},
+ "components": {
+ "messages": {},
+ "schemas": {},
+ "securitySchemes": {"user-password": {"type": "userPassword"}},
+ },
+ "defaultContentType": "application/json",
+ "info": {"description": "", "title": "FastStream", "version": "0.1.0"},
+ "servers": {
+ "development": {
+ "protocol": "redis",
+ "protocolVersion": "custom",
+ "security": [{"user-password": []}],
+ "url": "redis://localhost:6379",
+ },
+ },
+ }
diff --git a/tests/examples/fastapi_integration/test_app.py b/tests/examples/fastapi_integration/test_app.py
index 24fb0bd6ac..3f4e69e582 100644
--- a/tests/examples/fastapi_integration/test_app.py
+++ b/tests/examples/fastapi_integration/test_app.py
@@ -3,22 +3,26 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_handler():
- from examples.fastapi_integration.testing import router
- from examples.fastapi_integration.testing import test_handler as test_
+async def test_handler() -> None:
+ from examples.fastapi_integration.testing import (
+ router,
+ test_handler as test_,
+ )
from faststream.rabbit import TestRabbitBroker
async with TestRabbitBroker(router.broker) as br:
await test_(br)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_incorrect():
- from examples.fastapi_integration.testing import router
- from examples.fastapi_integration.testing import test_incorrect as test_
+async def test_incorrect() -> None:
+ from examples.fastapi_integration.testing import (
+ router,
+ test_incorrect as test_,
+ )
from faststream.rabbit import TestRabbitBroker
async with TestRabbitBroker(router.broker) as br:
diff --git a/tests/examples/kafka/test_ack.py b/tests/examples/kafka/test_ack.py
index 064bfd4130..c2402f2f6a 100644
--- a/tests/examples/kafka/test_ack.py
+++ b/tests/examples/kafka/test_ack.py
@@ -4,12 +4,14 @@
from examples.kafka.ack_after_process import app, broker
from faststream.kafka import TestApp, TestKafkaBroker
-from faststream.kafka.message import KafkaMessage
+from faststream.kafka.message import KafkaAckableMessage
from tests.tools import spy_decorator
-@pytest.mark.asyncio
-async def test_ack():
- with patch.object(KafkaMessage, "ack", spy_decorator(KafkaMessage.ack)) as m:
+@pytest.mark.asyncio()
+async def test_ack() -> None:
+ with patch.object(
+ KafkaAckableMessage, "ack", spy_decorator(KafkaAckableMessage.ack)
+ ) as m:
async with TestKafkaBroker(broker), TestApp(app):
m.mock.assert_called_once()
diff --git a/tests/examples/kafka/test_batch_consume.py b/tests/examples/kafka/test_batch_consume.py
index 0f1dade28c..53d61197bc 100644
--- a/tests/examples/kafka/test_batch_consume.py
+++ b/tests/examples/kafka/test_batch_consume.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"}
diff --git a/tests/examples/kafka/test_batch_publish_1.py b/tests/examples/kafka/test_batch_publish_1.py
index 88bee705ee..015019cbe4 100644
--- a/tests/examples/kafka/test_batch_publish_1.py
+++ b/tests/examples/kafka/test_batch_publish_1.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"}
diff --git a/tests/examples/kafka/test_batch_publish_2.py b/tests/examples/kafka/test_batch_publish_2.py
index 974e279eab..3c66d26ff2 100644
--- a/tests/examples/kafka/test_batch_publish_2.py
+++ b/tests/examples/kafka/test_batch_publish_2.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
assert set(handle.mock.call_args[0][0]) == {"hi", "FastStream"}
diff --git a/tests/examples/kafka/test_batch_publish_3.py b/tests/examples/kafka/test_batch_publish_3.py
index 62d91d0389..e453e4e5ec 100644
--- a/tests/examples/kafka/test_batch_publish_3.py
+++ b/tests/examples/kafka/test_batch_publish_3.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
await handle_response.wait_call(3)
diff --git a/tests/examples/nats/test_e01_basic.py b/tests/examples/nats/test_e01_basic.py
index cd487c6184..fefb49c621 100644
--- a/tests/examples/nats/test_e01_basic.py
+++ b/tests/examples/nats/test_e01_basic.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e01_basic import app, broker, handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e02_basic_rpc.py b/tests/examples/nats/test_e02_basic_rpc.py
index f7a5e81100..28e64e46c2 100644
--- a/tests/examples/nats/test_e02_basic_rpc.py
+++ b/tests/examples/nats/test_e02_basic_rpc.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e02_basic_rpc import app, broker, handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e03_publisher.py b/tests/examples/nats/test_e03_publisher.py
index 870552189a..7bb17ec340 100644
--- a/tests/examples/nats/test_e03_publisher.py
+++ b/tests/examples/nats/test_e03_publisher.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e03_publisher import app, broker, handler, response_handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e04_js_basic.py b/tests/examples/nats/test_e04_js_basic.py
index b806b5ad51..f89865b110 100644
--- a/tests/examples/nats/test_e04_js_basic.py
+++ b/tests/examples/nats/test_e04_js_basic.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e04_js_basic import app, broker, handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e05_basic_and_js.py b/tests/examples/nats/test_e05_basic_and_js.py
index 3be0c91c4b..a94d260371 100644
--- a/tests/examples/nats/test_e05_basic_and_js.py
+++ b/tests/examples/nats/test_e05_basic_and_js.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e05_basic_and_js import app, broker, core_handler, js_handler
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e06_key_value.py b/tests/examples/nats/test_e06_key_value.py
index b8150b9ef6..e1977bbaf4 100644
--- a/tests/examples/nats/test_e06_key_value.py
+++ b/tests/examples/nats/test_e06_key_value.py
@@ -4,9 +4,9 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_basic():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_basic() -> None:
from examples.nats.e06_key_value import app, broker, handler
async with TestNatsBroker(broker, with_real=True), TestApp(app):
diff --git a/tests/examples/nats/test_e07_object_storage.py b/tests/examples/nats/test_e07_object_storage.py
index c8783daaa0..bd2a0dfcb2 100644
--- a/tests/examples/nats/test_e07_object_storage.py
+++ b/tests/examples/nats/test_e07_object_storage.py
@@ -4,9 +4,9 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-@pytest.mark.nats
-async def test_basic():
+@pytest.mark.asyncio()
+@pytest.mark.nats()
+async def test_basic() -> None:
from examples.nats.e07_object_storage import app, broker, handler
async with TestNatsBroker(broker, with_real=True):
diff --git a/tests/examples/nats/test_e08_wildcards.py b/tests/examples/nats/test_e08_wildcards.py
index 38680dc7a9..b970866a65 100644
--- a/tests/examples/nats/test_e08_wildcards.py
+++ b/tests/examples/nats/test_e08_wildcards.py
@@ -4,8 +4,8 @@
from faststream.nats import TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e08_wildcards import app, broker, handler, handler_match
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/nats/test_e09_pull_sub.py b/tests/examples/nats/test_e09_pull_sub.py
index 67ff897278..198470db5c 100644
--- a/tests/examples/nats/test_e09_pull_sub.py
+++ b/tests/examples/nats/test_e09_pull_sub.py
@@ -3,8 +3,8 @@
from faststream.nats import TestApp, TestNatsBroker
-@pytest.mark.asyncio
-async def test_basic():
+@pytest.mark.asyncio()
+async def test_basic() -> None:
from examples.nats.e09_pull_sub import app, broker, handle
async with TestNatsBroker(broker), TestApp(app):
diff --git a/tests/examples/rabbit/test_direct.py b/tests/examples/rabbit/test_direct.py
index 8407924274..10a91fbff0 100644
--- a/tests/examples/rabbit/test_direct.py
+++ b/tests/examples/rabbit/test_direct.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from examples.rabbit.direct import (
app,
base_handler1,
diff --git a/tests/examples/rabbit/test_fanout.py b/tests/examples/rabbit/test_fanout.py
index f758c46176..f1d200425f 100644
--- a/tests/examples/rabbit/test_fanout.py
+++ b/tests/examples/rabbit/test_fanout.py
@@ -3,9 +3,9 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_index():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_index() -> None:
from examples.rabbit.fanout import (
app,
base_handler1,
diff --git a/tests/examples/rabbit/test_header.py b/tests/examples/rabbit/test_header.py
index 7fd0beb36e..8c2786913b 100644
--- a/tests/examples/rabbit/test_header.py
+++ b/tests/examples/rabbit/test_header.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from examples.rabbit.header import (
app,
base_handler1,
diff --git a/tests/examples/rabbit/test_stream.py b/tests/examples/rabbit/test_stream.py
index 4cb91a7d71..8273babccf 100644
--- a/tests/examples/rabbit/test_stream.py
+++ b/tests/examples/rabbit/test_stream.py
@@ -3,9 +3,9 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-@pytest.mark.rabbit
-async def test_stream():
+@pytest.mark.asyncio()
+@pytest.mark.rabbit()
+async def test_stream() -> None:
from examples.rabbit.stream import app, broker, handle
async with TestRabbitBroker(broker, with_real=True), TestApp(app):
diff --git a/tests/examples/rabbit/test_topic.py b/tests/examples/rabbit/test_topic.py
index a3458ad8d5..ff327cbd97 100644
--- a/tests/examples/rabbit/test_topic.py
+++ b/tests/examples/rabbit/test_topic.py
@@ -3,8 +3,8 @@
from faststream.rabbit import TestApp, TestRabbitBroker
-@pytest.mark.asyncio
-async def test_index():
+@pytest.mark.asyncio()
+async def test_index() -> None:
from examples.rabbit.topic import (
app,
base_handler1,
diff --git a/tests/examples/redis/test_channel_sub.py b/tests/examples/redis/test_channel_sub.py
index 5e97e69885..7de2522448 100644
--- a/tests/examples/redis/test_channel_sub.py
+++ b/tests/examples/redis/test_channel_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_channel():
+@pytest.mark.asyncio()
+async def test_channel() -> None:
from examples.redis.channel_sub import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/redis/test_channel_sub_pattern.py b/tests/examples/redis/test_channel_sub_pattern.py
index 4e29dfa1f1..80a59fdfa8 100644
--- a/tests/examples/redis/test_channel_sub_pattern.py
+++ b/tests/examples/redis/test_channel_sub_pattern.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_pattern():
+@pytest.mark.asyncio()
+async def test_pattern() -> None:
from examples.redis.channel_sub_pattern import app, broker, handle_test
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/redis/test_list_sub.py b/tests/examples/redis/test_list_sub.py
index 4d81a471ba..c2e427dfa2 100644
--- a/tests/examples/redis/test_list_sub.py
+++ b/tests/examples/redis/test_list_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_list():
+@pytest.mark.asyncio()
+async def test_list() -> None:
from examples.redis.list_sub import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/redis/test_list_sub_batch.py b/tests/examples/redis/test_list_sub_batch.py
index 3450c4366d..0d15f3fe31 100644
--- a/tests/examples/redis/test_list_sub_batch.py
+++ b/tests/examples/redis/test_list_sub_batch.py
@@ -4,9 +4,9 @@
from tests.marks import python39
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@python39
-async def test_batch():
+async def test_batch() -> None:
from examples.redis.list_sub_batch import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/redis/test_rpc.py b/tests/examples/redis/test_rpc.py
index cc71bc2081..e7ec71a34f 100644
--- a/tests/examples/redis/test_rpc.py
+++ b/tests/examples/redis/test_rpc.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_rpc():
+@pytest.mark.asyncio()
+async def test_rpc() -> None:
from examples.redis.rpc import (
app,
broker,
diff --git a/tests/examples/redis/test_stream_batch_sub.py b/tests/examples/redis/test_stream_batch_sub.py
index f9871ff99f..e6d8ff7039 100644
--- a/tests/examples/redis/test_stream_batch_sub.py
+++ b/tests/examples/redis/test_stream_batch_sub.py
@@ -4,9 +4,9 @@
from tests.marks import python39
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@python39
-async def test_stream_batch():
+async def test_stream_batch() -> None:
from examples.redis.stream_sub_batch import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/redis/test_stream_sub.py b/tests/examples/redis/test_stream_sub.py
index bf260013f5..761914fd66 100644
--- a/tests/examples/redis/test_stream_sub.py
+++ b/tests/examples/redis/test_stream_sub.py
@@ -3,8 +3,8 @@
from faststream.redis import TestApp, TestRedisBroker
-@pytest.mark.asyncio
-async def test_stream_sub():
+@pytest.mark.asyncio()
+async def test_stream_sub() -> None:
from examples.redis.stream_sub import app, broker, handle
async with TestRedisBroker(broker), TestApp(app):
diff --git a/tests/examples/router/test_basic_consume.py b/tests/examples/router/test_basic_consume.py
index 07475f0e56..3a5fcb4044 100644
--- a/tests/examples/router/test_basic_consume.py
+++ b/tests/examples/router/test_basic_consume.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
diff --git a/tests/examples/router/test_basic_publish.py b/tests/examples/router/test_basic_publish.py
index 973dec7982..0b5224e556 100644
--- a/tests/examples/router/test_basic_publish.py
+++ b/tests/examples/router/test_basic_publish.py
@@ -4,8 +4,8 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
+@pytest.mark.asyncio()
+async def test_example() -> None:
async with TestKafkaBroker(broker), TestApp(app):
await handle.wait_call(3)
await handle_response.wait_call(3)
diff --git a/tests/examples/router/test_delay_registration.py b/tests/examples/router/test_delay_registration.py
index dc8197b8bd..045aebd27d 100644
--- a/tests/examples/router/test_delay_registration.py
+++ b/tests/examples/router/test_delay_registration.py
@@ -4,9 +4,9 @@
from faststream.kafka import TestApp, TestKafkaBroker
-@pytest.mark.asyncio
-async def test_example():
- sub = next(iter(broker._subscribers.values()))
+@pytest.mark.asyncio()
+async def test_example() -> None:
+ sub = next(iter(broker._subscribers))
sub.topic = "prefix_in"
handle = sub.calls[0].handler
diff --git a/tests/examples/test_e01_basic_consume.py b/tests/examples/test_e01_basic_consume.py
index 70157fc328..0ca69e1333 100644
--- a/tests/examples/test_e01_basic_consume.py
+++ b/tests/examples/test_e01_basic_consume.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e01_basic_consume import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e02_1_basic_publisher.py b/tests/examples/test_e02_1_basic_publisher.py
index b6c1b5d703..6ba632668b 100644
--- a/tests/examples/test_e02_1_basic_publisher.py
+++ b/tests/examples/test_e02_1_basic_publisher.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e02_1_basic_publisher import app, broker, handle, handle_response
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e02_2_basic_publisher.py b/tests/examples/test_e02_2_basic_publisher.py
index 38498dc1bd..94ce18164b 100644
--- a/tests/examples/test_e02_2_basic_publisher.py
+++ b/tests/examples/test_e02_2_basic_publisher.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e02_2_basic_publisher import app, broker, handle, handle_response
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e02_3_basic_publisher.py b/tests/examples/test_e02_3_basic_publisher.py
index 73664f7641..9842332f64 100644
--- a/tests/examples/test_e02_3_basic_publisher.py
+++ b/tests/examples/test_e02_3_basic_publisher.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e02_3_basic_publisher import app, broker, handle, handle_response
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e03_miltiple_pubsub.py b/tests/examples/test_e03_miltiple_pubsub.py
index 4e624343d0..fbcecf0004 100644
--- a/tests/examples/test_e03_miltiple_pubsub.py
+++ b/tests/examples/test_e03_miltiple_pubsub.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e03_miltiple_pubsub import (
app,
broker,
diff --git a/tests/examples/test_e04_msg_filter.py b/tests/examples/test_e04_msg_filter.py
index 489d672bac..08dc87f59c 100644
--- a/tests/examples/test_e04_msg_filter.py
+++ b/tests/examples/test_e04_msg_filter.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e04_msg_filter import app, broker, handle_json, handle_other_messages
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e05_rpc_request.py b/tests/examples/test_e05_rpc_request.py
index b005bda0f8..a72f1be7c6 100644
--- a/tests/examples/test_e05_rpc_request.py
+++ b/tests/examples/test_e05_rpc_request.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e05_rpc_request import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e06_manual_ack.py b/tests/examples/test_e06_manual_ack.py
index 4272c15663..9fc261b837 100644
--- a/tests/examples/test_e06_manual_ack.py
+++ b/tests/examples/test_e06_manual_ack.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e06_manual_ack import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e07_ack_immediately.py b/tests/examples/test_e07_ack_immediately.py
index 3413ec320c..fb701b1a0b 100644
--- a/tests/examples/test_e07_ack_immediately.py
+++ b/tests/examples/test_e07_ack_immediately.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e07_ack_immediately import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e08_testing.py b/tests/examples/test_e08_testing.py
index 52613be0c3..fb72181dc2 100644
--- a/tests/examples/test_e08_testing.py
+++ b/tests/examples/test_e08_testing.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_handle():
+async def test_handle() -> None:
from examples.e08_testing import test_handle as _test
await _test()
diff --git a/tests/examples/test_e09_testing_mocks.py b/tests/examples/test_e09_testing_mocks.py
index 83b097f186..1a26328258 100644
--- a/tests/examples/test_e09_testing_mocks.py
+++ b/tests/examples/test_e09_testing_mocks.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_handle():
+async def test_handle() -> None:
from examples.e09_testing_mocks import test_handle as _test
await _test()
diff --git a/tests/examples/test_e10_middlewares.py b/tests/examples/test_e10_middlewares.py
index f6f1220392..b2db14d434 100644
--- a/tests/examples/test_e10_middlewares.py
+++ b/tests/examples/test_e10_middlewares.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e10_middlewares import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/examples/test_e11_settings.py b/tests/examples/test_e11_settings.py
index 5992637beb..8e2a19d8af 100644
--- a/tests/examples/test_e11_settings.py
+++ b/tests/examples/test_e11_settings.py
@@ -3,9 +3,9 @@
from tests.marks import require_aiopika
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_example():
+async def test_example() -> None:
from examples.e11_settings import app, broker, handle
from faststream.rabbit import TestApp, TestRabbitBroker
diff --git a/tests/log/test_formatter.py b/tests/log/test_formatter.py
index e8f21a9e52..efc50ee27e 100644
--- a/tests/log/test_formatter.py
+++ b/tests/log/test_formatter.py
@@ -1,10 +1,10 @@
import logging
-from faststream.log.formatter import ColourizedFormatter
+from faststream._internal.log.formatter import ColourizedFormatter
-def test_formatter():
- logger = logging.getLogger(__file__)
+def test_formatter() -> None:
+ logger = logging.getLogger(__name__)
handler = logging.Handler()
formatter = ColourizedFormatter("%(message)s")
handler.setFormatter(formatter)
diff --git a/tests/marks.py b/tests/marks.py
index 07bde035b0..ad698dac45 100644
--- a/tests/marks.py
+++ b/tests/marks.py
@@ -2,7 +2,7 @@
import pytest
-from faststream._compat import PYDANTIC_V2
+from faststream._internal._compat import PYDANTIC_V2
python39 = pytest.mark.skipif(
sys.version_info < (3, 9),
diff --git a/tests/mocks.py b/tests/mocks.py
index 3482444b68..fc3d8b5cf6 100644
--- a/tests/mocks.py
+++ b/tests/mocks.py
@@ -1,16 +1,18 @@
+from collections.abc import Mapping
from contextlib import contextmanager
-from typing import Any, Mapping
+from typing import Any
from unittest.mock import Mock
from pytest import MonkeyPatch # noqa: PT013
@contextmanager
-def mock_pydantic_settings_env(env_mapping: Mapping[str, Any]):
+def mock_pydantic_settings_env(env_mapping: Mapping[str, Any]) -> None:
with MonkeyPatch().context() as c:
mock = Mock()
mock.return_value = env_mapping
c.setattr(
- "pydantic_settings.sources.DotEnvSettingsSource._read_env_files", mock
+ "pydantic_settings.sources.DotEnvSettingsSource._read_env_files",
+ mock,
)
yield
diff --git a/tests/mypy/kafka.py b/tests/mypy/kafka.py
index eeeef066ed..4a15f33422 100644
--- a/tests/mypy/kafka.py
+++ b/tests/mypy/kafka.py
@@ -1,10 +1,14 @@
-from typing import Awaitable, Callable
+from collections.abc import Awaitable
+from typing import Callable
+import prometheus_client
from aiokafka import ConsumerRecord
+from faststream._internal.basic_types import DecodedMessage
from faststream.kafka import KafkaBroker, KafkaMessage, KafkaRoute, KafkaRouter
from faststream.kafka.fastapi import KafkaRouter as FastAPIRouter
-from faststream.types import DecodedMessage
+from faststream.kafka.opentelemetry import KafkaTelemetryMiddleware
+from faststream.kafka.prometheus import KafkaPrometheusMiddleware
def sync_decoder(msg: KafkaMessage) -> DecodedMessage:
@@ -16,7 +20,8 @@ async def async_decoder(msg: KafkaMessage) -> DecodedMessage:
async def custom_decoder(
- msg: KafkaMessage, original: Callable[[KafkaMessage], Awaitable[DecodedMessage]]
+ msg: KafkaMessage,
+ original: Callable[[KafkaMessage], Awaitable[DecodedMessage]],
) -> DecodedMessage:
return await original(msg)
@@ -27,15 +32,16 @@ async def custom_decoder(
def sync_parser(msg: ConsumerRecord) -> KafkaMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def async_parser(msg: ConsumerRecord) -> KafkaMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def custom_parser(
- msg: ConsumerRecord, original: Callable[[ConsumerRecord], Awaitable[KafkaMessage]]
+ msg: ConsumerRecord,
+ original: Callable[[ConsumerRecord], Awaitable[KafkaMessage]],
) -> KafkaMessage:
return await original(msg)
@@ -197,7 +203,7 @@ def async_handler() -> None: ...
parser=custom_parser,
decoder=custom_decoder,
),
- )
+ ),
)
@@ -263,3 +269,13 @@ def handle20() -> None: ...
@fastapi_router.subscriber("test")
@fastapi_router.publisher("test2")
async def handle21() -> None: ...
+
+
+otlp_middleware = KafkaTelemetryMiddleware()
+KafkaBroker().add_middleware(otlp_middleware)
+KafkaBroker(middlewares=[otlp_middleware])
+
+
+prometheus_middleware = KafkaPrometheusMiddleware(registry=prometheus_client.REGISTRY)
+KafkaBroker().add_middleware(prometheus_middleware)
+KafkaBroker(middlewares=[prometheus_middleware])
diff --git a/tests/mypy/nats.py b/tests/mypy/nats.py
index 955458eada..01d3210f8a 100644
--- a/tests/mypy/nats.py
+++ b/tests/mypy/nats.py
@@ -1,10 +1,14 @@
-from typing import Awaitable, Callable
+from collections.abc import Awaitable
+from typing import Callable
+import prometheus_client
from nats.aio.msg import Msg
+from faststream._internal.basic_types import DecodedMessage
from faststream.nats import NatsBroker, NatsMessage, NatsRoute, NatsRouter
from faststream.nats.fastapi import NatsRouter as FastAPIRouter
-from faststream.types import DecodedMessage
+from faststream.nats.opentelemetry import NatsTelemetryMiddleware
+from faststream.nats.prometheus import NatsPrometheusMiddleware
def sync_decoder(msg: NatsMessage) -> DecodedMessage:
@@ -16,7 +20,8 @@ async def async_decoder(msg: NatsMessage) -> DecodedMessage:
async def custom_decoder(
- msg: NatsMessage, original: Callable[[NatsMessage], Awaitable[DecodedMessage]]
+ msg: NatsMessage,
+ original: Callable[[NatsMessage], Awaitable[DecodedMessage]],
) -> DecodedMessage:
return await original(msg)
@@ -27,15 +32,16 @@ async def custom_decoder(
def sync_parser(msg: Msg) -> NatsMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def async_parser(msg: Msg) -> NatsMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def custom_parser(
- msg: Msg, original: Callable[[Msg], Awaitable[NatsMessage]]
+ msg: Msg,
+ original: Callable[[Msg], Awaitable[NatsMessage]],
) -> NatsMessage:
return await original(msg)
@@ -198,7 +204,7 @@ def async_handler() -> None: ...
parser=custom_parser,
decoder=custom_decoder,
),
- )
+ ),
)
@@ -264,3 +270,13 @@ def handle20() -> None: ...
@fastapi_router.subscriber("test")
@fastapi_router.publisher("test2")
async def handle21() -> None: ...
+
+
+otlp_middleware = NatsTelemetryMiddleware()
+NatsBroker().add_middleware(otlp_middleware)
+NatsBroker(middlewares=[otlp_middleware])
+
+
+prometheus_middleware = NatsPrometheusMiddleware(registry=prometheus_client.REGISTRY)
+NatsBroker().add_middleware(prometheus_middleware)
+NatsBroker(middlewares=[prometheus_middleware])
diff --git a/tests/mypy/rabbit.py b/tests/mypy/rabbit.py
index 064f6faad7..d70733ece1 100644
--- a/tests/mypy/rabbit.py
+++ b/tests/mypy/rabbit.py
@@ -1,10 +1,14 @@
-from typing import Awaitable, Callable
+from collections.abc import Awaitable
+from typing import Callable
+import prometheus_client
from aio_pika import IncomingMessage
+from faststream._internal.basic_types import DecodedMessage
from faststream.rabbit import RabbitBroker, RabbitMessage, RabbitRoute, RabbitRouter
from faststream.rabbit.fastapi import RabbitRouter as FastAPIRouter
-from faststream.types import DecodedMessage
+from faststream.rabbit.opentelemetry import RabbitTelemetryMiddleware
+from faststream.rabbit.prometheus import RabbitPrometheusMiddleware
def sync_decoder(msg: RabbitMessage) -> DecodedMessage:
@@ -16,7 +20,8 @@ async def async_decoder(msg: RabbitMessage) -> DecodedMessage:
async def custom_decoder(
- msg: RabbitMessage, original: Callable[[RabbitMessage], Awaitable[DecodedMessage]]
+ msg: RabbitMessage,
+ original: Callable[[RabbitMessage], Awaitable[DecodedMessage]],
) -> DecodedMessage:
return await original(msg)
@@ -27,11 +32,11 @@ async def custom_decoder(
def sync_parser(msg: IncomingMessage) -> RabbitMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def async_parser(msg: IncomingMessage) -> RabbitMessage:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def custom_parser(
@@ -198,7 +203,7 @@ def async_handler() -> None: ...
parser=custom_parser,
decoder=custom_decoder,
),
- )
+ ),
)
@@ -265,3 +270,13 @@ def handle20() -> None: ...
@fastapi_router.subscriber("test")
@fastapi_router.publisher("test2")
async def handle21() -> None: ...
+
+
+otlp_middleware = RabbitTelemetryMiddleware()
+RabbitBroker().add_middleware(otlp_middleware)
+RabbitBroker(middlewares=[otlp_middleware])
+
+
+prometheus_middleware = RabbitPrometheusMiddleware(registry=prometheus_client.REGISTRY)
+RabbitBroker().add_middleware(prometheus_middleware)
+RabbitBroker(middlewares=[prometheus_middleware])
diff --git a/tests/mypy/redis.py b/tests/mypy/redis.py
index 58a3da36cd..6c85fff856 100644
--- a/tests/mypy/redis.py
+++ b/tests/mypy/redis.py
@@ -1,12 +1,19 @@
-from typing import Awaitable, Callable
+from collections.abc import Awaitable
+from typing import Callable
-from faststream.redis import RedisBroker as Broker
-from faststream.redis import RedisMessage as Message
-from faststream.redis import RedisRoute as Route
-from faststream.redis import RedisRouter as StreamRouter
+import prometheus_client
+
+from faststream._internal.basic_types import DecodedMessage
+from faststream.redis import (
+ RedisBroker as Broker,
+ RedisMessage as Message,
+ RedisRoute as Route,
+ RedisRouter as StreamRouter,
+)
from faststream.redis.fastapi import RedisRouter as FastAPIRouter
from faststream.redis.message import RedisMessage as Msg
-from faststream.types import DecodedMessage
+from faststream.redis.opentelemetry import RedisTelemetryMiddleware
+from faststream.redis.prometheus import RedisPrometheusMiddleware
def sync_decoder(msg: Message) -> DecodedMessage:
@@ -18,7 +25,8 @@ async def async_decoder(msg: Message) -> DecodedMessage:
async def custom_decoder(
- msg: Message, original: Callable[[Message], Awaitable[DecodedMessage]]
+ msg: Message,
+ original: Callable[[Message], Awaitable[DecodedMessage]],
) -> DecodedMessage:
return await original(msg)
@@ -29,15 +37,16 @@ async def custom_decoder(
def sync_parser(msg: Msg) -> Message:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def async_parser(msg: Msg) -> Message:
- return "" # type: ignore
+ return "" # type: ignore[return-value]
async def custom_parser(
- msg: Msg, original: Callable[[Msg], Awaitable[Message]]
+ msg: Msg,
+ original: Callable[[Msg], Awaitable[Message]],
) -> Message:
return await original(msg)
@@ -201,7 +210,7 @@ def async_handler() -> None: ...
parser=custom_parser,
decoder=custom_decoder,
),
- )
+ ),
)
@@ -267,3 +276,13 @@ def handle20() -> None: ...
@fastapi_router.subscriber("test")
@fastapi_router.publisher("test2")
async def handle21() -> None: ...
+
+
+otlp_middleware = RedisTelemetryMiddleware()
+Broker().add_middleware(otlp_middleware)
+Broker(middlewares=[otlp_middleware])
+
+
+prometheus_middleware = RedisPrometheusMiddleware(registry=prometheus_client.REGISTRY)
+Broker().add_middleware(prometheus_middleware)
+Broker(middlewares=[prometheus_middleware])
diff --git a/tests/opentelemetry/basic.py b/tests/opentelemetry/basic.py
index d3a28ab0ce..2b0396b8f7 100644
--- a/tests/opentelemetry/basic.py
+++ b/tests/opentelemetry/basic.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import List, Optional, Tuple, Type, cast
+from typing import Any, Optional, cast
from unittest.mock import Mock
import pytest
@@ -16,41 +16,52 @@
from opentelemetry.semconv.trace import SpanAttributes as SpanAttr
from opentelemetry.trace import SpanKind, get_current_span
-from faststream.broker.core.usecase import BrokerUsecase
+from faststream._internal.broker.broker import BrokerUsecase
from faststream.opentelemetry import Baggage, CurrentBaggage, CurrentSpan
from faststream.opentelemetry.consts import (
ERROR_TYPE,
MESSAGING_DESTINATION_PUBLISH_NAME,
)
-from faststream.opentelemetry.middleware import MessageAction as Action
-from faststream.opentelemetry.middleware import TelemetryMiddleware
+from faststream.opentelemetry.middleware import (
+ MessageAction as Action,
+ TelemetryMiddleware,
+)
from tests.brokers.base.basic import BaseTestcaseConfig
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class LocalTelemetryTestcase(BaseTestcaseConfig):
messaging_system: str
include_messages_counters: bool
- broker_class: Type[BrokerUsecase]
resource: Resource = Resource.create(attributes={"service.name": "faststream.test"})
-
telemetry_middleware_class: TelemetryMiddleware
- def patch_broker(self, broker: BrokerUsecase) -> BrokerUsecase:
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs: Any,
+ ) -> BrokerUsecase[Any, Any]:
+ raise NotImplementedError
+
+ def patch_broker(
+ self,
+ broker: BrokerUsecase[Any, Any],
+ **kwargs: Any,
+ ) -> BrokerUsecase[Any, Any]:
return broker
def destination_name(self, queue: str) -> str:
return queue
@staticmethod
- def get_spans(exporter: InMemorySpanExporter) -> List[Span]:
- spans = cast(Tuple[Span, ...], exporter.get_finished_spans())
- return sorted(spans, key=lambda s: s.start_time)
+ def get_spans(exporter: InMemorySpanExporter) -> list[Span]:
+ spans = cast("tuple[Span, ...]", exporter.get_finished_spans())
+ return sorted(spans, key=lambda s: s.start_time or 0)
@staticmethod
def get_metrics(
reader: InMemoryMetricReader,
- ) -> List[Metric]:
+ ) -> list[Metric]:
"""Get sorted metrics.
Return order:
@@ -62,24 +73,23 @@ def get_metrics(
metrics = reader.get_metrics_data()
metrics = metrics.resource_metrics[0].scope_metrics[0].metrics
metrics = sorted(metrics, key=lambda m: m.name)
- return cast(List[Metric], metrics)
+ return cast("list[Metric]", metrics)
- @pytest.fixture
+ @pytest.fixture()
def tracer_provider(self) -> TracerProvider:
- tracer_provider = TracerProvider(resource=self.resource)
- return tracer_provider
+ return TracerProvider(resource=self.resource)
- @pytest.fixture
+ @pytest.fixture()
def trace_exporter(self, tracer_provider: TracerProvider) -> InMemorySpanExporter:
exporter = InMemorySpanExporter()
tracer_provider.add_span_processor(SimpleSpanProcessor(exporter))
return exporter
- @pytest.fixture
+ @pytest.fixture()
def metric_reader(self) -> InMemoryMetricReader:
return InMemoryMetricReader()
- @pytest.fixture
+ @pytest.fixture()
def meter_provider(self, metric_reader: InMemoryMetricReader) -> MeterProvider:
return MeterProvider(metric_readers=(metric_reader,), resource=self.resource)
@@ -99,14 +109,14 @@ def assert_span(
SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID
]
assert span.name == f"{self.destination_name(queue)} {action}", span.name
- assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER), span.kind
+ assert span.kind in {SpanKind.CONSUMER, SpanKind.PRODUCER}, span.kind
- if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH):
+ if span.kind == SpanKind.PRODUCER and action in {Action.CREATE, Action.PUBLISH}:
assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue, attrs[
SpanAttr.MESSAGING_DESTINATION_NAME
]
- if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS):
+ if span.kind == SpanKind.CONSUMER and action in {Action.CREATE, Action.PROCESS}:
assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue, attrs[
MESSAGING_DESTINATION_PUBLISH_NAME
]
@@ -116,7 +126,7 @@ def assert_span(
if action == Action.PROCESS:
assert attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] == len(
- msg
+ msg,
), attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES]
assert attrs[SpanAttr.MESSAGING_OPERATION] == action, attrs[
SpanAttr.MESSAGING_OPERATION
@@ -132,7 +142,7 @@ def assert_span(
def assert_metrics(
self,
- metrics: List[Metric],
+ metrics: list[Metric],
count: int = 1,
error_type: Optional[str] = None,
) -> None:
@@ -158,19 +168,20 @@ def assert_metrics(
async def test_subscriber_create_publish_process_span(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
mock(m)
event.set()
@@ -197,14 +208,15 @@ async def handler(m):
async def test_chain_subscriber_publisher(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
first_queue = queue
second_queue = queue + "2"
@@ -219,7 +231,7 @@ async def handler1(m):
args2, kwargs2 = self.get_subscriber_params(second_queue)
@broker.subscriber(*args2, **kwargs2)
- async def handler2(m):
+ async def handler2(m) -> None:
mock(m)
event.set()
@@ -257,19 +269,20 @@ async def handler2(m):
async def test_no_trace_context_create_process_span(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
mock(m)
event.set()
@@ -278,7 +291,7 @@ async def handler(m):
async with broker:
await broker.start()
- broker._middlewares = ()
+ broker.middlewares = ()
tasks = (
asyncio.create_task(broker.publish(msg, queue)),
asyncio.create_task(event.wait()),
@@ -296,19 +309,20 @@ async def handler(m):
async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(meter_provider=meter_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
mock(m)
event.set()
@@ -332,20 +346,21 @@ async def handler(m):
async def test_error_metrics(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(meter_provider=meter_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
expected_value_type = "ValueError"
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
try:
raise ValueError
finally:
@@ -372,19 +387,20 @@ async def handler(m):
async def test_span_in_context(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(m, span: CurrentSpan):
+ async def handler(m, span: CurrentSpan) -> None:
assert span is get_current_span()
mock(m)
event.set()
@@ -405,18 +421,19 @@ async def handler(m, span: CurrentSpan):
async def test_get_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_baggage = {"foo": "bar"}
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler1(m, baggage: CurrentBaggage):
+ async def handler1(m, baggage: CurrentBaggage) -> None:
assert baggage.get("foo") == "bar"
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == []
@@ -432,8 +449,10 @@ async def handler1(m, baggage: CurrentBaggage):
tasks = (
asyncio.create_task(
broker.publish(
- msg, queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ msg,
+ queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -444,12 +463,13 @@ async def handler1(m, baggage: CurrentBaggage):
async def test_clear_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
first_queue = queue + "1"
second_queue = queue + "2"
@@ -466,7 +486,7 @@ async def handler1(m, baggage: CurrentBaggage):
args2, kwargs2 = self.get_subscriber_params(second_queue)
@broker.subscriber(*args2, **kwargs2)
- async def handler2(m, baggage: CurrentBaggage):
+ async def handler2(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == {}
mock(m)
event.set()
@@ -479,8 +499,10 @@ async def handler2(m, baggage: CurrentBaggage):
tasks = (
asyncio.create_task(
broker.publish(
- msg, first_queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ msg,
+ first_queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -491,12 +513,13 @@ async def handler2(m, baggage: CurrentBaggage):
async def test_modify_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_baggage = {"baz": "bar", "bar": "baz"}
first_queue = queue + "1"
@@ -515,7 +538,7 @@ async def handler1(m, baggage: CurrentBaggage):
args2, kwargs2 = self.get_subscriber_params(second_queue)
@broker.subscriber(*args2, **kwargs2)
- async def handler2(m, baggage: CurrentBaggage):
+ async def handler2(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
mock(m)
event.set()
@@ -528,8 +551,10 @@ async def handler2(m, baggage: CurrentBaggage):
tasks = (
asyncio.create_task(
broker.publish(
- msg, first_queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ msg,
+ first_queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -540,11 +565,12 @@ async def handler2(m, baggage: CurrentBaggage):
async def test_get_baggage_from_headers(
self,
- event: asyncio.Event,
queue: str,
):
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
diff --git a/tests/opentelemetry/confluent/test_confluent.py b/tests/opentelemetry/confluent/test_confluent.py
index 9eb52d9742..088e1d551c 100644
--- a/tests/opentelemetry/confluent/test_confluent.py
+++ b/tests/opentelemetry/confluent/test_confluent.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import Optional
+from typing import Any, Optional
from unittest.mock import Mock
import pytest
@@ -17,17 +17,18 @@
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
from faststream.opentelemetry.middleware import MessageAction as Action
from tests.brokers.confluent.basic import ConfluentTestcaseConfig
+from tests.opentelemetry.basic import LocalTelemetryTestcase
-from ..basic import LocalTelemetryTestcase
-
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestTelemetry(ConfluentTestcaseConfig, LocalTelemetryTestcase):
messaging_system = "kafka"
include_messages_counters = True
- broker_class = KafkaBroker
telemetry_middleware_class = KafkaTelemetryMiddleware
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
def assert_span(
self,
span: Span,
@@ -40,12 +41,12 @@ def assert_span(
assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system
assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID
assert span.name == f"{self.destination_name(queue)} {action}"
- assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER)
+ assert span.kind in {SpanKind.CONSUMER, SpanKind.PRODUCER}
- if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH):
+ if span.kind == SpanKind.PRODUCER and action in {Action.CREATE, Action.PUBLISH}:
assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue
- if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS):
+ if span.kind == SpanKind.CONSUMER and action in {Action.CREATE, Action.PROCESS}:
assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue
assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsStr(regex=r"0-.+")
assert attrs[SpanAttr.MESSAGING_KAFKA_DESTINATION_PARTITION] == 0
@@ -63,48 +64,48 @@ def assert_span(
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 3
expected_link_count = 1
expected_link_attrs = {"messaging.batch.message_count": 3}
expected_baggage = {"with_batch": "True", "foo": "bar"}
expected_baggage_batch = [
- {"with_batch": "True", "foo": "bar"}
+ {"with_batch": "True", "foo": "bar"},
] * expected_msg_count
args, kwargs = self.get_subscriber_params(queue, batch=True)
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == expected_baggage_batch
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
asyncio.create_task(
- broker.publish_batch(
+ br.publish_batch(
1,
"hi",
3,
topic=queue,
headers=Baggage({"foo": "bar"}).to_headers(),
- )
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -136,11 +137,12 @@ async def test_batch_publish_with_single_consume(
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
msgs_queue = asyncio.Queue(maxsize=3)
expected_msg_count = 3
expected_link_count = 1
@@ -151,17 +153,19 @@ async def test_batch_publish_with_single_consume(
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(msg, baggage: CurrentBaggage):
+ async def handler(msg, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == []
await msgs_queue.put(msg)
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
- await broker.publish_batch(
- 1, "hi", 3, topic=queue, headers=Baggage({"foo": "bar"}).to_headers()
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish_batch(
+ 1,
+ "hi",
+ 3,
+ topic=queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
)
result, _ = await asyncio.wait(
(
@@ -195,18 +199,20 @@ async def handler(msg, baggage: CurrentBaggage):
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 2
expected_link_count = 2
expected_span_count = 6
@@ -216,27 +222,29 @@ async def test_single_publish_with_batch_consume(
args, kwargs = self.get_subscriber_params(queue, batch=True)
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert len(baggage.get_all_batch()) == expected_msg_count
m.sort()
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
asyncio.create_task(
- broker.publish(
- "hi", topic=queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ br.publish(
+ "hi",
+ topic=queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(
- broker.publish(
- "buy", topic=queue, headers=Baggage({"bar": "baz"}).to_headers()
- )
+ br.publish(
+ "buy",
+ topic=queue,
+ headers=Baggage({"bar": "baz"}).to_headers(),
+ ),
),
asyncio.create_task(event.wait()),
)
diff --git a/tests/opentelemetry/kafka/test_kafka.py b/tests/opentelemetry/kafka/test_kafka.py
index cc38a66281..79e93b82df 100644
--- a/tests/opentelemetry/kafka/test_kafka.py
+++ b/tests/opentelemetry/kafka/test_kafka.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import Optional
+from typing import Any, Optional
from unittest.mock import Mock
import pytest
@@ -18,17 +18,18 @@
from faststream.opentelemetry.middleware import MessageAction as Action
from tests.brokers.kafka.test_consume import TestConsume
from tests.brokers.kafka.test_publish import TestPublish
+from tests.opentelemetry.basic import LocalTelemetryTestcase
-from ..basic import LocalTelemetryTestcase
-
-@pytest.mark.kafka
+@pytest.mark.kafka()
class TestTelemetry(LocalTelemetryTestcase):
messaging_system = "kafka"
include_messages_counters = True
- broker_class = KafkaBroker
telemetry_middleware_class = KafkaTelemetryMiddleware
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
def assert_span(
self,
span: Span,
@@ -41,12 +42,12 @@ def assert_span(
assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system
assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID
assert span.name == f"{self.destination_name(queue)} {action}"
- assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER)
+ assert span.kind in {SpanKind.CONSUMER, SpanKind.PRODUCER}
- if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH):
+ if span.kind == SpanKind.PRODUCER and action in {Action.CREATE, Action.PUBLISH}:
assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue
- if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS):
+ if span.kind == SpanKind.CONSUMER and action in {Action.CREATE, Action.PROCESS}:
assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue
assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsStr(regex=r"0-.+")
assert attrs[SpanAttr.MESSAGING_KAFKA_DESTINATION_PARTITION] == 0
@@ -64,48 +65,48 @@ def assert_span(
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 3
expected_link_count = 1
expected_link_attrs = {"messaging.batch.message_count": 3}
expected_baggage = {"with_batch": "True", "foo": "bar"}
expected_baggage_batch = [
- {"with_batch": "True", "foo": "bar"}
+ {"with_batch": "True", "foo": "bar"},
] * expected_msg_count
args, kwargs = self.get_subscriber_params(queue, batch=True)
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == expected_baggage_batch
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
asyncio.create_task(
- broker.publish_batch(
+ br.publish_batch(
1,
"hi",
3,
topic=queue,
headers=Baggage({"foo": "bar"}).to_headers(),
- )
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -137,11 +138,12 @@ async def test_batch_publish_with_single_consume(
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
msgs_queue = asyncio.Queue(maxsize=3)
expected_msg_count = 3
expected_link_count = 1
@@ -152,17 +154,19 @@ async def test_batch_publish_with_single_consume(
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def handler(msg, baggage: CurrentBaggage):
+ async def handler(msg, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == []
await msgs_queue.put(msg)
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
- await broker.publish_batch(
- 1, "hi", 3, topic=queue, headers=Baggage({"foo": "bar"}).to_headers()
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish_batch(
+ 1,
+ "hi",
+ 3,
+ topic=queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
)
result, _ = await asyncio.wait(
(
@@ -196,18 +200,20 @@ async def handler(msg, baggage: CurrentBaggage):
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 2
expected_link_count = 2
expected_span_count = 6
@@ -217,27 +223,29 @@ async def test_single_publish_with_batch_consume(
args, kwargs = self.get_subscriber_params(queue, batch=True)
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert len(baggage.get_all_batch()) == expected_msg_count
m.sort()
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
asyncio.create_task(
- broker.publish(
- "hi", topic=queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ br.publish(
+ "hi",
+ topic=queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(
- broker.publish(
- "buy", topic=queue, headers=Baggage({"bar": "baz"}).to_headers()
- )
+ br.publish(
+ "buy",
+ topic=queue,
+ headers=Baggage({"bar": "baz"}).to_headers(),
+ ),
),
asyncio.create_task(event.wait()),
)
@@ -259,9 +267,9 @@ async def handler(m, baggage: CurrentBaggage):
mock.assert_called_once_with(["buy", "hi"])
-@pytest.mark.kafka
+@pytest.mark.kafka()
class TestPublishWithTelemetry(TestPublish):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
return KafkaBroker(
middlewares=(KafkaTelemetryMiddleware(),),
apply_types=apply_types,
@@ -269,9 +277,9 @@ def get_broker(self, apply_types: bool = False, **kwargs):
)
-@pytest.mark.kafka
+@pytest.mark.kafka()
class TestConsumeWithTelemetry(TestConsume):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
return KafkaBroker(
middlewares=(KafkaTelemetryMiddleware(),),
apply_types=apply_types,
diff --git a/tests/opentelemetry/nats/test_nats.py b/tests/opentelemetry/nats/test_nats.py
index d1262c7e6a..efa5153a50 100644
--- a/tests/opentelemetry/nats/test_nats.py
+++ b/tests/opentelemetry/nats/test_nats.py
@@ -1,4 +1,5 @@
import asyncio
+from typing import Any
from unittest.mock import Mock
import pytest
@@ -12,25 +13,25 @@
from faststream.nats.opentelemetry import NatsTelemetryMiddleware
from tests.brokers.nats.test_consume import TestConsume
from tests.brokers.nats.test_publish import TestPublish
+from tests.opentelemetry.basic import LocalTelemetryTestcase
-from ..basic import LocalTelemetryTestcase
-
-@pytest.fixture
+@pytest.fixture()
def stream(queue):
return JStream(queue)
-@pytest.mark.nats
+@pytest.mark.nats()
class TestTelemetry(LocalTelemetryTestcase):
messaging_system = "nats"
include_messages_counters = True
- broker_class = NatsBroker
telemetry_middleware_class = NatsTelemetryMiddleware
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
+ return NatsBroker(apply_types=apply_types, **kwargs)
+
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
stream: JStream,
@@ -38,11 +39,14 @@ async def test_batch(
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,))
expected_msg_count = 1
expected_span_count = 4
expected_proc_batch_count = 1
@@ -54,16 +58,14 @@ async def test_batch(
)
@broker.subscriber(*args, **kwargs)
- async def handler(m):
+ async def handler(m) -> None:
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
- asyncio.create_task(broker.publish("hi", queue)),
+ asyncio.create_task(br.publish("hi", queue)),
asyncio.create_task(event.wait()),
)
await asyncio.wait(tasks, timeout=self.timeout)
@@ -89,19 +91,21 @@ async def handler(m):
mock.assert_called_once_with(["hi"])
-@pytest.mark.nats
+@pytest.mark.nats()
class TestPublishWithTelemetry(TestPublish):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
return NatsBroker(
middlewares=(NatsTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
-@pytest.mark.nats
+@pytest.mark.nats()
class TestConsumeWithTelemetry(TestConsume):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
return NatsBroker(
middlewares=(NatsTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
diff --git a/tests/opentelemetry/rabbit/test_rabbit.py b/tests/opentelemetry/rabbit/test_rabbit.py
index 59d77c3b70..b921f45735 100644
--- a/tests/opentelemetry/rabbit/test_rabbit.py
+++ b/tests/opentelemetry/rabbit/test_rabbit.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from typing import Any, Optional
import pytest
from dirty_equals import IsInt, IsUUID
@@ -12,22 +12,23 @@
from faststream.rabbit.opentelemetry import RabbitTelemetryMiddleware
from tests.brokers.rabbit.test_consume import TestConsume
from tests.brokers.rabbit.test_publish import TestPublish
+from tests.opentelemetry.basic import LocalTelemetryTestcase
-from ..basic import LocalTelemetryTestcase
-
-@pytest.fixture
+@pytest.fixture()
def exchange(queue):
return RabbitExchange(name=queue)
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestTelemetry(LocalTelemetryTestcase):
messaging_system = "rabbitmq"
include_messages_counters = False
- broker_class = RabbitBroker
telemetry_middleware_class = RabbitTelemetryMiddleware
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
+ return RabbitBroker(apply_types=apply_types, **kwargs)
+
def destination_name(self, queue: str) -> str:
return f"default.{queue}"
@@ -44,12 +45,12 @@ def assert_span(
assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID
assert attrs[SpanAttr.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY] == queue
assert span.name == f"{self.destination_name(queue)} {action}"
- assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER)
+ assert span.kind in {SpanKind.CONSUMER, SpanKind.PRODUCER}
- if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH):
+ if span.kind == SpanKind.PRODUCER and action in {Action.CREATE, Action.PUBLISH}:
assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == ""
- if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS):
+ if span.kind == SpanKind.CONSUMER and action in {Action.CREATE, Action.PROCESS}:
assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == ""
assert attrs["messaging.rabbitmq.message.delivery_tag"] == IsInt
assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsUUID
@@ -65,19 +66,21 @@ def assert_span(
assert span.parent.span_id == parent_span_id
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestPublishWithTelemetry(TestPublish):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
return RabbitBroker(
middlewares=(RabbitTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestConsumeWithTelemetry(TestConsume):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
return RabbitBroker(
middlewares=(RabbitTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
diff --git a/tests/opentelemetry/redis/test_redis.py b/tests/opentelemetry/redis/test_redis.py
index 9729b0a27f..bdfc49ceb1 100644
--- a/tests/opentelemetry/redis/test_redis.py
+++ b/tests/opentelemetry/redis/test_redis.py
@@ -1,4 +1,5 @@
import asyncio
+from typing import Any
from unittest.mock import Mock
import pytest
@@ -17,31 +18,34 @@
TestConsumeStream,
)
from tests.brokers.redis.test_publish import TestPublish
+from tests.opentelemetry.basic import LocalTelemetryTestcase
-from ..basic import LocalTelemetryTestcase
-
-@pytest.mark.redis
+@pytest.mark.redis()
class TestTelemetry(LocalTelemetryTestcase):
messaging_system = "redis"
include_messages_counters = True
- broker_class = RedisBroker
telemetry_middleware_class = RedisTelemetryMiddleware
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
+ return RedisBroker(apply_types=apply_types, **kwargs)
+
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 3
expected_link_count = 1
expected_link_attrs = {"messaging.batch.message_count": 3}
@@ -51,18 +55,16 @@ async def test_batch(
args, kwargs = self.get_subscriber_params(list=ListSub(queue, batch=True))
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == expected_baggage_batch
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
+ async with self.patch_broker(broker) as br:
+ await br.start()
tasks = (
- asyncio.create_task(broker.publish_batch(1, "hi", 3, list=queue)),
+ asyncio.create_task(br.publish_batch(1, "hi", 3, list=queue)),
asyncio.create_task(event.wait()),
)
await asyncio.wait(tasks, timeout=self.timeout)
@@ -93,11 +95,12 @@ async def test_batch_publish_with_single_consume(
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
msgs_queue = asyncio.Queue(maxsize=3)
expected_msg_count = 3
expected_link_count = 1
@@ -109,16 +112,14 @@ async def test_batch_publish_with_single_consume(
args, kwargs = self.get_subscriber_params(list=ListSub(queue))
@broker.subscriber(*args, **kwargs)
- async def handler(msg, baggage: CurrentBaggage):
+ async def handler(msg, baggage: CurrentBaggage) -> None:
assert baggage.get_all() == expected_baggage
assert baggage.get_all_batch() == expected_baggage_batch
await msgs_queue.put(msg)
- broker = self.patch_broker(broker)
-
- async with broker:
- await broker.start()
- await broker.publish_batch(1, "hi", 3, list=queue)
+ async with self.patch_broker(broker) as br:
+ await br.start()
+ await br.publish_batch(1, "hi", 3, list=queue)
result, _ = await asyncio.wait(
(
asyncio.create_task(msgs_queue.get()),
@@ -151,18 +152,20 @@ async def handler(msg, baggage: CurrentBaggage):
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
- ):
+ ) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
- meter_provider=meter_provider, tracer_provider=tracer_provider
+ meter_provider=meter_provider,
+ tracer_provider=tracer_provider,
)
- broker = self.broker_class(middlewares=(mid,))
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_msg_count = 2
expected_link_count = 2
expected_span_count = 6
@@ -172,32 +175,35 @@ async def test_single_publish_with_batch_consume(
args, kwargs = self.get_subscriber_params(list=ListSub(queue, batch=True))
@broker.subscriber(*args, **kwargs)
- async def handler(m, baggage: CurrentBaggage):
+ async def handler(m, baggage: CurrentBaggage) -> None:
assert len(baggage.get_all_batch()) == expected_msg_count
assert baggage.get_all() == expected_baggage
m.sort()
mock(m)
event.set()
- broker = self.patch_broker(broker)
-
- async with broker:
+ async with self.patch_broker(broker) as br:
tasks = (
asyncio.create_task(
- broker.publish(
- "hi", list=queue, headers=Baggage({"foo": "bar"}).to_headers()
- )
+ br.publish(
+ "hi",
+ list=queue,
+ headers=Baggage({"foo": "bar"}).to_headers(),
+ ),
),
asyncio.create_task(
- broker.publish(
- "buy", list=queue, headers=Baggage({"bar": "baz"}).to_headers()
- )
+ br.publish(
+ "buy",
+ list=queue,
+ headers=Baggage({"bar": "baz"}).to_headers(),
+ ),
),
)
await asyncio.wait(tasks, timeout=self.timeout)
await broker.start()
await asyncio.wait(
- (asyncio.create_task(event.wait()),), timeout=self.timeout
+ (asyncio.create_task(event.wait()),),
+ timeout=self.timeout,
)
metrics = self.get_metrics(metric_reader)
@@ -216,37 +222,41 @@ async def handler(m, baggage: CurrentBaggage):
mock.assert_called_once_with(["buy", "hi"])
-@pytest.mark.redis
+@pytest.mark.redis()
class TestPublishWithTelemetry(TestPublish):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
-@pytest.mark.redis
+@pytest.mark.redis()
class TestConsumeWithTelemetry(TestConsume):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
-@pytest.mark.redis
+@pytest.mark.redis()
class TestConsumeListWithTelemetry(TestConsumeList):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
-@pytest.mark.redis
+@pytest.mark.redis()
class TestConsumeStreamWithTelemetry(TestConsumeStream):
- def get_broker(self, apply_types: bool = False):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisTelemetryMiddleware(),),
apply_types=apply_types,
+ **kwargs,
)
diff --git a/tests/prometheus/basic.py b/tests/prometheus/basic.py
index 9f80a86b01..a48d54906f 100644
--- a/tests/prometheus/basic.py
+++ b/tests/prometheus/basic.py
@@ -1,76 +1,89 @@
import asyncio
-from typing import Any, Optional, Type
-from unittest.mock import ANY, Mock, call
+from typing import Any, Optional, cast
import pytest
+from dirty_equals import IsList, IsPositiveFloat, IsStr
from prometheus_client import CollectorRegistry
from faststream import Context
-from faststream.broker.message import AckStatus
from faststream.exceptions import IgnoredException, RejectMessage
+from faststream.message import AckStatus
+from faststream.prometheus import MetricsSettingsProvider
from faststream.prometheus.middleware import (
PROCESSING_STATUS_BY_ACK_STATUS,
PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP,
BasePrometheusMiddleware,
)
-from faststream.prometheus.types import ProcessingStatus
+from faststream.prometheus.types import ProcessingStatus, PublishingStatus
from tests.brokers.base.basic import BaseTestcaseConfig
+from tests.prometheus.utils import (
+ get_published_messages_duration_seconds_metric,
+ get_published_messages_exceptions_metric,
+ get_published_messages_metric,
+ get_received_messages_in_process_metric,
+ get_received_messages_metric,
+ get_received_messages_size_bytes_metric,
+ get_received_processed_messages_duration_seconds_metric,
+ get_received_processed_messages_exceptions_metric,
+ get_received_processed_messages_metric,
+)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
class LocalPrometheusTestcase(BaseTestcaseConfig):
- def get_broker(self, apply_types=False, **kwargs):
+ def get_middleware(self, **kwargs: Any) -> BasePrometheusMiddleware:
raise NotImplementedError
- def get_middleware(self, **kwargs) -> BasePrometheusMiddleware:
+ def get_settings_provider(self) -> MetricsSettingsProvider[Any]:
raise NotImplementedError
- @staticmethod
- def consume_destination_name(queue: str) -> str:
- return queue
-
- @property
- def settings_provider_factory(self):
- return self.get_middleware(
- registry=CollectorRegistry()
- )._settings_provider_factory
-
@pytest.mark.parametrize(
(
"status",
"exception_class",
),
- [
+ (
pytest.param(
- AckStatus.acked,
+ AckStatus.ACKED,
RejectMessage,
id="acked status with reject message exception",
),
pytest.param(
- AckStatus.acked, Exception, id="acked status with not handler exception"
+ AckStatus.ACKED,
+ Exception,
+ id="acked status with not handler exception",
+ ),
+ pytest.param(
+ AckStatus.ACKED,
+ None,
+ id="acked status without exception",
+ ),
+ pytest.param(
+ AckStatus.NACKED,
+ None,
+ id="nacked status without exception",
),
- pytest.param(AckStatus.acked, None, id="acked status without exception"),
- pytest.param(AckStatus.nacked, None, id="nacked status without exception"),
pytest.param(
- AckStatus.rejected, None, id="rejected status without exception"
+ AckStatus.REJECTED,
+ None,
+ id="rejected status without exception",
),
pytest.param(
- AckStatus.acked,
+ AckStatus.ACKED,
IgnoredException,
id="acked status with ignore exception",
),
- ],
+ ),
)
async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
status: AckStatus,
- exception_class: Optional[Type[Exception]],
- ):
- middleware = self.get_middleware(registry=CollectorRegistry())
- metrics_manager_mock = Mock()
- middleware._metrics_manager = metrics_manager_mock
+ exception_class: Optional[type[Exception]],
+ ) -> None:
+ event = asyncio.Event()
+ registry = CollectorRegistry()
+ middleware = self.get_middleware(registry=registry)
broker = self.get_broker(apply_types=True, middlewares=(middleware,))
@@ -79,7 +92,7 @@ async def test_metrics(
message = None
@broker.subscriber(*args, **kwargs)
- async def handler(m=Context("message")):
+ async def handler(m=Context("message")) -> None:
event.set()
nonlocal message
@@ -88,11 +101,11 @@ async def handler(m=Context("message")):
if exception_class:
raise exception_class
- if status == AckStatus.acked:
+ if status == AckStatus.ACKED:
await message.ack()
- elif status == AckStatus.nacked:
+ elif status == AckStatus.NACKED:
await message.nack()
- elif status == AckStatus.rejected:
+ elif status == AckStatus.REJECTED:
await message.reject()
async with broker:
@@ -104,62 +117,69 @@ async def handler(m=Context("message")):
await asyncio.wait(tasks, timeout=self.timeout)
assert event.is_set()
- self.assert_consume_metrics(
- metrics_manager=metrics_manager_mock,
+ self.assert_metrics(
+ registry=registry,
message=message,
exception_class=exception_class,
)
- self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
- def assert_consume_metrics(
+ def assert_metrics(
self,
*,
- metrics_manager: Any,
+ registry: CollectorRegistry,
message: Any,
- exception_class: Optional[Type[Exception]],
- ):
- settings_provider = self.settings_provider_factory(message.raw_message)
+ exception_class: Optional[type[Exception]],
+ ) -> None:
+ settings_provider = self.get_settings_provider()
consume_attrs = settings_provider.get_consume_attrs_from_message(message)
- assert metrics_manager.add_received_message.mock_calls == [
- call(
- amount=consume_attrs["messages_count"],
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
- ),
- ]
- assert metrics_manager.observe_received_messages_size.mock_calls == [
- call(
- size=consume_attrs["message_size"],
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
- ),
- ]
+ received_messages_metric = get_received_messages_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=consume_attrs["destination_name"],
+ messages_amount=consume_attrs["messages_count"],
+ )
- assert metrics_manager.add_received_message_in_process.mock_calls == [
- call(
- amount=consume_attrs["messages_count"],
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
+ received_messages_size_bytes_metric = get_received_messages_size_bytes_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=consume_attrs["destination_name"],
+ buckets=(
+ 2.0**4,
+ 2.0**6,
+ 2.0**8,
+ 2.0**10,
+ 2.0**12,
+ 2.0**14,
+ 2.0**16,
+ 2.0**18,
+ 2.0**20,
+ 2.0**22,
+ 2.0**24,
+ float("inf"),
),
- ]
- assert metrics_manager.remove_received_message_in_process.mock_calls == [
- call(
- amount=consume_attrs["messages_count"],
+ size=consume_attrs["message_size"],
+ messages_amount=1,
+ )
+
+ received_messages_in_process_metric = get_received_messages_in_process_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=consume_attrs["destination_name"],
+ messages_amount=0,
+ )
+
+ received_processed_messages_duration_seconds_metric = (
+ get_received_processed_messages_duration_seconds_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
+ queue=consume_attrs["destination_name"],
+ duration=cast("float", IsPositiveFloat),
)
- ]
-
- assert (
- metrics_manager.observe_received_processed_message_duration.mock_calls
- == [
- call(
- duration=ANY,
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
- ),
- ]
)
status = ProcessingStatus.acked
@@ -172,51 +192,131 @@ def assert_consume_metrics(
elif message.committed:
status = PROCESSING_STATUS_BY_ACK_STATUS[message.committed]
- assert metrics_manager.add_received_processed_message.mock_calls == [
- call(
- amount=consume_attrs["messages_count"],
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
- status=status.value,
- ),
- ]
+ received_processed_messages_metric = get_received_processed_messages_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=consume_attrs["destination_name"],
+ messages_amount=consume_attrs["messages_count"],
+ status=status,
+ )
+
+ exception_type: Optional[str] = None
if exception_class and not issubclass(exception_class, IgnoredException):
- assert (
- metrics_manager.add_received_processed_message_exception.mock_calls
- == [
- call(
- broker=settings_provider.messaging_system,
- handler=consume_attrs["destination_name"],
- exception_type=exception_class.__name__,
- ),
- ]
- )
- else:
- assert (
- metrics_manager.add_received_processed_message_exception.mock_calls
- == []
+ exception_type = exception_class.__name__
+
+ received_processed_messages_exceptions_metric = (
+ get_received_processed_messages_exceptions_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=consume_attrs["destination_name"],
+ exception_type=exception_type,
+ exceptions_amount=consume_attrs["messages_count"],
)
+ )
- def assert_publish_metrics(self, metrics_manager: Any):
- settings_provider = self.settings_provider_factory(None)
- assert metrics_manager.observe_published_message_duration.mock_calls == [
- call(
- duration=ANY, broker=settings_provider.messaging_system, destination=ANY
- ),
- ]
- assert metrics_manager.add_published_message.mock_calls == [
- call(
- amount=ANY,
+ published_messages_metric = get_published_messages_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=cast("str", IsStr),
+ status=PublishingStatus.success,
+ messages_amount=consume_attrs["messages_count"],
+ )
+
+ published_messages_duration_seconds_metric = (
+ get_published_messages_duration_seconds_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
broker=settings_provider.messaging_system,
- destination=ANY,
- status="success",
- ),
- ]
+ queue=cast("str", IsStr),
+ duration=cast("float", IsPositiveFloat),
+ )
+ )
+
+ published_messages_exceptions_metric = get_published_messages_exceptions_metric(
+ metrics_prefix="faststream",
+ app_name="faststream",
+ broker=settings_provider.messaging_system,
+ queue=cast("str", IsStr),
+ exception_type=None,
+ )
+
+ expected_metrics = IsList(
+ received_messages_metric,
+ received_messages_size_bytes_metric,
+ received_messages_in_process_metric,
+ received_processed_messages_metric,
+ received_processed_messages_duration_seconds_metric,
+ received_processed_messages_exceptions_metric,
+ published_messages_metric,
+ published_messages_duration_seconds_metric,
+ published_messages_exceptions_metric,
+ check_order=False,
+ )
+ real_metrics = list(registry.collect())
- async def test_one_registry_for_some_middlewares(
- self, event: asyncio.Event, queue: str
+ assert real_metrics == expected_metrics
+
+
+class LocalRPCPrometheusTestcase:
+ @pytest.mark.asyncio()
+ async def test_rpc_request(
+ self,
+ queue: str,
) -> None:
+ event = asyncio.Event()
+ registry = CollectorRegistry()
+
+ middleware = self.get_middleware(registry=registry)
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ message = None
+
+ @broker.subscriber(queue)
+ async def handle(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ return ""
+
+ async with self.patch_broker(broker) as br:
+ await br.start()
+
+ await asyncio.wait_for(
+ br.request("", queue),
+ timeout=3,
+ )
+
+ assert event.is_set()
+
+ self.assert_metrics(
+ registry=registry,
+ message=message,
+ exception_class=None,
+ )
+
+
+class LocalMetricsSettingsProviderTestcase:
+ messaging_system: str
+
+ def get_middleware(self, **kwargs) -> BasePrometheusMiddleware:
+ raise NotImplementedError
+
+ @staticmethod
+ def get_settings_provider() -> MetricsSettingsProvider:
+ raise NotImplementedError
+
+ def test_messaging_system(self) -> None:
+ provider = self.get_settings_provider()
+ assert provider.messaging_system == self.messaging_system
+
+ def test_one_registry_for_some_middlewares(self) -> None:
registry = CollectorRegistry()
middleware_1 = self.get_middleware(registry=registry)
diff --git a/tests/prometheus/confluent/basic.py b/tests/prometheus/confluent/basic.py
new file mode 100644
index 0000000000..0852877951
--- /dev/null
+++ b/tests/prometheus/confluent/basic.py
@@ -0,0 +1,42 @@
+from typing import Any
+
+from faststream import AckPolicy
+from faststream.confluent.prometheus import KafkaPrometheusMiddleware
+from faststream.confluent.prometheus.provider import (
+ BatchConfluentMetricsSettingsProvider,
+ ConfluentMetricsSettingsProvider,
+)
+from tests.brokers.confluent.basic import ConfluentTestcaseConfig
+
+
+class BaseConfluentPrometheusSettings(ConfluentTestcaseConfig):
+ messaging_system = "kafka"
+
+ def get_middleware(self, **kwargs: Any) -> KafkaPrometheusMiddleware:
+ return KafkaPrometheusMiddleware(**kwargs)
+
+ def get_subscriber_params(
+ self,
+ *topics: Any,
+ **kwargs: Any,
+ ) -> tuple[
+ tuple[Any, ...],
+ dict[str, Any],
+ ]:
+ topics, kwargs = super().get_subscriber_params(*topics, **kwargs)
+
+ return topics, {
+ "group_id": "test",
+ "ack_policy": AckPolicy.REJECT_ON_ERROR,
+ **kwargs,
+ }
+
+
+class ConfluentPrometheusSettings(BaseConfluentPrometheusSettings):
+ def get_settings_provider(self) -> ConfluentMetricsSettingsProvider:
+ return ConfluentMetricsSettingsProvider()
+
+
+class BatchConfluentPrometheusSettings(BaseConfluentPrometheusSettings):
+ def get_settings_provider(self) -> BatchConfluentMetricsSettingsProvider:
+ return BatchConfluentMetricsSettingsProvider()
diff --git a/tests/prometheus/confluent/test_confluent.py b/tests/prometheus/confluent/test_confluent.py
index d1e3034ad6..6a5dc897c5 100644
--- a/tests/prometheus/confluent/test_confluent.py
+++ b/tests/prometheus/confluent/test_confluent.py
@@ -1,5 +1,5 @@
import asyncio
-from unittest.mock import Mock
+from typing import Any
import pytest
from prometheus_client import CollectorRegistry
@@ -7,28 +7,23 @@
from faststream import Context
from faststream.confluent import KafkaBroker
from faststream.confluent.prometheus.middleware import KafkaPrometheusMiddleware
-from tests.brokers.confluent.basic import ConfluentTestcaseConfig
from tests.brokers.confluent.test_consume import TestConsume
from tests.brokers.confluent.test_publish import TestPublish
from tests.prometheus.basic import LocalPrometheusTestcase
+from .basic import BatchConfluentPrometheusSettings, ConfluentPrometheusSettings
-@pytest.mark.confluent
-class TestPrometheus(ConfluentTestcaseConfig, LocalPrometheusTestcase):
- def get_broker(self, apply_types=False, **kwargs):
- return KafkaBroker(apply_types=apply_types, **kwargs)
- def get_middleware(self, **kwargs):
- return KafkaPrometheusMiddleware(**kwargs)
-
- async def test_metrics_batch(
+@pytest.mark.confluent()
+class TestBatchPrometheus(BatchConfluentPrometheusSettings, LocalPrometheusTestcase):
+ async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
):
- middleware = self.get_middleware(registry=CollectorRegistry())
- metrics_manager_mock = Mock()
- middleware._metrics_manager = metrics_manager_mock
+ event = asyncio.Event()
+
+ registry = CollectorRegistry()
+ middleware = self.get_middleware(registry=registry)
broker = self.get_broker(apply_types=True, middlewares=(middleware,))
@@ -53,15 +48,21 @@ async def handler(m=Context("message")):
await asyncio.wait(tasks, timeout=self.timeout)
assert event.is_set()
- self.assert_consume_metrics(
- metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ self.assert_metrics(
+ registry=registry,
+ message=message,
+ exception_class=None,
)
- self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
-@pytest.mark.confluent
+@pytest.mark.confluent()
+class TestPrometheus(ConfluentPrometheusSettings, LocalPrometheusTestcase):
+ pass
+
+
+@pytest.mark.confluent()
class TestPublishWithPrometheus(TestPublish):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
return KafkaBroker(
middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
@@ -69,9 +70,9 @@ def get_broker(self, apply_types: bool = False, **kwargs):
)
-@pytest.mark.confluent
+@pytest.mark.confluent()
class TestConsumeWithPrometheus(TestConsume):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
return KafkaBroker(
middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
diff --git a/tests/prometheus/confluent/test_provider.py b/tests/prometheus/confluent/test_provider.py
new file mode 100644
index 0000000000..a1099c3886
--- /dev/null
+++ b/tests/prometheus/confluent/test_provider.py
@@ -0,0 +1,97 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.confluent.prometheus.provider import (
+ BatchConfluentMetricsSettingsProvider,
+ ConfluentMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+from .basic import BatchConfluentPrometheusSettings, ConfluentPrometheusSettings
+
+
+class LocalBaseConfluentMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase,
+):
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_settings_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestKafkaMetricsSettingsProvider(
+ ConfluentPrometheusSettings, LocalBaseConfluentMetricsSettingsProviderTestcase
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ message = SimpleNamespace(
+ body=body, raw_message=SimpleNamespace(topic=lambda: queue)
+ )
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchConfluentMetricsSettingsProvider(
+ BatchConfluentPrometheusSettings, LocalBaseConfluentMetricsSettingsProviderTestcase
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = [b"Hi ", b"again, ", b"FastStream!"]
+ message = SimpleNamespace(
+ body=body,
+ raw_message=[
+ SimpleNamespace(topic=lambda: queue)
+ for _ in range(random.randint(a=2, b=10))
+ ],
+ )
+ expected_attrs = {
+ "destination_name": message.raw_message[0].topic(),
+ "message_size": len(bytearray().join(body)),
+ "messages_count": len(message.raw_message),
+ }
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (SimpleNamespace(), SimpleNamespace()),
+ BatchConfluentMetricsSettingsProvider(),
+ id="batch message",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ ConfluentMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ ConfluentMetricsSettingsProvider(),
+ id="None message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/kafka/basic.py b/tests/prometheus/kafka/basic.py
new file mode 100644
index 0000000000..b4b2661037
--- /dev/null
+++ b/tests/prometheus/kafka/basic.py
@@ -0,0 +1,41 @@
+from typing import Any
+
+from faststream import AckPolicy
+from faststream.kafka.prometheus import KafkaPrometheusMiddleware
+from faststream.kafka.prometheus.provider import (
+ BatchKafkaMetricsSettingsProvider,
+ KafkaMetricsSettingsProvider,
+)
+from tests.brokers.kafka.basic import KafkaTestcaseConfig
+
+
+class BaseKafkaPrometheusSettings(KafkaTestcaseConfig):
+ messaging_system = "kafka"
+
+ def get_middleware(self, **kwargs: Any) -> KafkaPrometheusMiddleware:
+ return KafkaPrometheusMiddleware(**kwargs)
+
+ def get_subscriber_params(
+ self,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[
+ tuple[Any, ...],
+ dict[str, Any],
+ ]:
+ args, kwargs = super().get_subscriber_params(*args, **kwargs)
+ return args, {
+ "group_id": "test",
+ "ack_policy": AckPolicy.REJECT_ON_ERROR,
+ **kwargs,
+ }
+
+
+class KafkaPrometheusSettings(BaseKafkaPrometheusSettings):
+ def get_settings_provider(self) -> KafkaMetricsSettingsProvider:
+ return KafkaMetricsSettingsProvider()
+
+
+class BatchKafkaPrometheusSettings(BaseKafkaPrometheusSettings):
+ def get_settings_provider(self) -> BatchKafkaMetricsSettingsProvider:
+ return BatchKafkaMetricsSettingsProvider()
diff --git a/tests/prometheus/kafka/test_kafka.py b/tests/prometheus/kafka/test_kafka.py
index abb5c86b3f..e2e0580b52 100644
--- a/tests/prometheus/kafka/test_kafka.py
+++ b/tests/prometheus/kafka/test_kafka.py
@@ -1,5 +1,4 @@
import asyncio
-from unittest.mock import Mock
import pytest
from prometheus_client import CollectorRegistry
@@ -11,23 +10,19 @@
from tests.brokers.kafka.test_publish import TestPublish
from tests.prometheus.basic import LocalPrometheusTestcase
+from .basic import BatchKafkaPrometheusSettings, KafkaPrometheusSettings
-@pytest.mark.kafka
-class TestPrometheus(LocalPrometheusTestcase):
- def get_broker(self, apply_types=False, **kwargs):
- return KafkaBroker(apply_types=apply_types, **kwargs)
- def get_middleware(self, **kwargs):
- return KafkaPrometheusMiddleware(**kwargs)
-
- async def test_metrics_batch(
+@pytest.mark.kafka()
+class TestBatchPrometheus(BatchKafkaPrometheusSettings, LocalPrometheusTestcase):
+ async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
):
- middleware = self.get_middleware(registry=CollectorRegistry())
- metrics_manager_mock = Mock()
- middleware._metrics_manager = metrics_manager_mock
+ event = asyncio.Event()
+
+ registry = CollectorRegistry()
+ middleware = self.get_middleware(registry=registry)
broker = self.get_broker(apply_types=True, middlewares=(middleware,))
@@ -52,13 +47,18 @@ async def handler(m=Context("message")):
await asyncio.wait(tasks, timeout=self.timeout)
assert event.is_set()
- self.assert_consume_metrics(
- metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ self.assert_metrics(
+ registry=registry,
+ message=message,
+ exception_class=None,
)
- self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
-@pytest.mark.kafka
+@pytest.mark.kafka()
+class TestPrometheus(KafkaPrometheusSettings, LocalPrometheusTestcase): ...
+
+
+@pytest.mark.kafka()
class TestPublishWithPrometheus(TestPublish):
def get_broker(
self,
@@ -72,7 +72,7 @@ def get_broker(
)
-@pytest.mark.kafka
+@pytest.mark.kafka()
class TestConsumeWithPrometheus(TestConsume):
def get_broker(self, apply_types: bool = False, **kwargs):
return KafkaBroker(
diff --git a/tests/prometheus/kafka/test_provider.py b/tests/prometheus/kafka/test_provider.py
new file mode 100644
index 0000000000..e046737f85
--- /dev/null
+++ b/tests/prometheus/kafka/test_provider.py
@@ -0,0 +1,96 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.kafka.prometheus.provider import (
+ BatchKafkaMetricsSettingsProvider,
+ KafkaMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+from .basic import BatchKafkaPrometheusSettings, KafkaPrometheusSettings
+
+
+class LocalBaseKafkaMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase,
+):
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_settings_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestKafkaMetricsSettingsProvider(
+ KafkaPrometheusSettings,
+ LocalBaseKafkaMetricsSettingsProviderTestcase,
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ message = SimpleNamespace(body=body, raw_message=SimpleNamespace(topic=queue))
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchKafkaMetricsSettingsProvider(
+ BatchKafkaPrometheusSettings,
+ LocalBaseKafkaMetricsSettingsProviderTestcase,
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = [b"Hi ", b"again, ", b"FastStream!"]
+ message = SimpleNamespace(
+ body=body,
+ raw_message=[
+ SimpleNamespace(topic=queue) for _ in range(random.randint(a=2, b=10))
+ ],
+ )
+ expected_attrs = {
+ "destination_name": message.raw_message[0].topic,
+ "message_size": len(bytearray().join(body)),
+ "messages_count": len(message.raw_message),
+ }
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (SimpleNamespace(), SimpleNamespace()),
+ BatchKafkaMetricsSettingsProvider(),
+ id="batch message",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ KafkaMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ KafkaMetricsSettingsProvider(),
+ id="None message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/nats/basic.py b/tests/prometheus/nats/basic.py
new file mode 100644
index 0000000000..4fda6435e7
--- /dev/null
+++ b/tests/prometheus/nats/basic.py
@@ -0,0 +1,25 @@
+from typing import Any
+
+from faststream.nats.prometheus import NatsPrometheusMiddleware
+from faststream.nats.prometheus.provider import (
+ BatchNatsMetricsSettingsProvider,
+ NatsMetricsSettingsProvider,
+)
+from tests.brokers.nats.basic import NatsTestcaseConfig
+
+
+class BaseNatsPrometheusSettings(NatsTestcaseConfig):
+ messaging_system = "nats"
+
+ def get_middleware(self, **kwargs: Any) -> NatsPrometheusMiddleware:
+ return NatsPrometheusMiddleware(**kwargs)
+
+
+class NatsPrometheusSettings(BaseNatsPrometheusSettings):
+ def get_settings_provider(self) -> NatsMetricsSettingsProvider:
+ return NatsMetricsSettingsProvider()
+
+
+class BatchNatsPrometheusSettings(BaseNatsPrometheusSettings):
+ def get_settings_provider(self) -> BatchNatsMetricsSettingsProvider:
+ return BatchNatsMetricsSettingsProvider()
diff --git a/tests/prometheus/nats/test_nats.py b/tests/prometheus/nats/test_nats.py
index f65eb41e85..b9ea9c89a2 100644
--- a/tests/prometheus/nats/test_nats.py
+++ b/tests/prometheus/nats/test_nats.py
@@ -1,5 +1,5 @@
import asyncio
-from unittest.mock import Mock
+from typing import Any
import pytest
from prometheus_client import CollectorRegistry
@@ -9,31 +9,27 @@
from faststream.nats.prometheus.middleware import NatsPrometheusMiddleware
from tests.brokers.nats.test_consume import TestConsume
from tests.brokers.nats.test_publish import TestPublish
-from tests.prometheus.basic import LocalPrometheusTestcase
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+from .basic import BatchNatsPrometheusSettings, NatsPrometheusSettings
-@pytest.fixture
+
+@pytest.fixture()
def stream(queue):
return JStream(queue)
-@pytest.mark.nats
-class TestPrometheus(LocalPrometheusTestcase):
- def get_broker(self, apply_types=False, **kwargs):
- return NatsBroker(apply_types=apply_types, **kwargs)
-
- def get_middleware(self, **kwargs):
- return NatsPrometheusMiddleware(**kwargs)
-
- async def test_metrics_batch(
+@pytest.mark.nats()
+class TestBatchPrometheus(BatchNatsPrometheusSettings, LocalPrometheusTestcase):
+ async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
stream: JStream,
- ):
- middleware = self.get_middleware(registry=CollectorRegistry())
- metrics_manager_mock = Mock()
- middleware._metrics_manager = metrics_manager_mock
+ ) -> None:
+ event = asyncio.Event()
+
+ registry = CollectorRegistry()
+ middleware = self.get_middleware(registry=registry)
broker = self.get_broker(apply_types=True, middlewares=(middleware,))
@@ -60,15 +56,24 @@ async def handler(m=Context("message")):
await asyncio.wait(tasks, timeout=self.timeout)
assert event.is_set()
- self.assert_consume_metrics(
- metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ self.assert_metrics(
+ registry=registry,
+ message=message,
+ exception_class=None,
)
- self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
-@pytest.mark.nats
+@pytest.mark.nats()
+class TestPrometheus(
+ NatsPrometheusSettings,
+ LocalPrometheusTestcase,
+ LocalRPCPrometheusTestcase,
+): ...
+
+
+@pytest.mark.nats()
class TestPublishWithPrometheus(TestPublish):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
return NatsBroker(
middlewares=(NatsPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
@@ -76,9 +81,9 @@ def get_broker(self, apply_types: bool = False, **kwargs):
)
-@pytest.mark.nats
+@pytest.mark.nats()
class TestConsumeWithPrometheus(TestConsume):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
return NatsBroker(
middlewares=(NatsPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
diff --git a/tests/prometheus/nats/test_provider.py b/tests/prometheus/nats/test_provider.py
new file mode 100644
index 0000000000..3c94a7e129
--- /dev/null
+++ b/tests/prometheus/nats/test_provider.py
@@ -0,0 +1,100 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+from nats.aio.msg import Msg
+
+from faststream.nats.prometheus.provider import (
+ BatchNatsMetricsSettingsProvider,
+ NatsMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+from .basic import BatchNatsPrometheusSettings, NatsPrometheusSettings
+
+
+class LocalBaseNatsMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ command = SimpleNamespace(destination=queue)
+
+ provider = self.get_settings_provider()
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestNatsMetricsSettingsProvider(
+ NatsPrometheusSettings, LocalBaseNatsMetricsSettingsProviderTestcase
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+ message = SimpleNamespace(body=body, raw_message=SimpleNamespace(subject=queue))
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchNatsMetricsSettingsProvider(
+ BatchNatsPrometheusSettings, LocalBaseNatsMetricsSettingsProviderTestcase
+):
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ raw_messages = [
+ SimpleNamespace(subject=queue) for _ in range(random.randint(a=2, b=10))
+ ]
+
+ expected_attrs = {
+ "destination_name": raw_messages[0].subject,
+ "message_size": len(body),
+ "messages_count": len(raw_messages),
+ }
+ message = SimpleNamespace(body=body, raw_message=raw_messages)
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (Msg(SimpleNamespace()), Msg(SimpleNamespace())),
+ BatchNatsMetricsSettingsProvider(),
+ id="message is sequence",
+ ),
+ pytest.param(
+ Msg(
+ SimpleNamespace(),
+ ),
+ NatsMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ NatsMetricsSettingsProvider(),
+ id="message is None",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ None,
+ id="message is not Msg instance",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/rabbit/basic.py b/tests/prometheus/rabbit/basic.py
new file mode 100644
index 0000000000..ee8a2d20a9
--- /dev/null
+++ b/tests/prometheus/rabbit/basic.py
@@ -0,0 +1,16 @@
+from typing import Any
+
+from faststream.prometheus import MetricsSettingsProvider
+from faststream.rabbit.prometheus import RabbitPrometheusMiddleware
+from faststream.rabbit.prometheus.provider import RabbitMetricsSettingsProvider
+from tests.brokers.rabbit.basic import RabbitTestcaseConfig
+
+
+class RabbitPrometheusSettings(RabbitTestcaseConfig):
+ messaging_system = "rabbitmq"
+
+ def get_middleware(self, **kwargs: Any) -> RabbitPrometheusMiddleware:
+ return RabbitPrometheusMiddleware(**kwargs)
+
+ def get_settings_provider(self) -> MetricsSettingsProvider[Any]:
+ return RabbitMetricsSettingsProvider()
diff --git a/tests/prometheus/rabbit/test_provider.py b/tests/prometheus/rabbit/test_provider.py
new file mode 100644
index 0000000000..f31ea11a35
--- /dev/null
+++ b/tests/prometheus/rabbit/test_provider.py
@@ -0,0 +1,62 @@
+from types import SimpleNamespace
+from typing import Union
+
+import pytest
+
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+from .basic import RabbitPrometheusSettings
+
+
+class TestRabbitMetricsSettingsProvider(
+ RabbitPrometheusSettings,
+ LocalMetricsSettingsProviderTestcase,
+):
+ @pytest.mark.parametrize(
+ "exchange",
+ (
+ pytest.param("my_exchange", id="with exchange"),
+ pytest.param(None, id="without exchange"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(
+ self,
+ exchange: Union[str, None],
+ queue: str,
+ ) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": f"{exchange or 'default'}.{queue}",
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+ message = SimpleNamespace(
+ body=body, raw_message=SimpleNamespace(exchange=exchange, routing_key=queue)
+ )
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+ @pytest.mark.parametrize(
+ "exchange",
+ (
+ pytest.param("my_exchange", id="with exchange"),
+ pytest.param(None, id="without exchange"),
+ ),
+ )
+ def test_get_publish_destination_name_from_cmd(
+ self,
+ exchange: Union[str, None],
+ queue: str,
+ ) -> None:
+ expected_destination_name = f"{exchange or 'default'}.{queue}"
+ command = SimpleNamespace(
+ exchange=SimpleNamespace(name=exchange), destination=queue
+ )
+
+ provider = self.get_settings_provider()
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
diff --git a/tests/prometheus/rabbit/test_rabbit.py b/tests/prometheus/rabbit/test_rabbit.py
index 6eef6d224f..dff264063a 100644
--- a/tests/prometheus/rabbit/test_rabbit.py
+++ b/tests/prometheus/rabbit/test_rabbit.py
@@ -1,3 +1,5 @@
+from typing import Any
+
import pytest
from prometheus_client import CollectorRegistry
@@ -5,26 +7,26 @@
from faststream.rabbit.prometheus.middleware import RabbitPrometheusMiddleware
from tests.brokers.rabbit.test_consume import TestConsume
from tests.brokers.rabbit.test_publish import TestPublish
-from tests.prometheus.basic import LocalPrometheusTestcase
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+
+from .basic import RabbitPrometheusSettings
-@pytest.fixture
+@pytest.fixture()
def exchange(queue):
return RabbitExchange(name=queue)
-@pytest.mark.rabbit
-class TestPrometheus(LocalPrometheusTestcase):
- def get_broker(self, apply_types=False, **kwargs):
- return RabbitBroker(apply_types=apply_types, **kwargs)
-
- def get_middleware(self, **kwargs):
- return RabbitPrometheusMiddleware(**kwargs)
+@pytest.mark.rabbit()
+class TestPrometheus(
+ RabbitPrometheusSettings, LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+):
+ pass
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestPublishWithPrometheus(TestPublish):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
return RabbitBroker(
middlewares=(RabbitPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
@@ -32,9 +34,9 @@ def get_broker(self, apply_types: bool = False, **kwargs):
)
-@pytest.mark.rabbit
+@pytest.mark.rabbit()
class TestConsumeWithPrometheus(TestConsume):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
return RabbitBroker(
middlewares=(RabbitPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
diff --git a/tests/prometheus/redis/basic.py b/tests/prometheus/redis/basic.py
new file mode 100644
index 0000000000..79e3bb477b
--- /dev/null
+++ b/tests/prometheus/redis/basic.py
@@ -0,0 +1,25 @@
+from typing import Any
+
+from faststream.redis.prometheus import RedisPrometheusMiddleware
+from faststream.redis.prometheus.provider import (
+ BatchRedisMetricsSettingsProvider,
+ RedisMetricsSettingsProvider,
+)
+from tests.brokers.redis.basic import RedisTestcaseConfig
+
+
+class BaseRedisPrometheusSettings(RedisTestcaseConfig):
+ messaging_system = "redis"
+
+ def get_middleware(self, **kwargs: Any) -> RedisPrometheusMiddleware:
+ return RedisPrometheusMiddleware(**kwargs)
+
+
+class RedisPrometheusSettings(BaseRedisPrometheusSettings):
+ def get_settings_provider(self) -> RedisMetricsSettingsProvider:
+ return RedisMetricsSettingsProvider()
+
+
+class BatchRedisPrometheusSettings(BaseRedisPrometheusSettings):
+ def get_settings_provider(self) -> BatchRedisMetricsSettingsProvider:
+ return BatchRedisMetricsSettingsProvider()
diff --git a/tests/prometheus/redis/test_provider.py b/tests/prometheus/redis/test_provider.py
new file mode 100644
index 0000000000..1e4fef4581
--- /dev/null
+++ b/tests/prometheus/redis/test_provider.py
@@ -0,0 +1,154 @@
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.redis.message import (
+ BatchListMessage,
+ BatchStreamMessage,
+ DefaultListMessage,
+ DefaultStreamMessage,
+ PubSubMessage,
+)
+from faststream.redis.prometheus.provider import (
+ BatchRedisMetricsSettingsProvider,
+ RedisMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+from .basic import BatchRedisPrometheusSettings, RedisPrometheusSettings
+
+
+class LocalBaseRedisMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_settings_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestRedisMetricsSettingsProvider(
+ RedisPrometheusSettings, LocalBaseRedisMetricsSettingsProviderTestcase
+):
+ @pytest.mark.parametrize(
+ "destination",
+ (
+ pytest.param("channel", id="destination is channel"),
+ pytest.param("list", id="destination is list"),
+ pytest.param("stream", id="destination is stream"),
+ pytest.param("", id="destination is blank"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(self, queue: str, destination: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue if destination else "",
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ raw_message = {"data": body}
+ if destination:
+ raw_message[destination] = queue
+
+ message = SimpleNamespace(body=body, raw_message=raw_message)
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchRedisMetricsSettingsProvider(
+ BatchRedisPrometheusSettings,
+ LocalBaseRedisMetricsSettingsProviderTestcase,
+):
+ @pytest.mark.parametrize(
+ "destination",
+ (
+ pytest.param("channel", id="destination is channel"),
+ pytest.param("list", id="destination is list"),
+ pytest.param("stream", id="destination is stream"),
+ pytest.param("", id="destination is blank"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(self, queue: str, destination: str) -> None:
+ decoded_body = ["Hi ", "again, ", "FastStream!"]
+ body = str(decoded_body).encode()
+
+ expected_attrs = {
+ "destination_name": queue if destination else "",
+ "message_size": len(body),
+ "messages_count": len(decoded_body),
+ }
+
+ raw_message = {"data": decoded_body}
+
+ if destination:
+ raw_message[destination] = queue
+
+ message = SimpleNamespace(
+ body=body,
+ raw_message=raw_message,
+ )
+
+ provider = self.get_settings_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ PubSubMessage(
+ type="message",
+ channel="test-channel",
+ data=b"",
+ pattern=None,
+ ),
+ RedisMetricsSettingsProvider(),
+ id="PubSub message",
+ ),
+ pytest.param(
+ DefaultListMessage(type="list", channel="test-list", data=b""),
+ RedisMetricsSettingsProvider(),
+ id="Single List message",
+ ),
+ pytest.param(
+ BatchListMessage(type="blist", channel="test-list", data=[b"", b""]),
+ BatchRedisMetricsSettingsProvider(),
+ id="Batch List message",
+ ),
+ pytest.param(
+ DefaultStreamMessage(
+ type="stream",
+ channel="test-stream",
+ data=b"",
+ message_ids=[],
+ ),
+ RedisMetricsSettingsProvider(),
+ id="Single Stream message",
+ ),
+ pytest.param(
+ BatchStreamMessage(
+ type="bstream",
+ channel="test-stream",
+ data=[{b"": b""}, {b"": b""}],
+ message_ids=[],
+ ),
+ BatchRedisMetricsSettingsProvider(),
+ id="Batch Stream message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/redis/test_redis.py b/tests/prometheus/redis/test_redis.py
index 4059c33d48..fae4932bec 100644
--- a/tests/prometheus/redis/test_redis.py
+++ b/tests/prometheus/redis/test_redis.py
@@ -1,5 +1,5 @@
import asyncio
-from unittest.mock import Mock
+from typing import Any
import pytest
from prometheus_client import CollectorRegistry
@@ -9,25 +9,21 @@
from faststream.redis.prometheus.middleware import RedisPrometheusMiddleware
from tests.brokers.redis.test_consume import TestConsume
from tests.brokers.redis.test_publish import TestPublish
-from tests.prometheus.basic import LocalPrometheusTestcase
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+from .basic import BatchRedisPrometheusSettings, RedisPrometheusSettings
-@pytest.mark.redis
-class TestPrometheus(LocalPrometheusTestcase):
- def get_broker(self, apply_types=False, **kwargs):
- return RedisBroker(apply_types=apply_types, **kwargs)
- def get_middleware(self, **kwargs):
- return RedisPrometheusMiddleware(**kwargs)
-
- async def test_metrics_batch(
+@pytest.mark.redis()
+class TestBatchPrometheus(BatchRedisPrometheusSettings, LocalPrometheusTestcase):
+ async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
- ):
- middleware = self.get_middleware(registry=CollectorRegistry())
- metrics_manager_mock = Mock()
- middleware._metrics_manager = metrics_manager_mock
+ ) -> None:
+ event = asyncio.Event()
+
+ registry = CollectorRegistry()
+ middleware = self.get_middleware(registry=registry)
broker = self.get_broker(apply_types=True, middlewares=(middleware,))
@@ -45,21 +41,30 @@ async def handler(m=Context("message")):
async with broker:
await broker.start()
tasks = (
- asyncio.create_task(broker.publish_batch("hello", "world", list=queue)),
+ asyncio.create_task(broker.publish_batch(1, 2, list=queue)),
asyncio.create_task(event.wait()),
)
await asyncio.wait(tasks, timeout=self.timeout)
assert event.is_set()
- self.assert_consume_metrics(
- metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ self.assert_metrics(
+ registry=registry,
+ message=message,
+ exception_class=None,
)
- self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
-@pytest.mark.redis
+@pytest.mark.redis()
+class TestPrometheus(
+ RedisPrometheusSettings,
+ LocalPrometheusTestcase,
+ LocalRPCPrometheusTestcase,
+): ...
+
+
+@pytest.mark.redis()
class TestPublishWithPrometheus(TestPublish):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
@@ -67,9 +72,9 @@ def get_broker(self, apply_types: bool = False, **kwargs):
)
-@pytest.mark.redis
+@pytest.mark.redis()
class TestConsumeWithPrometheus(TestConsume):
- def get_broker(self, apply_types: bool = False, **kwargs):
+ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
return RedisBroker(
middlewares=(RedisPrometheusMiddleware(registry=CollectorRegistry()),),
apply_types=apply_types,
diff --git a/tests/prometheus/test_metrics.py b/tests/prometheus/test_metrics.py
index 7f9aa85771..45dc6132fd 100644
--- a/tests/prometheus/test_metrics.py
+++ b/tests/prometheus/test_metrics.py
@@ -1,23 +1,31 @@
import random
-from typing import List, Optional
-from unittest.mock import ANY
+from typing import Any, Optional
import pytest
-from dirty_equals import IsPositiveFloat, IsStr
-from prometheus_client import CollectorRegistry, Histogram, Metric
-from prometheus_client.samples import Sample
+from prometheus_client import CollectorRegistry
from faststream.prometheus.container import MetricsContainer
from faststream.prometheus.manager import MetricsManager
from faststream.prometheus.types import ProcessingStatus, PublishingStatus
+from tests.prometheus.utils import (
+ get_published_messages_duration_seconds_metric,
+ get_published_messages_exceptions_metric,
+ get_published_messages_metric,
+ get_received_messages_in_process_metric,
+ get_received_messages_metric,
+ get_received_messages_size_bytes_metric,
+ get_received_processed_messages_duration_seconds_metric,
+ get_received_processed_messages_exceptions_metric,
+ get_received_processed_messages_metric,
+)
class TestCaseMetrics:
@staticmethod
def create_metrics_manager(
- app_name: Optional[str] = None,
- metrics_prefix: Optional[str] = None,
- received_messages_size_buckets: Optional[List[float]] = None,
+ app_name: str,
+ metrics_prefix: str,
+ received_messages_size_buckets: Optional[list[float]] = None,
) -> MetricsManager:
registry = CollectorRegistry()
container = MetricsContainer(
@@ -27,27 +35,27 @@ def create_metrics_manager(
)
return MetricsManager(container, app_name=app_name)
- @pytest.fixture
+ @pytest.fixture()
def app_name(self, request) -> str:
return "youtube"
- @pytest.fixture
+ @pytest.fixture()
def metrics_prefix(self, request) -> str:
return "fs"
- @pytest.fixture
+ @pytest.fixture()
def broker(self) -> str:
return "rabbit"
- @pytest.fixture
+ @pytest.fixture()
def queue(self) -> str:
return "default.test"
- @pytest.fixture
+ @pytest.fixture()
def messages_amount(self) -> int:
return random.randint(1, 10)
- @pytest.fixture
+ @pytest.fixture()
def exception_type(self) -> str:
return Exception.__name__
@@ -64,28 +72,13 @@ def test_add_received_message(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_received_messages",
- documentation="Count of received messages by broker and handler",
- unit="",
- typ="counter",
- )
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_received_messages_total",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=float(messages_amount),
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_messages_created",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_received_messages_metric(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ queue=queue,
+ broker=broker,
+ messages_amount=messages_amount,
+ )
manager.add_received_message(
amount=messages_amount, broker=broker, handler=queue
@@ -97,10 +90,10 @@ def test_add_received_message(
@pytest.mark.parametrize(
"is_default_buckets",
- [
+ (
pytest.param(True, id="with default buckets"),
pytest.param(False, id="with custom buckets"),
- ],
+ ),
)
def test_observe_received_messages_size(
self,
@@ -110,7 +103,7 @@ def test_observe_received_messages_size(
broker: str,
is_default_buckets: bool,
) -> None:
- manager_kwargs = {
+ manager_kwargs: dict[str, Any] = {
"app_name": app_name,
"metrics_prefix": metrics_prefix,
}
@@ -129,50 +122,15 @@ def test_observe_received_messages_size(
else custom_buckets
)
- expected = Metric(
- name=f"{metrics_prefix}_received_messages_size_bytes",
- documentation="Histogram of received messages size in bytes by broker and handler",
- unit="",
- typ="histogram",
- )
- expected.samples = [
- *[
- Sample(
- name=f"{metrics_prefix}_received_messages_size_bytes_bucket",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "le": IsStr,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- )
- for _ in buckets
- ],
- Sample(
- name=f"{metrics_prefix}_received_messages_size_bytes_count",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_messages_size_bytes_sum",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=size,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_messages_size_bytes_created",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=ANY,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_received_messages_size_bytes_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ buckets=buckets,
+ size=size,
+ messages_amount=1,
+ )
manager.observe_received_messages_size(size=size, broker=broker, handler=queue)
@@ -193,21 +151,13 @@ def test_add_received_message_in_process(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_received_messages_in_process",
- documentation="Gauge of received messages in process by broker and handler",
- unit="",
- typ="gauge",
+ expected = get_received_messages_in_process_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ messages_amount=messages_amount,
)
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_received_messages_in_process",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=float(messages_amount),
- timestamp=None,
- exemplar=None,
- ),
- ]
manager.add_received_message_in_process(
amount=messages_amount, broker=broker, handler=queue
@@ -230,21 +180,13 @@ def test_remove_received_message_in_process(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_received_messages_in_process",
- documentation="Gauge of received messages in process by broker and handler",
- unit="",
- typ="gauge",
+ expected = get_received_messages_in_process_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ messages_amount=messages_amount - 1,
)
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_received_messages_in_process",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=float(messages_amount - 1),
- timestamp=None,
- exemplar=None,
- ),
- ]
manager.add_received_message_in_process(
amount=messages_amount, broker=broker, handler=queue
@@ -259,13 +201,13 @@ def test_remove_received_message_in_process(
@pytest.mark.parametrize(
"status",
- [
+ (
pytest.param(ProcessingStatus.acked, id="acked status"),
pytest.param(ProcessingStatus.nacked, id="nacked status"),
pytest.param(ProcessingStatus.rejected, id="rejected status"),
pytest.param(ProcessingStatus.skipped, id="skipped status"),
pytest.param(ProcessingStatus.error, id="error status"),
- ],
+ ),
)
def test_add_received_processed_message(
self,
@@ -281,38 +223,14 @@ def test_add_received_processed_message(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_received_processed_messages",
- documentation="Count of received processed messages by broker, handler and status",
- unit="",
- typ="counter",
- )
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_total",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "status": status.value,
- },
- value=float(messages_amount),
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_created",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "status": status.value,
- },
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_received_processed_messages_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ messages_amount=messages_amount,
+ status=status,
+ )
manager.add_received_processed_message(
amount=messages_amount,
@@ -339,50 +257,13 @@ def test_observe_received_processed_message_duration(
duration = 0.001
- expected = Metric(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds",
- documentation="Histogram of received processed messages duration in seconds by broker and handler",
- unit="",
- typ="histogram",
- )
- expected.samples = [
- *[
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds_bucket",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "le": IsStr,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- )
- for _ in Histogram.DEFAULT_BUCKETS
- ],
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds_count",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds_sum",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=duration,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_duration_seconds_created",
- labels={"app_name": app_name, "broker": broker, "handler": queue},
- value=ANY,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_received_processed_messages_duration_seconds_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ duration=duration,
+ )
manager.observe_received_processed_message_duration(
duration=duration,
@@ -409,38 +290,14 @@ def test_add_received_processed_message_exception(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_received_processed_messages_exceptions",
- documentation="Count of received processed messages exceptions by broker, handler and exception_type",
- unit="",
- typ="counter",
- )
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_exceptions_total",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "exception_type": exception_type,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_received_processed_messages_exceptions_created",
- labels={
- "app_name": app_name,
- "broker": broker,
- "handler": queue,
- "exception_type": exception_type,
- },
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_received_processed_messages_exceptions_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ exception_type=exception_type,
+ exceptions_amount=1,
+ )
manager.add_received_processed_message_exception(
exception_type=exception_type,
@@ -456,10 +313,10 @@ def test_add_received_processed_message_exception(
@pytest.mark.parametrize(
"status",
- [
+ (
pytest.param(PublishingStatus.success, id="success status"),
pytest.param(PublishingStatus.error, id="error status"),
- ],
+ ),
)
def test_add_published_message(
self,
@@ -475,38 +332,14 @@ def test_add_published_message(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_published_messages",
- documentation="Count of published messages by destination and status",
- unit="",
- typ="counter",
- )
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_published_messages_total",
- labels={
- "app_name": app_name,
- "broker": broker,
- "destination": queue,
- "status": status.value,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_published_messages_created",
- labels={
- "app_name": app_name,
- "broker": broker,
- "destination": queue,
- "status": status.value,
- },
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_published_messages_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ status=status,
+ messages_amount=1,
+ )
manager.add_published_message(
status=status,
@@ -532,50 +365,13 @@ def test_observe_published_message_duration(
duration = 0.001
- expected = Metric(
- name=f"{metrics_prefix}_published_messages_duration_seconds",
- documentation="Histogram of published messages duration in seconds by broker and destination",
- unit="",
- typ="histogram",
- )
- expected.samples = [
- *[
- Sample(
- name=f"{metrics_prefix}_published_messages_duration_seconds_bucket",
- labels={
- "app_name": app_name,
- "broker": broker,
- "destination": queue,
- "le": IsStr,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- )
- for _ in Histogram.DEFAULT_BUCKETS
- ],
- Sample(
- name=f"{metrics_prefix}_published_messages_duration_seconds_count",
- labels={"app_name": app_name, "broker": broker, "destination": queue},
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_published_messages_duration_seconds_sum",
- labels={"app_name": app_name, "broker": broker, "destination": queue},
- value=duration,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_published_messages_duration_seconds_created",
- labels={"app_name": app_name, "broker": broker, "destination": queue},
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_published_messages_duration_seconds_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ duration=duration,
+ )
manager.observe_published_message_duration(
duration=duration,
@@ -600,38 +396,13 @@ def test_add_published_message_exception(
metrics_prefix=metrics_prefix,
)
- expected = Metric(
- name=f"{metrics_prefix}_published_messages_exceptions",
- documentation="Count of published messages exceptions by broker, destination and exception_type",
- unit="",
- typ="counter",
- )
- expected.samples = [
- Sample(
- name=f"{metrics_prefix}_published_messages_exceptions_total",
- labels={
- "app_name": app_name,
- "broker": broker,
- "destination": queue,
- "exception_type": exception_type,
- },
- value=1.0,
- timestamp=None,
- exemplar=None,
- ),
- Sample(
- name=f"{metrics_prefix}_published_messages_exceptions_created",
- labels={
- "app_name": app_name,
- "broker": broker,
- "destination": queue,
- "exception_type": exception_type,
- },
- value=IsPositiveFloat,
- timestamp=None,
- exemplar=None,
- ),
- ]
+ expected = get_published_messages_exceptions_metric(
+ metrics_prefix=metrics_prefix,
+ app_name=app_name,
+ broker=broker,
+ queue=queue,
+ exception_type=exception_type,
+ )
manager.add_published_message_exception(
exception_type=exception_type,
diff --git a/tests/prometheus/utils.py b/tests/prometheus/utils.py
new file mode 100644
index 0000000000..29a813e927
--- /dev/null
+++ b/tests/prometheus/utils.py
@@ -0,0 +1,426 @@
+from collections.abc import Sequence
+from typing import Optional, cast
+
+from dirty_equals import IsFloat, IsPositiveFloat, IsStr
+from prometheus_client import Histogram, Metric
+from prometheus_client.samples import Sample
+
+from faststream.prometheus.types import ProcessingStatus, PublishingStatus
+
+
+def get_received_messages_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ messages_amount: int,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_messages",
+ documentation="Count of received messages by broker and handler",
+ unit="",
+ typ="counter",
+ )
+ metric.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_messages_total",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_received_messages_size_bytes_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ buckets: Sequence[float],
+ size: int,
+ messages_amount: int,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_messages_size_bytes",
+ documentation="Histogram of received messages size in bytes by broker and handler",
+ unit="",
+ typ="histogram",
+ )
+ metric.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "le": cast("str", IsStr),
+ },
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in buckets
+ ],
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_count",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_sum",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=size,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_received_messages_in_process_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ messages_amount: int,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ documentation="Gauge of received messages in process by broker and handler",
+ unit="",
+ typ="gauge",
+ )
+ metric.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_received_processed_messages_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ messages_amount: int,
+ status: ProcessingStatus,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_processed_messages",
+ documentation="Count of received processed messages by broker, handler and status",
+ unit="",
+ typ="counter",
+ )
+ metric.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "status": status.value,
+ },
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "status": status.value,
+ },
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_received_processed_messages_duration_seconds_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ duration: float,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds",
+ documentation="Histogram of received processed messages duration in seconds by broker and handler",
+ unit="",
+ typ="histogram",
+ )
+ metric.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "le": cast("str", IsStr),
+ },
+ value=cast("float", IsFloat),
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in Histogram.DEFAULT_BUCKETS
+ ],
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_count",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_sum",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=duration,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_received_processed_messages_exceptions_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ exception_type: Optional[str],
+ exceptions_amount: int,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions",
+ documentation="Count of received processed messages exceptions by broker, handler and exception_type",
+ unit="",
+ typ="counter",
+ )
+ metric.samples = (
+ [
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "exception_type": exception_type,
+ },
+ value=float(exceptions_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "exception_type": exception_type,
+ },
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+ if exception_type is not None
+ else []
+ )
+
+ return metric
+
+
+def get_published_messages_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ messages_amount: int,
+ status: PublishingStatus,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_published_messages",
+ documentation="Count of published messages by destination and status",
+ unit="",
+ typ="counter",
+ )
+ metric.samples = [
+ Sample(
+ name=f"{metrics_prefix}_published_messages_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "status": status.value,
+ },
+ value=messages_amount,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "status": status.value,
+ },
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_published_messages_duration_seconds_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ duration: float,
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_published_messages_duration_seconds",
+ documentation="Histogram of published messages duration in seconds by broker and destination",
+ unit="",
+ typ="histogram",
+ )
+ metric.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "le": cast("str", IsStr),
+ },
+ value=cast("float", IsFloat),
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in Histogram.DEFAULT_BUCKETS
+ ],
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_count",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_sum",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=duration,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_created",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ return metric
+
+
+def get_published_messages_exceptions_metric(
+ *,
+ metrics_prefix: str,
+ app_name: str,
+ broker: str,
+ queue: str,
+ exception_type: Optional[str],
+) -> Metric:
+ metric = Metric(
+ name=f"{metrics_prefix}_published_messages_exceptions",
+ documentation="Count of published messages exceptions by broker, destination and exception_type",
+ unit="",
+ typ="counter",
+ )
+ metric.samples = (
+ [
+ Sample(
+ name=f"{metrics_prefix}_published_messages_exceptions_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "exception_type": exception_type,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_exceptions_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "exception_type": exception_type,
+ },
+ value=cast("float", IsPositiveFloat),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+ if exception_type is not None
+ else []
+ )
+
+ return metric
diff --git a/tests/tools.py b/tests/tools.py
index 3ff9f57186..c2682f2455 100644
--- a/tests/tools.py
+++ b/tests/tools.py
@@ -1,35 +1,36 @@
import inspect
from functools import wraps
-from typing import Any, Iterable
+from typing import Callable, Protocol, TypeVar
from unittest.mock import MagicMock
+from typing_extensions import ParamSpec
-def spy_decorator(method):
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+class SmartMock(Protocol[P, T]):
+ mock: MagicMock
+
+ def __call__(self, *args: P.args, **kwds: P.kwargs) -> T: ...
+
+
+def spy_decorator(method: Callable[P, T]) -> SmartMock[P, T]:
mock = MagicMock()
if inspect.iscoroutinefunction(method):
@wraps(method)
- async def wrapper(*args, **kwargs):
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
mock(*args, **kwargs)
return await method(*args, **kwargs)
+
else:
@wraps(method)
- def wrapper(*args, **kwargs):
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
mock(*args, **kwargs)
return method(*args, **kwargs)
wrapper.mock = mock
return wrapper
-
-
-class AsyncIterator:
- def __init__(self, iterable: Iterable[Any]) -> None:
- self.iter = iter(iterable)
-
- def __aiter__(self):
- return self
-
- async def __anext__(self):
- return next(self.iter)
diff --git a/tests/utils/context/test_alias.py b/tests/utils/context/test_alias.py
index 55590cea2f..a84e475a03 100644
--- a/tests/utils/context/test_alias.py
+++ b/tests/utils/context/test_alias.py
@@ -1,41 +1,41 @@
-from typing import Any
+from typing import Annotated, Any
import pytest
-from typing_extensions import Annotated
-from faststream.utils import Context, ContextRepo, apply_types
+from faststream import Context, ContextRepo
+from faststream._internal.utils import apply_types
-@pytest.mark.asyncio
-async def test_base_context_alias(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_base_context_alias(context: ContextRepo) -> None:
key = 1000
context.set_global("key", key)
- @apply_types
+ @apply_types(context__=context)
async def func(k=Context("key")):
return k is key
assert await func()
-@pytest.mark.asyncio
-async def test_context_cast(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_cast(context: ContextRepo) -> None:
key = 1000
context.set_global("key", key)
- @apply_types
+ @apply_types(context__=context)
async def func(k: float = Context("key", cast=True)):
return isinstance(k, float)
assert await func()
-@pytest.mark.asyncio
-async def test_nested_context_alias(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_nested_context_alias(context: ContextRepo) -> None:
model = SomeModel(field=SomeModel(field=1000))
context.set_global("model", model)
- @apply_types
+ @apply_types(context__=context)
async def func(
m=Context("model.field.field"),
m2=Context("model.not_existed", default=None),
@@ -54,12 +54,12 @@ async def func(
assert await func(model=model)
-@pytest.mark.asyncio
-async def test_annotated_alias(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_annotated_alias(context: ContextRepo) -> None:
model = SomeModel(field=SomeModel(field=1000))
context.set_global("model", model)
- @apply_types
+ @apply_types(context__=context)
async def func(m: Annotated[int, Context("model.field.field")]):
return m is model.field.field
@@ -70,5 +70,5 @@ class SomeModel:
field: Any = ""
another_field: Any = None
- def __init__(self, field):
+ def __init__(self, field) -> None:
self.field = field
diff --git a/tests/utils/context/test_depends.py b/tests/utils/context/test_depends.py
index b2c92434a9..942a7b5afe 100644
--- a/tests/utils/context/test_depends.py
+++ b/tests/utils/context/test_depends.py
@@ -1,7 +1,9 @@
+from typing import Annotated
+
import pytest
-from typing_extensions import Annotated
-from faststream.utils import Depends, apply_types
+from faststream import Depends
+from faststream._internal.utils import apply_types
def sync_dep(key):
@@ -12,8 +14,8 @@ async def async_dep(key):
return key
-@pytest.mark.asyncio
-async def test_sync_depends():
+@pytest.mark.asyncio()
+async def test_sync_depends() -> None:
key = 1000
@apply_types
@@ -23,17 +25,17 @@ def func(k=Depends(sync_dep)):
assert func(key=key)
-@pytest.mark.asyncio
-async def test_sync_with_async_depends():
+@pytest.mark.asyncio()
+async def test_sync_with_async_depends() -> None:
with pytest.raises(AssertionError):
@apply_types
- def func(k=Depends(async_dep)): # pragma: no cover
+ def func(k=Depends(async_dep)) -> None: # pragma: no cover
pass
-@pytest.mark.asyncio
-async def test_async_depends():
+@pytest.mark.asyncio()
+async def test_async_depends() -> None:
key = 1000
@apply_types
@@ -43,8 +45,8 @@ async def func(k=Depends(async_dep)):
assert await func(key=key)
-@pytest.mark.asyncio
-async def test_async_with_sync_depends():
+@pytest.mark.asyncio()
+async def test_async_with_sync_depends() -> None:
key = 1000
@apply_types
@@ -54,8 +56,8 @@ async def func(k=Depends(sync_dep)):
assert await func(key=key)
-@pytest.mark.asyncio
-async def test_annotated_depends():
+@pytest.mark.asyncio()
+async def test_annotated_depends() -> None:
D = Annotated[int, Depends(sync_dep)] # noqa: N806
key = 1000
diff --git a/tests/utils/context/test_headers.py b/tests/utils/context/test_headers.py
index fa6c716db9..bb694ddf36 100644
--- a/tests/utils/context/test_headers.py
+++ b/tests/utils/context/test_headers.py
@@ -4,9 +4,9 @@
from tests.marks import require_nats
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_nats_headers():
+async def test_nats_headers() -> None:
from faststream.nats import NatsBroker, TestNatsBroker
broker = NatsBroker()
@@ -15,22 +15,23 @@ async def test_nats_headers():
async def h(
name: str = Header(),
id_: int = Header("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestNatsBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in",
- headers={
- "name": "john",
- "id": "1",
- },
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in",
+ headers={
+ "name": "john",
+ "id": "1",
+ },
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
diff --git a/tests/utils/context/test_main.py b/tests/utils/context/test_main.py
index 39e6434cec..c34317a879 100644
--- a/tests/utils/context/test_main.py
+++ b/tests/utils/context/test_main.py
@@ -1,10 +1,11 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
-from faststream.utils import Context, ContextRepo, apply_types
+from faststream import Context, ContextRepo
+from faststream._internal.utils import apply_types
-def test_context_getattr(context: ContextRepo):
+def test_context_getattr(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
@@ -12,117 +13,117 @@ def test_context_getattr(context: ContextRepo):
assert context.key2 is None
-@pytest.mark.asyncio
-async def test_context_apply(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_apply(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context()):
return key is a
assert await use()
-@pytest.mark.asyncio
-async def test_context_ignore(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_ignore(context: ContextRepo) -> None:
a = 3
context.set_global("key", a)
- @apply_types
- async def use():
+ @apply_types(context__=context)
+ async def use() -> None:
return None
assert await use() is None
-@pytest.mark.asyncio
-async def test_context_apply_multi(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_apply_multi(context: ContextRepo) -> None:
a = 1001
context.set_global("key_a", a)
b = 1000
context.set_global("key_b", b)
- @apply_types
+ @apply_types(context__=context)
async def use1(key_a=Context()):
return key_a is a
assert await use1()
- @apply_types
+ @apply_types(context__=context)
async def use2(key_b=Context()):
return key_b is b
assert await use2()
- @apply_types
+ @apply_types(context__=context)
async def use3(key_a=Context(), key_b=Context()):
return key_a is a and key_b is b
assert await use3()
-@pytest.mark.asyncio
-async def test_context_overrides(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_overrides(context: ContextRepo) -> None:
a = 1001
context.set_global("test", a)
b = 1000
context.set_global("test", b)
- @apply_types
+ @apply_types(context__=context)
async def use(test=Context()):
return test is b
assert await use()
-@pytest.mark.asyncio
-async def test_context_nested_apply(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_nested_apply(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
- @apply_types
+ @apply_types(context__=context)
def use_nested(key=Context()):
return key
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context()):
return key is use_nested() is a
assert await use()
-@pytest.mark.asyncio
-async def test_reset_global(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_reset_global(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
context.reset_global("key")
- @apply_types
- async def use(key=Context()): ...
+ @apply_types(context__=context)
+ async def use(key=Context()) -> None: ...
with pytest.raises(ValidationError):
await use()
-@pytest.mark.asyncio
-async def test_clear_context(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_clear_context(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
context.clear()
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context(default=None)):
return key is None
assert await use()
-def test_scope(context: ContextRepo):
- @apply_types
- def use(key=Context(), key2=Context()):
+def test_scope(context: ContextRepo) -> None:
+ @apply_types(context__=context)
+ def use(key=Context(), key2=Context()) -> None:
assert key == 1
assert key2 == 1
@@ -133,28 +134,33 @@ def use(key=Context(), key2=Context()):
assert context.get("key2") is None
-def test_default(context: ContextRepo):
- @apply_types
+def test_default(context: ContextRepo) -> None:
+ @apply_types(context__=context)
def use(
key=Context(),
key2=Context(),
key3=Context(default=1),
key4=Context("key.key4", default=1),
key5=Context("key5.key6"),
- ):
+ ) -> None:
assert key == 0
assert key2 is True
assert key3 == 1
assert key4 == 1
assert key5 is False
- with context.scope("key", 0), context.scope("key2", True), context.scope(
- "key5", {"key6": False}
+ with (
+ context.scope("key", 0),
+ context.scope("key2", True),
+ context.scope(
+ "key5",
+ {"key6": False},
+ ),
):
use()
-def test_local_default(context: ContextRepo):
+def test_local_default(context: ContextRepo) -> None:
key = "some-key"
tag = context.set_local(key, "useless")
@@ -163,8 +169,8 @@ def test_local_default(context: ContextRepo):
assert context.get_local(key, 1) == 1
-def test_initial():
- @apply_types
+def test_initial(context: ContextRepo) -> None:
+ @apply_types(context__=context)
def use(
a,
key=Context(initial=list),
@@ -176,10 +182,12 @@ def use(
assert use(2) == [1, 2]
-@pytest.mark.asyncio
-async def test_context_with_custom_object_implementing_comparison(context: ContextRepo):
+@pytest.mark.asyncio()
+async def test_context_with_custom_object_implementing_comparison(
+ context: ContextRepo,
+) -> None:
class User:
- def __init__(self, user_id: int):
+ def __init__(self, user_id: int) -> None:
self.user_id = user_id
def __eq__(self, other):
@@ -193,7 +201,7 @@ def __ne__(self, other):
user2 = User(user_id=2)
user3 = User(user_id=3)
- @apply_types
+ @apply_types(context__=context)
async def use(
key1=Context("user1"),
key2=Context("user2", default=user2),
@@ -205,7 +213,8 @@ async def use(
and key3 == User(user_id=4)
)
- with context.scope("user1", User(user_id=1)), context.scope(
- "user3", User(user_id=4)
+ with (
+ context.scope("user1", User(user_id=1)),
+ context.scope("user3", User(user_id=4)),
):
assert await use()
diff --git a/tests/utils/context/test_path.py b/tests/utils/context/test_path.py
index 5cfc8caf99..ff135cb1a3 100644
--- a/tests/utils/context/test_path.py
+++ b/tests/utils/context/test_path.py
@@ -7,9 +7,9 @@
from tests.marks import require_aiokafka, require_aiopika, require_nats, require_redis
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiokafka
-async def test_aiokafka_path():
+async def test_aiokafka_path() -> None:
from faststream.kafka import KafkaBroker, TestKafkaBroker
broker = KafkaBroker()
@@ -18,26 +18,27 @@ async def test_aiokafka_path():
async def h(
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestKafkaBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in.john.1",
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in.john.1",
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_nats_path():
+async def test_nats_path() -> None:
from faststream.nats import NatsBroker, TestNatsBroker
broker = NatsBroker()
@@ -46,31 +47,33 @@ async def test_nats_path():
async def h(
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestNatsBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in.john.1",
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in.john.1",
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
-@pytest.mark.asyncio
-@pytest.mark.nats
+@pytest.mark.asyncio()
+@pytest.mark.nats()
@require_nats
async def test_nats_kv_path(
queue: str,
- event: asyncio.Event,
mock: Mock,
-):
+) -> None:
+ event = asyncio.Event()
+
from faststream.nats import NatsBroker
broker = NatsBroker()
@@ -80,7 +83,7 @@ async def h(
msg: int,
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> None:
mock(msg == 1 and name == "john" and id_ == 1)
event.set()
@@ -101,9 +104,9 @@ async def h(
mock.assert_called_once_with(True)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_nats
-async def test_nats_batch_path():
+async def test_nats_batch_path() -> None:
from faststream.nats import NatsBroker, PullSub, TestNatsBroker
broker = NatsBroker()
@@ -112,26 +115,27 @@ async def test_nats_batch_path():
async def h(
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestNatsBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in.john.1",
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in.john.1",
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_redis
-async def test_redis_path():
+async def test_redis_path() -> None:
from faststream.redis import RedisBroker, TestRedisBroker
broker = RedisBroker()
@@ -140,26 +144,27 @@ async def test_redis_path():
async def h(
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestRedisBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in.john.1",
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in.john.1",
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
-@pytest.mark.asyncio
+@pytest.mark.asyncio()
@require_aiopika
-async def test_rabbit_path():
+async def test_rabbit_path() -> None:
from faststream.rabbit import (
ExchangeType,
RabbitBroker,
@@ -183,19 +188,20 @@ async def test_rabbit_path():
async def h(
name: str = Path(),
id_: int = Path("id"),
- ):
+ ) -> int:
assert name == "john"
assert id_ == 1
return 1
async with TestRabbitBroker(broker) as br:
assert (
- await br.publish(
- "",
- "in.john.1",
- "test",
- rpc=True,
- rpc_timeout=1.0,
- )
+ await (
+ await br.request(
+ "",
+ "in.john.1",
+ "test",
+ timeout=1.0,
+ )
+ ).decode()
== 1
)
diff --git a/tests/utils/test_ast.py b/tests/utils/test_ast.py
index 6417425c90..d57d29e88f 100644
--- a/tests/utils/test_ast.py
+++ b/tests/utils/test_ast.py
@@ -1,6 +1,6 @@
import pytest
-from faststream.utils.ast import is_contains_context_name
+from faststream._internal.testing.ast import is_contains_context_name
class Context:
@@ -18,65 +18,65 @@ async def __aexit__(self, *args):
class A(Context):
- def __init__(self):
+ def __init__(self) -> None:
self.contains = is_contains_context_name(self.__class__.__name__, B.__name__)
class B(Context):
- def __init__(self):
+ def __init__(self) -> None:
pass
-def test_base():
+def test_base() -> None:
with A() as a, B():
assert a.contains
-@pytest.mark.asyncio
-async def test_base_async():
+@pytest.mark.asyncio()
+async def test_base_async() -> None:
async with A() as a, B():
assert a.contains
-def test_nested():
+def test_nested() -> None:
with A() as a, B():
assert a.contains
-@pytest.mark.asyncio
-async def test_nested_async():
+@pytest.mark.asyncio()
+async def test_nested_async() -> None:
async with A() as a, B():
assert a.contains
-@pytest.mark.asyncio
-async def test_async_A(): # noqa: N802
+@pytest.mark.asyncio()
+async def test_async_A() -> None: # noqa: N802
async with A() as a:
with B():
assert a.contains
-@pytest.mark.asyncio
-async def test_async_B(): # noqa: N802
+@pytest.mark.asyncio()
+async def test_async_B() -> None: # noqa: N802
with A() as a:
async with B():
assert a.contains
-def test_base_invalid():
+def test_base_invalid() -> None:
with B(), B(), A() as a:
assert not a.contains
-def test_nested_invalid():
+def test_nested_invalid() -> None:
with B(), A() as a:
assert not a.contains
-def test_not_broken():
+def test_not_broken() -> None:
with A() as a, B():
assert a.contains
# test ast processes another context correctly
with pytest.raises(ValueError): # noqa: PT011
- raise ValueError()
+ raise ValueError
diff --git a/tests/utils/test_classes.py b/tests/utils/test_classes.py
deleted file mode 100644
index 65d1a3bc8a..0000000000
--- a/tests/utils/test_classes.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from faststream.utils.classes import Singleton
-
-
-def test_singleton():
- assert Singleton() is Singleton()
-
-
-def test_drop():
- s1 = Singleton()
- s1._drop()
- assert Singleton() is not s1
diff --git a/tests/utils/test_functions.py b/tests/utils/test_functions.py
index 5b3cf8a57e..780aea2526 100644
--- a/tests/utils/test_functions.py
+++ b/tests/utils/test_functions.py
@@ -1,6 +1,6 @@
import pytest
-from faststream.utils.functions import call_or_await
+from faststream._internal.utils.functions import call_or_await
def sync_func(a):
@@ -11,11 +11,11 @@ async def async_func(a):
return a
-@pytest.mark.asyncio
-async def test_call():
+@pytest.mark.asyncio()
+async def test_call() -> None:
assert (await call_or_await(sync_func, a=3)) == 3
-@pytest.mark.asyncio
-async def test_await():
+@pytest.mark.asyncio()
+async def test_await() -> None:
assert (await call_or_await(async_func, a=3)) == 3
diff --git a/tests/utils/test_handler_lock.py b/tests/utils/test_handler_lock.py
index 814b606f46..dbe3a27724 100644
--- a/tests/utils/test_handler_lock.py
+++ b/tests/utils/test_handler_lock.py
@@ -4,11 +4,11 @@
import pytest
from anyio.abc import TaskStatus
-from faststream.broker.utils import MultiLock
+from faststream._internal.subscriber.utils import MultiLock
-@pytest.mark.asyncio
-async def test_base():
+@pytest.mark.asyncio()
+async def test_base() -> None:
lock = MultiLock()
with lock:
@@ -26,15 +26,15 @@ async def test_base():
assert lock.qsize == 0
-@pytest.mark.asyncio
-async def test_wait_correct():
+@pytest.mark.asyncio()
+async def test_wait_correct() -> None:
lock = MultiLock()
- async def func():
+ async def func() -> None:
with lock:
await asyncio.sleep(0.01)
- async def check(task_status: TaskStatus):
+ async def check(task_status: TaskStatus) -> None:
task_status.started()
assert not lock.empty
@@ -50,15 +50,15 @@ async def check(task_status: TaskStatus):
await tg.start(check)
-@pytest.mark.asyncio
-async def test_nowait_correct():
+@pytest.mark.asyncio()
+async def test_nowait_correct() -> None:
lock = MultiLock()
- async def func():
+ async def func() -> None:
with lock:
await asyncio.sleep(0.01)
- async def check(task_status: TaskStatus):
+ async def check(task_status: TaskStatus) -> None:
task_status.started()
assert not lock.empty
diff --git a/tests/utils/test_no_cast.py b/tests/utils/test_no_cast.py
index 9fbb2f850d..62c7fe6d24 100644
--- a/tests/utils/test_no_cast.py
+++ b/tests/utils/test_no_cast.py
@@ -1,10 +1,10 @@
from faststream import apply_types
-from faststream.annotations import NoCast
+from faststream.params import NoCast
-def test_no_cast():
+def test_no_cast() -> None:
@apply_types
- def handler(s: NoCast[str]):
+ def handler(s: NoCast[str]) -> None:
assert isinstance(s, int)
handler(1)
diff --git a/tests/utils/type_cast/test_base.py b/tests/utils/type_cast/test_base.py
index 150e0e68db..c973e2efdc 100644
--- a/tests/utils/type_cast/test_base.py
+++ b/tests/utils/type_cast/test_base.py
@@ -1,21 +1,19 @@
-from typing import Tuple
-
import pytest
-from faststream.utils import apply_types
+from faststream._internal.utils import apply_types
@apply_types
-def cast_int(t: int = 1) -> Tuple[bool, int]:
+def cast_int(t: int = 1) -> tuple[bool, int]:
return isinstance(t, int), t
@apply_types
-def cast_default(t: int = 1) -> Tuple[bool, int]:
+def cast_default(t: int = 1) -> tuple[bool, int]:
return isinstance(t, int), t
-def test_int():
+def test_int() -> None:
assert cast_int("1") == (True, 1)
assert cast_int(t=1.0) == (True, 1)
@@ -30,7 +28,7 @@ def test_int():
assert cast_int([])
-def test_cast_default():
+def test_cast_default() -> None:
assert cast_default("1") == (True, 1)
assert cast_default(t=1.0) == (True, 1)
diff --git a/tests/utils/type_cast/test_model.py b/tests/utils/type_cast/test_model.py
index 2cb57dca04..4c0465846e 100644
--- a/tests/utils/type_cast/test_model.py
+++ b/tests/utils/type_cast/test_model.py
@@ -1,9 +1,7 @@
-from typing import Tuple
-
import pytest
from pydantic import BaseModel
-from faststream.utils import apply_types
+from faststream._internal.utils import apply_types
class Base(BaseModel):
@@ -11,11 +9,11 @@ class Base(BaseModel):
@apply_types
-def cast_model(t: Base) -> Tuple[bool, Base]:
+def cast_model(t: Base) -> tuple[bool, Base]:
return isinstance(t, Base), t
-def test_model():
+def test_model() -> None:
is_casted, m = cast_model({"field": 1})
assert is_casted, m.field == (True, 1)