diff --git a/.github/workflows/docs_deploy.yaml b/.github/workflows/docs_deploy.yaml
index 4c5e6a43f9..6f7046a6c6 100644
--- a/.github/workflows/docs_deploy.yaml
+++ b/.github/workflows/docs_deploy.yaml
@@ -19,6 +19,9 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- uses: actions/setup-python@v5
with:
python-version: 3.x
@@ -28,7 +31,6 @@ jobs:
path: .cache
- run: |
set -ux
- python -m pip install uv
uv pip install --system -e .[dev]
uv pip uninstall --system email-validator # This is to fix broken link in docs
- run: ./scripts/build-docs.sh
diff --git a/.github/workflows/docs_update-references.yaml b/.github/workflows/docs_update-references.yaml
index e83c97a363..2b2bc2732d 100644
--- a/.github/workflows/docs_update-references.yaml
+++ b/.github/workflows/docs_update-references.yaml
@@ -25,6 +25,9 @@ jobs:
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.head_ref }}
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
@@ -35,7 +38,6 @@ jobs:
# should install with `-e`
run: |
set -ux
- python -m pip install uv
uv pip install --system -e .[dev]
- name: Run build docs
run: bash scripts/build-docs.sh
diff --git a/.github/workflows/docs_update-release-notes.yaml b/.github/workflows/docs_update-release-notes.yaml
index b9344fa1cc..9192d23e98 100644
--- a/.github/workflows/docs_update-release-notes.yaml
+++ b/.github/workflows/docs_update-release-notes.yaml
@@ -20,7 +20,9 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
-
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Configure Git user
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
@@ -39,7 +41,6 @@ jobs:
- name: Install dependencies
run: |
- python -m pip install uv
uv pip install --system requests
- name: Run update_releases.py script
diff --git a/.github/workflows/pr_tests.yaml b/.github/workflows/pr_tests.yaml
index 7cb3f21fcc..58e75a252b 100644
--- a/.github/workflows/pr_tests.yaml
+++ b/.github/workflows/pr_tests.yaml
@@ -31,7 +31,6 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: |
- 3.8
3.9
3.10
- name: Set $PY environment variable
@@ -55,12 +54,15 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
pydantic-version: ["pydantic-v1", "pydantic-v2"]
fail-fast: false
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
@@ -73,7 +75,6 @@ jobs:
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- name: Install Pydantic v1
if: matrix.pydantic-version == 'pydantic-v1'
@@ -102,14 +103,16 @@ jobs:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- name: Test
run: >
@@ -121,14 +124,16 @@ jobs:
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- name: Test
run: >
@@ -140,14 +145,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[kafka,test-core,cli]
- name: Test
run: >
@@ -179,14 +186,16 @@ jobs:
ALLOW_PLAINTEXT_LISTENER: "true"
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- run: mkdir coverage
- name: Test
@@ -209,14 +218,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[confluent,test-core,cli]
- name: Test
run: >
@@ -248,14 +259,16 @@ jobs:
ALLOW_PLAINTEXT_LISTENER: "true"
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- run: mkdir coverage
- name: Test
@@ -278,14 +291,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[rabbit,test-core,cli]
- name: Test
run: >
@@ -306,14 +321,16 @@ jobs:
- 5672:5672
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- run: mkdir coverage
- name: Test
@@ -336,14 +353,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[nats,test-core,cli]
- name: Test
run: >
@@ -364,14 +383,16 @@ jobs:
- 4222:4222
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- run: mkdir coverage
- name: Test
@@ -394,14 +415,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[redis,test-core,cli]
- name: Test
run: >
@@ -422,14 +445,16 @@ jobs:
- 6379:6379
steps:
- uses: actions/checkout@v4
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- name: Set up Python
uses: actions/setup-python@v5
with:
- python-version: "3.12"
+ python-version: "3.13"
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
- python -m pip install uv
uv pip install --system .[optionals,testing]
- run: mkdir coverage
- name: Test
@@ -460,10 +485,12 @@ jobs:
steps:
- uses: actions/checkout@v4
-
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- uses: actions/setup-python@v5
with:
- python-version: "3.8"
+ python-version: "3.12"
- name: Get coverage files
uses: actions/download-artifact@v4
@@ -473,7 +500,6 @@ jobs:
merge-multiple: true
- run: |
- python -m pip install uv
uv pip install --system coverage[toml]
- run: ls -la coverage
diff --git a/.github/workflows/publish_coverage.yaml b/.github/workflows/publish_coverage.yaml
index 766d5c9c94..755b0cf9ce 100644
--- a/.github/workflows/publish_coverage.yaml
+++ b/.github/workflows/publish_coverage.yaml
@@ -16,9 +16,11 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: "3.9"
+ - uses: astral-sh/setup-uv@v3
+ with:
+ version: "latest"
- run: |
- python -m pip install uv
uv pip install --system smokeshow
- uses: dawidd6/action-download-artifact@v6 # nosemgrep
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f2852886c6..e4cde3a9c5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -38,7 +38,6 @@ repos:
stages: [pre-commit, pre-merge-commit, manual]
entry: "scripts/lint-pre-commit.sh"
language: python
- language_version: python3.10
types: [python]
require_serial: true
verbose: true
@@ -49,7 +48,6 @@ repos:
name: Static analysis
entry: "scripts/static-pre-commit.sh"
language: python
- language_version: python3.10
types: [python]
require_serial: true
verbose: true
@@ -60,7 +58,6 @@ repos:
name: Build docs
entry: "scripts/build-docs-pre-commit.sh"
language: python
- language_version: python3.10
files: ^docs
require_serial: true
verbose: true
diff --git a/.secrets.baseline b/.secrets.baseline
index f4836e2025..c2189a1f8f 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -153,7 +153,7 @@
"filename": "docs/docs/en/release.md",
"hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450",
"is_verified": false,
- "line_number": 1723,
+ "line_number": 1835,
"is_secret": false
}
],
@@ -178,5 +178,5 @@
}
]
},
- "generated_at": "2024-09-25T19:57:57Z"
+ "generated_at": "2024-11-08T12:39:15Z"
}
diff --git a/CITATION.cff b/CITATION.cff
index cfc7b23a6a..9e01da744e 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -10,7 +10,7 @@ type: software
authors:
- given-names: Nikita
family-names: Pastukhov
- email: diementros@yandex.com
+ email: nikita@pastukhov-dev.ru
- given-names: Davor
family-names: Runje
email: davor@airt.ai
diff --git a/README.md b/README.md
index bcde26a543..b0dca7da08 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,11 @@
---
+
+
+
+
+
@@ -50,6 +55,12 @@
+
+
+
+
+
+
---
diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md
index c4578bdd7f..d8cc349c03 100644
--- a/docs/docs/SUMMARY.md
+++ b/docs/docs/SUMMARY.md
@@ -44,6 +44,7 @@ search:
- [CLI](getting-started/cli/index.md)
- [ASGI](getting-started/asgi.md)
- [OpenTelemetry](getting-started/opentelemetry/index.md)
+ - [Prometheus](getting-started/prometheus/index.md)
- [Logging](getting-started/logging.md)
- [Config Management](getting-started/config/index.md)
- [Task Scheduling](scheduling.md)
@@ -95,6 +96,9 @@ search:
- [Publishing](nats/publishing/index.md)
- [RPC](nats/rpc.md)
- [Message Information](nats/message.md)
+ - [How-To](howto/nats/index.md)
+ - [DynaConf](howto/nats/dynaconf.md)
+ - [In-Progess](howto/nats/in-progress.md)
- [Redis](redis/index.md)
- [Pub/Sub](redis/pubsub/index.md)
- [Subscription](redis/pubsub/subscription.md)
@@ -115,6 +119,7 @@ search:
- [Reference - Code API](api/index.md)
- Public API
- faststream
+ - [AckPolicy](public_api/faststream/AckPolicy.md)
- [BaseMiddleware](public_api/faststream/BaseMiddleware.md)
- [Context](public_api/faststream/Context.md)
- [Depends](public_api/faststream/Depends.md)
@@ -201,6 +206,7 @@ search:
- [TestRedisBroker](public_api/faststream/redis/TestRedisBroker.md)
- All API
- faststream
+ - [AckPolicy](api/faststream/AckPolicy.md)
- [BaseMiddleware](api/faststream/BaseMiddleware.md)
- [Context](api/faststream/Context.md)
- [Depends](api/faststream/Depends.md)
@@ -290,18 +296,36 @@ search:
- [telemetry_attributes_provider_factory](api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md)
- parser
- [AsyncConfluentParser](api/faststream/confluent/parser/AsyncConfluentParser.md)
+ - prometheus
+ - [KafkaPrometheusMiddleware](api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md)
+ - middleware
+ - [KafkaPrometheusMiddleware](api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md)
+ - provider
+ - [BaseConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md)
+ - [BatchConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md)
+ - [ConfluentMetricsSettingsProvider](api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md)
+ - [settings_provider_factory](api/faststream/confluent/prometheus/provider/settings_provider_factory.md)
- publisher
+ - factory
+ - [create_publisher](api/faststream/confluent/publisher/factory/create_publisher.md)
+ - fake
+ - [KafkaFakePublisher](api/faststream/confluent/publisher/fake/KafkaFakePublisher.md)
- producer
- [AsyncConfluentFastProducer](api/faststream/confluent/publisher/producer/AsyncConfluentFastProducer.md)
- - publisher
- - [SpecificationBatchPublisher](api/faststream/confluent/publisher/publisher/SpecificationBatchPublisher.md)
- - [SpecificationDefaultPublisher](api/faststream/confluent/publisher/publisher/SpecificationDefaultPublisher.md)
- - [SpecificationPublisher](api/faststream/confluent/publisher/publisher/SpecificationPublisher.md)
+ - specified
+ - [SpecificationBatchPublisher](api/faststream/confluent/publisher/specified/SpecificationBatchPublisher.md)
+ - [SpecificationDefaultPublisher](api/faststream/confluent/publisher/specified/SpecificationDefaultPublisher.md)
+ - [SpecificationPublisher](api/faststream/confluent/publisher/specified/SpecificationPublisher.md)
+ - state
+ - [EmptyProducerState](api/faststream/confluent/publisher/state/EmptyProducerState.md)
+ - [ProducerState](api/faststream/confluent/publisher/state/ProducerState.md)
+ - [RealProducer](api/faststream/confluent/publisher/state/RealProducer.md)
- usecase
- [BatchPublisher](api/faststream/confluent/publisher/usecase/BatchPublisher.md)
- [DefaultPublisher](api/faststream/confluent/publisher/usecase/DefaultPublisher.md)
- [LogicPublisher](api/faststream/confluent/publisher/usecase/LogicPublisher.md)
- response
+ - [KafkaPublishCommand](api/faststream/confluent/response/KafkaPublishCommand.md)
- [KafkaResponse](api/faststream/confluent/response/KafkaResponse.md)
- router
- [KafkaPublisher](api/faststream/confluent/router/KafkaPublisher.md)
@@ -318,10 +342,10 @@ search:
- subscriber
- factory
- [create_subscriber](api/faststream/confluent/subscriber/factory/create_subscriber.md)
- - subscriber
- - [SpecificationBatchSubscriber](api/faststream/confluent/subscriber/subscriber/SpecificationBatchSubscriber.md)
- - [SpecificationDefaultSubscriber](api/faststream/confluent/subscriber/subscriber/SpecificationDefaultSubscriber.md)
- - [SpecificationSubscriber](api/faststream/confluent/subscriber/subscriber/SpecificationSubscriber.md)
+ - specified
+ - [SpecificationBatchSubscriber](api/faststream/confluent/subscriber/specified/SpecificationBatchSubscriber.md)
+ - [SpecificationDefaultSubscriber](api/faststream/confluent/subscriber/specified/SpecificationDefaultSubscriber.md)
+ - [SpecificationSubscriber](api/faststream/confluent/subscriber/specified/SpecificationSubscriber.md)
- usecase
- [BatchSubscriber](api/faststream/confluent/subscriber/usecase/BatchSubscriber.md)
- [DefaultSubscriber](api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md)
@@ -335,11 +359,11 @@ search:
- [AckMessage](api/faststream/exceptions/AckMessage.md)
- [ContextError](api/faststream/exceptions/ContextError.md)
- [FastStreamException](api/faststream/exceptions/FastStreamException.md)
+ - [FeatureNotSupportedException](api/faststream/exceptions/FeatureNotSupportedException.md)
- [HandlerException](api/faststream/exceptions/HandlerException.md)
- [IgnoredException](api/faststream/exceptions/IgnoredException.md)
- [IncorrectState](api/faststream/exceptions/IncorrectState.md)
- [NackMessage](api/faststream/exceptions/NackMessage.md)
- - [OperationForbiddenError](api/faststream/exceptions/OperationForbiddenError.md)
- [RejectMessage](api/faststream/exceptions/RejectMessage.md)
- [SetupError](api/faststream/exceptions/SetupError.md)
- [SkipMessage](api/faststream/exceptions/SkipMessage.md)
@@ -386,18 +410,36 @@ search:
- parser
- [AioKafkaBatchParser](api/faststream/kafka/parser/AioKafkaBatchParser.md)
- [AioKafkaParser](api/faststream/kafka/parser/AioKafkaParser.md)
+ - prometheus
+ - [KafkaPrometheusMiddleware](api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md)
+ - middleware
+ - [KafkaPrometheusMiddleware](api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md)
+ - provider
+ - [BaseKafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md)
+ - [BatchKafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md)
+ - [KafkaMetricsSettingsProvider](api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md)
+ - [settings_provider_factory](api/faststream/kafka/prometheus/provider/settings_provider_factory.md)
- publisher
+ - factory
+ - [create_publisher](api/faststream/kafka/publisher/factory/create_publisher.md)
+ - fake
+ - [KafkaFakePublisher](api/faststream/kafka/publisher/fake/KafkaFakePublisher.md)
- producer
- [AioKafkaFastProducer](api/faststream/kafka/publisher/producer/AioKafkaFastProducer.md)
- - publisher
- - [SpecificationBatchPublisher](api/faststream/kafka/publisher/publisher/SpecificationBatchPublisher.md)
- - [SpecificationDefaultPublisher](api/faststream/kafka/publisher/publisher/SpecificationDefaultPublisher.md)
- - [SpecificationPublisher](api/faststream/kafka/publisher/publisher/SpecificationPublisher.md)
+ - specified
+ - [SpecificationBatchPublisher](api/faststream/kafka/publisher/specified/SpecificationBatchPublisher.md)
+ - [SpecificationDefaultPublisher](api/faststream/kafka/publisher/specified/SpecificationDefaultPublisher.md)
+ - [SpecificationPublisher](api/faststream/kafka/publisher/specified/SpecificationPublisher.md)
+ - state
+ - [EmptyProducerState](api/faststream/kafka/publisher/state/EmptyProducerState.md)
+ - [ProducerState](api/faststream/kafka/publisher/state/ProducerState.md)
+ - [RealProducer](api/faststream/kafka/publisher/state/RealProducer.md)
- usecase
- [BatchPublisher](api/faststream/kafka/publisher/usecase/BatchPublisher.md)
- [DefaultPublisher](api/faststream/kafka/publisher/usecase/DefaultPublisher.md)
- [LogicPublisher](api/faststream/kafka/publisher/usecase/LogicPublisher.md)
- response
+ - [KafkaPublishCommand](api/faststream/kafka/response/KafkaPublishCommand.md)
- [KafkaResponse](api/faststream/kafka/response/KafkaResponse.md)
- router
- [KafkaPublisher](api/faststream/kafka/router/KafkaPublisher.md)
@@ -411,10 +453,10 @@ search:
- subscriber
- factory
- [create_subscriber](api/faststream/kafka/subscriber/factory/create_subscriber.md)
- - subscriber
- - [SpecificationBatchSubscriber](api/faststream/kafka/subscriber/subscriber/SpecificationBatchSubscriber.md)
- - [SpecificationDefaultSubscriber](api/faststream/kafka/subscriber/subscriber/SpecificationDefaultSubscriber.md)
- - [SpecificationSubscriber](api/faststream/kafka/subscriber/subscriber/SpecificationSubscriber.md)
+ - specified
+ - [SpecificationBatchSubscriber](api/faststream/kafka/subscriber/specified/SpecificationBatchSubscriber.md)
+ - [SpecificationDefaultSubscriber](api/faststream/kafka/subscriber/specified/SpecificationDefaultSubscriber.md)
+ - [SpecificationSubscriber](api/faststream/kafka/subscriber/specified/SpecificationSubscriber.md)
- usecase
- [BatchSubscriber](api/faststream/kafka/subscriber/usecase/BatchSubscriber.md)
- [DefaultSubscriber](api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md)
@@ -425,6 +467,7 @@ search:
- [build_message](api/faststream/kafka/testing/build_message.md)
- message
- [AckStatus](api/faststream/message/AckStatus.md)
+ - [SourceType](api/faststream/message/SourceType.md)
- [StreamMessage](api/faststream/message/StreamMessage.md)
- [decode_message](api/faststream/message/decode_message.md)
- [encode_message](api/faststream/message/encode_message.md)
@@ -432,22 +475,29 @@ search:
- message
- [AckStatus](api/faststream/message/message/AckStatus.md)
- [StreamMessage](api/faststream/message/message/StreamMessage.md)
+ - source_type
+ - [SourceType](api/faststream/message/source_type/SourceType.md)
- utils
- [decode_message](api/faststream/message/utils/decode_message.md)
- [encode_message](api/faststream/message/utils/encode_message.md)
- [gen_cor_id](api/faststream/message/utils/gen_cor_id.md)
- middlewares
+ - [AckPolicy](api/faststream/middlewares/AckPolicy.md)
+ - [AcknowledgementMiddleware](api/faststream/middlewares/AcknowledgementMiddleware.md)
- [BaseMiddleware](api/faststream/middlewares/BaseMiddleware.md)
- [ExceptionMiddleware](api/faststream/middlewares/ExceptionMiddleware.md)
+ - acknowledgement
+ - conf
+ - [AckPolicy](api/faststream/middlewares/acknowledgement/conf/AckPolicy.md)
+ - middleware
+ - [AcknowledgementMiddleware](api/faststream/middlewares/acknowledgement/middleware/AcknowledgementMiddleware.md)
- base
- [BaseMiddleware](api/faststream/middlewares/base/BaseMiddleware.md)
- exception
- - [BaseExceptionMiddleware](api/faststream/middlewares/exception/BaseExceptionMiddleware.md)
- [ExceptionMiddleware](api/faststream/middlewares/exception/ExceptionMiddleware.md)
- [ignore_handler](api/faststream/middlewares/exception/ignore_handler.md)
- logging
- [CriticalLogMiddleware](api/faststream/middlewares/logging/CriticalLogMiddleware.md)
- - [LoggingMiddleware](api/faststream/middlewares/logging/LoggingMiddleware.md)
- nats
- [AckPolicy](api/faststream/nats/AckPolicy.md)
- [ConsumerConfig](api/faststream/nats/ConsumerConfig.md)
@@ -480,6 +530,11 @@ search:
- [NatsParamsStorage](api/faststream/nats/broker/logging/NatsParamsStorage.md)
- registrator
- [NatsRegistrator](api/faststream/nats/broker/registrator/NatsRegistrator.md)
+ - state
+ - [BrokerState](api/faststream/nats/broker/state/BrokerState.md)
+ - [ConnectedState](api/faststream/nats/broker/state/ConnectedState.md)
+ - [ConnectionBrokenState](api/faststream/nats/broker/state/ConnectionBrokenState.md)
+ - [EmptyBrokerState](api/faststream/nats/broker/state/EmptyBrokerState.md)
- fastapi
- [Context](api/faststream/nats/fastapi/Context.md)
- [NatsRouter](api/faststream/nats/fastapi/NatsRouter.md)
@@ -495,6 +550,10 @@ search:
- [OSBucketDeclarer](api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md)
- object_builder
- [StreamBuilder](api/faststream/nats/helpers/object_builder/StreamBuilder.md)
+ - state
+ - [ConnectedState](api/faststream/nats/helpers/state/ConnectedState.md)
+ - [ConnectionState](api/faststream/nats/helpers/state/ConnectionState.md)
+ - [EmptyConnectionState](api/faststream/nats/helpers/state/EmptyConnectionState.md)
- message
- [NatsBatchMessage](api/faststream/nats/message/NatsBatchMessage.md)
- [NatsKvMessage](api/faststream/nats/message/NatsKvMessage.md)
@@ -516,15 +575,29 @@ search:
- [NatsBaseParser](api/faststream/nats/parser/NatsBaseParser.md)
- [NatsParser](api/faststream/nats/parser/NatsParser.md)
- [ObjParser](api/faststream/nats/parser/ObjParser.md)
+ - prometheus
+ - [NatsPrometheusMiddleware](api/faststream/nats/prometheus/NatsPrometheusMiddleware.md)
+ - middleware
+ - [NatsPrometheusMiddleware](api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md)
+ - provider
+ - [BaseNatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md)
+ - [BatchNatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md)
+ - [NatsMetricsSettingsProvider](api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md)
+ - [settings_provider_factory](api/faststream/nats/prometheus/provider/settings_provider_factory.md)
- publisher
+ - factory
+ - [create_publisher](api/faststream/nats/publisher/factory/create_publisher.md)
+ - fake
+ - [NatsFakePublisher](api/faststream/nats/publisher/fake/NatsFakePublisher.md)
- producer
- [NatsFastProducer](api/faststream/nats/publisher/producer/NatsFastProducer.md)
- [NatsJSFastProducer](api/faststream/nats/publisher/producer/NatsJSFastProducer.md)
- - publisher
- - [SpecificationPublisher](api/faststream/nats/publisher/publisher/SpecificationPublisher.md)
+ - specified
+ - [SpecificationPublisher](api/faststream/nats/publisher/specified/SpecificationPublisher.md)
- usecase
- [LogicPublisher](api/faststream/nats/publisher/usecase/LogicPublisher.md)
- response
+ - [NatsPublishCommand](api/faststream/nats/response/NatsPublishCommand.md)
- [NatsResponse](api/faststream/nats/response/NatsResponse.md)
- router
- [NatsPublisher](api/faststream/nats/router/NatsPublisher.md)
@@ -548,23 +621,27 @@ search:
- security
- [parse_security](api/faststream/nats/security/parse_security.md)
- subscriber
+ - adapters
+ - [UnsubscribeAdapter](api/faststream/nats/subscriber/adapters/UnsubscribeAdapter.md)
+ - [Unsubscriptable](api/faststream/nats/subscriber/adapters/Unsubscriptable.md)
+ - [Watchable](api/faststream/nats/subscriber/adapters/Watchable.md)
- factory
- [create_subscriber](api/faststream/nats/subscriber/factory/create_subscriber.md)
- - subscriber
- - [SpecificationBatchPullStreamSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationBatchPullStreamSubscriber.md)
- - [SpecificationConcurrentCoreSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationConcurrentCoreSubscriber.md)
- - [SpecificationConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPullStreamSubscriber.md)
- - [SpecificationConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPushStreamSubscriber.md)
- - [SpecificationCoreSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationCoreSubscriber.md)
- - [SpecificationKeyValueWatchSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationKeyValueWatchSubscriber.md)
- - [SpecificationObjStoreWatchSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationObjStoreWatchSubscriber.md)
- - [SpecificationPullStreamSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationPullStreamSubscriber.md)
- - [SpecificationStreamSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationStreamSubscriber.md)
- - [SpecificationSubscriber](api/faststream/nats/subscriber/subscriber/SpecificationSubscriber.md)
- - subscription
- - [UnsubscribeAdapter](api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md)
- - [Unsubscriptable](api/faststream/nats/subscriber/subscription/Unsubscriptable.md)
- - [Watchable](api/faststream/nats/subscriber/subscription/Watchable.md)
+ - specified
+ - [SpecificationBatchPullStreamSubscriber](api/faststream/nats/subscriber/specified/SpecificationBatchPullStreamSubscriber.md)
+ - [SpecificationConcurrentCoreSubscriber](api/faststream/nats/subscriber/specified/SpecificationConcurrentCoreSubscriber.md)
+ - [SpecificationConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/specified/SpecificationConcurrentPullStreamSubscriber.md)
+ - [SpecificationConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/specified/SpecificationConcurrentPushStreamSubscriber.md)
+ - [SpecificationCoreSubscriber](api/faststream/nats/subscriber/specified/SpecificationCoreSubscriber.md)
+ - [SpecificationKeyValueWatchSubscriber](api/faststream/nats/subscriber/specified/SpecificationKeyValueWatchSubscriber.md)
+ - [SpecificationObjStoreWatchSubscriber](api/faststream/nats/subscriber/specified/SpecificationObjStoreWatchSubscriber.md)
+ - [SpecificationPullStreamSubscriber](api/faststream/nats/subscriber/specified/SpecificationPullStreamSubscriber.md)
+ - [SpecificationPushStreamSubscriber](api/faststream/nats/subscriber/specified/SpecificationPushStreamSubscriber.md)
+ - [SpecificationSubscriber](api/faststream/nats/subscriber/specified/SpecificationSubscriber.md)
+ - state
+ - [ConnectedSubscriberState](api/faststream/nats/subscriber/state/ConnectedSubscriberState.md)
+ - [EmptySubscriberState](api/faststream/nats/subscriber/state/EmptySubscriberState.md)
+ - [SubscriberState](api/faststream/nats/subscriber/state/SubscriberState.md)
- usecase
- [BatchPullStreamSubscriber](api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md)
- [ConcurrentCoreSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md)
@@ -590,7 +667,6 @@ search:
- consts
- [MessageAction](api/faststream/opentelemetry/consts/MessageAction.md)
- middleware
- - [BaseTelemetryMiddleware](api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md)
- [TelemetryMiddleware](api/faststream/opentelemetry/middleware/TelemetryMiddleware.md)
- provider
- [TelemetrySettingsProvider](api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md)
@@ -605,6 +681,22 @@ search:
- [Context](api/faststream/params/params/Context.md)
- [Header](api/faststream/params/params/Header.md)
- [Path](api/faststream/params/params/Path.md)
+ - prometheus
+ - [ConsumeAttrs](api/faststream/prometheus/ConsumeAttrs.md)
+ - [MetricsSettingsProvider](api/faststream/prometheus/MetricsSettingsProvider.md)
+ - [PrometheusMiddleware](api/faststream/prometheus/PrometheusMiddleware.md)
+ - container
+ - [MetricsContainer](api/faststream/prometheus/container/MetricsContainer.md)
+ - manager
+ - [MetricsManager](api/faststream/prometheus/manager/MetricsManager.md)
+ - middleware
+ - [PrometheusMiddleware](api/faststream/prometheus/middleware/PrometheusMiddleware.md)
+ - provider
+ - [MetricsSettingsProvider](api/faststream/prometheus/provider/MetricsSettingsProvider.md)
+ - types
+ - [ConsumeAttrs](api/faststream/prometheus/types/ConsumeAttrs.md)
+ - [ProcessingStatus](api/faststream/prometheus/types/ProcessingStatus.md)
+ - [PublishingStatus](api/faststream/prometheus/types/PublishingStatus.md)
- rabbit
- [ExchangeType](api/faststream/rabbit/ExchangeType.md)
- [RabbitBroker](api/faststream/rabbit/RabbitBroker.md)
@@ -627,11 +719,15 @@ search:
- fastapi
- [Context](api/faststream/rabbit/fastapi/Context.md)
- [RabbitRouter](api/faststream/rabbit/fastapi/RabbitRouter.md)
- - router
- - [RabbitRouter](api/faststream/rabbit/fastapi/router/RabbitRouter.md)
+ - fastapi
+ - [RabbitRouter](api/faststream/rabbit/fastapi/fastapi/RabbitRouter.md)
- helpers
- declarer
- [RabbitDeclarer](api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md)
+ - state
+ - [ConnectedState](api/faststream/rabbit/helpers/state/ConnectedState.md)
+ - [ConnectionState](api/faststream/rabbit/helpers/state/ConnectionState.md)
+ - [EmptyConnectionState](api/faststream/rabbit/helpers/state/EmptyConnectionState.md)
- message
- [RabbitMessage](api/faststream/rabbit/message/RabbitMessage.md)
- opentelemetry
@@ -642,16 +738,33 @@ search:
- [RabbitTelemetrySettingsProvider](api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md)
- parser
- [AioPikaParser](api/faststream/rabbit/parser/AioPikaParser.md)
+ - prometheus
+ - [RabbitPrometheusMiddleware](api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md)
+ - middleware
+ - [RabbitPrometheusMiddleware](api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md)
+ - provider
+ - [RabbitMetricsSettingsProvider](api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md)
- publisher
+ - factory
+ - [create_publisher](api/faststream/rabbit/publisher/factory/create_publisher.md)
+ - fake
+ - [RabbitFakePublisher](api/faststream/rabbit/publisher/fake/RabbitFakePublisher.md)
+ - options
+ - [MessageOptions](api/faststream/rabbit/publisher/options/MessageOptions.md)
+ - [PublishOptions](api/faststream/rabbit/publisher/options/PublishOptions.md)
- producer
- [AioPikaFastProducer](api/faststream/rabbit/publisher/producer/AioPikaFastProducer.md)
- - publisher
- - [SpecificationPublisher](api/faststream/rabbit/publisher/publisher/SpecificationPublisher.md)
+ - [LockState](api/faststream/rabbit/publisher/producer/LockState.md)
+ - [LockUnset](api/faststream/rabbit/publisher/producer/LockUnset.md)
+ - [RealLock](api/faststream/rabbit/publisher/producer/RealLock.md)
+ - specified
+ - [SpecificationPublisher](api/faststream/rabbit/publisher/specified/SpecificationPublisher.md)
- usecase
- [LogicPublisher](api/faststream/rabbit/publisher/usecase/LogicPublisher.md)
- [PublishKwargs](api/faststream/rabbit/publisher/usecase/PublishKwargs.md)
- [RequestPublishKwargs](api/faststream/rabbit/publisher/usecase/RequestPublishKwargs.md)
- response
+ - [RabbitPublishCommand](api/faststream/rabbit/response/RabbitPublishCommand.md)
- [RabbitResponse](api/faststream/rabbit/response/RabbitResponse.md)
- router
- [RabbitPublisher](api/faststream/rabbit/router/RabbitPublisher.md)
@@ -675,8 +788,8 @@ search:
- subscriber
- factory
- [create_subscriber](api/faststream/rabbit/subscriber/factory/create_subscriber.md)
- - subscriber
- - [SpecificationSubscriber](api/faststream/rabbit/subscriber/subscriber/SpecificationSubscriber.md)
+ - specified
+ - [SpecificationSubscriber](api/faststream/rabbit/subscriber/specified/SpecificationSubscriber.md)
- usecase
- [LogicSubscriber](api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md)
- testing
@@ -711,6 +824,11 @@ search:
- [RedisRouter](api/faststream/redis/fastapi/RedisRouter.md)
- fastapi
- [RedisRouter](api/faststream/redis/fastapi/fastapi/RedisRouter.md)
+ - helpers
+ - state
+ - [ConnectedState](api/faststream/redis/helpers/state/ConnectedState.md)
+ - [ConnectionState](api/faststream/redis/helpers/state/ConnectionState.md)
+ - [EmptyConnectionState](api/faststream/redis/helpers/state/EmptyConnectionState.md)
- message
- [BatchListMessage](api/faststream/redis/message/BatchListMessage.md)
- [BatchStreamMessage](api/faststream/redis/message/BatchStreamMessage.md)
@@ -740,15 +858,28 @@ search:
- [RedisPubSubParser](api/faststream/redis/parser/RedisPubSubParser.md)
- [RedisStreamParser](api/faststream/redis/parser/RedisStreamParser.md)
- [SimpleParser](api/faststream/redis/parser/SimpleParser.md)
+ - prometheus
+ - [RedisPrometheusMiddleware](api/faststream/redis/prometheus/RedisPrometheusMiddleware.md)
+ - middleware
+ - [RedisPrometheusMiddleware](api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md)
+ - provider
+ - [BaseRedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md)
+ - [BatchRedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md)
+ - [RedisMetricsSettingsProvider](api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md)
+ - [settings_provider_factory](api/faststream/redis/prometheus/provider/settings_provider_factory.md)
- publisher
+ - factory
+ - [create_publisher](api/faststream/redis/publisher/factory/create_publisher.md)
+ - fake
+ - [RedisFakePublisher](api/faststream/redis/publisher/fake/RedisFakePublisher.md)
- producer
- [RedisFastProducer](api/faststream/redis/publisher/producer/RedisFastProducer.md)
- - publisher
- - [AsyncAPIChannelPublisher](api/faststream/redis/publisher/publisher/AsyncAPIChannelPublisher.md)
- - [AsyncAPIListBatchPublisher](api/faststream/redis/publisher/publisher/AsyncAPIListBatchPublisher.md)
- - [AsyncAPIListPublisher](api/faststream/redis/publisher/publisher/AsyncAPIListPublisher.md)
- - [AsyncAPIStreamPublisher](api/faststream/redis/publisher/publisher/AsyncAPIStreamPublisher.md)
- - [SpecificationPublisher](api/faststream/redis/publisher/publisher/SpecificationPublisher.md)
+ - specified
+ - [SpecificationChannelPublisher](api/faststream/redis/publisher/specified/SpecificationChannelPublisher.md)
+ - [SpecificationListBatchPublisher](api/faststream/redis/publisher/specified/SpecificationListBatchPublisher.md)
+ - [SpecificationListPublisher](api/faststream/redis/publisher/specified/SpecificationListPublisher.md)
+ - [SpecificationPublisher](api/faststream/redis/publisher/specified/SpecificationPublisher.md)
+ - [SpecificationStreamPublisher](api/faststream/redis/publisher/specified/SpecificationStreamPublisher.md)
- usecase
- [ChannelPublisher](api/faststream/redis/publisher/usecase/ChannelPublisher.md)
- [ListBatchPublisher](api/faststream/redis/publisher/usecase/ListBatchPublisher.md)
@@ -756,6 +887,8 @@ search:
- [LogicPublisher](api/faststream/redis/publisher/usecase/LogicPublisher.md)
- [StreamPublisher](api/faststream/redis/publisher/usecase/StreamPublisher.md)
- response
+ - [DestinationType](api/faststream/redis/response/DestinationType.md)
+ - [RedisPublishCommand](api/faststream/redis/response/RedisPublishCommand.md)
- [RedisResponse](api/faststream/redis/response/RedisResponse.md)
- router
- [RedisPublisher](api/faststream/redis/router/RedisPublisher.md)
@@ -768,7 +901,7 @@ search:
- list_sub
- [ListSub](api/faststream/redis/schemas/list_sub/ListSub.md)
- proto
- - [RedisAsyncAPIProtocol](api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md)
+ - [RedisSpecificationProtocol](api/faststream/redis/schemas/proto/RedisSpecificationProtocol.md)
- [validate_options](api/faststream/redis/schemas/proto/validate_options.md)
- pub_sub
- [PubSub](api/faststream/redis/schemas/pub_sub/PubSub.md)
@@ -779,19 +912,19 @@ search:
- subscriber
- factory
- [create_subscriber](api/faststream/redis/subscriber/factory/create_subscriber.md)
- - subscriber
- - [AsyncAPIChannelSubscriber](api/faststream/redis/subscriber/subscriber/AsyncAPIChannelSubscriber.md)
- - [AsyncAPIListBatchSubscriber](api/faststream/redis/subscriber/subscriber/AsyncAPIListBatchSubscriber.md)
- - [AsyncAPIListSubscriber](api/faststream/redis/subscriber/subscriber/AsyncAPIListSubscriber.md)
- - [AsyncAPIStreamBatchSubscriber](api/faststream/redis/subscriber/subscriber/AsyncAPIStreamBatchSubscriber.md)
- - [AsyncAPIStreamSubscriber](api/faststream/redis/subscriber/subscriber/AsyncAPIStreamSubscriber.md)
- - [SpecificationSubscriber](api/faststream/redis/subscriber/subscriber/SpecificationSubscriber.md)
+ - specified
+ - [SpecificationChannelSubscriber](api/faststream/redis/subscriber/specified/SpecificationChannelSubscriber.md)
+ - [SpecificationListBatchSubscriber](api/faststream/redis/subscriber/specified/SpecificationListBatchSubscriber.md)
+ - [SpecificationListSubscriber](api/faststream/redis/subscriber/specified/SpecificationListSubscriber.md)
+ - [SpecificationStreamBatchSubscriber](api/faststream/redis/subscriber/specified/SpecificationStreamBatchSubscriber.md)
+ - [SpecificationStreamSubscriber](api/faststream/redis/subscriber/specified/SpecificationStreamSubscriber.md)
+ - [SpecificationSubscriber](api/faststream/redis/subscriber/specified/SpecificationSubscriber.md)
- usecase
- [BatchListSubscriber](api/faststream/redis/subscriber/usecase/BatchListSubscriber.md)
- - [BatchStreamSubscriber](api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md)
- [ChannelSubscriber](api/faststream/redis/subscriber/usecase/ChannelSubscriber.md)
- [ListSubscriber](api/faststream/redis/subscriber/usecase/ListSubscriber.md)
- [LogicSubscriber](api/faststream/redis/subscriber/usecase/LogicSubscriber.md)
+ - [StreamBatchSubscriber](api/faststream/redis/subscriber/usecase/StreamBatchSubscriber.md)
- [StreamSubscriber](api/faststream/redis/subscriber/usecase/StreamSubscriber.md)
- testing
- [ChannelVisitor](api/faststream/redis/testing/ChannelVisitor.md)
@@ -802,9 +935,14 @@ search:
- [Visitor](api/faststream/redis/testing/Visitor.md)
- [build_message](api/faststream/redis/testing/build_message.md)
- response
+ - [PublishCommand](api/faststream/response/PublishCommand.md)
+ - [PublishType](api/faststream/response/PublishType.md)
- [Response](api/faststream/response/Response.md)
- [ensure_response](api/faststream/response/ensure_response.md)
+ - publish_type
+ - [PublishType](api/faststream/response/publish_type/PublishType.md)
- response
+ - [PublishCommand](api/faststream/response/response/PublishCommand.md)
- [Response](api/faststream/response/response/Response.md)
- utils
- [ensure_response](api/faststream/response/utils/ensure_response.md)
@@ -816,21 +954,14 @@ search:
- [SASLScram256](api/faststream/security/SASLScram256.md)
- [SASLScram512](api/faststream/security/SASLScram512.md)
- specification
+ - [AsyncAPI](api/faststream/specification/AsyncAPI.md)
+ - [Contact](api/faststream/specification/Contact.md)
+ - [ExternalDocs](api/faststream/specification/ExternalDocs.md)
+ - [License](api/faststream/specification/License.md)
+ - [Tag](api/faststream/specification/Tag.md)
- asyncapi
- [AsyncAPI](api/faststream/specification/asyncapi/AsyncAPI.md)
- - [AsyncAPIProto](api/faststream/specification/asyncapi/AsyncAPIProto.md)
- [get_asyncapi_html](api/faststream/specification/asyncapi/get_asyncapi_html.md)
- - base
- - [AsyncAPIProto](api/faststream/specification/asyncapi/base/AsyncAPIProto.md)
- - asyncapi
- - [AsyncAPIProto](api/faststream/specification/asyncapi/base/asyncapi/AsyncAPIProto.md)
- - schema
- - [BaseInfo](api/faststream/specification/asyncapi/base/schema/BaseInfo.md)
- - [BaseSchema](api/faststream/specification/asyncapi/base/schema/BaseSchema.md)
- - info
- - [BaseInfo](api/faststream/specification/asyncapi/base/schema/info/BaseInfo.md)
- - schema
- - [BaseSchema](api/faststream/specification/asyncapi/base/schema/schema/BaseSchema.md)
- factory
- [AsyncAPI](api/faststream/specification/asyncapi/factory/AsyncAPI.md)
- message
@@ -1041,10 +1172,20 @@ search:
- [Schema](api/faststream/specification/asyncapi/v3_0_0/schema/schema/Schema.md)
- servers
- [Server](api/faststream/specification/asyncapi/v3_0_0/schema/servers/Server.md)
- - proto
- - [SpecApplication](api/faststream/specification/proto/SpecApplication.md)
- - [SpecificationProto](api/faststream/specification/proto/SpecificationProto.md)
+ - base
+ - info
+ - [BaseInfo](api/faststream/specification/base/info/BaseInfo.md)
+ - proto
+ - [SpecificationEndpoint](api/faststream/specification/base/proto/SpecificationEndpoint.md)
+ - schema
+ - [BaseSchema](api/faststream/specification/base/schema/BaseSchema.md)
+ - specification
+ - [Specification](api/faststream/specification/base/specification/Specification.md)
- schema
+ - [Contact](api/faststream/specification/schema/Contact.md)
+ - [ExternalDocs](api/faststream/specification/schema/ExternalDocs.md)
+ - [License](api/faststream/specification/schema/License.md)
+ - [Tag](api/faststream/specification/schema/Tag.md)
- bindings
- [ChannelBinding](api/faststream/specification/schema/bindings/ChannelBinding.md)
- [OperationBinding](api/faststream/specification/schema/bindings/OperationBinding.md)
@@ -1088,8 +1229,6 @@ search:
- [Message](api/faststream/specification/schema/message/Message.md)
- operation
- [Operation](api/faststream/specification/schema/operation/Operation.md)
- - schema
- - [BaseSchema](api/faststream/specification/schema/schema/BaseSchema.md)
- security
- [OauthFlowObj](api/faststream/specification/schema/security/OauthFlowObj.md)
- [OauthFlows](api/faststream/specification/schema/security/OauthFlows.md)
diff --git a/docs/docs/assets/img/grafana-dashboard.png b/docs/docs/assets/img/grafana-dashboard.png
new file mode 100644
index 0000000000..c424ea8b66
Binary files /dev/null and b/docs/docs/assets/img/grafana-dashboard.png differ
diff --git a/docs/docs/assets/img/import-dashboard.png b/docs/docs/assets/img/import-dashboard.png
new file mode 100644
index 0000000000..1731ece99e
Binary files /dev/null and b/docs/docs/assets/img/import-dashboard.png differ
diff --git a/docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md b/docs/docs/en/api/faststream/AckPolicy.md
similarity index 68%
rename from docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md
rename to docs/docs/en/api/faststream/AckPolicy.md
index e34e86542b..4d7218c81b 100644
--- a/docs/docs/en/api/faststream/exceptions/OperationForbiddenError.md
+++ b/docs/docs/en/api/faststream/AckPolicy.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.exceptions.OperationForbiddenError
+::: faststream.AckPolicy
diff --git a/docs/docs/en/api/faststream/middlewares/exception/BaseExceptionMiddleware.md b/docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md
similarity index 63%
rename from docs/docs/en/api/faststream/middlewares/exception/BaseExceptionMiddleware.md
rename to docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md
index 54f8031f0a..e84e84acc3 100644
--- a/docs/docs/en/api/faststream/middlewares/exception/BaseExceptionMiddleware.md
+++ b/docs/docs/en/api/faststream/confluent/prometheus/KafkaPrometheusMiddleware.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.middlewares.exception.BaseExceptionMiddleware
+::: faststream.confluent.prometheus.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md
new file mode 100644
index 0000000000..6603893f74
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/prometheus/middleware/KafkaPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.prometheus.middleware.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md
new file mode 100644
index 0000000000..27c186c098
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/prometheus/provider/BaseConfluentMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.prometheus.provider.BaseConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md
new file mode 100644
index 0000000000..f784a64e9f
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/prometheus/provider/BatchConfluentMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.prometheus.provider.BatchConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md
new file mode 100644
index 0000000000..65f0a8348e
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/prometheus/provider/ConfluentMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.prometheus.provider.ConfluentMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationSubscriber.md b/docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md
similarity index 60%
rename from docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationSubscriber.md
rename to docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md
index de67fe6ca8..78358f46e3 100644
--- a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationSubscriber.md
+++ b/docs/docs/en/api/faststream/confluent/prometheus/provider/settings_provider_factory.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.confluent.subscriber.subscriber.SpecificationSubscriber
+::: faststream.confluent.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md b/docs/docs/en/api/faststream/confluent/publisher/factory/create_publisher.md
similarity index 64%
rename from docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md
rename to docs/docs/en/api/faststream/confluent/publisher/factory/create_publisher.md
index c94cb1b731..60e9664052 100644
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md
+++ b/docs/docs/en/api/faststream/confluent/publisher/factory/create_publisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.subscriber.subscription.Unsubscriptable
+::: faststream.confluent.publisher.factory.create_publisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/fake/KafkaFakePublisher.md b/docs/docs/en/api/faststream/confluent/publisher/fake/KafkaFakePublisher.md
new file mode 100644
index 0000000000..019fbf855f
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/publisher/fake/KafkaFakePublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.publisher.fake.KafkaFakePublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationBatchPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationBatchPublisher.md
similarity index 65%
rename from docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationBatchPublisher.md
rename to docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationBatchPublisher.md
index d666c31641..2879b0d6c3 100644
--- a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationBatchPublisher.md
+++ b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationBatchPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.confluent.publisher.publisher.SpecificationBatchPublisher
+::: faststream.confluent.publisher.specified.SpecificationBatchPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationDefaultPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationDefaultPublisher.md
similarity index 65%
rename from docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationDefaultPublisher.md
rename to docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationDefaultPublisher.md
index 2755e26b87..581ae19fd8 100644
--- a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationDefaultPublisher.md
+++ b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationDefaultPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.confluent.publisher.publisher.SpecificationDefaultPublisher
+::: faststream.confluent.publisher.specified.SpecificationDefaultPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationPublisher.md b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationPublisher.md
similarity index 64%
rename from docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationPublisher.md
rename to docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationPublisher.md
index d4f8b18d7c..55ed49caf5 100644
--- a/docs/docs/en/api/faststream/confluent/publisher/publisher/SpecificationPublisher.md
+++ b/docs/docs/en/api/faststream/confluent/publisher/specified/SpecificationPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.confluent.publisher.publisher.SpecificationPublisher
+::: faststream.confluent.publisher.specified.SpecificationPublisher
diff --git a/docs/docs/en/api/faststream/confluent/publisher/state/EmptyProducerState.md b/docs/docs/en/api/faststream/confluent/publisher/state/EmptyProducerState.md
new file mode 100644
index 0000000000..a72476a6d3
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/publisher/state/EmptyProducerState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.publisher.state.EmptyProducerState
diff --git a/docs/docs/en/api/faststream/confluent/publisher/state/ProducerState.md b/docs/docs/en/api/faststream/confluent/publisher/state/ProducerState.md
new file mode 100644
index 0000000000..5a5a35dddd
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/publisher/state/ProducerState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.publisher.state.ProducerState
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md b/docs/docs/en/api/faststream/confluent/publisher/state/RealProducer.md
similarity index 67%
rename from docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md
rename to docs/docs/en/api/faststream/confluent/publisher/state/RealProducer.md
index 67638258ea..52143d1596 100644
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md
+++ b/docs/docs/en/api/faststream/confluent/publisher/state/RealProducer.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.subscriber.subscription.Watchable
+::: faststream.confluent.publisher.state.RealProducer
diff --git a/docs/docs/en/api/faststream/confluent/response/KafkaPublishCommand.md b/docs/docs/en/api/faststream/confluent/response/KafkaPublishCommand.md
new file mode 100644
index 0000000000..2a4efcf180
--- /dev/null
+++ b/docs/docs/en/api/faststream/confluent/response/KafkaPublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.confluent.response.KafkaPublishCommand
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationBatchSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationBatchSubscriber.md
similarity index 59%
rename from docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationBatchSubscriber.md
rename to docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationBatchSubscriber.md
index 269bf4a3c4..ae9e9f42e8 100644
--- a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationBatchSubscriber.md
+++ b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationBatchSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.subscriber.subscriber.SpecificationBatchSubscriber
+::: faststream.confluent.subscriber.specified.SpecificationBatchSubscriber
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationDefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationDefaultSubscriber.md
similarity index 58%
rename from docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationDefaultSubscriber.md
rename to docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationDefaultSubscriber.md
index 5e9fb682fd..d4dd2d304e 100644
--- a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationDefaultSubscriber.md
+++ b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationDefaultSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.subscriber.subscriber.SpecificationDefaultSubscriber
+::: faststream.confluent.subscriber.specified.SpecificationDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationCoreSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationSubscriber.md
similarity index 64%
rename from docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationCoreSubscriber.md
rename to docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationSubscriber.md
index 55256d5f70..8887278921 100644
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationCoreSubscriber.md
+++ b/docs/docs/en/api/faststream/confluent/subscriber/specified/SpecificationSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.subscriber.subscriber.SpecificationCoreSubscriber
+::: faststream.confluent.subscriber.specified.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationDefaultSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationDefaultSubscriber.md
deleted file mode 100644
index 71c9c19ece..0000000000
--- a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationDefaultSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.confluent.subscriber.subscriber.SpecificationDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/exceptions/FeatureNotSupportedException.md b/docs/docs/en/api/faststream/exceptions/FeatureNotSupportedException.md
new file mode 100644
index 0000000000..bbf1f32d2b
--- /dev/null
+++ b/docs/docs/en/api/faststream/exceptions/FeatureNotSupportedException.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.exceptions.FeatureNotSupportedException
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md
new file mode 100644
index 0000000000..c2ffd5356a
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/KafkaPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md b/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md
new file mode 100644
index 0000000000..451b7080c0
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/middleware/KafkaPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.middleware.KafkaPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md
new file mode 100644
index 0000000000..0fd044f694
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/provider/BaseKafkaMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.provider.BaseKafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md
new file mode 100644
index 0000000000..9bd01d5e71
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/provider/BatchKafkaMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.provider.BatchKafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md
new file mode 100644
index 0000000000..ae7c490da8
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/provider/KafkaMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.provider.KafkaMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md
new file mode 100644
index 0000000000..1393fd9065
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/prometheus/provider/settings_provider_factory.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/kafka/publisher/factory/create_publisher.md b/docs/docs/en/api/faststream/kafka/publisher/factory/create_publisher.md
new file mode 100644
index 0000000000..7ec33758af
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/publisher/factory/create_publisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.publisher.factory.create_publisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/fake/KafkaFakePublisher.md b/docs/docs/en/api/faststream/kafka/publisher/fake/KafkaFakePublisher.md
new file mode 100644
index 0000000000..6bacca904e
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/publisher/fake/KafkaFakePublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.publisher.fake.KafkaFakePublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationBatchPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationBatchPublisher.md
similarity index 64%
rename from docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationBatchPublisher.md
rename to docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationBatchPublisher.md
index 456dd5e1f1..795766b030 100644
--- a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationBatchPublisher.md
+++ b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationBatchPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.publisher.publisher.SpecificationBatchPublisher
+::: faststream.kafka.publisher.specified.SpecificationBatchPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationDefaultPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationDefaultPublisher.md
similarity index 64%
rename from docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationDefaultPublisher.md
rename to docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationDefaultPublisher.md
index 3b6fa9b980..e191045545 100644
--- a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationDefaultPublisher.md
+++ b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationDefaultPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.publisher.publisher.SpecificationDefaultPublisher
+::: faststream.kafka.publisher.specified.SpecificationDefaultPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/publisher/SpecificationPublisher.md b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationPublisher.md
similarity index 63%
rename from docs/docs/en/api/faststream/redis/publisher/publisher/SpecificationPublisher.md
rename to docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationPublisher.md
index 755f5c6939..ed687d7e08 100644
--- a/docs/docs/en/api/faststream/redis/publisher/publisher/SpecificationPublisher.md
+++ b/docs/docs/en/api/faststream/kafka/publisher/specified/SpecificationPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.publisher.publisher.SpecificationPublisher
+::: faststream.kafka.publisher.specified.SpecificationPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/state/EmptyProducerState.md b/docs/docs/en/api/faststream/kafka/publisher/state/EmptyProducerState.md
new file mode 100644
index 0000000000..0152ee7c2f
--- /dev/null
+++ b/docs/docs/en/api/faststream/kafka/publisher/state/EmptyProducerState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.kafka.publisher.state.EmptyProducerState
diff --git a/docs/docs/en/api/faststream/specification/proto/SpecApplication.md b/docs/docs/en/api/faststream/kafka/publisher/state/ProducerState.md
similarity index 68%
rename from docs/docs/en/api/faststream/specification/proto/SpecApplication.md
rename to docs/docs/en/api/faststream/kafka/publisher/state/ProducerState.md
index 14e1bad727..c937179471 100644
--- a/docs/docs/en/api/faststream/specification/proto/SpecApplication.md
+++ b/docs/docs/en/api/faststream/kafka/publisher/state/ProducerState.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.specification.proto.SpecApplication
+::: faststream.kafka.publisher.state.ProducerState
diff --git a/docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md b/docs/docs/en/api/faststream/kafka/publisher/state/RealProducer.md
similarity index 68%
rename from docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md
rename to docs/docs/en/api/faststream/kafka/publisher/state/RealProducer.md
index 36dda03314..a576226b3c 100644
--- a/docs/docs/en/api/faststream/rabbit/fastapi/router/RabbitRouter.md
+++ b/docs/docs/en/api/faststream/kafka/publisher/state/RealProducer.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.rabbit.fastapi.router.RabbitRouter
+::: faststream.kafka.publisher.state.RealProducer
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/AsyncAPIProto.md b/docs/docs/en/api/faststream/kafka/response/KafkaPublishCommand.md
similarity index 67%
rename from docs/docs/en/api/faststream/specification/asyncapi/AsyncAPIProto.md
rename to docs/docs/en/api/faststream/kafka/response/KafkaPublishCommand.md
index 532ae1b999..4852098fcc 100644
--- a/docs/docs/en/api/faststream/specification/asyncapi/AsyncAPIProto.md
+++ b/docs/docs/en/api/faststream/kafka/response/KafkaPublishCommand.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.specification.asyncapi.AsyncAPIProto
+::: faststream.kafka.response.KafkaPublishCommand
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListBatchSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationBatchSubscriber.md
similarity index 64%
rename from docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListBatchSubscriber.md
rename to docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationBatchSubscriber.md
index a1126ce719..9e0ce90401 100644
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListBatchSubscriber.md
+++ b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationBatchSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.subscriber.subscriber.AsyncAPIListBatchSubscriber
+::: faststream.kafka.subscriber.specified.SpecificationBatchSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamBatchSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationDefaultSubscriber.md
similarity index 65%
rename from docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamBatchSubscriber.md
rename to docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationDefaultSubscriber.md
index 5dd6ad995a..fe8ac61ac2 100644
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamBatchSubscriber.md
+++ b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationDefaultSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.subscriber.subscriber.AsyncAPIStreamBatchSubscriber
+::: faststream.kafka.subscriber.specified.SpecificationDefaultSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationSubscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationSubscriber.md
similarity index 63%
rename from docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationSubscriber.md
rename to docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationSubscriber.md
index f62b166e92..79dca87d97 100644
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationSubscriber.md
+++ b/docs/docs/en/api/faststream/kafka/subscriber/specified/SpecificationSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.subscriber.subscriber.SpecificationSubscriber
+::: faststream.kafka.subscriber.specified.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/message/SourceType.md b/docs/docs/en/api/faststream/message/SourceType.md
new file mode 100644
index 0000000000..7df391eac3
--- /dev/null
+++ b/docs/docs/en/api/faststream/message/SourceType.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.message.SourceType
diff --git a/docs/docs/en/api/faststream/message/source_type/SourceType.md b/docs/docs/en/api/faststream/message/source_type/SourceType.md
new file mode 100644
index 0000000000..8a6fc990e4
--- /dev/null
+++ b/docs/docs/en/api/faststream/message/source_type/SourceType.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.message.source_type.SourceType
diff --git a/docs/docs/en/api/faststream/middlewares/AckPolicy.md b/docs/docs/en/api/faststream/middlewares/AckPolicy.md
new file mode 100644
index 0000000000..82d0033dfb
--- /dev/null
+++ b/docs/docs/en/api/faststream/middlewares/AckPolicy.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.middlewares.AckPolicy
diff --git a/docs/docs/en/api/faststream/middlewares/logging/LoggingMiddleware.md b/docs/docs/en/api/faststream/middlewares/AcknowledgementMiddleware.md
similarity index 67%
rename from docs/docs/en/api/faststream/middlewares/logging/LoggingMiddleware.md
rename to docs/docs/en/api/faststream/middlewares/AcknowledgementMiddleware.md
index 62e6dfa604..d3e7d6a763 100644
--- a/docs/docs/en/api/faststream/middlewares/logging/LoggingMiddleware.md
+++ b/docs/docs/en/api/faststream/middlewares/AcknowledgementMiddleware.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.middlewares.logging.LoggingMiddleware
+::: faststream.middlewares.AcknowledgementMiddleware
diff --git a/docs/docs/en/api/faststream/middlewares/acknowledgement/conf/AckPolicy.md b/docs/docs/en/api/faststream/middlewares/acknowledgement/conf/AckPolicy.md
new file mode 100644
index 0000000000..8a92ec0a54
--- /dev/null
+++ b/docs/docs/en/api/faststream/middlewares/acknowledgement/conf/AckPolicy.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.middlewares.acknowledgement.conf.AckPolicy
diff --git a/docs/docs/en/api/faststream/middlewares/acknowledgement/middleware/AcknowledgementMiddleware.md b/docs/docs/en/api/faststream/middlewares/acknowledgement/middleware/AcknowledgementMiddleware.md
new file mode 100644
index 0000000000..79b2956eb4
--- /dev/null
+++ b/docs/docs/en/api/faststream/middlewares/acknowledgement/middleware/AcknowledgementMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.middlewares.acknowledgement.middleware.AcknowledgementMiddleware
diff --git a/docs/docs/en/api/faststream/nats/broker/state/BrokerState.md b/docs/docs/en/api/faststream/nats/broker/state/BrokerState.md
new file mode 100644
index 0000000000..ed5dc00c35
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/broker/state/BrokerState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.broker.state.BrokerState
diff --git a/docs/docs/en/api/faststream/nats/broker/state/ConnectedState.md b/docs/docs/en/api/faststream/nats/broker/state/ConnectedState.md
new file mode 100644
index 0000000000..b7bb106798
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/broker/state/ConnectedState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.broker.state.ConnectedState
diff --git a/docs/docs/en/api/faststream/nats/broker/state/ConnectionBrokenState.md b/docs/docs/en/api/faststream/nats/broker/state/ConnectionBrokenState.md
new file mode 100644
index 0000000000..66df604330
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/broker/state/ConnectionBrokenState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.broker.state.ConnectionBrokenState
diff --git a/docs/docs/en/api/faststream/nats/broker/state/EmptyBrokerState.md b/docs/docs/en/api/faststream/nats/broker/state/EmptyBrokerState.md
new file mode 100644
index 0000000000..88bf83710d
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/broker/state/EmptyBrokerState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.broker.state.EmptyBrokerState
diff --git a/docs/docs/en/api/faststream/nats/helpers/state/ConnectedState.md b/docs/docs/en/api/faststream/nats/helpers/state/ConnectedState.md
new file mode 100644
index 0000000000..888302338b
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/helpers/state/ConnectedState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.helpers.state.ConnectedState
diff --git a/docs/docs/en/api/faststream/nats/helpers/state/ConnectionState.md b/docs/docs/en/api/faststream/nats/helpers/state/ConnectionState.md
new file mode 100644
index 0000000000..0d99fb56ed
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/helpers/state/ConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.helpers.state.ConnectionState
diff --git a/docs/docs/en/api/faststream/nats/helpers/state/EmptyConnectionState.md b/docs/docs/en/api/faststream/nats/helpers/state/EmptyConnectionState.md
new file mode 100644
index 0000000000..31a062d4ad
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/helpers/state/EmptyConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.helpers.state.EmptyConnectionState
diff --git a/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md b/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md
new file mode 100644
index 0000000000..d9b179b0c4
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/NatsPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.NatsPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md b/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md
new file mode 100644
index 0000000000..7202731048
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/middleware/NatsPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.middleware.NatsPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md
new file mode 100644
index 0000000000..80742833bc
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/provider/BaseNatsMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.provider.BaseNatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md
new file mode 100644
index 0000000000..163ebb7bc6
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/provider/BatchNatsMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.provider.BatchNatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md b/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md
new file mode 100644
index 0000000000..e5515a4cc5
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/provider/NatsMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.provider.NatsMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md
new file mode 100644
index 0000000000..aeaa7b26e0
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/prometheus/provider/settings_provider_factory.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/nats/publisher/factory/create_publisher.md b/docs/docs/en/api/faststream/nats/publisher/factory/create_publisher.md
new file mode 100644
index 0000000000..19b23b99a5
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/publisher/factory/create_publisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.publisher.factory.create_publisher
diff --git a/docs/docs/en/api/faststream/nats/publisher/fake/NatsFakePublisher.md b/docs/docs/en/api/faststream/nats/publisher/fake/NatsFakePublisher.md
new file mode 100644
index 0000000000..df23cc8045
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/publisher/fake/NatsFakePublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.publisher.fake.NatsFakePublisher
diff --git a/docs/docs/en/api/faststream/nats/publisher/publisher/SpecificationPublisher.md b/docs/docs/en/api/faststream/nats/publisher/specified/SpecificationPublisher.md
similarity index 63%
rename from docs/docs/en/api/faststream/nats/publisher/publisher/SpecificationPublisher.md
rename to docs/docs/en/api/faststream/nats/publisher/specified/SpecificationPublisher.md
index 59aeffa6ab..3f5eec9e22 100644
--- a/docs/docs/en/api/faststream/nats/publisher/publisher/SpecificationPublisher.md
+++ b/docs/docs/en/api/faststream/nats/publisher/specified/SpecificationPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.publisher.publisher.SpecificationPublisher
+::: faststream.nats.publisher.specified.SpecificationPublisher
diff --git a/docs/docs/en/api/faststream/nats/response/NatsPublishCommand.md b/docs/docs/en/api/faststream/nats/response/NatsPublishCommand.md
new file mode 100644
index 0000000000..148119ba8a
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/response/NatsPublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.response.NatsPublishCommand
diff --git a/docs/docs/en/api/faststream/nats/subscriber/adapters/UnsubscribeAdapter.md b/docs/docs/en/api/faststream/nats/subscriber/adapters/UnsubscribeAdapter.md
new file mode 100644
index 0000000000..9b00a89428
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/adapters/UnsubscribeAdapter.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.adapters.UnsubscribeAdapter
diff --git a/docs/docs/en/api/faststream/nats/subscriber/adapters/Unsubscriptable.md b/docs/docs/en/api/faststream/nats/subscriber/adapters/Unsubscriptable.md
new file mode 100644
index 0000000000..4c6c6b0abe
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/adapters/Unsubscriptable.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.adapters.Unsubscriptable
diff --git a/docs/docs/en/api/faststream/nats/subscriber/adapters/Watchable.md b/docs/docs/en/api/faststream/nats/subscriber/adapters/Watchable.md
new file mode 100644
index 0000000000..00dde78565
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/adapters/Watchable.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.adapters.Watchable
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationBatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationBatchPullStreamSubscriber.md
new file mode 100644
index 0000000000..d663201213
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationBatchPullStreamSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationBatchPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentCoreSubscriber.md
new file mode 100644
index 0000000000..24f1a256ce
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentCoreSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationConcurrentCoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPullStreamSubscriber.md
new file mode 100644
index 0000000000..45d7106b98
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPullStreamSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationConcurrentPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPushStreamSubscriber.md
new file mode 100644
index 0000000000..4a5bebd382
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationConcurrentPushStreamSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationConcurrentPushStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIChannelSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationCoreSubscriber.md
similarity index 64%
rename from docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIChannelSubscriber.md
rename to docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationCoreSubscriber.md
index f14fc9956e..5415ec6203 100644
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIChannelSubscriber.md
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationCoreSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.subscriber.subscriber.AsyncAPIChannelSubscriber
+::: faststream.nats.subscriber.specified.SpecificationCoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationKeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationKeyValueWatchSubscriber.md
new file mode 100644
index 0000000000..bd9e10e9d2
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationKeyValueWatchSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationKeyValueWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationObjStoreWatchSubscriber.md
new file mode 100644
index 0000000000..d2b5bfa27f
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationObjStoreWatchSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationObjStoreWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPullStreamSubscriber.md
new file mode 100644
index 0000000000..c0867b195f
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPullStreamSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPushStreamSubscriber.md
new file mode 100644
index 0000000000..ef20892652
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationPushStreamSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.specified.SpecificationPushStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIStreamPublisher.md b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationSubscriber.md
similarity index 63%
rename from docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIStreamPublisher.md
rename to docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationSubscriber.md
index 53fc7fbd3a..613fdecd8a 100644
--- a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIStreamPublisher.md
+++ b/docs/docs/en/api/faststream/nats/subscriber/specified/SpecificationSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.publisher.publisher.AsyncAPIStreamPublisher
+::: faststream.nats.subscriber.specified.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/state/ConnectedSubscriberState.md b/docs/docs/en/api/faststream/nats/subscriber/state/ConnectedSubscriberState.md
new file mode 100644
index 0000000000..3398403cb2
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/state/ConnectedSubscriberState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.state.ConnectedSubscriberState
diff --git a/docs/docs/en/api/faststream/nats/subscriber/state/EmptySubscriberState.md b/docs/docs/en/api/faststream/nats/subscriber/state/EmptySubscriberState.md
new file mode 100644
index 0000000000..de80057014
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/state/EmptySubscriberState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.state.EmptySubscriberState
diff --git a/docs/docs/en/api/faststream/nats/subscriber/state/SubscriberState.md b/docs/docs/en/api/faststream/nats/subscriber/state/SubscriberState.md
new file mode 100644
index 0000000000..a61839436a
--- /dev/null
+++ b/docs/docs/en/api/faststream/nats/subscriber/state/SubscriberState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.nats.subscriber.state.SubscriberState
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationBatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationBatchPullStreamSubscriber.md
deleted file mode 100644
index 7329cf824b..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationBatchPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationBatchPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentCoreSubscriber.md
deleted file mode 100644
index 861a569292..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentCoreSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationConcurrentCoreSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPullStreamSubscriber.md
deleted file mode 100644
index 4437e07663..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationConcurrentPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPushStreamSubscriber.md
deleted file mode 100644
index 1381a591c1..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationConcurrentPushStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationConcurrentPushStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationKeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationKeyValueWatchSubscriber.md
deleted file mode 100644
index 1b46711a00..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationKeyValueWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationKeyValueWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationObjStoreWatchSubscriber.md
deleted file mode 100644
index 91ce924e87..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationObjStoreWatchSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationObjStoreWatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationPullStreamSubscriber.md
deleted file mode 100644
index 1465f7cca0..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationPullStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscriber.SpecificationPullStreamSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md
deleted file mode 100644
index 455885671f..0000000000
--- a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.nats.subscriber.subscription.UnsubscribeAdapter
diff --git a/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md
deleted file mode 100644
index 64a7b4a501..0000000000
--- a/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.opentelemetry.middleware.BaseTelemetryMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md b/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md
new file mode 100644
index 0000000000..ad8e536b7a
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/ConsumeAttrs.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.ConsumeAttrs
diff --git a/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md b/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md
new file mode 100644
index 0000000000..0f7405e44d
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/MetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.MetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/prometheus/PrometheusMiddleware.md b/docs/docs/en/api/faststream/prometheus/PrometheusMiddleware.md
new file mode 100644
index 0000000000..c340a0cb23
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/PrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.PrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md b/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md
new file mode 100644
index 0000000000..009d88d263
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/container/MetricsContainer.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.container.MetricsContainer
diff --git a/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md b/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md
new file mode 100644
index 0000000000..b1a897c717
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/manager/MetricsManager.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.manager.MetricsManager
diff --git a/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md b/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md
new file mode 100644
index 0000000000..2902586e38
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/middleware/PrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.middleware.PrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md b/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md
new file mode 100644
index 0000000000..3511a21a5b
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/provider/MetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.provider.MetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md b/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md
new file mode 100644
index 0000000000..d9196cab8d
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/types/ConsumeAttrs.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.types.ConsumeAttrs
diff --git a/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md b/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md
new file mode 100644
index 0000000000..98b6710bcd
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/types/ProcessingStatus.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.types.ProcessingStatus
diff --git a/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md b/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md
new file mode 100644
index 0000000000..4e7435fbea
--- /dev/null
+++ b/docs/docs/en/api/faststream/prometheus/types/PublishingStatus.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.prometheus.types.PublishingStatus
diff --git a/docs/docs/en/api/faststream/rabbit/fastapi/fastapi/RabbitRouter.md b/docs/docs/en/api/faststream/rabbit/fastapi/fastapi/RabbitRouter.md
new file mode 100644
index 0000000000..d70c558254
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/fastapi/fastapi/RabbitRouter.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.fastapi.fastapi.RabbitRouter
diff --git a/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectedState.md b/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectedState.md
new file mode 100644
index 0000000000..db97303aa3
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectedState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.helpers.state.ConnectedState
diff --git a/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectionState.md b/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectionState.md
new file mode 100644
index 0000000000..36b3d4d4d1
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/helpers/state/ConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.helpers.state.ConnectionState
diff --git a/docs/docs/en/api/faststream/rabbit/helpers/state/EmptyConnectionState.md b/docs/docs/en/api/faststream/rabbit/helpers/state/EmptyConnectionState.md
new file mode 100644
index 0000000000..7b0af42897
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/helpers/state/EmptyConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.helpers.state.EmptyConnectionState
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md b/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md
new file mode 100644
index 0000000000..2c4308fabd
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/prometheus/RabbitPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.prometheus.RabbitPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md b/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md
new file mode 100644
index 0000000000..45163c998a
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/prometheus/middleware/RabbitPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.prometheus.middleware.RabbitPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md b/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md
new file mode 100644
index 0000000000..6d63301b34
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/prometheus/provider/RabbitMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.prometheus.provider.RabbitMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/factory/create_publisher.md b/docs/docs/en/api/faststream/rabbit/publisher/factory/create_publisher.md
new file mode 100644
index 0000000000..bac090fa43
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/factory/create_publisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.factory.create_publisher
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/fake/RabbitFakePublisher.md b/docs/docs/en/api/faststream/rabbit/publisher/fake/RabbitFakePublisher.md
new file mode 100644
index 0000000000..60879c8e3a
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/fake/RabbitFakePublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.fake.RabbitFakePublisher
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/options/MessageOptions.md b/docs/docs/en/api/faststream/rabbit/publisher/options/MessageOptions.md
new file mode 100644
index 0000000000..eaa454588a
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/options/MessageOptions.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.options.MessageOptions
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/options/PublishOptions.md b/docs/docs/en/api/faststream/rabbit/publisher/options/PublishOptions.md
new file mode 100644
index 0000000000..c80cc9e937
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/options/PublishOptions.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.options.PublishOptions
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/producer/LockState.md b/docs/docs/en/api/faststream/rabbit/publisher/producer/LockState.md
new file mode 100644
index 0000000000..4d7b37ba46
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/producer/LockState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.producer.LockState
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/producer/LockUnset.md b/docs/docs/en/api/faststream/rabbit/publisher/producer/LockUnset.md
new file mode 100644
index 0000000000..95df1a10e7
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/producer/LockUnset.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.producer.LockUnset
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/producer/RealLock.md b/docs/docs/en/api/faststream/rabbit/publisher/producer/RealLock.md
new file mode 100644
index 0000000000..570a279a0a
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/publisher/producer/RealLock.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.publisher.producer.RealLock
diff --git a/docs/docs/en/api/faststream/rabbit/publisher/publisher/SpecificationPublisher.md b/docs/docs/en/api/faststream/rabbit/publisher/specified/SpecificationPublisher.md
similarity index 63%
rename from docs/docs/en/api/faststream/rabbit/publisher/publisher/SpecificationPublisher.md
rename to docs/docs/en/api/faststream/rabbit/publisher/specified/SpecificationPublisher.md
index d19f4e84ef..0001c99fb7 100644
--- a/docs/docs/en/api/faststream/rabbit/publisher/publisher/SpecificationPublisher.md
+++ b/docs/docs/en/api/faststream/rabbit/publisher/specified/SpecificationPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.rabbit.publisher.publisher.SpecificationPublisher
+::: faststream.rabbit.publisher.specified.SpecificationPublisher
diff --git a/docs/docs/en/api/faststream/rabbit/response/RabbitPublishCommand.md b/docs/docs/en/api/faststream/rabbit/response/RabbitPublishCommand.md
new file mode 100644
index 0000000000..4c4bb224b6
--- /dev/null
+++ b/docs/docs/en/api/faststream/rabbit/response/RabbitPublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.rabbit.response.RabbitPublishCommand
diff --git a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationSubscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/specified/SpecificationSubscriber.md
similarity index 63%
rename from docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationSubscriber.md
rename to docs/docs/en/api/faststream/rabbit/subscriber/specified/SpecificationSubscriber.md
index c8ee640280..928876011d 100644
--- a/docs/docs/en/api/faststream/kafka/subscriber/subscriber/SpecificationSubscriber.md
+++ b/docs/docs/en/api/faststream/rabbit/subscriber/specified/SpecificationSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.subscriber.subscriber.SpecificationSubscriber
+::: faststream.rabbit.subscriber.specified.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/subscriber/SpecificationSubscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/subscriber/SpecificationSubscriber.md
deleted file mode 100644
index 76c3fbe14f..0000000000
--- a/docs/docs/en/api/faststream/rabbit/subscriber/subscriber/SpecificationSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.rabbit.subscriber.subscriber.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/redis/helpers/state/ConnectedState.md b/docs/docs/en/api/faststream/redis/helpers/state/ConnectedState.md
new file mode 100644
index 0000000000..793fdb055e
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/helpers/state/ConnectedState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.helpers.state.ConnectedState
diff --git a/docs/docs/en/api/faststream/redis/helpers/state/ConnectionState.md b/docs/docs/en/api/faststream/redis/helpers/state/ConnectionState.md
new file mode 100644
index 0000000000..0a27d849dc
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/helpers/state/ConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.helpers.state.ConnectionState
diff --git a/docs/docs/en/api/faststream/redis/helpers/state/EmptyConnectionState.md b/docs/docs/en/api/faststream/redis/helpers/state/EmptyConnectionState.md
new file mode 100644
index 0000000000..70273722e0
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/helpers/state/EmptyConnectionState.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.helpers.state.EmptyConnectionState
diff --git a/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md b/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md
new file mode 100644
index 0000000000..01b23fe4f1
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/RedisPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.RedisPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md b/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md
new file mode 100644
index 0000000000..c29cc91130
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/middleware/RedisPrometheusMiddleware.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.middleware.RedisPrometheusMiddleware
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md
new file mode 100644
index 0000000000..243414331b
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/provider/BaseRedisMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.provider.BaseRedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md
new file mode 100644
index 0000000000..33d1d2d3a1
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/provider/BatchRedisMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.provider.BatchRedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md b/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md
new file mode 100644
index 0000000000..a7f5f3abe8
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/provider/RedisMetricsSettingsProvider.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.provider.RedisMetricsSettingsProvider
diff --git a/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md b/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md
new file mode 100644
index 0000000000..aa4812f1e2
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/prometheus/provider/settings_provider_factory.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.prometheus.provider.settings_provider_factory
diff --git a/docs/docs/en/api/faststream/redis/publisher/factory/create_publisher.md b/docs/docs/en/api/faststream/redis/publisher/factory/create_publisher.md
new file mode 100644
index 0000000000..e568f4120a
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/publisher/factory/create_publisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.publisher.factory.create_publisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/fake/RedisFakePublisher.md b/docs/docs/en/api/faststream/redis/publisher/fake/RedisFakePublisher.md
new file mode 100644
index 0000000000..eb00559657
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/publisher/fake/RedisFakePublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.publisher.fake.RedisFakePublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListPublisher.md b/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListPublisher.md
deleted file mode 100644
index 2aa117e912..0000000000
--- a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListPublisher.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.publisher.publisher.AsyncAPIListPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationChannelPublisher.md b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationChannelPublisher.md
new file mode 100644
index 0000000000..93b88342e4
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationChannelPublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.publisher.specified.SpecificationChannelPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListBatchPublisher.md b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListBatchPublisher.md
new file mode 100644
index 0000000000..c3f9769199
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListBatchPublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.publisher.specified.SpecificationListBatchPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListBatchPublisher.md b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListPublisher.md
similarity index 64%
rename from docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListBatchPublisher.md
rename to docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListPublisher.md
index 005a6c863f..a7ed630a72 100644
--- a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIListBatchPublisher.md
+++ b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationListPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.publisher.publisher.AsyncAPIListBatchPublisher
+::: faststream.redis.publisher.specified.SpecificationListPublisher
diff --git a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationPublisher.md b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationPublisher.md
similarity index 63%
rename from docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationPublisher.md
rename to docs/docs/en/api/faststream/redis/publisher/specified/SpecificationPublisher.md
index 59fc5e9a94..ceb47b19f6 100644
--- a/docs/docs/en/api/faststream/kafka/publisher/publisher/SpecificationPublisher.md
+++ b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationPublisher.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.kafka.publisher.publisher.SpecificationPublisher
+::: faststream.redis.publisher.specified.SpecificationPublisher
diff --git a/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationStreamPublisher.md b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationStreamPublisher.md
new file mode 100644
index 0000000000..5800dc96da
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/publisher/specified/SpecificationStreamPublisher.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.publisher.specified.SpecificationStreamPublisher
diff --git a/docs/docs/en/api/faststream/redis/response/DestinationType.md b/docs/docs/en/api/faststream/redis/response/DestinationType.md
new file mode 100644
index 0000000000..4eda1ad154
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/response/DestinationType.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.response.DestinationType
diff --git a/docs/docs/en/api/faststream/redis/response/RedisPublishCommand.md b/docs/docs/en/api/faststream/redis/response/RedisPublishCommand.md
new file mode 100644
index 0000000000..14e21c799e
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/response/RedisPublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.response.RedisPublishCommand
diff --git a/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md b/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md
deleted file mode 100644
index 7a9d46c451..0000000000
--- a/docs/docs/en/api/faststream/redis/schemas/proto/RedisAsyncAPIProtocol.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.schemas.proto.RedisAsyncAPIProtocol
diff --git a/docs/docs/en/api/faststream/redis/schemas/proto/RedisSpecificationProtocol.md b/docs/docs/en/api/faststream/redis/schemas/proto/RedisSpecificationProtocol.md
new file mode 100644
index 0000000000..de3ab92ace
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/schemas/proto/RedisSpecificationProtocol.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.schemas.proto.RedisSpecificationProtocol
diff --git a/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationChannelSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationChannelSubscriber.md
new file mode 100644
index 0000000000..538babd05f
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationChannelSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.subscriber.specified.SpecificationChannelSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListBatchSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListBatchSubscriber.md
new file mode 100644
index 0000000000..60e7fa385d
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListBatchSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.subscriber.specified.SpecificationListBatchSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListSubscriber.md
new file mode 100644
index 0000000000..988ffccb3c
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationListSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.subscriber.specified.SpecificationListSubscriber
diff --git a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationBatchSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamBatchSubscriber.md
similarity index 58%
rename from docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationBatchSubscriber.md
rename to docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamBatchSubscriber.md
index c41a863145..76a6aff457 100644
--- a/docs/docs/en/api/faststream/confluent/subscriber/subscriber/SpecificationBatchSubscriber.md
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamBatchSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.confluent.subscriber.subscriber.SpecificationBatchSubscriber
+::: faststream.redis.subscriber.specified.SpecificationStreamBatchSubscriber
diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationStreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamSubscriber.md
similarity index 64%
rename from docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationStreamSubscriber.md
rename to docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamSubscriber.md
index bf9aea4bd4..f1bfe8a520 100644
--- a/docs/docs/en/api/faststream/nats/subscriber/subscriber/SpecificationStreamSubscriber.md
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationStreamSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.nats.subscriber.subscriber.SpecificationStreamSubscriber
+::: faststream.redis.subscriber.specified.SpecificationStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIChannelPublisher.md b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationSubscriber.md
similarity index 63%
rename from docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIChannelPublisher.md
rename to docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationSubscriber.md
index c802c5471f..90a2845dc9 100644
--- a/docs/docs/en/api/faststream/redis/publisher/publisher/AsyncAPIChannelPublisher.md
+++ b/docs/docs/en/api/faststream/redis/subscriber/specified/SpecificationSubscriber.md
@@ -8,4 +8,4 @@ search:
boost: 0.5
---
-::: faststream.redis.publisher.publisher.AsyncAPIChannelPublisher
+::: faststream.redis.subscriber.specified.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListSubscriber.md
deleted file mode 100644
index d1c36ae1a1..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIListSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.subscriber.AsyncAPIListSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamSubscriber.md
deleted file mode 100644
index e4c474fde0..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/AsyncAPIStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.subscriber.AsyncAPIStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/subscriber/SpecificationSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/subscriber/SpecificationSubscriber.md
deleted file mode 100644
index 1187a36173..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/subscriber/SpecificationSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.subscriber.SpecificationSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md
deleted file mode 100644
index 0f8e4f2e1b..0000000000
--- a/docs/docs/en/api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.redis.subscriber.usecase.BatchStreamSubscriber
diff --git a/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamBatchSubscriber.md b/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamBatchSubscriber.md
new file mode 100644
index 0000000000..3500cc21e2
--- /dev/null
+++ b/docs/docs/en/api/faststream/redis/subscriber/usecase/StreamBatchSubscriber.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.redis.subscriber.usecase.StreamBatchSubscriber
diff --git a/docs/docs/en/api/faststream/response/PublishCommand.md b/docs/docs/en/api/faststream/response/PublishCommand.md
new file mode 100644
index 0000000000..8ca17ac376
--- /dev/null
+++ b/docs/docs/en/api/faststream/response/PublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.response.PublishCommand
diff --git a/docs/docs/en/api/faststream/response/PublishType.md b/docs/docs/en/api/faststream/response/PublishType.md
new file mode 100644
index 0000000000..57d3cbddd7
--- /dev/null
+++ b/docs/docs/en/api/faststream/response/PublishType.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.response.PublishType
diff --git a/docs/docs/en/api/faststream/response/publish_type/PublishType.md b/docs/docs/en/api/faststream/response/publish_type/PublishType.md
new file mode 100644
index 0000000000..2ac2fcd51c
--- /dev/null
+++ b/docs/docs/en/api/faststream/response/publish_type/PublishType.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.response.publish_type.PublishType
diff --git a/docs/docs/en/api/faststream/response/response/PublishCommand.md b/docs/docs/en/api/faststream/response/response/PublishCommand.md
new file mode 100644
index 0000000000..b247a7e5d8
--- /dev/null
+++ b/docs/docs/en/api/faststream/response/response/PublishCommand.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.response.response.PublishCommand
diff --git a/docs/docs/en/api/faststream/specification/AsyncAPI.md b/docs/docs/en/api/faststream/specification/AsyncAPI.md
new file mode 100644
index 0000000000..4b23e3fa4a
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/AsyncAPI.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.AsyncAPI
diff --git a/docs/docs/en/api/faststream/specification/Contact.md b/docs/docs/en/api/faststream/specification/Contact.md
new file mode 100644
index 0000000000..aa8ac012ea
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/Contact.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.Contact
diff --git a/docs/docs/en/api/faststream/specification/ExternalDocs.md b/docs/docs/en/api/faststream/specification/ExternalDocs.md
new file mode 100644
index 0000000000..52e0432c94
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/ExternalDocs.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.ExternalDocs
diff --git a/docs/docs/en/api/faststream/specification/License.md b/docs/docs/en/api/faststream/specification/License.md
new file mode 100644
index 0000000000..ac2365f82b
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/License.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.License
diff --git a/docs/docs/en/api/faststream/specification/Tag.md b/docs/docs/en/api/faststream/specification/Tag.md
new file mode 100644
index 0000000000..ae4f1202a1
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/Tag.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.Tag
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/AsyncAPIProto.md b/docs/docs/en/api/faststream/specification/asyncapi/base/AsyncAPIProto.md
deleted file mode 100644
index 21e1a6f41d..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/AsyncAPIProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.AsyncAPIProto
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/asyncapi/AsyncAPIProto.md b/docs/docs/en/api/faststream/specification/asyncapi/base/asyncapi/AsyncAPIProto.md
deleted file mode 100644
index d246c776c2..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/asyncapi/AsyncAPIProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.asyncapi.AsyncAPIProto
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseInfo.md b/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseInfo.md
deleted file mode 100644
index 24ca320131..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseInfo.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.schema.BaseInfo
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseSchema.md b/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseSchema.md
deleted file mode 100644
index 553072fb35..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/BaseSchema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.schema.BaseSchema
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/info/BaseInfo.md b/docs/docs/en/api/faststream/specification/asyncapi/base/schema/info/BaseInfo.md
deleted file mode 100644
index b3ba410f70..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/info/BaseInfo.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.schema.info.BaseInfo
diff --git a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/schema/BaseSchema.md b/docs/docs/en/api/faststream/specification/asyncapi/base/schema/schema/BaseSchema.md
deleted file mode 100644
index b41b1ab894..0000000000
--- a/docs/docs/en/api/faststream/specification/asyncapi/base/schema/schema/BaseSchema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.asyncapi.base.schema.schema.BaseSchema
diff --git a/docs/docs/en/api/faststream/specification/base/info/BaseInfo.md b/docs/docs/en/api/faststream/specification/base/info/BaseInfo.md
new file mode 100644
index 0000000000..0925aac556
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/base/info/BaseInfo.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.base.info.BaseInfo
diff --git a/docs/docs/en/api/faststream/specification/base/proto/SpecificationEndpoint.md b/docs/docs/en/api/faststream/specification/base/proto/SpecificationEndpoint.md
new file mode 100644
index 0000000000..a6a2658fc4
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/base/proto/SpecificationEndpoint.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.base.proto.SpecificationEndpoint
diff --git a/docs/docs/en/api/faststream/specification/base/schema/BaseSchema.md b/docs/docs/en/api/faststream/specification/base/schema/BaseSchema.md
new file mode 100644
index 0000000000..27512fe2b3
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/base/schema/BaseSchema.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.base.schema.BaseSchema
diff --git a/docs/docs/en/api/faststream/specification/base/specification/Specification.md b/docs/docs/en/api/faststream/specification/base/specification/Specification.md
new file mode 100644
index 0000000000..0b3f07b9f7
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/base/specification/Specification.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.base.specification.Specification
diff --git a/docs/docs/en/api/faststream/specification/proto/SpecificationProto.md b/docs/docs/en/api/faststream/specification/proto/SpecificationProto.md
deleted file mode 100644
index 6258cfcd19..0000000000
--- a/docs/docs/en/api/faststream/specification/proto/SpecificationProto.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.proto.SpecificationProto
diff --git a/docs/docs/en/api/faststream/specification/schema/Contact.md b/docs/docs/en/api/faststream/specification/schema/Contact.md
new file mode 100644
index 0000000000..5c95b1d99b
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/schema/Contact.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.schema.Contact
diff --git a/docs/docs/en/api/faststream/specification/schema/ExternalDocs.md b/docs/docs/en/api/faststream/specification/schema/ExternalDocs.md
new file mode 100644
index 0000000000..242418f578
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/schema/ExternalDocs.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.schema.ExternalDocs
diff --git a/docs/docs/en/api/faststream/specification/schema/License.md b/docs/docs/en/api/faststream/specification/schema/License.md
new file mode 100644
index 0000000000..4e321c7aab
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/schema/License.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.schema.License
diff --git a/docs/docs/en/api/faststream/specification/schema/Tag.md b/docs/docs/en/api/faststream/specification/schema/Tag.md
new file mode 100644
index 0000000000..03071c314f
--- /dev/null
+++ b/docs/docs/en/api/faststream/specification/schema/Tag.md
@@ -0,0 +1,11 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 0.5
+---
+
+::: faststream.specification.schema.Tag
diff --git a/docs/docs/en/api/faststream/specification/schema/schema/BaseSchema.md b/docs/docs/en/api/faststream/specification/schema/schema/BaseSchema.md
deleted file mode 100644
index 0b0a288e80..0000000000
--- a/docs/docs/en/api/faststream/specification/schema/schema/BaseSchema.md
+++ /dev/null
@@ -1,11 +0,0 @@
----
-# 0.5 - API
-# 2 - Release
-# 3 - Contributing
-# 5 - Template Page
-# 10 - Default
-search:
- boost: 0.5
----
-
-::: faststream.specification.schema.schema.BaseSchema
diff --git a/docs/docs/en/faststream.md b/docs/docs/en/faststream.md
index 6cc5040cf4..6f64dfd8a6 100644
--- a/docs/docs/en/faststream.md
+++ b/docs/docs/en/faststream.md
@@ -12,6 +12,11 @@ search:
---
+
+
+
+
+
@@ -53,6 +58,12 @@ search:
+
+
+
+
+
+
---
diff --git a/docs/docs/en/getting-started/dependencies/index.md b/docs/docs/en/getting-started/dependencies/index.md
index 8d88ab81d2..6f2fae0c54 100644
--- a/docs/docs/en/getting-started/dependencies/index.md
+++ b/docs/docs/en/getting-started/dependencies/index.md
@@ -21,7 +21,7 @@ By default, it applies to all event handlers, unless you disabled the same optio
!!! warning
Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`.
- If you want to disable only type casting, use `validate=False` instead.
+ If you want to disable only type casting, use `serializer=None` instead.
This flag can be useful if you are using **FastStream** within another framework and you need to use its native dependency system.
diff --git a/docs/docs/en/getting-started/integrations/fastapi/index.md b/docs/docs/en/getting-started/integrations/fastapi/index.md
index 9ece2eab5e..c020dfcb5d 100644
--- a/docs/docs/en/getting-started/integrations/fastapi/index.md
+++ b/docs/docs/en/getting-started/integrations/fastapi/index.md
@@ -27,7 +27,7 @@ Just import a **StreamRouter** you need and declare the message handler in the s
{! includes/getting_started/integrations/fastapi/1.md !}
!!! warning
- If you are using **fastapi < 0.102.2** version, you should setup lifespan manually `#!python FastAPI(lifespan=router.lifespan_context)`
+ If you are using **fastapi < 0.112.2** version, you should setup lifespan manually `#!python FastAPI(lifespan=router.lifespan_context)`
When processing a message from a broker, the entire message body is placed simultaneously in both the `body` and `path` request parameters. You can access them in any way convenient for you. The message header is placed in `headers`.
diff --git a/docs/docs/en/getting-started/integrations/frameworks/index.md b/docs/docs/en/getting-started/integrations/frameworks/index.md
index fcb09ce7f2..d6fe094465 100644
--- a/docs/docs/en/getting-started/integrations/frameworks/index.md
+++ b/docs/docs/en/getting-started/integrations/frameworks/index.md
@@ -9,6 +9,7 @@ search:
# template variables
fastapi_plugin: If you want to use **FastStream** in conjunction with **FastAPI**, perhaps you should use a special [plugin](../fastapi/index.md){.internal-link}
no_hook: However, even if such a hook is not provided, you can do it yourself.
+and_not_only_http: And not only HTTP frameworks.
---
# INTEGRATIONS
diff --git a/docs/docs/en/getting-started/opentelemetry/index.md b/docs/docs/en/getting-started/opentelemetry/index.md
index 3a4c163f96..dc8fab945e 100644
--- a/docs/docs/en/getting-started/opentelemetry/index.md
+++ b/docs/docs/en/getting-started/opentelemetry/index.md
@@ -122,6 +122,8 @@ An example includes:
* Three `FastStream` services
* Exporting traces to `Grafana Tempo` via `gRPC`
* Visualization of traces via `Grafana`
+* Collecting metrics and exporting using `Prometheus`
+* `Grafana dashboard` for metrics
* Examples with custom spans
* Configured `docker-compose` with the entire infrastructure
diff --git a/docs/docs/en/getting-started/prometheus/index.md b/docs/docs/en/getting-started/prometheus/index.md
new file mode 100644
index 0000000000..d7d2f26568
--- /dev/null
+++ b/docs/docs/en/getting-started/prometheus/index.md
@@ -0,0 +1,96 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 10
+---
+
+# Prometheus
+
+[**Prometheus**](https://prometheus.io/){.external-link target="_blank"} is an open-source monitoring and alerting toolkit originally built at SoundCloud.
+With a focus on reliability, robustness, and easy scalability, **Prometheus** allows users to collect metrics,
+scrape data from various sources, store them efficiently, and query them in real-time. Its flexible data model,
+powerful query language, and seamless integration with [**Grafana**](https://grafana.com/){.external-link target="_blank"} make it a popular choice for monitoring the health
+and performance of systems and applications.
+
+### FastStream Metrics
+
+To add a metrics to your broker, you need to:
+
+1. Install `FastStream` with `prometheus-client`
+
+ ```shell
+ pip install faststream[prometheus]
+ ```
+
+2. Add `PrometheusMiddleware` to your broker
+
+{!> includes/getting_started/prometheus/1.md !}
+
+### Exposing the `/metrics` endpoint
+The way Prometheus works requires the service to expose an HTTP endpoint for analysis.
+By convention, this is a GET endpoint, and its path is usually `/metrics`.
+
+FastStream's built-in **ASGI** support allows you to expose endpoints in your application.
+
+A convenient way to serve this endpoint is to use `make_asgi_app` from `prometheus_client`,
+passing in the registry that was passed to `PrometheusMiddleware`.
+
+{!> includes/getting_started/prometheus/2.md !}
+
+---
+
+### Exported metrics
+
+{% set received_messages_total_description = 'The metric is incremented each time the application receives a message.
This is necessary to count messages that the application has received but has not yet started processing.' %}
+{% set received_messages_size_bytes_description = 'The metric is filled with the sizes of received messages. When a message is received, the size of its body in bytes is calculated and written to the metric.
Useful for analyzing the sizes of incoming messages, also in cases when the application receives messages of unexpected sizes.' %}
+{% set received_messages_in_process_description = 'The metric is incremented when the message processing starts and decremented when the processing ends.
It is necessary to count the number of messages that the application processes.
Such a metric will help answer the question: _`is there a need to scale the service?`_' %}
+{% set received_processed_messages_total_description = 'The metric is incremented after a message is processed, regardless of whether the processing ended with a success or an error.
This metric allows you to analyze the number of processed messages and their statuses.' %}
+{% set received_processed_messages_duration_seconds_description = 'The metric is filled with the message processing time regardless of whether the processing ended with a success or an error.
Time stamps are recorded just before and immediately after the processing.
Then the metric is filled with their difference (in seconds).' %}
+{% set received_processed_messages_exceptions_total_description = 'The metric is incremented if any exception occurred while processing a message (except `AckMessage`, `NackMessage`, `RejectMessage` and `SkipMessage`).
It can be used to draw conclusions about how many and what kind of exceptions occurred while processing messages.' %}
+{% set published_messages_total_description = 'The metric is incremented when messages are sent, regardless of whether the sending was successful or not.' %}
+{% set published_messages_duration_seconds_description = 'The metric is filled with the time the message was sent, regardless of whether the sending was successful or failed.
Timestamps are written immediately before and immediately after sending.
Then the metric is filled with their difference (in seconds).' %}
+{% set published_messages_exceptions_total_description = 'The metric increases if any exception occurred while sending a message.
You can draw conclusions about how many and what exceptions occurred while sending messages.' %}
+
+
+| Metric | Type | Description | Labels |
+|--------------------------------------------------|---------------|----------------------------------------------------------------|-------------------------------------------------------|
+| **received_messages_total** | **Counter** | {{ received_messages_total_description }} | `app_name`, `broker`, `handler` |
+| **received_messages_size_bytes** | **Histogram** | {{ received_messages_size_bytes_description }} | `app_name`, `broker`, `handler` |
+| **received_messages_in_process** | **Gauge** | {{ received_messages_in_process_description }} | `app_name`, `broker`, `handler` |
+| **received_processed_messages_total** | **Counter** | {{ received_processed_messages_total_description }} | `app_name`, `broker`, `handler`, `status` |
+| **received_processed_messages_duration_seconds** | **Histogram** | {{ received_processed_messages_duration_seconds_description }} | `app_name`, `broker`, `handler` |
+| **received_processed_messages_exceptions_total** | **Counter** | {{ received_processed_messages_exceptions_total_description }} | `app_name`, `broker`, `handler`, `exception_type` |
+| **published_messages_total** | **Counter** | {{ published_messages_total_description }} | `app_name`, `broker`, `destination`, `status` |
+| **published_messages_duration_seconds** | **Histogram** | {{ published_messages_duration_seconds_description }} | `app_name`, `broker`, `destination` |
+| **published_messages_exceptions_total** | **Counter** | {{ published_messages_exceptions_total_description }} | `app_name`, `broker`, `destination`, `exception_type` |
+
+### Labels
+
+| Label | Description | Values |
+|-----------------------------------|-----------------------------------------------------------------|---------------------------------------------------|
+| app_name | The name of the application, which the user can specify himself | `faststream` by default |
+| broker | Broker name | `kafka`, `rabbit`, `nats`, `redis` |
+| handler | Where the message came from | |
+| status (while receiving) | Message processing status | `acked`, `nacked`, `rejected`, `skipped`, `error` |
+| exception_type (while receiving) | Exception type when processing message | |
+| status (while publishing) | Message publishing status | `success`, `error` |
+| destination | Where the message is sent | |
+| exception_type (while publishing) | Exception type when publishing message | |
+
+### Grafana dashboard
+
+You can import the [**Grafana dashboard**](https://grafana.com/grafana/dashboards/22130-faststream-metrics/){.external-link target="_blank"} to visualize the metrics collected by middleware.
+
+Enter the dashboard **URL** `https://grafana.com/grafana/dashboards/22130-faststream-metrics/` (or just the **ID**, `22130`), and click on **Load**.
+
+![HTML-page](../../../assets/img/import-dashboard.png){ .on-glb loading=lazy }
+`Import dashboard`
+
+An [example](https://github.com/draincoder/faststream-monitoring){.external-link target="_blank"} application with configured **metrics**, **Prometheus** and **Grafana**.
+
+![HTML-page](../../../assets/img/grafana-dashboard.png){ .on-glb loading=lazy }
+`Grafana dashboard`
diff --git a/docs/docs/en/getting-started/subscription/index.md b/docs/docs/en/getting-started/subscription/index.md
index e1ae7ecef2..2604674830 100644
--- a/docs/docs/en/getting-started/subscription/index.md
+++ b/docs/docs/en/getting-started/subscription/index.md
@@ -41,7 +41,7 @@ This way **FastStream** still consumes `#!python json.loads` result, but without
!!! warning
Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`.
- If you want to disable only type casting, use `validate=False` instead.
+ If you want to disable only type casting, use `serializer=None` instead.
## Multiple Subscriptions
diff --git a/docs/docs/en/howto/nats/in-progress.md b/docs/docs/en/howto/nats/in-progress.md
new file mode 100644
index 0000000000..82a402ad0b
--- /dev/null
+++ b/docs/docs/en/howto/nats/in-progress.md
@@ -0,0 +1,39 @@
+---
+# 0.5 - API
+# 2 - Release
+# 3 - Contributing
+# 5 - Template Page
+# 10 - Default
+search:
+ boost: 10
+---
+
+# In-Progress sender
+
+Nats Jetstream uses the at least once principle, so the message will be delivered until it receives the ACK status (even if your handler takes a long time to process the message), so you can extend the message processing status with a request
+
+??? example "Full Example"
+ ```python linenums="1"
+ import asyncio
+
+ from faststream import Depends, FastStream
+ from faststream.nats import NatsBroker, NatsMessage
+
+ broker = NatsBroker()
+ app = FastStream(broker)
+
+ async def progress_sender(message: NatsMessage):
+ async def in_progress_task():
+ while True:
+ await asyncio.sleep(10.0)
+ await message.in_progress()
+
+ task = asyncio.create_task(in_progress_task())
+ yield
+ task.cancel()
+
+ @broker.subscriber("test", dependencies=[Depends(progress_sender)])
+ async def handler():
+ await asyncio.sleep(20.0)
+
+ ```
diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md
index cb9a207e09..7a8dc7cf4a 100644
--- a/docs/docs/en/release.md
+++ b/docs/docs/en/release.md
@@ -12,6 +12,117 @@ hide:
---
# Release Notes
+## 0.5.29
+
+### What's Changed
+
+* feat: add explicit message source enum by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1866](https://github.com/airtai/faststream/pull/1866){.external-link target="_blank"}
+* Change uv manual installation to setup-uv in CI by [@pavelepanov](https://github.com/pavelepanov){.external-link target="_blank"} in [#1871](https://github.com/airtai/faststream/pull/1871){.external-link target="_blank"}
+* refactor: make Task and Concurrent mixins broker-agnostic by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1873](https://github.com/airtai/faststream/pull/1873){.external-link target="_blank"}
+* Add support for environment variables in faststream run command by [@ulbwa](https://github.com/ulbwa){.external-link target="_blank"} in [#1876](https://github.com/airtai/faststream/pull/1876){.external-link target="_blank"}
+* fastapi example update by [@xodiumx](https://github.com/xodiumx){.external-link target="_blank"} in [#1875](https://github.com/airtai/faststream/pull/1875){.external-link target="_blank"}
+* Do not import `fake_context` if not needed by [@sobolevn](https://github.com/sobolevn){.external-link target="_blank"} in [#1877](https://github.com/airtai/faststream/pull/1877){.external-link target="_blank"}
+* build: add warning about manual lifespan_context by [@vectorvp](https://github.com/vectorvp){.external-link target="_blank"} in [#1878](https://github.com/airtai/faststream/pull/1878){.external-link target="_blank"}
+* Add trending badge by [@davorrunje](https://github.com/davorrunje){.external-link target="_blank"} in [#1882](https://github.com/airtai/faststream/pull/1882){.external-link target="_blank"}
+* feat: add class method to create a baggage instance from headers by [@vectorvp](https://github.com/vectorvp){.external-link target="_blank"} in [#1885](https://github.com/airtai/faststream/pull/1885){.external-link target="_blank"}
+* ops: update docker compose commands to compose V2 in scripts by [@vectorvp](https://github.com/vectorvp){.external-link target="_blank"} in [#1889](https://github.com/airtai/faststream/pull/1889){.external-link target="_blank"}
+
+### New Contributors
+* [@pavelepanov](https://github.com/pavelepanov){.external-link target="_blank"} made their first contribution in [#1871](https://github.com/airtai/faststream/pull/1871){.external-link target="_blank"}
+* [@xodiumx](https://github.com/xodiumx){.external-link target="_blank"} made their first contribution in [#1875](https://github.com/airtai/faststream/pull/1875){.external-link target="_blank"}
+* [@sobolevn](https://github.com/sobolevn){.external-link target="_blank"} made their first contribution in [#1877](https://github.com/airtai/faststream/pull/1877){.external-link target="_blank"}
+* [@vectorvp](https://github.com/vectorvp){.external-link target="_blank"} made their first contribution in [#1878](https://github.com/airtai/faststream/pull/1878){.external-link target="_blank"}
+
+**Full Changelog**: [#0.5.28...0.5.29](https://github.com/airtai/faststream/compare/0.5.28...0.5.29){.external-link target="_blank"}
+
+## 0.5.28
+
+### What's Changed
+
+There were a lot of time since [**0.5.7 OpenTelemetry** release](https://github.com/airtai/faststream/releases/tag/0.5.7) and now we completed **Observability** features we planned! **FastStream** supports **Prometheus** metrics in a native way!
+
+Special thanks to @roma-frolov and @draincoder (again) for it!
+
+To collect **Prometheus** metrics for your **FastStream** application you just need to install special distribution
+
+```cmd
+pip install faststream[prometheus]
+```
+
+And use **PrometheusMiddleware**. Also, it could be helpful to use our [**ASGI**](https://faststream.airt.ai/latest/getting-started/asgi/) to serve metrics endpoint in the same app.
+
+```python
+from prometheus_client import CollectorRegistry, make_asgi_app
+from faststream.asgi import AsgiFastStream
+from faststream.nats import NatsBroker
+from faststream.nats.prometheus import NatsPrometheusMiddleware
+
+registry = CollectorRegistry()
+
+broker = NatsBroker(
+ middlewares=(
+ NatsPrometheusMiddleware(registry=registry),
+ )
+)
+
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
+```
+
+Moreover, we have a ready-to-use [**Grafana** dashboard](https://grafana.com/grafana/dashboards/22130-faststream-metrics/) you can just import and use!
+
+To find more information about **Prometheus** support, just visit [our documentation](https://faststream.airt.ai/latest/getting-started/prometheus/).
+
+### All changes
+
+* docs: Correct minimum FastAPI version for lifespan handling by @tim-hutchinson in https://github.com/airtai/faststream/pull/1853
+* add aiogram example by @IvanKirpichnikov in https://github.com/airtai/faststream/pull/1858
+* Feature: Prometheus Middleware by @roma-frolov in https://github.com/airtai/faststream/pull/1791
+* Add in-progress tutorial to how-to section by @sheldygg in https://github.com/airtai/faststream/pull/1859
+* docs: Add info about Grafana dashboard by @draincoder in https://github.com/airtai/faststream/pull/1863
+
+### New Contributors
+
+* @tim-hutchinson made their first contribution in https://github.com/airtai/faststream/pull/1853
+
+**Full Changelog**: https://github.com/airtai/faststream/compare/0.5.27...0.5.28
+
+## 0.5.27
+
+### What's Changed
+
+* fix: anyio major version parser by [@dotX12](https://github.com/dotX12){.external-link target="_blank"} in [#1850](https://github.com/airtai/faststream/pull/1850){.external-link target="_blank"}
+
+### New Contributors
+* [@dotX12](https://github.com/dotX12){.external-link target="_blank"} made their first contribution in [#1850](https://github.com/airtai/faststream/pull/1850){.external-link target="_blank"}
+
+**Full Changelog**: [#0.5.26...0.5.27](https://github.com/airtai/faststream/compare/0.5.26...0.5.27){.external-link target="_blank"}
+
+## 0.5.26
+
+### What's Changed
+
+This it the official **Python 3.13** support! Now, **FastStream** works (and tested) at **Python 3.8 - 3.13** versions!
+
+Warning: **Python3.8** is EOF since **3.13** release and we plan to drop it support in **FastStream 0.6.0** version.
+
+Also, current release has little bugfixes related to **CLI** and **AsyncAPI** schema.
+
+* fix: asgi docs by [@Sehat1137](https://github.com/Sehat1137){.external-link target="_blank"} in [#1828](https://github.com/airtai/faststream/pull/1828){.external-link target="_blank"}
+* docs: add link to RU TG community by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1831](https://github.com/airtai/faststream/pull/1831){.external-link target="_blank"}
+* docs: add dynaconf NATS HowTo example by [@sheldygg](https://github.com/sheldygg){.external-link target="_blank"} in [#1832](https://github.com/airtai/faststream/pull/1832){.external-link target="_blank"}
+* Fix AsyncAPI 2.6.0 operation label by [@KrySeyt](https://github.com/KrySeyt){.external-link target="_blank"} in [#1835](https://github.com/airtai/faststream/pull/1835){.external-link target="_blank"}
+* fix: correct CLI factory behavior by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1838](https://github.com/airtai/faststream/pull/1838){.external-link target="_blank"}
+* Autocommit precommit changes by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1840](https://github.com/airtai/faststream/pull/1840){.external-link target="_blank"}
+* Add devcontainers supporting all the brokers by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1839](https://github.com/airtai/faststream/pull/1839){.external-link target="_blank"}
+* Replace async Event with bool by [@Olegt0rr](https://github.com/Olegt0rr){.external-link target="_blank"} in [#1846](https://github.com/airtai/faststream/pull/1846){.external-link target="_blank"}
+* Add support for Python 3.13 by [@davorrunje](https://github.com/davorrunje){.external-link target="_blank"} in [#1845](https://github.com/airtai/faststream/pull/1845){.external-link target="_blank"}
+
+**Full Changelog**: [#0.5.25...0.5.26](https://github.com/airtai/faststream/compare/0.5.25...0.5.26){.external-link target="_blank"}
## 0.5.25
diff --git a/docs/docs/navigation_template.txt b/docs/docs/navigation_template.txt
index 4cd45d0874..1a1420d7ca 100644
--- a/docs/docs/navigation_template.txt
+++ b/docs/docs/navigation_template.txt
@@ -44,6 +44,7 @@ search:
- [CLI](getting-started/cli/index.md)
- [ASGI](getting-started/asgi.md)
- [OpenTelemetry](getting-started/opentelemetry/index.md)
+ - [Prometheus](getting-started/prometheus/index.md)
- [Logging](getting-started/logging.md)
- [Config Management](getting-started/config/index.md)
- [Task Scheduling](scheduling.md)
@@ -97,6 +98,7 @@ search:
- [Message Information](nats/message.md)
- [How-To](howto/nats/index.md)
- [DynaConf](howto/nats/dynaconf.md)
+ - [In-Progess](howto/nats/in-progress.md)
- [Redis](redis/index.md)
- [Pub/Sub](redis/pubsub/index.md)
- [Subscription](redis/pubsub/subscription.md)
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
index 52c427af6c..1dc0c0b9e9 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/basic.py
@@ -1,5 +1,5 @@
from faststream import FastStream
-from faststream.kafka import KafkaBroker, KafkaMessage
+from faststream.kafka import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
index fa879f9c41..d177e86909 100644
--- a/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
+++ b/docs/docs_src/getting_started/asyncapi/asyncapi_customization/custom_info.py
@@ -6,9 +6,7 @@
broker = KafkaBroker("localhost:9092")
description="""# Title of the description
This description supports **Markdown** syntax"""
-app = FastStream(
- broker,
-)
+app = FastStream(broker)
docs_obj = AsyncAPI(
broker,
title="My App",
diff --git a/docs/docs_src/getting_started/context/confluent/cast.py b/docs/docs_src/getting_started/context/confluent/cast.py
index 3d0b14c343..77000f7b5b 100644
--- a/docs/docs_src/getting_started/context/confluent/cast.py
+++ b/docs/docs_src/getting_started/context/confluent/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.confluent import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-topic")
async def handle(
diff --git a/docs/docs_src/getting_started/context/confluent/custom_local_context.py b/docs/docs_src/getting_started/context/confluent/custom_local_context.py
index e10da7f3fa..5c23081e2d 100644
--- a/docs/docs_src/getting_started/context/confluent/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/confluent/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: KafkaMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/confluent/manual_local_context.py b/docs/docs_src/getting_started/context/confluent/manual_local_context.py
index c4264548d0..d419bda9a2 100644
--- a/docs/docs_src/getting_started/context/confluent/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/confluent/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types, ContextRepo
from faststream.confluent import KafkaBroker
from faststream.confluent.annotations import KafkaMessage
@@ -10,16 +10,17 @@
async def handle(
msg: str,
message: KafkaMessage,
+ context: ContextRepo,
):
tag = context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: KafkaMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/kafka/cast.py b/docs/docs_src/getting_started/context/kafka/cast.py
index 1ef06d3595..00db482531 100644
--- a/docs/docs_src/getting_started/context/kafka/cast.py
+++ b/docs/docs_src/getting_started/context/kafka/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.kafka import KafkaBroker
broker = KafkaBroker("localhost:9092")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-topic")
async def handle(
diff --git a/docs/docs_src/getting_started/context/kafka/custom_local_context.py b/docs/docs_src/getting_started/context/kafka/custom_local_context.py
index e20a5a6567..e137319775 100644
--- a/docs/docs_src/getting_started/context/kafka/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/kafka/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: KafkaMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/kafka/manual_local_context.py b/docs/docs_src/getting_started/context/kafka/manual_local_context.py
index 3e39cff046..4e69f6600a 100644
--- a/docs/docs_src/getting_started/context/kafka/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/kafka/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types, ContextRepo
from faststream.kafka import KafkaBroker
from faststream.kafka.annotations import KafkaMessage
@@ -10,16 +10,17 @@
async def handle(
msg: str,
message: KafkaMessage,
+ context: ContextRepo,
):
tag = context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: KafkaMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/nats/cast.py b/docs/docs_src/getting_started/context/nats/cast.py
index 0733561043..128cb19dd8 100644
--- a/docs/docs_src/getting_started/context/nats/cast.py
+++ b/docs/docs_src/getting_started/context/nats/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.nats import NatsBroker
broker = NatsBroker("nats://localhost:4222")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-subject")
async def handle(
diff --git a/docs/docs_src/getting_started/context/nats/custom_local_context.py b/docs/docs_src/getting_started/context/nats/custom_local_context.py
index 510ec251e4..484bb9f5f8 100644
--- a/docs/docs_src/getting_started/context/nats/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/nats/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: NatsMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/nats/manual_local_context.py b/docs/docs_src/getting_started/context/nats/manual_local_context.py
index 72a3519daf..fac68e4394 100644
--- a/docs/docs_src/getting_started/context/nats/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/nats/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.nats import NatsBroker
from faststream.nats.annotations import NatsMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: NatsMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: NatsMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/nested.py b/docs/docs_src/getting_started/context/nested.py
index 6eac7ca816..362112850d 100644
--- a/docs/docs_src/getting_started/context/nested.py
+++ b/docs/docs_src/getting_started/context/nested.py
@@ -11,6 +11,6 @@ async def handler(body):
nested_func(body)
-@apply_types
+@apply_types(context__=broker.context)
def nested_func(body, logger=Context()):
logger.info(body)
diff --git a/docs/docs_src/getting_started/context/rabbit/cast.py b/docs/docs_src/getting_started/context/rabbit/cast.py
index 24cf1bf72e..47ce2b4525 100644
--- a/docs/docs_src/getting_started/context/rabbit/cast.py
+++ b/docs/docs_src/getting_started/context/rabbit/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.rabbit import RabbitBroker
broker = RabbitBroker("amqp://guest:guest@localhost:5672/")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-queue")
async def handle(
diff --git a/docs/docs_src/getting_started/context/rabbit/custom_local_context.py b/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
index 6ee9866967..9a3f922073 100644
--- a/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/rabbit/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: RabbitMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/rabbit/manual_local_context.py b/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
index 426abe88bb..c6859ff184 100644
--- a/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/rabbit/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.rabbit import RabbitBroker
from faststream.rabbit.annotations import RabbitMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: RabbitMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: RabbitMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/context/redis/cast.py b/docs/docs_src/getting_started/context/redis/cast.py
index fbd5eaeb3b..203daafb30 100644
--- a/docs/docs_src/getting_started/context/redis/cast.py
+++ b/docs/docs_src/getting_started/context/redis/cast.py
@@ -1,9 +1,9 @@
-from faststream import Context, FastStream, context
+from faststream import Context, FastStream
from faststream.redis import RedisBroker
broker = RedisBroker("redis://localhost:6379")
app = FastStream(broker)
-context.set_global("secret", "1")
+app.context.set_global("secret", "1")
@broker.subscriber("test-channel")
async def handle(
diff --git a/docs/docs_src/getting_started/context/redis/custom_local_context.py b/docs/docs_src/getting_started/context/redis/custom_local_context.py
index 4feb1eb438..9e06b3ea93 100644
--- a/docs/docs_src/getting_started/context/redis/custom_local_context.py
+++ b/docs/docs_src/getting_started/context/redis/custom_local_context.py
@@ -16,7 +16,7 @@ async def handle(
call()
-@apply_types
+@apply_types(context__=app.context)
def call(
message: RedisMessage,
correlation_id=Context(),
diff --git a/docs/docs_src/getting_started/context/redis/manual_local_context.py b/docs/docs_src/getting_started/context/redis/manual_local_context.py
index f52af02782..74a5ced413 100644
--- a/docs/docs_src/getting_started/context/redis/manual_local_context.py
+++ b/docs/docs_src/getting_started/context/redis/manual_local_context.py
@@ -1,4 +1,4 @@
-from faststream import Context, FastStream, apply_types, context
+from faststream import Context, FastStream, apply_types
from faststream.redis import RedisBroker
from faststream.redis.annotations import RedisMessage
@@ -11,15 +11,15 @@ async def handle(
msg: str,
message: RedisMessage,
):
- tag = context.set_local("correlation_id", message.correlation_id)
+ tag = app.context.set_local("correlation_id", message.correlation_id)
call(tag)
-@apply_types
+@apply_types(context__=app.context)
def call(
tag,
message: RedisMessage,
correlation_id=Context(),
):
assert correlation_id == message.correlation_id
- context.reset_local("correlation_id", tag)
+ app.context.reset_local("correlation_id", tag)
diff --git a/docs/docs_src/getting_started/lifespan/multiple.py b/docs/docs_src/getting_started/lifespan/multiple.py
index f0280d4da4..d1d6fd75f6 100644
--- a/docs/docs_src/getting_started/lifespan/multiple.py
+++ b/docs/docs_src/getting_started/lifespan/multiple.py
@@ -1,6 +1,8 @@
+from unittest.mock import AsyncMock
+
from faststream import Context, ContextRepo, FastStream
-app = FastStream()
+app = FastStream(AsyncMock())
@app.on_startup
diff --git a/faststream/specification/asyncapi/base/__init__.py b/docs/docs_src/getting_started/prometheus/__init__.py
similarity index 100%
rename from faststream/specification/asyncapi/base/__init__.py
rename to docs/docs_src/getting_started/prometheus/__init__.py
diff --git a/docs/docs_src/getting_started/prometheus/confluent.py b/docs/docs_src/getting_started/prometheus/confluent.py
new file mode 100644
index 0000000000..2d89e8bee6
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/confluent.py
@@ -0,0 +1,13 @@
+from faststream import FastStream
+from faststream.confluent import KafkaBroker
+from faststream.confluent.prometheus import KafkaPrometheusMiddleware
+from prometheus_client import CollectorRegistry
+
+registry = CollectorRegistry()
+
+broker = KafkaBroker(
+ middlewares=(
+ KafkaPrometheusMiddleware(registry=registry),
+ )
+)
+app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/prometheus/confluent_asgi.py b/docs/docs_src/getting_started/prometheus/confluent_asgi.py
new file mode 100644
index 0000000000..2574a49cef
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/confluent_asgi.py
@@ -0,0 +1,18 @@
+from faststream.asgi import AsgiFastStream
+from faststream.confluent import KafkaBroker
+from faststream.confluent.prometheus import KafkaPrometheusMiddleware
+from prometheus_client import CollectorRegistry, make_asgi_app
+
+registry = CollectorRegistry()
+
+broker = KafkaBroker(
+ middlewares=(
+ KafkaPrometheusMiddleware(registry=registry),
+ )
+)
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
diff --git a/docs/docs_src/getting_started/prometheus/kafka.py b/docs/docs_src/getting_started/prometheus/kafka.py
new file mode 100644
index 0000000000..f6d1224e66
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/kafka.py
@@ -0,0 +1,13 @@
+from faststream import FastStream
+from faststream.kafka import KafkaBroker
+from faststream.kafka.prometheus import KafkaPrometheusMiddleware
+from prometheus_client import CollectorRegistry
+
+registry = CollectorRegistry()
+
+broker = KafkaBroker(
+ middlewares=(
+ KafkaPrometheusMiddleware(registry=registry),
+ )
+)
+app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/prometheus/kafka_asgi.py b/docs/docs_src/getting_started/prometheus/kafka_asgi.py
new file mode 100644
index 0000000000..ddf79040d9
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/kafka_asgi.py
@@ -0,0 +1,18 @@
+from faststream.asgi import AsgiFastStream
+from faststream.kafka import KafkaBroker
+from faststream.kafka.prometheus import KafkaPrometheusMiddleware
+from prometheus_client import CollectorRegistry, make_asgi_app
+
+registry = CollectorRegistry()
+
+broker = KafkaBroker(
+ middlewares=(
+ KafkaPrometheusMiddleware(registry=registry),
+ )
+)
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
diff --git a/docs/docs_src/getting_started/prometheus/nats.py b/docs/docs_src/getting_started/prometheus/nats.py
new file mode 100644
index 0000000000..0894078881
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/nats.py
@@ -0,0 +1,13 @@
+from faststream import FastStream
+from faststream.nats import NatsBroker
+from faststream.nats.prometheus import NatsPrometheusMiddleware
+from prometheus_client import CollectorRegistry
+
+registry = CollectorRegistry()
+
+broker = NatsBroker(
+ middlewares=(
+ NatsPrometheusMiddleware(registry=registry),
+ )
+)
+app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/prometheus/nats_asgi.py b/docs/docs_src/getting_started/prometheus/nats_asgi.py
new file mode 100644
index 0000000000..c273a36f36
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/nats_asgi.py
@@ -0,0 +1,18 @@
+from faststream.asgi import AsgiFastStream
+from faststream.nats import NatsBroker
+from faststream.nats.prometheus import NatsPrometheusMiddleware
+from prometheus_client import CollectorRegistry, make_asgi_app
+
+registry = CollectorRegistry()
+
+broker = NatsBroker(
+ middlewares=(
+ NatsPrometheusMiddleware(registry=registry),
+ )
+)
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
diff --git a/docs/docs_src/getting_started/prometheus/rabbit.py b/docs/docs_src/getting_started/prometheus/rabbit.py
new file mode 100644
index 0000000000..a0fb683b7f
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/rabbit.py
@@ -0,0 +1,13 @@
+from faststream import FastStream
+from faststream.rabbit import RabbitBroker
+from faststream.rabbit.prometheus import RabbitPrometheusMiddleware
+from prometheus_client import CollectorRegistry
+
+registry = CollectorRegistry()
+
+broker = RabbitBroker(
+ middlewares=(
+ RabbitPrometheusMiddleware(registry=registry),
+ )
+)
+app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/prometheus/rabbit_asgi.py b/docs/docs_src/getting_started/prometheus/rabbit_asgi.py
new file mode 100644
index 0000000000..40bc990fcc
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/rabbit_asgi.py
@@ -0,0 +1,18 @@
+from faststream.asgi import AsgiFastStream
+from faststream.rabbit import RabbitBroker
+from faststream.rabbit.prometheus import RabbitPrometheusMiddleware
+from prometheus_client import CollectorRegistry, make_asgi_app
+
+registry = CollectorRegistry()
+
+broker = RabbitBroker(
+ middlewares=(
+ RabbitPrometheusMiddleware(registry=registry),
+ )
+)
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
diff --git a/docs/docs_src/getting_started/prometheus/redis.py b/docs/docs_src/getting_started/prometheus/redis.py
new file mode 100644
index 0000000000..98fc2b70c0
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/redis.py
@@ -0,0 +1,13 @@
+from faststream import FastStream
+from faststream.redis import RedisBroker
+from faststream.redis.prometheus import RedisPrometheusMiddleware
+from prometheus_client import CollectorRegistry
+
+registry = CollectorRegistry()
+
+broker = RedisBroker(
+ middlewares=(
+ RedisPrometheusMiddleware(registry=registry),
+ )
+)
+app = FastStream(broker)
diff --git a/docs/docs_src/getting_started/prometheus/redis_asgi.py b/docs/docs_src/getting_started/prometheus/redis_asgi.py
new file mode 100644
index 0000000000..2c3b095f1b
--- /dev/null
+++ b/docs/docs_src/getting_started/prometheus/redis_asgi.py
@@ -0,0 +1,18 @@
+from faststream.asgi import AsgiFastStream
+from faststream.redis import RedisBroker
+from faststream.redis.prometheus import RedisPrometheusMiddleware
+from prometheus_client import CollectorRegistry, make_asgi_app
+
+registry = CollectorRegistry()
+
+broker = RedisBroker(
+ middlewares=(
+ RedisPrometheusMiddleware(registry=registry),
+ )
+)
+app = AsgiFastStream(
+ broker,
+ asgi_routes=[
+ ("/metrics", make_asgi_app(registry)),
+ ]
+)
diff --git a/docs/docs_src/getting_started/subscription/confluent/real_testing.py b/docs/docs_src/getting_started/subscription/confluent/real_testing.py
index 43973935b9..fcbd09f7e4 100644
--- a/docs/docs_src/getting_started/subscription/confluent/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/confluent/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/confluent/testing.py b/docs/docs_src/getting_started/subscription/confluent/testing.py
index 57ed6acaaa..dfb2bf964d 100644
--- a/docs/docs_src/getting_started/subscription/confluent/testing.py
+++ b/docs/docs_src/getting_started/subscription/confluent/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/kafka/real_testing.py b/docs/docs_src/getting_started/subscription/kafka/real_testing.py
index 0cf374b233..5eb6fd7817 100644
--- a/docs/docs_src/getting_started/subscription/kafka/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/kafka/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/kafka/testing.py b/docs/docs_src/getting_started/subscription/kafka/testing.py
index e1f6241276..cf834ff802 100644
--- a/docs/docs_src/getting_started/subscription/kafka/testing.py
+++ b/docs/docs_src/getting_started/subscription/kafka/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
diff --git a/docs/docs_src/getting_started/subscription/nats/real_testing.py b/docs/docs_src/getting_started/subscription/nats/real_testing.py
index 5e9d6e4567..c14123218c 100644
--- a/docs/docs_src/getting_started/subscription/nats/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/nats/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
diff --git a/docs/docs_src/getting_started/subscription/nats/testing.py b/docs/docs_src/getting_started/subscription/nats/testing.py
index 0f7560e043..4d66a744c0 100644
--- a/docs/docs_src/getting_started/subscription/nats/testing.py
+++ b/docs/docs_src/getting_started/subscription/nats/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
diff --git a/docs/docs_src/getting_started/subscription/rabbit/real_testing.py b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
index 900b6046e7..7cf61a2df5 100644
--- a/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/rabbit/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
diff --git a/docs/docs_src/getting_started/subscription/rabbit/testing.py b/docs/docs_src/getting_started/subscription/rabbit/testing.py
index 78425924da..f49be05c7a 100644
--- a/docs/docs_src/getting_started/subscription/rabbit/testing.py
+++ b/docs/docs_src/getting_started/subscription/rabbit/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
diff --git a/docs/docs_src/getting_started/subscription/redis/real_testing.py b/docs/docs_src/getting_started/subscription/redis/real_testing.py
index b2c05c203e..6514d66902 100644
--- a/docs/docs_src/getting_started/subscription/redis/real_testing.py
+++ b/docs/docs_src/getting_started/subscription/redis/real_testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
diff --git a/docs/docs_src/getting_started/subscription/redis/testing.py b/docs/docs_src/getting_started/subscription/redis/testing.py
index 4934366f75..bb38ffd5fe 100644
--- a/docs/docs_src/getting_started/subscription/redis/testing.py
+++ b/docs/docs_src/getting_started/subscription/redis/testing.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
diff --git a/docs/docs_src/index/confluent/test.py b/docs/docs_src/index/confluent/test.py
index 1cc613d157..b569184a81 100644
--- a/docs/docs_src/index/confluent/test.py
+++ b/docs/docs_src/index/confluent/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.confluent import TestKafkaBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestKafkaBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-topic")
diff --git a/docs/docs_src/index/kafka/test.py b/docs/docs_src/index/kafka/test.py
index bfd740312c..67b57e6f12 100644
--- a/docs/docs_src/index/kafka/test.py
+++ b/docs/docs_src/index/kafka/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.kafka import TestKafkaBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestKafkaBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-topic")
diff --git a/docs/docs_src/index/nats/test.py b/docs/docs_src/index/nats/test.py
index 85b2e6de76..ca2e71e7b9 100644
--- a/docs/docs_src/index/nats/test.py
+++ b/docs/docs_src/index/nats/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.nats import TestNatsBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestNatsBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-subject")
diff --git a/docs/docs_src/index/rabbit/test.py b/docs/docs_src/index/rabbit/test.py
index a193db35b2..7b67df49dc 100644
--- a/docs/docs_src/index/rabbit/test.py
+++ b/docs/docs_src/index/rabbit/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.rabbit import TestRabbitBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestRabbitBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-queue")
diff --git a/docs/docs_src/index/redis/test.py b/docs/docs_src/index/redis/test.py
index 9a14ba4190..411e032edb 100644
--- a/docs/docs_src/index/redis/test.py
+++ b/docs/docs_src/index/redis/test.py
@@ -1,7 +1,7 @@
from .pydantic import broker
import pytest
-import pydantic
+from fast_depends.exceptions import ValidationError
from faststream.redis import TestRedisBroker
@@ -16,5 +16,5 @@ async def test_correct():
@pytest.mark.asyncio
async def test_invalid():
async with TestRedisBroker(broker) as br:
- with pytest.raises(pydantic.ValidationError):
+ with pytest.raises(ValidationError):
await br.publish("wrong message", "in-channel")
diff --git a/docs/docs_src/integrations/no_http_frameworks_integrations/__init__.py b/docs/docs_src/integrations/no_http_frameworks_integrations/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/docs/docs_src/integrations/no_http_frameworks_integrations/aiogram.py b/docs/docs_src/integrations/no_http_frameworks_integrations/aiogram.py
new file mode 100644
index 0000000000..94d8cc7aae
--- /dev/null
+++ b/docs/docs_src/integrations/no_http_frameworks_integrations/aiogram.py
@@ -0,0 +1,34 @@
+import asyncio
+
+from aiogram import Bot, Dispatcher
+from aiogram.types import Message
+
+from faststream.nats import NatsBroker
+
+bot = Bot("")
+dispatcher = Dispatcher()
+broker = NatsBroker()
+
+@broker.subscriber("echo")
+async def echo_faststream_handler(data: dict[str, str]) -> None:
+ await bot.copy_message(**data)
+
+
+@dispatcher.message()
+async def echo_aiogram_handler(event: Message) -> None:
+ await broker.publish(
+ subject="echo",
+ message={
+ "chat_id": event.chat.id,
+ "message_id": event.message_id,
+ "from_chat_id": event.chat.id,
+ },
+ )
+
+
+async def main() -> None:
+ async with broker:
+ await broker.start()
+ await dispatcher.start_polling(bot)
+
+asyncio.run(main())
diff --git a/docs/includes/getting_started/integrations/http/1.md b/docs/includes/getting_started/integrations/http/1.md
index 6c6d58e0c0..d124345ce4 100644
--- a/docs/includes/getting_started/integrations/http/1.md
+++ b/docs/includes/getting_started/integrations/http/1.md
@@ -42,3 +42,10 @@
```python linenums="1" hl_lines="5 7 10-12 32-36"
{!> docs_src/integrations/http_frameworks_integrations/tornado.py !}
```
+
+{{ and_not_only_http }}
+
+=== "Aiogram"
+ ```python linenums="1" hl_lines="6 10 12-14 30-31"
+ {!> docs_src/integrations/no_http_frameworks_integrations/aiogram.py !}
+ ```
diff --git a/docs/includes/getting_started/prometheus/1.md b/docs/includes/getting_started/prometheus/1.md
new file mode 100644
index 0000000000..16ad860ff8
--- /dev/null
+++ b/docs/includes/getting_started/prometheus/1.md
@@ -0,0 +1,24 @@
+=== "AIOKafka"
+ ```python linenums="1" hl_lines="6 10"
+ {!> docs_src/getting_started/prometheus/kafka.py!}
+ ```
+
+=== "Confluent"
+ ```python linenums="1" hl_lines="6 10"
+ {!> docs_src/getting_started/prometheus/confluent.py!}
+ ```
+
+=== "RabbitMQ"
+ ```python linenums="1" hl_lines="6 10"
+ {!> docs_src/getting_started/prometheus/rabbit.py!}
+ ```
+
+=== "NATS"
+ ```python linenums="1" hl_lines="6 10"
+ {!> docs_src/getting_started/prometheus/nats.py!}
+ ```
+
+=== "Redis"
+ ```python linenums="1" hl_lines="6 10"
+ {!> docs_src/getting_started/prometheus/redis.py!}
+ ```
diff --git a/docs/includes/getting_started/prometheus/2.md b/docs/includes/getting_started/prometheus/2.md
new file mode 100644
index 0000000000..483e5437f4
--- /dev/null
+++ b/docs/includes/getting_started/prometheus/2.md
@@ -0,0 +1,24 @@
+=== "AIOKafka"
+ ```python linenums="1" hl_lines="6 10 13 16"
+ {!> docs_src/getting_started/prometheus/kafka_asgi.py!}
+ ```
+
+=== "Confluent"
+ ```python linenums="1" hl_lines="6 10 13 16"
+ {!> docs_src/getting_started/prometheus/confluent_asgi.py!}
+ ```
+
+=== "RabbitMQ"
+ ```python linenums="1" hl_lines="6 10 13 16"
+ {!> docs_src/getting_started/prometheus/rabbit_asgi.py!}
+ ```
+
+=== "NATS"
+ ```python linenums="1" hl_lines="6 10 13 16"
+ {!> docs_src/getting_started/prometheus/nats_asgi.py!}
+ ```
+
+=== "Redis"
+ ```python linenums="1" hl_lines="6 10 13 16"
+ {!> docs_src/getting_started/prometheus/redis_asgi.py!}
+ ```
diff --git a/examples/e10_middlewares.py b/examples/e10_middlewares.py
index 03a0519d79..31a2a257c9 100644
--- a/examples/e10_middlewares.py
+++ b/examples/e10_middlewares.py
@@ -25,7 +25,7 @@ async def subscriber_middleware(
msg: RabbitMessage,
) -> Any:
print(f"call handler middleware with body: {msg}")
- msg._decoded_body = "fake message"
+ msg.body = b"fake message"
result = await call_next(msg)
print("handler middleware out")
return result
diff --git a/examples/fastapi_integration/app.py b/examples/fastapi_integration/app.py
index e9f7ccc579..b53f4becfa 100644
--- a/examples/fastapi_integration/app.py
+++ b/examples/fastapi_integration/app.py
@@ -3,7 +3,7 @@
from faststream.rabbit.fastapi import Logger, RabbitRouter
router = RabbitRouter("amqp://guest:guest@localhost:5672/")
-app = FastAPI(lifespan=router.lifespan_context)
+app = FastAPI()
publisher = router.publisher("response-q")
diff --git a/faststream/__about__.py b/faststream/__about__.py
index 70de450a72..b817eed0eb 100644
--- a/faststream/__about__.py
+++ b/faststream/__about__.py
@@ -1,5 +1,5 @@
"""Simple and fast framework to create message brokers based microservices."""
-__version__ = "0.6.0a0"
+__version__ = "0.5.30"
SERVICE_NAME = f"faststream-{__version__}"
diff --git a/faststream/__init__.py b/faststream/__init__.py
index b4241ff458..cad7e628bf 100644
--- a/faststream/__init__.py
+++ b/faststream/__init__.py
@@ -1,11 +1,10 @@
"""A Python framework for building services interacting with Apache Kafka, RabbitMQ, NATS and Redis."""
-from faststream._internal.context import context
from faststream._internal.testing.app import TestApp
from faststream._internal.utils import apply_types
from faststream.annotations import ContextRepo, Logger
from faststream.app import FastStream
-from faststream.middlewares import BaseMiddleware, ExceptionMiddleware
+from faststream.middlewares import AckPolicy, BaseMiddleware, ExceptionMiddleware
from faststream.params import (
Context,
Depends,
@@ -17,6 +16,7 @@
__all__ = (
# middlewares
+ "AckPolicy",
"BaseMiddleware",
# params
"Context",
@@ -35,6 +35,4 @@
"TestApp",
# utils
"apply_types",
- # context
- "context",
)
diff --git a/faststream/_internal/_compat.py b/faststream/_internal/_compat.py
index 7cd4842ca5..baccfc866f 100644
--- a/faststream/_internal/_compat.py
+++ b/faststream/_internal/_compat.py
@@ -1,5 +1,4 @@
import json
-import os
import sys
import warnings
from collections.abc import Iterable, Mapping
@@ -13,14 +12,13 @@
Union,
)
-from fast_depends._compat import ( # type: ignore[attr-defined]
- PYDANTIC_V2,
- PYDANTIC_VERSION,
-)
from pydantic import BaseModel
+from pydantic.version import VERSION as PYDANTIC_VERSION
from faststream._internal.basic_types import AnyDict
+PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
+
IS_WINDOWS = (
sys.platform == "win32" or sys.platform == "cygwin" or sys.platform == "msys"
)
@@ -29,10 +27,6 @@
ModelVar = TypeVar("ModelVar", bound=BaseModel)
-def is_test_env() -> bool:
- return bool(os.getenv("PYTEST_CURRENT_TEST"))
-
-
json_dumps: Callable[..., bytes]
orjson: Any
ujson: Any
@@ -92,9 +86,14 @@ def json_dumps(*a: Any, **kw: Any) -> bytes:
with_info_plain_validator_function,
)
else:
- from pydantic._internal._annotated_handlers import ( # type: ignore[no-redef]
- GetJsonSchemaHandler,
- )
+ if PYDANTIC_VERSION >= "2.10":
+ from pydantic.annotated_handlers import (
+ GetJsonSchemaHandler,
+ )
+ else:
+ from pydantic._internal._annotated_handlers import ( # type: ignore[no-redef]
+ GetJsonSchemaHandler,
+ )
from pydantic_core.core_schema import (
general_plain_validator_function as with_info_plain_validator_function,
)
@@ -176,7 +175,7 @@ def with_info_plain_validator_function( # type: ignore[misc]
return {}
-anyio_major, *_ = map(int, get_version("anyio").split("."))
+anyio_major = int(get_version("anyio").split(".")[0])
ANYIO_V3 = anyio_major == 3
diff --git a/faststream/_internal/application.py b/faststream/_internal/application.py
index 15c389b6e8..85e0ba43d0 100644
--- a/faststream/_internal/application.py
+++ b/faststream/_internal/application.py
@@ -10,11 +10,14 @@
TypeVar,
)
+from fast_depends import Provider
from typing_extensions import ParamSpec
-from faststream._internal.context import context
+from faststream._internal.constants import EMPTY
+from faststream._internal.context import ContextRepo
from faststream._internal.log import logger
-from faststream._internal.setup.state import EmptyState
+from faststream._internal.state import DIState
+from faststream._internal.state.broker import OuterBrokerState
from faststream._internal.utils import apply_types
from faststream._internal.utils.functions import (
drop_response_type,
@@ -23,6 +26,8 @@
)
if TYPE_CHECKING:
+ from fast_depends.library.serializer import SerializerProto
+
from faststream._internal.basic_types import (
AnyCallable,
AsyncFunc,
@@ -68,59 +73,94 @@ async def catch_startup_validation_error() -> AsyncIterator[None]:
class StartAbleApplication:
def __init__(
self,
- broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ ) -> None:
+ self._init_setupable_(
+ broker,
+ provider=provider,
+ serializer=serializer,
+ )
+
+ def _init_setupable_( # noqa: PLW3201
+ self,
+ broker: "BrokerUsecase[Any, Any]",
+ /,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
) -> None:
- self._state = EmptyState()
+ self.context = ContextRepo()
+ self.provider = provider or Provider()
+
+ if serializer is EMPTY:
+ from fast_depends.pydantic.serializer import PydanticSerializer
+
+ serializer = PydanticSerializer()
+
+ self._state = DIState(
+ use_fastdepends=True,
+ get_dependent=None,
+ call_decorators=(),
+ serializer=serializer,
+ provider=self.provider,
+ context=self.context,
+ )
self.broker = broker
+ self._setup()
+
def _setup(self) -> None:
- if self.broker is not None:
- self.broker._setup(self._state)
+ self.broker._setup(OuterBrokerState(di_state=self._state))
async def _start_broker(self) -> None:
- if self.broker is not None:
- await self.broker.connect()
- self._setup()
- await self.broker.start()
+ await self.broker.start()
class Application(StartAbleApplication):
def __init__(
self,
- *,
- broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ broker: "BrokerUsecase[Any, Any]",
+ /,
logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
lifespan: Optional["Lifespan"] = None,
on_startup: Sequence["AnyCallable"] = (),
after_startup: Sequence["AnyCallable"] = (),
on_shutdown: Sequence["AnyCallable"] = (),
after_shutdown: Sequence["AnyCallable"] = (),
) -> None:
- super().__init__(broker)
+ super().__init__(
+ broker,
+ provider=provider,
+ serializer=serializer,
+ )
- context.set_global("app", self)
+ self.context.set_global("app", self)
self.logger = logger
- self.context = context
self._on_startup_calling: list[AsyncFunc] = [
- apply_types(to_async(x)) for x in on_startup
+ apply_types(to_async(x), context__=self.context) for x in on_startup
]
self._after_startup_calling: list[AsyncFunc] = [
- apply_types(to_async(x)) for x in after_startup
+ apply_types(to_async(x), context__=self.context) for x in after_startup
]
self._on_shutdown_calling: list[AsyncFunc] = [
- apply_types(to_async(x)) for x in on_shutdown
+ apply_types(to_async(x), context__=self.context) for x in on_shutdown
]
self._after_shutdown_calling: list[AsyncFunc] = [
- apply_types(to_async(x)) for x in after_shutdown
+ apply_types(to_async(x), context__=self.context) for x in after_shutdown
]
if lifespan is not None:
self.lifespan_context = apply_types(
func=lifespan,
wrap_model=drop_response_type,
+ context__=self.context,
)
else:
self.lifespan_context = fake_context
@@ -198,8 +238,7 @@ async def _shutdown(self, log_level: int = logging.INFO) -> None:
async def stop(self) -> None:
"""Executes shutdown hooks and stop broker."""
async with self._shutdown_hooks_context():
- if self.broker is not None:
- await self.broker.close()
+ await self.broker.close()
@asynccontextmanager
async def _shutdown_hooks_context(self) -> AsyncIterator[None]:
@@ -235,13 +274,6 @@ def _log(self, level: int, message: str) -> None:
if self.logger is not None:
self.logger.log(level, message)
- def set_broker(self, broker: "BrokerUsecase[Any, Any]") -> None:
- """Set already existed App object broker.
-
- Useful then you create/init broker in `on_startup` hook.
- """
- self.broker = broker
-
# Hooks
def on_startup(
@@ -252,7 +284,9 @@ def on_startup(
This hook also takes an extra CLI options as a kwargs.
"""
- self._on_startup_calling.append(apply_types(to_async(func)))
+ self._on_startup_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
return func
def on_shutdown(
@@ -260,7 +294,9 @@ def on_shutdown(
func: Callable[P_HookParams, T_HookReturn],
) -> Callable[P_HookParams, T_HookReturn]:
"""Add hook running BEFORE broker disconnected."""
- self._on_shutdown_calling.append(apply_types(to_async(func)))
+ self._on_shutdown_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
return func
def after_startup(
@@ -268,7 +304,9 @@ def after_startup(
func: Callable[P_HookParams, T_HookReturn],
) -> Callable[P_HookParams, T_HookReturn]:
"""Add hook running AFTER broker connected."""
- self._after_startup_calling.append(apply_types(to_async(func)))
+ self._after_startup_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
return func
def after_shutdown(
@@ -276,5 +314,7 @@ def after_shutdown(
func: Callable[P_HookParams, T_HookReturn],
) -> Callable[P_HookParams, T_HookReturn]:
"""Add hook running AFTER broker disconnected."""
- self._after_shutdown_calling.append(apply_types(to_async(func)))
+ self._after_shutdown_calling.append(
+ apply_types(to_async(func), context__=self.context)
+ )
return func
diff --git a/faststream/_internal/broker/abc_broker.py b/faststream/_internal/broker/abc_broker.py
index 20aae90d1a..f92b8c2358 100644
--- a/faststream/_internal/broker/abc_broker.py
+++ b/faststream/_internal/broker/abc_broker.py
@@ -7,10 +7,11 @@
Optional,
)
+from faststream._internal.state import BrokerState, Pointer
from faststream._internal.types import BrokerMiddleware, CustomCallable, MsgType
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.publisher.proto import PublisherProto
from faststream._internal.subscriber.proto import SubscriberProto
@@ -24,11 +25,12 @@ def __init__(
self,
*,
prefix: str,
- dependencies: Iterable["Depends"],
+ dependencies: Iterable["Dependant"],
middlewares: Iterable["BrokerMiddleware[MsgType]"],
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
include_in_schema: Optional[bool],
+ state: "BrokerState",
) -> None:
self.prefix = prefix
self.include_in_schema = include_in_schema
@@ -37,16 +39,18 @@ def __init__(
self._publishers = []
self._dependencies = dependencies
- self._middlewares = middlewares
+ self.middlewares = middlewares
self._parser = parser
self._decoder = decoder
+ self._state = Pointer(state)
+
def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
"""Append BrokerMiddleware to the end of middlewares list.
Current middleware will be used as a most inner of already existed ones.
"""
- self._middlewares = (*self._middlewares, middleware)
+ self.middlewares = (*self.middlewares, middleware)
for sub in self._subscribers:
sub.add_middleware(middleware)
@@ -58,30 +62,50 @@ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
def subscriber(
self,
subscriber: "SubscriberProto[MsgType]",
+ is_running: bool = False,
) -> "SubscriberProto[MsgType]":
subscriber.add_prefix(self.prefix)
- self._subscribers.append(subscriber)
+ if not is_running:
+ self._subscribers.append(subscriber)
return subscriber
@abstractmethod
def publisher(
self,
publisher: "PublisherProto[MsgType]",
+ is_running: bool = False,
) -> "PublisherProto[MsgType]":
publisher.add_prefix(self.prefix)
- self._publishers.append(publisher)
+
+ if not is_running:
+ self._publishers.append(publisher)
+
return publisher
+ def setup_publisher(
+ self,
+ publisher: "PublisherProto[MsgType]",
+ **kwargs: Any,
+ ) -> None:
+ """Setup the Publisher to prepare it to starting."""
+ publisher._setup(**kwargs, state=self._state)
+
+ def _setup(self, state: Optional["BrokerState"]) -> None:
+ if state is not None:
+ self._state.set(state)
+
def include_router(
self,
router: "ABCBroker[Any]",
*,
prefix: str = "",
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
middlewares: Iterable["BrokerMiddleware[MsgType]"] = (),
include_in_schema: Optional[bool] = None,
) -> None:
"""Includes a router in the current object."""
+ router._setup(self._state.get())
+
for h in router._subscribers:
h.add_prefix(f"{self.prefix}{prefix}")
@@ -91,7 +115,7 @@ def include_router(
h.include_in_schema = include_in_schema
h._broker_middlewares = (
- *self._middlewares,
+ *self.middlewares,
*middlewares,
*h._broker_middlewares,
)
@@ -111,7 +135,7 @@ def include_router(
p.include_in_schema = include_in_schema
p._broker_middlewares = (
- *self._middlewares,
+ *self.middlewares,
*middlewares,
*p._broker_middlewares,
)
@@ -126,6 +150,8 @@ def include_routers(
self.include_router(r)
def _solve_include_in_schema(self, include_in_schema: bool) -> bool:
- if self.include_in_schema is None or self.include_in_schema:
- return include_in_schema
- return self.include_in_schema
+ # should be `is False` to pass `None` case
+ if self.include_in_schema is False:
+ return False
+
+ return include_in_schema
diff --git a/faststream/_internal/broker/broker.py b/faststream/_internal/broker/broker.py
index 2cdf23cbe7..831295ae76 100644
--- a/faststream/_internal/broker/broker.py
+++ b/faststream/_internal/broker/broker.py
@@ -1,6 +1,5 @@
from abc import abstractmethod
from collections.abc import Iterable, Sequence
-from functools import partial
from typing import (
TYPE_CHECKING,
Annotated,
@@ -12,19 +11,22 @@
cast,
)
+from fast_depends import Provider
from typing_extensions import Doc, Self
-from faststream._internal._compat import is_test_env
-from faststream._internal.setup import (
- EmptyState,
- FastDependsData,
+from faststream._internal.constants import EMPTY
+from faststream._internal.context.repository import ContextRepo
+from faststream._internal.state import (
+ DIState,
LoggerState,
SetupAble,
- SetupState,
)
-from faststream._internal.setup.state import BaseState
+from faststream._internal.state.broker import (
+ BrokerState,
+ InitialBrokerState,
+)
+from faststream._internal.state.producer import ProducerUnset
from faststream._internal.subscriber.proto import SubscriberProto
-from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import (
AsyncCustomCallable,
BrokerMiddleware,
@@ -33,16 +35,16 @@
MsgType,
)
from faststream._internal.utils.functions import to_async
-from faststream.exceptions import NOT_CONNECTED_YET
-from faststream.middlewares.logging import CriticalLogMiddleware
from faststream.specification.proto import ServerSpecification
from .abc_broker import ABCBroker
+from .pub_base import BrokerPublishMixin
if TYPE_CHECKING:
from types import TracebackType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from faststream._internal.basic_types import AnyDict, Decorator
from faststream._internal.publisher.proto import (
@@ -57,14 +59,14 @@ class BrokerUsecase(
ABCBroker[MsgType],
SetupAble,
ServerSpecification,
+ BrokerPublishMixin[MsgType],
Generic[MsgType, ConnectionType],
):
"""A class representing a broker async use case."""
url: Union[str, Sequence[str]]
_connection: Optional[ConnectionType]
- _producer: Optional["ProducerProto"]
- _state: BaseState
+ _producer: "ProducerProto"
def __init__(
self,
@@ -78,7 +80,7 @@ def __init__(
Doc("Custom parser object."),
],
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
],
middlewares: Annotated[
@@ -98,10 +100,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
],
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ],
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -139,6 +138,20 @@ def __init__(
],
**connection_kwargs: Any,
) -> None:
+ state = InitialBrokerState(
+ di_state=DIState(
+ use_fastdepends=apply_types,
+ get_dependent=_get_dependant,
+ call_decorators=_call_decorators,
+ serializer=serializer,
+ provider=Provider(),
+ context=ContextRepo(),
+ ),
+ logger_state=logger_state,
+ graceful_timeout=graceful_timeout,
+ producer=ProducerUnset(),
+ )
+
super().__init__(
middlewares=middlewares,
dependencies=dependencies,
@@ -153,31 +166,13 @@ def __init__(
# Broker is a root router
include_in_schema=True,
prefix="",
+ state=state,
)
self.running = False
- self.graceful_timeout = graceful_timeout
self._connection_kwargs = connection_kwargs
self._connection = None
- self._producer = None
-
- # TODO: remove useless middleware filter
- if not is_test_env():
- self._middlewares = (
- CriticalLogMiddleware(logger_state),
- *self._middlewares,
- )
-
- self._state = EmptyState(
- depends_params=FastDependsData(
- apply_types=apply_types,
- is_validate=validate,
- get_dependent=_get_dependant,
- call_decorators=_call_decorators,
- ),
- logger_state=logger_state,
- )
# AsyncAPI information
self.url = specification_url
@@ -187,6 +182,18 @@ def __init__(
self.tags = tags
self.security = security
+ @property
+ def _producer(self) -> "ProducerProto":
+ return self._state.get().producer
+
+ @property
+ def context(self) -> "ContextRepo":
+ return self._state.get().di_state.context
+
+ @property
+ def provider(self) -> Provider:
+ return self._state.get().di_state.provider
+
async def __aenter__(self) -> "Self":
await self.connect()
return self
@@ -203,12 +210,24 @@ async def __aexit__(
async def start(self) -> None:
"""Start the broker async use case."""
# TODO: filter by already running handlers after TestClient refactor
- for handler in self._subscribers:
- self._state.logger_state.log(
- f"`{handler.call_name}` waiting for messages",
- extra=handler.get_log_context(None),
+ state = self._state.get()
+
+ for subscriber in self._subscribers:
+ log_context = subscriber.get_log_context(None)
+ log_context.pop("message_id", None)
+ state.logger_state.params_storage.setup_log_contest(log_context)
+
+ state._setup_logger_state()
+
+ for subscriber in self._subscribers:
+ state.logger_state.log(
+ f"`{subscriber.call_name}` waiting for messages",
+ extra=subscriber.get_log_context(None),
)
- await handler.start()
+ await subscriber.start()
+
+ if not self.running:
+ self.running = True
async def connect(self, **kwargs: Any) -> ConnectionType:
"""Connect to a remote server."""
@@ -224,33 +243,42 @@ async def _connect(self) -> ConnectionType:
"""Connect to a resource."""
raise NotImplementedError
- def _setup(self, state: Optional[BaseState] = None) -> None:
- """Prepare all Broker entities to startup."""
- if not self._state:
+ def _setup(self, state: Optional["BrokerState"] = None) -> None:
+ """Prepare all Broker entities to startup.
+
+ Method should be idempotent due could be called twice
+ """
+ broker_state = self._state.get()
+ current_di_state = broker_state.di_state
+ broker_serializer = current_di_state.serializer
+
+ if state is not None:
+ di_state = state.di_state
+
+ if broker_serializer is EMPTY:
+ broker_serializer = di_state.serializer
+
+ current_di_state.update(
+ serializer=broker_serializer,
+ provider=di_state.provider,
+ context=di_state.context,
+ )
+
+ else:
# Fallback to default state if there no
# parent container like FastStream object
- default_state = self._state.copy_to_state(SetupState)
+ if broker_serializer is EMPTY:
+ from fast_depends.pydantic import PydanticSerializer
- if state:
- self._state = state.copy_with_params(
- depends_params=default_state.depends_params,
- logger_state=default_state.logger_state,
- )
- else:
- self._state = default_state
+ broker_serializer = PydanticSerializer()
- if not self.running:
- self.running = True
-
- for h in self._subscribers:
- log_context = h.get_log_context(None)
- log_context.pop("message_id", None)
- self._state.logger_state.params_storage.setup_log_contest(log_context)
+ current_di_state.update(
+ serializer=broker_serializer,
+ )
- self._state._setup()
+ broker_state._setup()
- # TODO: why we can't move it to running?
- # TODO: can we setup subscriber in running broker automatically?
+ # TODO: move setup to object creation
for h in self._subscribers:
self.setup_subscriber(h)
@@ -265,45 +293,22 @@ def setup_subscriber(
"""Setup the Subscriber to prepare it to starting."""
data = self._subscriber_setup_extra.copy()
data.update(kwargs)
- subscriber._setup(**data)
-
- def setup_publisher(
- self,
- publisher: "PublisherProto[MsgType]",
- **kwargs: Any,
- ) -> None:
- """Setup the Publisher to prepare it to starting."""
- data = self._publisher_setup_extra.copy()
- data.update(kwargs)
- publisher._setup(**data)
+ subscriber._setup(**data, state=self._state)
@property
def _subscriber_setup_extra(self) -> "AnyDict":
return {
- "logger": self._state.logger_state.logger.logger,
- "producer": self._producer,
- "graceful_timeout": self.graceful_timeout,
"extra_context": {
"broker": self,
- "logger": self._state.logger_state.logger.logger,
},
# broker options
"broker_parser": self._parser,
"broker_decoder": self._decoder,
- # dependant args
- "state": self._state,
- }
-
- @property
- def _publisher_setup_extra(self) -> "AnyDict":
- return {
- "producer": self._producer,
}
def publisher(self, *args: Any, **kwargs: Any) -> "PublisherProto[MsgType]":
- pub = super().publisher(*args, **kwargs)
- if self.running:
- self.setup_publisher(pub)
+ pub = super().publisher(*args, **kwargs, is_running=self.running)
+ self.setup_publisher(pub)
return pub
async def close(
@@ -318,53 +323,6 @@ async def close(
self.running = False
- async def publish(
- self,
- msg: Any,
- *,
- producer: Optional["ProducerProto"],
- correlation_id: Optional[str] = None,
- **kwargs: Any,
- ) -> Optional[Any]:
- """Publish message directly."""
- assert producer, NOT_CONNECTED_YET # nosec B101
-
- publish = producer.publish
-
- for m in self._middlewares:
- publish = partial(m(None).publish_scope, publish)
-
- return await publish(msg, correlation_id=correlation_id, **kwargs)
-
- async def request(
- self,
- msg: Any,
- *,
- producer: Optional["ProducerProto"],
- correlation_id: Optional[str] = None,
- **kwargs: Any,
- ) -> Any:
- """Publish message directly."""
- assert producer, NOT_CONNECTED_YET # nosec B101
-
- request = producer.request
- for m in self._middlewares:
- request = partial(m(None).publish_scope, request)
-
- published_msg = await request(
- msg,
- correlation_id=correlation_id,
- **kwargs,
- )
-
- message: Any = await process_msg(
- msg=published_msg,
- middlewares=self._middlewares,
- parser=producer._parser,
- decoder=producer._decoder,
- )
- return message
-
@abstractmethod
async def ping(self, timeout: Optional[float]) -> bool:
"""Check connection alive."""
diff --git a/faststream/_internal/broker/pub_base.py b/faststream/_internal/broker/pub_base.py
new file mode 100644
index 0000000000..31cfb476fd
--- /dev/null
+++ b/faststream/_internal/broker/pub_base.py
@@ -0,0 +1,97 @@
+from abc import abstractmethod
+from collections.abc import Iterable
+from functools import partial
+from typing import TYPE_CHECKING, Any, Generic
+
+from faststream._internal.subscriber.utils import process_msg
+from faststream._internal.types import MsgType
+from faststream.message.source_type import SourceType
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.context import ContextRepo
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.response import PublishCommand
+
+
+class BrokerPublishMixin(Generic[MsgType]):
+ middlewares: Iterable["BrokerMiddleware[MsgType]"]
+ context: "ContextRepo"
+
+ @abstractmethod
+ async def publish(
+ self,
+ message: "SendableMessage",
+ queue: str,
+ /,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ publish = producer.publish
+ context = self.context # caches property
+
+ for m in self.middlewares:
+ publish = partial(m(None, context=context).publish_scope, publish)
+
+ return await publish(cmd)
+
+ async def publish_batch(
+ self,
+ *messages: "SendableMessage",
+ queue: str,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_publish_batch(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ publish = producer.publish_batch
+ context = self.context # caches property
+
+ for m in self.middlewares:
+ publish = partial(m(None, context=context).publish_scope, publish)
+
+ return await publish(cmd)
+
+ @abstractmethod
+ async def request(
+ self,
+ message: "SendableMessage",
+ queue: str,
+ /,
+ timeout: float = 0.5,
+ ) -> Any:
+ raise NotImplementedError
+
+ async def _basic_request(
+ self,
+ cmd: "PublishCommand",
+ *,
+ producer: "ProducerProto",
+ ) -> Any:
+ request = producer.request
+ context = self.context # caches property
+
+ for m in self.middlewares:
+ request = partial(m(None, context=context).publish_scope, request)
+
+ published_msg = await request(cmd)
+
+ response_msg: Any = await process_msg(
+ msg=published_msg,
+ middlewares=(m(published_msg, context=context) for m in self.middlewares),
+ parser=producer._parser,
+ decoder=producer._decoder,
+ source_type=SourceType.RESPONSE,
+ )
+ return response_msg
diff --git a/faststream/_internal/broker/router.py b/faststream/_internal/broker/router.py
index 35d2b2779a..c2fe269e1d 100644
--- a/faststream/_internal/broker/router.py
+++ b/faststream/_internal/broker/router.py
@@ -6,6 +6,7 @@
Optional,
)
+from faststream._internal.state.broker import EmptyBrokerState
from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
@@ -15,7 +16,7 @@
from .abc_broker import ABCBroker
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
@@ -23,6 +24,8 @@
class ArgsContainer:
"""Class to store any arguments."""
+ __slots__ = ("args", "kwargs")
+
args: Iterable[Any]
kwargs: "AnyDict"
@@ -38,6 +41,8 @@ def __init__(
class SubscriberRoute(ArgsContainer):
"""A generic class to represent a broker route."""
+ __slots__ = ("args", "call", "kwargs", "publishers")
+
call: Callable[..., Any]
publishers: Iterable[Any]
@@ -64,7 +69,7 @@ def __init__(
handlers: Iterable[SubscriberRoute],
# base options
prefix: str,
- dependencies: Iterable["Depends"],
+ dependencies: Iterable["Dependant"],
middlewares: Iterable["BrokerMiddleware[MsgType]"],
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
@@ -77,6 +82,7 @@ def __init__(
parser=parser,
decoder=decoder,
include_in_schema=include_in_schema,
+ state=EmptyBrokerState("You should include router to any broker."),
)
for h in handlers:
diff --git a/faststream/_internal/cli/docs/app.py b/faststream/_internal/cli/docs/app.py
index c8f17f5629..d7c7b5951d 100644
--- a/faststream/_internal/cli/docs/app.py
+++ b/faststream/_internal/cli/docs/app.py
@@ -138,7 +138,7 @@ def gen(
_, asyncapi_obj = import_from_string(asyncapi, is_factory=is_factory)
- assert isinstance(asyncapi_obj, Specification)
+ assert isinstance(asyncapi_obj, Specification) # nosec B101
raw_schema = asyncapi_obj.schema
@@ -173,7 +173,7 @@ def _parse_and_serve(
if ":" in docs:
_, docs_obj = import_from_string(docs, is_factory=is_factory)
- assert isinstance(docs_obj, Specification)
+ assert isinstance(docs_obj, Specification) # nosec B101
raw_schema = docs_obj
diff --git a/faststream/_internal/cli/main.py b/faststream/_internal/cli/main.py
index 9ed0f516ef..95fb8037c6 100644
--- a/faststream/_internal/cli/main.py
+++ b/faststream/_internal/cli/main.py
@@ -66,6 +66,7 @@ def run(
"--workers",
show_default=False,
help="Run [workers] applications with process spawning.",
+ envvar="FASTSTREAM_WORKERS",
),
log_level: LogLevels = typer.Option(
LogLevels.notset,
@@ -73,6 +74,7 @@ def run(
"--log-level",
case_sensitive=False,
help="Set selected level for FastStream and brokers logger objects.",
+ envvar="FASTSTREAM_LOG_LEVEL",
),
reload: bool = typer.Option(
False,
@@ -96,12 +98,12 @@ def run(
"Look for APP in the specified directory, by adding this to the PYTHONPATH."
" Defaults to the current working directory."
),
+ envvar="FASTSTREAM_APP_DIR",
),
is_factory: bool = typer.Option(
False,
"-f",
"--factory",
- is_flag=True,
help="Treat APP as an application factory.",
),
) -> None:
@@ -162,7 +164,11 @@ def run(
_run(*args)
else:
- _run(*args)
+ _run_imported_app(
+ app_obj,
+ extra_options=extra,
+ log_level=casted_log_level,
+ )
def _run(
@@ -171,11 +177,24 @@ def _run(
extra_options: dict[str, "SettingField"],
is_factory: bool,
log_level: int = logging.NOTSET,
- app_level: int = logging.INFO,
+ app_level: int = logging.INFO, # option for reloader only
) -> None:
"""Runs the specified application."""
_, app_obj = import_from_string(app, is_factory=is_factory)
+ _run_imported_app(
+ app_obj,
+ extra_options=extra_options,
+ log_level=log_level,
+ app_level=app_level,
+ )
+
+def _run_imported_app(
+ app_obj: "Application",
+ extra_options: dict[str, "SettingField"],
+ log_level: int = logging.NOTSET,
+ app_level: int = logging.INFO, # option for reloader only
+) -> None:
if not isinstance(app_obj, Application):
msg = f'Imported object "{app_obj}" must be "Application" type.'
raise typer.BadParameter(
@@ -220,13 +239,11 @@ def publish(
),
rpc: bool = typer.Option(
False,
- is_flag=True,
help="Enable RPC mode and system output.",
),
is_factory: bool = typer.Option(
False,
"--factory",
- is_flag=True,
help="Treat APP as an application factory.",
),
) -> None:
@@ -245,7 +262,7 @@ def publish(
try:
_, app_obj = import_from_string(app, is_factory=is_factory)
- assert isinstance(app_obj, FastStream), app_obj
+ assert isinstance(app_obj, FastStream), app_obj # nosec B101
if not app_obj.broker:
msg = "Broker instance not found in the app."
diff --git a/faststream/_internal/cli/utils/logs.py b/faststream/_internal/cli/utils/logs.py
index f1656686a4..1c1f98936c 100644
--- a/faststream/_internal/cli/utils/logs.py
+++ b/faststream/_internal/cli/utils/logs.py
@@ -1,11 +1,10 @@
import logging
from collections import defaultdict
from enum import Enum
-from typing import TYPE_CHECKING, Optional, Union
+from typing import TYPE_CHECKING, Union
if TYPE_CHECKING:
- from faststream._internal.basic_types import LoggerProto
- from faststream.app import FastStream
+ from faststream._internal.application import Application
class LogLevels(str, Enum):
@@ -64,14 +63,9 @@ def get_log_level(level: Union[LogLevels, str, int]) -> int:
return None
-def set_log_level(level: int, app: "FastStream") -> None:
+def set_log_level(level: int, app: "Application") -> None:
"""Sets the log level for an application."""
if app.logger and getattr(app.logger, "setLevel", None):
app.logger.setLevel(level) # type: ignore[attr-defined]
- if app.broker:
- broker_logger: Optional[LoggerProto] = (
- app.broker._state.logger_state.logger.logger
- )
- if broker_logger is not None and getattr(broker_logger, "setLevel", None):
- broker_logger.setLevel(level) # type: ignore[attr-defined]
+ app.broker._state.get().logger_state.set_level(level)
diff --git a/faststream/_internal/constants.py b/faststream/_internal/constants.py
index 108e318f67..c81916ed95 100644
--- a/faststream/_internal/constants.py
+++ b/faststream/_internal/constants.py
@@ -7,8 +7,8 @@
class ContentTypes(str, Enum):
"""A class to represent content types."""
- text = "text/plain"
- json = "application/json"
+ TEXT = "text/plain"
+ JSON = "application/json"
class EmptyPlaceholder:
diff --git a/faststream/_internal/context/__init__.py b/faststream/_internal/context/__init__.py
index 89152e682f..f0a0b1d1cb 100644
--- a/faststream/_internal/context/__init__.py
+++ b/faststream/_internal/context/__init__.py
@@ -1,8 +1,7 @@
from .context_type import Context
-from .repository import ContextRepo, context
+from .repository import ContextRepo
__all__ = (
"Context",
"ContextRepo",
- "context",
)
diff --git a/faststream/_internal/context/context_type.py b/faststream/_internal/context/context_type.py
index c2eb815b6e..b50e859066 100644
--- a/faststream/_internal/context/context_type.py
+++ b/faststream/_internal/context/context_type.py
@@ -67,6 +67,7 @@ def use(self, /, **kwargs: Any) -> AnyDict:
name=name,
default=self.default,
initial=self.initial,
+ context=kwargs["context__"],
)
):
kwargs[self.param_name] = v
diff --git a/faststream/_internal/context/repository.py b/faststream/_internal/context/repository.py
index 9990ad9bd0..eab1763088 100644
--- a/faststream/_internal/context/repository.py
+++ b/faststream/_internal/context/repository.py
@@ -7,8 +7,6 @@
from faststream._internal.constants import EMPTY
from faststream.exceptions import ContextError
-__all__ = ("ContextRepo", "context")
-
class ContextRepo:
"""A class to represent a context repository."""
@@ -171,6 +169,3 @@ def resolve(self, argument: str) -> Any:
def clear(self) -> None:
self._global_context = {"context": self}
self._scope_context.clear()
-
-
-context = ContextRepo()
diff --git a/faststream/_internal/context/resolve.py b/faststream/_internal/context/resolve.py
index bca91de33a..854229175e 100644
--- a/faststream/_internal/context/resolve.py
+++ b/faststream/_internal/context/resolve.py
@@ -1,14 +1,16 @@
-from typing import Any, Callable, Optional
+from typing import TYPE_CHECKING, Any, Callable, Optional
from faststream._internal.constants import EMPTY
-from .repository import context
+if TYPE_CHECKING:
+ from .repository import ContextRepo
def resolve_context_by_name(
name: str,
default: Any,
initial: Optional[Callable[..., Any]],
+ context: "ContextRepo",
) -> Any:
value: Any = EMPTY
diff --git a/faststream/_internal/fastapi/_compat.py b/faststream/_internal/fastapi/_compat.py
index 0393b62489..2359c114e6 100644
--- a/faststream/_internal/fastapi/_compat.py
+++ b/faststream/_internal/fastapi/_compat.py
@@ -12,7 +12,10 @@
from fastapi.dependencies.models import Dependant
from fastapi.requests import Request
-major, minor, patch, *_ = map(int, FASTAPI_VERSION.split("."))
+major, minor, patch, *_ = FASTAPI_VERSION.split(".")
+major = int(major)
+minor = int(minor)
+patch = int(patch)
FASTAPI_V2 = major > 0 or minor > 100
FASTAPI_V106 = major > 0 or minor >= 106
FASTAPI_v102_3 = major > 0 or minor > 112 or (minor == 112 and patch > 2)
diff --git a/faststream/_internal/fastapi/context.py b/faststream/_internal/fastapi/context.py
index 0a76e31a19..78d8dd26a7 100644
--- a/faststream/_internal/fastapi/context.py
+++ b/faststream/_internal/fastapi/context.py
@@ -15,14 +15,18 @@ def Context( # noqa: N802
initial: Optional[Callable[..., Any]] = None,
) -> Any:
"""Get access to objects of the Context."""
- return params.Depends(
- lambda: resolve_context_by_name(
+
+ def solve_context(
+ context: Annotated[Any, params.Header(alias="context__")],
+ ) -> Any:
+ return resolve_context_by_name(
name=name,
default=default,
initial=initial,
- ),
- use_cache=True,
- )
+ context=context,
+ )
+
+ return params.Depends(solve_context, use_cache=True)
Logger = Annotated[logging.Logger, Context("logger")]
diff --git a/faststream/_internal/fastapi/get_dependant.py b/faststream/_internal/fastapi/get_dependant.py
index e21a62954a..2db1b140d9 100644
--- a/faststream/_internal/fastapi/get_dependant.py
+++ b/faststream/_internal/fastapi/get_dependant.py
@@ -1,6 +1,7 @@
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, Callable, cast
+from fast_depends.library.serializer import OptionItem
from fastapi.dependencies.utils import get_dependant, get_parameterless_sub_dependant
from faststream._internal._compat import PYDANTIC_V2
@@ -120,6 +121,9 @@ def _patch_fastapi_dependent(dependant: "Dependant") -> "Dependant":
)
dependant.custom_fields = {} # type: ignore[attr-defined]
- dependant.flat_params = params_unique # type: ignore[attr-defined]
+ dependant.flat_params = [
+ OptionItem(field_name=name, field_type=type_, default_value=default)
+ for name, (type_, default) in params_unique.items()
+ ] # type: ignore[attr-defined]
return dependant
diff --git a/faststream/_internal/fastapi/route.py b/faststream/_internal/fastapi/route.py
index dffe9517aa..a132daf3f9 100644
--- a/faststream/_internal/fastapi/route.py
+++ b/faststream/_internal/fastapi/route.py
@@ -35,6 +35,7 @@
from fastapi.types import IncEx
from faststream._internal.basic_types import AnyDict
+ from faststream._internal.state import DIState
from faststream.message import StreamMessage as NativeMessage
@@ -75,6 +76,7 @@ def wrap_callable_to_fastapi_compatible(
response_model_exclude_unset: bool,
response_model_exclude_defaults: bool,
response_model_exclude_none: bool,
+ state: "DIState",
) -> Callable[["NativeMessage[Any]"], Awaitable[Any]]:
__magic_attr = "__faststream_consumer__"
@@ -100,6 +102,7 @@ def wrap_callable_to_fastapi_compatible(
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
+ state=state,
)
setattr(parsed_callable, __magic_attr, True)
@@ -117,6 +120,7 @@ def build_faststream_to_fastapi_parser(
response_model_exclude_unset: bool,
response_model_exclude_defaults: bool,
response_model_exclude_none: bool,
+ state: "DIState",
) -> Callable[["NativeMessage[Any]"], Awaitable[Any]]:
"""Creates a session for handling requests."""
assert dependent.call # nosec B101
@@ -158,14 +162,14 @@ async def parsed_consumer(message: "NativeMessage[Any]") -> Any:
stream_message = StreamMessage(
body=fastapi_body,
- headers=message.headers,
+ headers={"context__": state.context, **message.headers},
path={**path, **message.path},
)
else:
stream_message = StreamMessage(
body={},
- headers={},
+ headers={"context__": state.context},
path={},
)
diff --git a/faststream/_internal/fastapi/router.py b/faststream/_internal/fastapi/router.py
index a0d4a1ee1e..29452792b1 100644
--- a/faststream/_internal/fastapi/router.py
+++ b/faststream/_internal/fastapi/router.py
@@ -1,4 +1,5 @@
import json
+import warnings
from abc import abstractmethod
from collections.abc import AsyncIterator, Awaitable, Iterable, Mapping, Sequence
from contextlib import asynccontextmanager
@@ -25,12 +26,10 @@
from faststream._internal.application import StartAbleApplication
from faststream._internal.broker.router import BrokerRouter
-from faststream._internal.context.repository import context
from faststream._internal.fastapi.get_dependant import get_fastapi_dependant
from faststream._internal.fastapi.route import (
wrap_callable_to_fastapi_compatible,
)
-from faststream._internal.setup import EmptyState
from faststream._internal.types import (
MsgType,
P_HandlerParams,
@@ -69,7 +68,7 @@ async def __aexit__(
if not exc_type and (
background := cast(
Optional[BackgroundTasks],
- getattr(context.get_local("message"), "background", None),
+ getattr(self.context.get_local("message"), "background", None),
)
):
await background()
@@ -130,7 +129,7 @@ def __init__(
self.broker_class
), "You should specify `broker_class` at your implementation"
- self.broker = self.broker_class(
+ broker = self.broker_class(
*connection_args,
middlewares=(
*middlewares,
@@ -143,6 +142,11 @@ def __init__(
**connection_kwars,
)
+ self._init_setupable_(
+ broker,
+ provider=None,
+ )
+
self.setup_state = setup_state
# Specification information
@@ -160,8 +164,6 @@ def __init__(
self.schema = None
- self._state = EmptyState()
-
super().__init__(
prefix=prefix,
tags=tags,
@@ -194,6 +196,8 @@ def __init__(
self._after_startup_hooks = []
self._on_shutdown_hooks = []
+ self._lifespan_started = False
+
def _get_dependencies_overides_provider(self) -> Optional[Any]:
"""Dependency provider WeakRef resolver."""
if self.dependency_overrides_provider is not None:
@@ -231,6 +235,7 @@ def wrapper(
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
provider_factory=self._get_dependencies_overides_provider,
+ state=self._state,
)
return wrapper
@@ -314,22 +319,24 @@ async def start_broker_lifespan(
self.weak_dependencies_provider.add(app)
async with lifespan_context(app) as maybe_context:
- if maybe_context is None:
- context: AnyDict = {}
- else:
- context = dict(maybe_context)
+ lifespan_extra = {"broker": self.broker, **(maybe_context or {})}
- context.update({"broker": self.broker})
- await self._start_broker()
+ if not self._lifespan_started:
+ await self._start_broker()
+ self._lifespan_started = True
+ else:
+ warnings.warn(
+ "Specifying 'lifespan_context' manually is no longer necessary with FastAPI >= 0.112.2.",
+ category=RuntimeWarning,
+ stacklevel=2,
+ )
for h in self._after_startup_hooks:
- h_context = await h(app)
- if h_context: # pragma: no branch
- context.update(h_context)
+ lifespan_extra.update(await h(app) or {})
try:
if self.setup_state:
- yield context
+ yield lifespan_extra
else:
# NOTE: old asgi compatibility
yield None
diff --git a/faststream/_internal/log/logging.py b/faststream/_internal/log/logging.py
index 4156b24270..2fa346ca8a 100644
--- a/faststream/_internal/log/logging.py
+++ b/faststream/_internal/log/logging.py
@@ -2,10 +2,14 @@
import sys
from collections.abc import Mapping
from logging import LogRecord
+from typing import TYPE_CHECKING
-from faststream._internal.context.repository import context
from faststream._internal.log.formatter import ColourizedFormatter
+if TYPE_CHECKING:
+ from faststream._internal.context.repository import ContextRepo
+
+
logger = logging.getLogger("faststream")
logger.setLevel(logging.INFO)
logger.propagate = False
@@ -24,15 +28,17 @@ def __init__(
self,
default_context: Mapping[str, str],
message_id_ln: int,
+ context: "ContextRepo",
name: str = "",
) -> None:
self.default_context = default_context
self.message_id_ln = message_id_ln
+ self.context = context
super().__init__(name)
def filter(self, record: LogRecord) -> bool:
if is_suitable := super().filter(record):
- log_context: Mapping[str, str] = context.get_local(
+ log_context: Mapping[str, str] = self.context.get_local(
"log_context",
self.default_context,
)
@@ -51,11 +57,13 @@ def get_broker_logger(
default_context: Mapping[str, str],
message_id_ln: int,
fmt: str,
+ context: "ContextRepo",
+ log_level: int,
) -> logging.Logger:
logger = logging.getLogger(f"faststream.access.{name}")
- logger.setLevel(logging.INFO)
+ logger.setLevel(log_level)
logger.propagate = False
- logger.addFilter(ExtendedFilter(default_context, message_id_ln))
+ logger.addFilter(ExtendedFilter(default_context, message_id_ln, context=context))
handler = logging.StreamHandler(stream=sys.stdout)
handler.setFormatter(
ColourizedFormatter(
diff --git a/faststream/_internal/proto.py b/faststream/_internal/proto.py
index b75266d087..615dec872b 100644
--- a/faststream/_internal/proto.py
+++ b/faststream/_internal/proto.py
@@ -1,10 +1,8 @@
from abc import abstractmethod
from typing import Any, Optional, Protocol, TypeVar, Union, overload
-from .setup import SetupAble
-
-class Endpoint(SetupAble, Protocol):
+class Endpoint(Protocol):
@abstractmethod
def add_prefix(self, prefix: str) -> None: ...
diff --git a/faststream/_internal/publisher/fake.py b/faststream/_internal/publisher/fake.py
index 3fb3b1e074..e1d498d86c 100644
--- a/faststream/_internal/publisher/fake.py
+++ b/faststream/_internal/publisher/fake.py
@@ -1,50 +1,60 @@
+from abc import abstractmethod
from collections.abc import Iterable
from functools import partial
-from itertools import chain
from typing import TYPE_CHECKING, Any, Optional
+from faststream._internal.basic_types import SendableMessage
from faststream._internal.publisher.proto import BasePublisherProto
if TYPE_CHECKING:
- from faststream._internal.basic_types import AnyDict, AsyncFunc, SendableMessage
+ from faststream._internal.basic_types import AsyncFunc
+ from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.types import PublisherMiddleware
+ from faststream.response.response import PublishCommand
class FakePublisher(BasePublisherProto):
- """Publisher Interface implementation to use as RPC or REPLY TO publisher."""
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
def __init__(
self,
- method: "AsyncFunc",
*,
- publish_kwargs: "AnyDict",
- middlewares: Iterable["PublisherMiddleware"] = (),
+ producer: "ProducerProto",
) -> None:
"""Initialize an object."""
- self.method = method
- self.publish_kwargs = publish_kwargs
- self.middlewares = middlewares
+ self._producer = producer
- async def publish(
+ @abstractmethod
+ def patch_command(self, cmd: "PublishCommand") -> "PublishCommand":
+ raise NotImplementedError
+
+ async def _publish(
self,
- message: "SendableMessage",
+ cmd: "PublishCommand",
*,
- correlation_id: Optional[str] = None,
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
- **kwargs: Any,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
) -> Any:
- """Publish a message."""
- publish_kwargs = {
- "correlation_id": correlation_id,
- **self.publish_kwargs,
- **kwargs,
- }
-
- call: AsyncFunc = self.method
- for m in chain(_extra_middlewares, self.middlewares):
+ """This method should be called in subscriber flow only."""
+ cmd = self.patch_command(cmd)
+
+ call: AsyncFunc = self._producer.publish
+ for m in _extra_middlewares:
call = partial(m, call)
- return await call(message, **publish_kwargs)
+ return await call(cmd)
+
+ async def publish(
+ self,
+ message: SendableMessage,
+ /,
+ *,
+ correlation_id: Optional[str] = None,
+ ) -> Optional[Any]:
+ msg = (
+ f"`{self.__class__.__name__}` can be used only to publish "
+ "a response for `reply-to` or `RPC` messages."
+ )
+ raise NotImplementedError(msg)
async def request(
self,
@@ -52,12 +62,9 @@ async def request(
/,
*,
correlation_id: Optional[str] = None,
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> Any:
msg = (
- "`FakePublisher` can be used only to publish "
+ f"`{self.__class__.__name__}` can be used only to publish "
"a response for `reply-to` or `RPC` messages."
)
- raise NotImplementedError(
- msg,
- )
+ raise NotImplementedError(msg)
diff --git a/faststream/_internal/publisher/proto.py b/faststream/_internal/publisher/proto.py
index 206fd96545..31d66c0268 100644
--- a/faststream/_internal/publisher/proto.py
+++ b/faststream/_internal/publisher/proto.py
@@ -6,11 +6,11 @@
from faststream._internal.proto import Endpoint
from faststream._internal.types import MsgType
-from faststream.specification.proto.endpoint import EndpointSpecification
-from faststream.specification.schema.publisher import PublisherSpec
+from faststream.response.response import PublishCommand
if TYPE_CHECKING:
from faststream._internal.basic_types import SendableMessage
+ from faststream._internal.state import BrokerState, Pointer
from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
@@ -18,6 +18,7 @@
PublisherMiddleware,
T_HandlerReturn,
)
+ from faststream.response.response import PublishCommand
class ProducerProto(Protocol):
@@ -25,27 +26,26 @@ class ProducerProto(Protocol):
_decoder: "AsyncCallable"
@abstractmethod
- async def publish(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- ) -> Optional[Any]:
+ async def publish(self, cmd: "PublishCommand") -> Optional[Any]:
"""Publishes a message asynchronously."""
...
@abstractmethod
- async def request(
- self,
- message: "SendableMessage",
- /,
- *,
- correlation_id: Optional[str] = None,
- ) -> Any:
+ async def request(self, cmd: "PublishCommand") -> Any:
"""Publishes a message synchronously."""
...
+ @abstractmethod
+ async def publish_batch(self, cmd: "PublishCommand") -> None:
+ """Publishes a messages batch asynchronously."""
+ ...
+
+
+class ProducerFactory(Protocol):
+ def __call__(
+ self, parser: "AsyncCallable", decoder: "AsyncCallable"
+ ) -> ProducerProto: ...
+
class BasePublisherProto(Protocol):
@abstractmethod
@@ -55,9 +55,24 @@ async def publish(
/,
*,
correlation_id: Optional[str] = None,
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> Optional[Any]:
- """Publishes a message asynchronously."""
+ """Public method to publish a message.
+
+ Should be called by user only `broker.publisher(...).publish(...)`.
+ """
+ ...
+
+ @abstractmethod
+ async def _publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """Private method to publish a message.
+
+ Should be called inside `publish` method or as a step of `consume` scope.
+ """
...
@abstractmethod
@@ -67,20 +82,16 @@ async def request(
/,
*,
correlation_id: Optional[str] = None,
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> Optional[Any]:
"""Publishes a message synchronously."""
...
class PublisherProto(
- EndpointSpecification[PublisherSpec],
Endpoint,
BasePublisherProto,
Generic[MsgType],
):
- schema_: Any
-
_broker_middlewares: Iterable["BrokerMiddleware[MsgType]"]
_middlewares: Iterable["PublisherMiddleware"]
_producer: Optional["ProducerProto"]
@@ -88,18 +99,13 @@ class PublisherProto(
@abstractmethod
def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ...
- @staticmethod
- @abstractmethod
- def create() -> "PublisherProto[MsgType]":
- """Abstract factory to create a real Publisher."""
- ...
-
@override
@abstractmethod
def _setup( # type: ignore[override]
self,
*,
producer: Optional["ProducerProto"],
+ state: "Pointer[BrokerState]",
) -> None: ...
@abstractmethod
diff --git a/faststream/_internal/publisher/specified.py b/faststream/_internal/publisher/specified.py
new file mode 100644
index 0000000000..256b6ca39e
--- /dev/null
+++ b/faststream/_internal/publisher/specified.py
@@ -0,0 +1,95 @@
+from inspect import Parameter, unwrap
+from typing import TYPE_CHECKING, Any, Callable, Optional, Union
+
+from fast_depends.core import build_call_model
+from fast_depends.pydantic._compat import create_model, get_config_base
+
+from faststream._internal.types import (
+ MsgType,
+ P_HandlerParams,
+ T_HandlerReturn,
+)
+from faststream.specification.asyncapi.message import get_model_schema
+from faststream.specification.asyncapi.utils import to_camelcase
+from faststream.specification.proto import EndpointSpecification
+from faststream.specification.schema import PublisherSpec
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyCallable, AnyDict
+ from faststream._internal.state import BrokerState, Pointer
+ from faststream._internal.subscriber.call_wrapper.call import HandlerCallWrapper
+
+
+class SpecificationPublisher(EndpointSpecification[PublisherSpec]):
+ """A base class for publishers in an asynchronous API."""
+
+ _state: "Pointer[BrokerState]" # should be set in next parent
+
+ def __init__(
+ self,
+ *,
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ self.calls: list[AnyCallable] = []
+
+ self.title_ = title_
+ self.description_ = description_
+ self.include_in_schema = include_in_schema
+ self.schema_ = schema_
+
+ def __call__(
+ self,
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ ],
+ ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
+ func = super().__call__(func)
+ self.calls.append(func._original_call)
+ return func
+
+ def get_payloads(self) -> list[tuple["AnyDict", str]]:
+ payloads: list[tuple[AnyDict, str]] = []
+
+ if self.schema_:
+ body = get_model_schema(
+ call=create_model(
+ "",
+ __config__=get_config_base(),
+ response__=(self.schema_, ...),
+ ),
+ prefix=f"{self.name}:Message",
+ )
+
+ if body: # pragma: no branch
+ payloads.append((body, ""))
+
+ else:
+ di_state = self._state.get().di_state
+
+ for call in self.calls:
+ call_model = build_call_model(
+ call,
+ dependency_provider=di_state.provider,
+ serializer_cls=di_state.serializer,
+ )
+
+ response_type = next(
+ iter(call_model.serializer.response_option.values())
+ ).field_type
+ if response_type is not None and response_type is not Parameter.empty:
+ body = get_model_schema(
+ create_model(
+ "",
+ __config__=get_config_base(),
+ response__=(response_type, ...),
+ ),
+ prefix=f"{self.name}:Message",
+ )
+ if body:
+ payloads.append((body, to_camelcase(unwrap(call).__name__)))
+
+ return payloads
diff --git a/faststream/_internal/publisher/usecase.py b/faststream/_internal/publisher/usecase.py
index dbff727a29..46ebf0f7da 100644
--- a/faststream/_internal/publisher/usecase.py
+++ b/faststream/_internal/publisher/usecase.py
@@ -1,113 +1,84 @@
-from collections.abc import Iterable
-from inspect import unwrap
+from collections.abc import Awaitable, Iterable
+from functools import partial
+from itertools import chain
from typing import (
TYPE_CHECKING,
- Annotated,
Any,
Callable,
Optional,
+ Union,
)
from unittest.mock import MagicMock
-from fast_depends._compat import create_model, get_config_base
-from fast_depends.core import CallModel, build_call_model
-from typing_extensions import Doc, override
+from typing_extensions import override
from faststream._internal.publisher.proto import PublisherProto
-from faststream._internal.subscriber.call_wrapper.call import HandlerCallWrapper
+from faststream._internal.state import BrokerState, EmptyBrokerState, Pointer
+from faststream._internal.state.producer import ProducerUnset
+from faststream._internal.subscriber.call_wrapper.call import (
+ HandlerCallWrapper,
+ ensure_call_wrapper,
+)
+from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import (
MsgType,
P_HandlerParams,
T_HandlerReturn,
)
-from faststream.specification.asyncapi.message import get_response_schema
-from faststream.specification.asyncapi.utils import to_camelcase
+from faststream.message.source_type import SourceType
if TYPE_CHECKING:
- from faststream._internal.basic_types import AnyDict
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.types import (
BrokerMiddleware,
PublisherMiddleware,
)
+ from faststream.response.response import PublishCommand
-class PublisherUsecase(
- PublisherProto[MsgType],
-):
+class PublisherUsecase(PublisherProto[MsgType]):
"""A base class for publishers in an asynchronous API."""
- mock: Optional[MagicMock]
- calls: list[Callable[..., Any]]
-
def __init__(
self,
*,
- broker_middlewares: Annotated[
- Iterable["BrokerMiddleware[MsgType]"],
- Doc("Top-level middlewares to use in direct `.publish` call."),
- ],
- middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Publisher middlewares."),
- ],
- # AsyncAPI args
- schema_: Annotated[
- Optional[Any],
- Doc(
- "AsyncAPI publishing message type"
- "Should be any python-native object annotation or `pydantic.BaseModel`.",
- ),
- ],
- title_: Annotated[
- Optional[str],
- Doc("AsyncAPI object title."),
- ],
- description_: Annotated[
- Optional[str],
- Doc("AsyncAPI object description."),
- ],
- include_in_schema: Annotated[
- bool,
- Doc("Whetever to include operation in AsyncAPI schema or not."),
- ],
+ broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ middlewares: Iterable["PublisherMiddleware"],
) -> None:
- self.calls = []
- self._middlewares = middlewares
+ self.middlewares = middlewares
self._broker_middlewares = broker_middlewares
- self._producer = None
+
+ self.__producer: Optional[ProducerProto] = ProducerUnset()
self._fake_handler = False
- self.mock = None
+ self.mock: Optional[MagicMock] = None
- # AsyncAPI
- self.title_ = title_
- self.description_ = description_
- self.include_in_schema = include_in_schema
- self.schema_ = schema_
+ self._state: Pointer[BrokerState] = Pointer(
+ EmptyBrokerState("You should include publisher to any broker.")
+ )
def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
self._broker_middlewares = (*self._broker_middlewares, middleware)
+ @property
+ def _producer(self) -> "ProducerProto":
+ return self.__producer or self._state.get().producer
+
@override
def _setup( # type: ignore[override]
self,
*,
- producer: Optional["ProducerProto"],
+ state: "Pointer[BrokerState]",
+ producer: Optional["ProducerProto"] = None,
) -> None:
- self._producer = producer
+ self._state = state
+ self.__producer = producer
def set_test(
self,
*,
- mock: Annotated[
- MagicMock,
- Doc("Mock object to check in tests."),
- ],
- with_fake: Annotated[
- bool,
- Doc("Whetevet publisher's fake subscriber created or not."),
- ],
+ mock: MagicMock,
+ with_fake: bool,
) -> None:
"""Turn publisher to testing mode."""
self.mock = mock
@@ -120,50 +91,89 @@ def reset_test(self) -> None:
def __call__(
self,
- func: Callable[P_HandlerParams, T_HandlerReturn],
+ func: Union[
+ Callable[P_HandlerParams, T_HandlerReturn],
+ HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn],
+ ],
) -> HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]:
"""Decorate user's function by current publisher."""
- handler_call = HandlerCallWrapper[
- MsgType,
- P_HandlerParams,
- T_HandlerReturn,
- ](func)
- handler_call._publishers.append(self)
- self.calls.append(handler_call._original_call)
- return handler_call
-
- def get_payloads(self) -> list[tuple["AnyDict", str]]:
- payloads: list[tuple[AnyDict, str]] = []
-
- if self.schema_:
- params = {"response__": (self.schema_, ...)}
-
- call_model: CallModel[Any, Any] = CallModel(
- call=lambda: None,
- model=create_model("Fake"),
- response_model=create_model( # type: ignore[call-overload]
- "",
- __config__=get_config_base(),
- **params,
- ),
- params=params,
- )
-
- body = get_response_schema(
- call_model,
- prefix=f"{self.name}:Message",
- )
- if body: # pragma: no branch
- payloads.append((body, ""))
-
- else:
- for call in self.calls:
- call_model = build_call_model(call)
- body = get_response_schema(
- call_model,
- prefix=f"{self.name}:Message",
+ handler: HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn] = (
+ ensure_call_wrapper(func)
+ )
+ handler._publishers.append(self)
+ return handler
+
+ async def _basic_publish(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> Any:
+ pub: Callable[..., Awaitable[Any]] = self._producer.publish
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ (
+ _extra_middlewares
+ or (
+ m(None, context=context).publish_scope
+ for m in self._broker_middlewares
+ )
+ ),
+ self.middlewares,
+ ):
+ pub = partial(pub_m, pub)
+
+ await pub(cmd)
+
+ async def _basic_request(
+ self,
+ cmd: "PublishCommand",
+ ) -> Optional[Any]:
+ request = self._producer.request
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ (m(None, context=context).publish_scope for m in self._broker_middlewares),
+ self.middlewares,
+ ):
+ request = partial(pub_m, request)
+
+ published_msg = await request(cmd)
+
+ response_msg: Any = await process_msg(
+ msg=published_msg,
+ middlewares=(
+ m(published_msg, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._producer._parser,
+ decoder=self._producer._decoder,
+ source_type=SourceType.RESPONSE,
+ )
+ return response_msg
+
+ async def _basic_publish_batch(
+ self,
+ cmd: "PublishCommand",
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> Optional[Any]:
+ pub = self._producer.publish_batch
+
+ context = self._state.get().di_state.context
+
+ for pub_m in chain(
+ (
+ _extra_middlewares
+ or (
+ m(None, context=context).publish_scope
+ for m in self._broker_middlewares
)
- if body:
- payloads.append((body, to_camelcase(unwrap(call).__name__)))
+ ),
+ self.middlewares,
+ ):
+ pub = partial(pub_m, pub)
- return payloads
+ await pub(cmd)
diff --git a/faststream/_internal/setup/fast_depends.py b/faststream/_internal/setup/fast_depends.py
deleted file mode 100644
index aa3a664540..0000000000
--- a/faststream/_internal/setup/fast_depends.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from collections.abc import Sequence
-from dataclasses import dataclass
-from typing import TYPE_CHECKING, Any, Callable, Optional
-
-if TYPE_CHECKING:
- from faststream._internal.basic_types import Decorator
-
-
-@dataclass
-class FastDependsData:
- apply_types: bool
- is_validate: bool
- get_dependent: Optional[Callable[..., Any]]
- call_decorators: Sequence["Decorator"]
diff --git a/faststream/_internal/setup/logger.py b/faststream/_internal/setup/logger.py
deleted file mode 100644
index 45d660c687..0000000000
--- a/faststream/_internal/setup/logger.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import warnings
-from dataclasses import dataclass, field
-from typing import Optional, Protocol
-
-from faststream._internal.basic_types import AnyDict, LoggerProto
-from faststream._internal.constants import EMPTY
-from faststream.exceptions import IncorrectState
-
-from .proto import SetupAble
-
-__all__ = (
- "DefaultLoggerStorage",
- "LoggerParamsStorage",
- "LoggerState",
- "make_logger_state",
-)
-
-
-def make_logger_state(
- logger: Optional["LoggerProto"],
- log_level: int,
- log_fmt: Optional[str],
- default_storag_cls: type["DefaultLoggerStorage"],
-) -> "LoggerState":
- if logger is not EMPTY and log_fmt:
- warnings.warn(
- message="You can't set custom `logger` with `log_fmt` both.",
- category=RuntimeWarning,
- stacklevel=1,
- )
-
- if logger is EMPTY:
- storage = default_storag_cls(log_fmt)
- elif logger is None:
- storage = _EmptyLoggerStorage()
- else:
- storage = _ManualLoggerStorage(logger)
-
- return LoggerState(
- log_level=log_level,
- params_storage=storage,
- )
-
-
-class _LoggerObject(Protocol):
- logger: Optional["LoggerProto"]
-
- def log(
- self,
- message: str,
- log_level: int,
- extra: Optional["AnyDict"] = None,
- exc_info: Optional[Exception] = None,
- ) -> None: ...
-
-
-class _NotSetLoggerObject(_LoggerObject):
- def __init__(self) -> None:
- self.logger = None
-
- def log(
- self,
- message: str,
- log_level: int,
- extra: Optional["AnyDict"] = None,
- exc_info: Optional[Exception] = None,
- ) -> None:
- msg = "Logger object was not set up."
- raise IncorrectState(msg)
-
-
-class _EmptyLoggerObject(_LoggerObject):
- def __init__(self) -> None:
- self.logger = None
-
- def log(
- self,
- message: str,
- log_level: int,
- extra: Optional["AnyDict"] = None,
- exc_info: Optional[Exception] = None,
- ) -> None:
- pass
-
-
-class _RealLoggerObject(_LoggerObject):
- def __init__(self, logger: "LoggerProto") -> None:
- self.logger = logger
-
- def log(
- self,
- message: str,
- log_level: int,
- extra: Optional["AnyDict"] = None,
- exc_info: Optional[Exception] = None,
- ) -> None:
- self.logger.log(
- log_level,
- message,
- extra=extra,
- exc_info=exc_info,
- )
-
-
-class LoggerParamsStorage(Protocol):
- def setup_log_contest(self, params: "AnyDict") -> None: ...
-
- def get_logger(self) -> Optional["LoggerProto"]: ...
-
-
-class _EmptyLoggerStorage(LoggerParamsStorage):
- def setup_log_contest(self, params: AnyDict) -> None:
- pass
-
- def get_logger(self) -> None:
- return None
-
-
-class _ManualLoggerStorage(LoggerParamsStorage):
- def __init__(self, logger: "LoggerProto") -> None:
- self.__logger = logger
-
- def setup_log_contest(self, params: AnyDict) -> None:
- pass
-
- def get_logger(self) -> LoggerProto:
- return self.__logger
-
-
-class DefaultLoggerStorage(LoggerParamsStorage):
- def __init__(self, log_fmt: Optional[str]) -> None:
- self._log_fmt = log_fmt
-
-
-@dataclass
-class LoggerState(SetupAble):
- log_level: int
- params_storage: LoggerParamsStorage
-
- logger: _LoggerObject = field(default=_NotSetLoggerObject(), init=False)
-
- def log(
- self,
- message: str,
- log_level: Optional[int] = None,
- extra: Optional["AnyDict"] = None,
- exc_info: Optional[Exception] = None,
- ) -> None:
- self.logger.log(
- log_level=(log_level or self.log_level),
- message=message,
- extra=extra,
- exc_info=exc_info,
- )
-
- def _setup(self) -> None:
- if logger := self.params_storage.get_logger():
- self.logger = _RealLoggerObject(logger)
- else:
- self.logger = _EmptyLoggerObject()
diff --git a/faststream/_internal/setup/state.py b/faststream/_internal/setup/state.py
deleted file mode 100644
index 907d8a3b56..0000000000
--- a/faststream/_internal/setup/state.py
+++ /dev/null
@@ -1,99 +0,0 @@
-from abc import abstractmethod, abstractproperty
-from typing import Optional
-
-from faststream.exceptions import IncorrectState
-
-from .fast_depends import FastDependsData
-from .logger import LoggerState
-from .proto import SetupAble
-
-
-class BaseState(SetupAble):
- _depends_params: FastDependsData
- _logger_params: LoggerState
-
- @abstractproperty
- def depends_params(self) -> FastDependsData:
- raise NotImplementedError
-
- @abstractproperty
- def logger_state(self) -> LoggerState:
- raise NotImplementedError
-
- @abstractmethod
- def __bool__(self) -> bool:
- raise NotImplementedError
-
- def _setup(self) -> None:
- self.logger_state._setup()
-
- def copy_with_params(
- self,
- *,
- depends_params: Optional[FastDependsData] = None,
- logger_state: Optional[LoggerState] = None,
- ) -> "SetupState":
- return self.__class__(
- logger_state=logger_state or self._logger_params,
- depends_params=depends_params or self._depends_params,
- )
-
- def copy_to_state(self, state_cls: type["SetupState"]) -> "SetupState":
- return state_cls(
- depends_params=self._depends_params,
- logger_state=self._logger_params,
- )
-
-
-class SetupState(BaseState):
- """State after broker._setup() called."""
-
- def __init__(
- self,
- *,
- logger_state: LoggerState,
- depends_params: FastDependsData,
- ) -> None:
- self._depends_params = depends_params
- self._logger_params = logger_state
-
- @property
- def depends_params(self) -> FastDependsData:
- return self._depends_params
-
- @property
- def logger_state(self) -> LoggerState:
- return self._logger_params
-
- def __bool__(self) -> bool:
- return True
-
-
-class EmptyState(BaseState):
- """Initial state for App, broker, etc."""
-
- def __init__(
- self,
- *,
- logger_state: Optional[LoggerState] = None,
- depends_params: Optional[FastDependsData] = None,
- ) -> None:
- self._depends_params = depends_params
- self._logger_params = logger_state
-
- @property
- def depends_params(self) -> FastDependsData:
- if not self._depends_params:
- raise IncorrectState
-
- return self._depends_params
-
- @property
- def logger_state(self) -> LoggerState:
- if not self._logger_params:
- raise IncorrectState
-
- return self._logger_params
-
- def __bool__(self) -> bool:
- return False
diff --git a/faststream/_internal/setup/__init__.py b/faststream/_internal/state/__init__.py
similarity index 53%
rename from faststream/_internal/setup/__init__.py
rename to faststream/_internal/state/__init__.py
index bd5d749560..f65fc1cb63 100644
--- a/faststream/_internal/setup/__init__.py
+++ b/faststream/_internal/state/__init__.py
@@ -1,17 +1,19 @@
-from .fast_depends import FastDependsData
+from .broker import BrokerState, EmptyBrokerState
+from .fast_depends import DIState
from .logger import LoggerParamsStorage, LoggerState
+from .pointer import Pointer
from .proto import SetupAble
-from .state import EmptyState, SetupState
__all__ = (
- "EmptyState",
+ # state
+ "BrokerState",
# FastDepend
- "FastDependsData",
+ "DIState",
+ "EmptyBrokerState",
"LoggerParamsStorage",
# logging
"LoggerState",
+ "Pointer",
# proto
"SetupAble",
- # state
- "SetupState",
)
diff --git a/faststream/_internal/state/broker.py b/faststream/_internal/state/broker.py
new file mode 100644
index 0000000000..374b0e8c3b
--- /dev/null
+++ b/faststream/_internal/state/broker.py
@@ -0,0 +1,90 @@
+from typing import TYPE_CHECKING, Optional, Protocol
+
+from faststream.exceptions import IncorrectState
+
+from .producer import ProducerUnset
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+
+ from .fast_depends import DIState
+ from .logger import LoggerState
+
+
+class BrokerState(Protocol):
+ di_state: "DIState"
+ logger_state: "LoggerState"
+ producer: "ProducerProto"
+
+ # Persistent variables
+ graceful_timeout: Optional[float]
+
+ def _setup(self) -> None: ...
+
+ def _setup_logger_state(self) -> None: ...
+
+ def __bool__(self) -> bool: ...
+
+
+class _EmptyBrokerState(BrokerState):
+ def __init__(self, error_msg: str) -> None:
+ self.error_msg = error_msg
+ self.producer = ProducerUnset()
+
+ @property
+ def logger_state(self) -> "DIState":
+ raise IncorrectState(self.error_msg)
+
+ @property
+ def graceful_timeout(self) -> Optional[float]:
+ raise IncorrectState(self.error_msg)
+
+ def _setup(self) -> None:
+ pass
+
+ def _setup_logger_state(self) -> None:
+ pass
+
+ def __bool__(self) -> bool:
+ return False
+
+
+class EmptyBrokerState(_EmptyBrokerState):
+ @property
+ def di_state(self) -> "DIState":
+ raise IncorrectState(self.error_msg)
+
+
+class OuterBrokerState(_EmptyBrokerState):
+ def __init__(self, *, di_state: "DIState") -> None:
+ self.di_state = di_state
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class InitialBrokerState(BrokerState):
+ def __init__(
+ self,
+ *,
+ di_state: "DIState",
+ logger_state: "LoggerState",
+ graceful_timeout: Optional[float],
+ producer: "ProducerProto",
+ ) -> None:
+ self.di_state = di_state
+ self.logger_state = logger_state
+
+ self.graceful_timeout = graceful_timeout
+ self.producer = producer
+
+ self.setupped = False
+
+ def _setup(self) -> None:
+ self.setupped = True
+
+ def _setup_logger_state(self) -> None:
+ self.logger_state._setup(context=self.di_state.context)
+
+ def __bool__(self) -> bool:
+ return self.setupped
diff --git a/faststream/_internal/state/fast_depends.py b/faststream/_internal/state/fast_depends.py
new file mode 100644
index 0000000000..a5e7a098ad
--- /dev/null
+++ b/faststream/_internal/state/fast_depends.py
@@ -0,0 +1,38 @@
+from collections.abc import Sequence
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+from faststream._internal.constants import EMPTY
+
+if TYPE_CHECKING:
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
+
+ from faststream._internal.basic_types import Decorator
+ from faststream._internal.context import ContextRepo
+
+
+@dataclass
+class DIState:
+ use_fastdepends: bool
+ get_dependent: Optional[Callable[..., Any]]
+ call_decorators: Sequence["Decorator"]
+ provider: "Provider"
+ serializer: Optional["SerializerProto"]
+ context: "ContextRepo"
+
+ def update(
+ self,
+ *,
+ provider: "Provider" = EMPTY,
+ serializer: Optional["SerializerProto"] = EMPTY,
+ context: "ContextRepo" = EMPTY,
+ ) -> None:
+ if provider is not EMPTY:
+ self.provider = provider
+
+ if serializer is not EMPTY:
+ self.serializer = serializer
+
+ if context is not EMPTY:
+ self.context = context
diff --git a/faststream/_internal/state/logger/__init__.py b/faststream/_internal/state/logger/__init__.py
new file mode 100644
index 0000000000..466e24c689
--- /dev/null
+++ b/faststream/_internal/state/logger/__init__.py
@@ -0,0 +1,9 @@
+from .params_storage import DefaultLoggerStorage, LoggerParamsStorage
+from .state import LoggerState, make_logger_state
+
+__all__ = (
+ "DefaultLoggerStorage",
+ "LoggerParamsStorage",
+ "LoggerState",
+ "make_logger_state",
+)
diff --git a/faststream/_internal/state/logger/logger_proxy.py b/faststream/_internal/state/logger/logger_proxy.py
new file mode 100644
index 0000000000..690a42c6dd
--- /dev/null
+++ b/faststream/_internal/state/logger/logger_proxy.py
@@ -0,0 +1,99 @@
+from collections.abc import Mapping
+from typing import Any, Optional
+
+from faststream._internal.basic_types import LoggerProto
+from faststream.exceptions import IncorrectState
+
+
+class LoggerObject(LoggerProto):
+ logger: Optional["LoggerProto"]
+
+ def __bool__(self) -> bool: ...
+
+
+class NotSetLoggerObject(LoggerObject):
+ """Default logger proxy for state.
+
+ Raises an error if user tries to log smth before state setup.
+ """
+
+ def __init__(self) -> None:
+ self.logger = None
+
+ def __bool__(self) -> bool:
+ return False
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}()"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ err_msg = "Logger object not set. Please, call `_setup_logger_state` of parent broker state."
+ raise IncorrectState(err_msg)
+
+
+class EmptyLoggerObject(LoggerObject):
+ """Empty logger proxy for state.
+
+ Will be used if user setup `logger=None`.
+ """
+
+ def __init__(self) -> None:
+ self.logger = None
+
+ def __bool__(self) -> bool:
+ return True
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}()"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ pass
+
+
+class RealLoggerObject(LoggerObject):
+ """Empty logger proxy for state.
+
+ Will be used if user setup custom `logger` (.params_storage.ManualLoggerStorage)
+ or in default logger case (.params_storage.DefaultLoggerStorage).
+ """
+
+ def __init__(self, logger: "LoggerProto") -> None:
+ self.logger = logger
+
+ def __bool__(self) -> bool:
+ return True
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(logger={self.logger})"
+
+ def log(
+ self,
+ level: int,
+ msg: Any,
+ /,
+ *,
+ exc_info: Any = None,
+ extra: Optional[Mapping[str, Any]] = None,
+ ) -> None:
+ self.logger.log(
+ level,
+ msg,
+ extra=extra,
+ exc_info=exc_info,
+ )
diff --git a/faststream/_internal/state/logger/params_storage.py b/faststream/_internal/state/logger/params_storage.py
new file mode 100644
index 0000000000..ee12344a7a
--- /dev/null
+++ b/faststream/_internal/state/logger/params_storage.py
@@ -0,0 +1,73 @@
+import warnings
+from abc import abstractmethod
+from typing import TYPE_CHECKING, Optional, Protocol
+
+from faststream._internal.constants import EMPTY
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
+
+
+def make_logger_storage(
+ logger: Optional["LoggerProto"],
+ log_fmt: Optional[str],
+ default_storage_cls: type["LoggerParamsStorage"],
+) -> "LoggerParamsStorage":
+ if logger is EMPTY:
+ return default_storage_cls(log_fmt)
+
+ if log_fmt:
+ warnings.warn(
+ message="You can't set custom `logger` with `log_fmt` both.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ return EmptyLoggerStorage() if logger is None else ManualLoggerStorage(logger)
+
+
+class LoggerParamsStorage(Protocol):
+ def setup_log_contest(self, params: "AnyDict") -> None: ...
+
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]: ...
+
+ def set_level(self, level: int) -> None: ...
+
+
+class EmptyLoggerStorage(LoggerParamsStorage):
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ pass
+
+ def get_logger(self, *, context: "ContextRepo") -> None:
+ return None
+
+ def set_level(self, level: int) -> None:
+ pass
+
+
+class ManualLoggerStorage(LoggerParamsStorage):
+ def __init__(self, logger: "LoggerProto") -> None:
+ self.__logger = logger
+
+ def setup_log_contest(self, params: "AnyDict") -> None:
+ pass
+
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
+ return self.__logger
+
+ def set_level(self, level: int) -> None:
+ if getattr(self.__logger, "setLevel", None):
+ self.__logger.setLevel(level) # type: ignore[attr-defined]
+
+
+class DefaultLoggerStorage(LoggerParamsStorage):
+ def __init__(self, log_fmt: Optional[str]) -> None:
+ self._log_fmt = log_fmt
+
+ @abstractmethod
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
+ raise NotImplementedError
+
+ def set_level(self, level: int) -> None:
+ raise NotImplementedError
diff --git a/faststream/_internal/state/logger/state.py b/faststream/_internal/state/logger/state.py
new file mode 100644
index 0000000000..2fc29707b8
--- /dev/null
+++ b/faststream/_internal/state/logger/state.py
@@ -0,0 +1,72 @@
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.state.proto import SetupAble
+
+from .logger_proxy import (
+ EmptyLoggerObject,
+ LoggerObject,
+ NotSetLoggerObject,
+ RealLoggerObject,
+)
+from .params_storage import LoggerParamsStorage, make_logger_storage
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
+
+
+def make_logger_state(
+ logger: Optional["LoggerProto"],
+ log_level: int,
+ log_fmt: Optional[str],
+ default_storage_cls: type["LoggerParamsStorage"],
+) -> "LoggerState":
+ storage = make_logger_storage(
+ logger=logger,
+ log_fmt=log_fmt,
+ default_storage_cls=default_storage_cls,
+ )
+
+ return LoggerState(
+ log_level=log_level,
+ storage=storage,
+ )
+
+
+class LoggerState(SetupAble):
+ def __init__(
+ self,
+ log_level: int,
+ storage: LoggerParamsStorage,
+ ) -> None:
+ self.log_level = log_level
+ self.params_storage = storage
+
+ self.logger: LoggerObject = NotSetLoggerObject()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(log_level={self.log_level}, logger={self.logger})"
+
+ def set_level(self, level: int) -> None:
+ self.params_storage.set_level(level)
+
+ def log(
+ self,
+ message: str,
+ log_level: Optional[int] = None,
+ extra: Optional["AnyDict"] = None,
+ exc_info: Optional[Exception] = None,
+ ) -> None:
+ self.logger.log(
+ (log_level or self.log_level),
+ message,
+ extra=extra,
+ exc_info=exc_info,
+ )
+
+ def _setup(self, *, context: "ContextRepo") -> None:
+ if not self.logger:
+ if logger := self.params_storage.get_logger(context=context):
+ self.logger = RealLoggerObject(logger)
+ else:
+ self.logger = EmptyLoggerObject()
diff --git a/faststream/_internal/state/pointer.py b/faststream/_internal/state/pointer.py
new file mode 100644
index 0000000000..dbe927d5f9
--- /dev/null
+++ b/faststream/_internal/state/pointer.py
@@ -0,0 +1,26 @@
+from typing import TYPE_CHECKING, Generic, TypeVar
+
+from typing_extensions import Self
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+
+T = TypeVar("T")
+
+
+class Pointer(Generic[T]):
+ __slots__ = ("__value",)
+
+ def __init__(self, value: T) -> None:
+ self.__value = value
+
+ def set(self, new_value: T) -> "Self":
+ self.__value = new_value
+ return self
+
+ def get(self) -> T:
+ return self.__value
+
+ def patch_value(self, **kwargs: "AnyDict") -> None:
+ for k, v in kwargs.items():
+ setattr(self.__value, k, v)
diff --git a/faststream/_internal/state/producer.py b/faststream/_internal/state/producer.py
new file mode 100644
index 0000000000..d65cd4f205
--- /dev/null
+++ b/faststream/_internal/state/producer.py
@@ -0,0 +1,29 @@
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream._internal.publisher.proto import ProducerProto
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from faststream._internal.types import AsyncCallable
+ from faststream.response import PublishCommand
+
+
+class ProducerUnset(ProducerProto):
+ msg = "Producer is unset yet. You should set producer in broker initial method."
+
+ @property
+ def _decoder(self) -> "AsyncCallable":
+ raise IncorrectState(self.msg)
+
+ @property
+ def _parser(self) -> "AsyncCallable":
+ raise IncorrectState(self.msg)
+
+ async def publish(self, cmd: "PublishCommand") -> Optional[Any]:
+ raise IncorrectState(self.msg)
+
+ async def request(self, cmd: "PublishCommand") -> Any:
+ raise IncorrectState(self.msg)
+
+ async def publish_batch(self, cmd: "PublishCommand") -> None:
+ raise IncorrectState(self.msg)
diff --git a/faststream/_internal/setup/proto.py b/faststream/_internal/state/proto.py
similarity index 100%
rename from faststream/_internal/setup/proto.py
rename to faststream/_internal/state/proto.py
diff --git a/faststream/_internal/subscriber/acknowledgement_watcher.py b/faststream/_internal/subscriber/acknowledgement_watcher.py
deleted file mode 100644
index c86e59baf6..0000000000
--- a/faststream/_internal/subscriber/acknowledgement_watcher.py
+++ /dev/null
@@ -1,220 +0,0 @@
-import logging
-from abc import ABC, abstractmethod
-from collections import (
- Counter,
- Counter as CounterType,
-)
-from typing import TYPE_CHECKING, Any, Optional, Union
-
-from faststream.exceptions import (
- AckMessage,
- HandlerException,
- NackMessage,
- RejectMessage,
- SkipMessage,
-)
-
-if TYPE_CHECKING:
- from types import TracebackType
-
- from faststream._internal.basic_types import LoggerProto
- from faststream._internal.types import MsgType
- from faststream.message import StreamMessage
-
-
-class BaseWatcher(ABC):
- """A base class for a watcher."""
-
- max_tries: int
-
- def __init__(
- self,
- max_tries: int = 0,
- logger: Optional["LoggerProto"] = None,
- ) -> None:
- self.logger = logger
- self.max_tries = max_tries
-
- @abstractmethod
- def add(self, message_id: str) -> None:
- """Add a message."""
- raise NotImplementedError
-
- @abstractmethod
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- raise NotImplementedError
-
- @abstractmethod
- def remove(self, message_id: str) -> None:
- """Remove a message."""
- raise NotImplementedError
-
-
-class EndlessWatcher(BaseWatcher):
- """A class to watch and track messages."""
-
- def add(self, message_id: str) -> None:
- """Add a message to the list."""
-
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- return False
-
- def remove(self, message_id: str) -> None:
- """Remove a message."""
-
-
-class OneTryWatcher(BaseWatcher):
- """A class to watch and track messages."""
-
- def add(self, message_id: str) -> None:
- """Add a message."""
-
- def is_max(self, message_id: str) -> bool:
- """Check if the given message ID is the maximum attempt."""
- return True
-
- def remove(self, message_id: str) -> None:
- """Remove a message."""
-
-
-class CounterWatcher(BaseWatcher):
- """A class to watch and track the count of messages."""
-
- memory: CounterType[str]
-
- def __init__(
- self,
- max_tries: int = 3,
- logger: Optional["LoggerProto"] = None,
- ) -> None:
- super().__init__(logger=logger, max_tries=max_tries)
- self.memory = Counter()
-
- def add(self, message_id: str) -> None:
- """Check if the given message ID is the maximum attempt."""
- self.memory[message_id] += 1
-
- def is_max(self, message_id: str) -> bool:
- """Check if the number of tries for a message has exceeded the maximum allowed tries."""
- is_max = self.memory[message_id] > self.max_tries
- if self.logger is not None:
- if is_max:
- self.logger.log(
- logging.ERROR,
- f"Already retried {self.max_tries} times. Skipped.",
- )
- else:
- self.logger.log(
- logging.ERROR,
- "Error is occurred. Pushing back to queue.",
- )
- return is_max
-
- def remove(self, message_id: str) -> None:
- """Remove a message from memory."""
- self.memory[message_id] = 0
- self.memory += Counter()
-
-
-class WatcherContext:
- """A class representing a context for a watcher."""
-
- def __init__(
- self,
- message: "StreamMessage[MsgType]",
- watcher: BaseWatcher,
- logger: Optional["LoggerProto"] = None,
- **extra_options: Any,
- ) -> None:
- self.watcher = watcher
- self.message = message
- self.extra_options = extra_options
- self.logger = logger
-
- async def __aenter__(self) -> None:
- self.watcher.add(self.message.message_id)
-
- async def __aexit__(
- self,
- exc_type: Optional[type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional["TracebackType"],
- ) -> bool:
- """Exit the asynchronous context manager."""
- if not exc_type:
- await self.__ack()
-
- elif isinstance(exc_val, HandlerException):
- if isinstance(exc_val, SkipMessage):
- self.watcher.remove(self.message.message_id)
-
- elif isinstance(exc_val, AckMessage):
- await self.__ack(**exc_val.extra_options)
-
- elif isinstance(exc_val, NackMessage):
- await self.__nack(**exc_val.extra_options)
-
- elif isinstance(exc_val, RejectMessage): # pragma: no branch
- await self.__reject(**exc_val.extra_options)
-
- # Exception was processed and suppressed
- return True
-
- elif self.watcher.is_max(self.message.message_id):
- await self.__reject()
-
- else:
- await self.__nack()
-
- # Exception was not processed
- return False
-
- async def __ack(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.ack(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
- else:
- self.watcher.remove(self.message.message_id)
-
- async def __nack(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.nack(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
-
- async def __reject(self, **exc_extra_options: Any) -> None:
- try:
- await self.message.reject(**self.extra_options, **exc_extra_options)
- except Exception as er:
- if self.logger is not None:
- self.logger.log(logging.ERROR, er, exc_info=er)
- else:
- self.watcher.remove(self.message.message_id)
-
-
-def get_watcher(
- logger: Optional["LoggerProto"],
- try_number: Union[bool, int],
-) -> BaseWatcher:
- """Get a watcher object based on the provided parameters.
-
- Args:
- logger: Optional logger object for logging messages.
- try_number: Optional parameter to specify the type of watcher.
- - If set to True, an EndlessWatcher object will be returned.
- - If set to False, a OneTryWatcher object will be returned.
- - If set to an integer, a CounterWatcher object with the specified maximum number of tries will be returned.
- """
- watcher: Optional[BaseWatcher]
- if try_number is True:
- watcher = EndlessWatcher()
- elif try_number is False:
- watcher = OneTryWatcher()
- else:
- watcher = CounterWatcher(logger=logger, max_tries=try_number)
- return watcher
diff --git a/faststream/_internal/subscriber/call_item.py b/faststream/_internal/subscriber/call_item.py
index 6a20e3ac7c..48814e9ea0 100644
--- a/faststream/_internal/subscriber/call_item.py
+++ b/faststream/_internal/subscriber/call_item.py
@@ -5,7 +5,6 @@
from typing import (
TYPE_CHECKING,
Any,
- Callable,
Generic,
Optional,
cast,
@@ -13,14 +12,15 @@
from typing_extensions import override
-from faststream._internal.setup import SetupAble
+from faststream._internal.state import SetupAble
from faststream._internal.types import MsgType
from faststream.exceptions import IgnoredException, SetupError
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AsyncFuncAny, Decorator
+ from faststream._internal.state import BrokerState, Pointer
from faststream._internal.subscriber.call_wrapper.call import HandlerCallWrapper
from faststream._internal.types import (
AsyncCallable,
@@ -54,7 +54,7 @@ def __init__(
item_parser: Optional["CustomCallable"],
item_decoder: Optional["CustomCallable"],
item_middlewares: Iterable["SubscriberMiddleware[StreamMessage[MsgType]]"],
- dependencies: Iterable["Depends"],
+ dependencies: Iterable["Dependant"],
) -> None:
self.handler = handler
self.filter = filter
@@ -75,30 +75,28 @@ def _setup( # type: ignore[override]
*,
parser: "AsyncCallable",
decoder: "AsyncCallable",
- broker_dependencies: Iterable["Depends"],
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
+ state: "Pointer[BrokerState]",
+ broker_dependencies: Iterable["Dependant"],
_call_decorators: Iterable["Decorator"],
) -> None:
if self.dependant is None:
+ di_state = state.get().di_state
+
self.item_parser = parser
self.item_decoder = decoder
dependencies = (*broker_dependencies, *self.dependencies)
dependant = self.handler.set_wrapped(
- apply_types=apply_types,
- is_validate=is_validate,
dependencies=dependencies,
- _get_dependant=_get_dependant,
- _call_decorators=_call_decorators,
+ _call_decorators=(*_call_decorators, *di_state.call_decorators),
+ state=di_state,
)
- if _get_dependant is None:
+ if di_state.get_dependent is None:
self.dependant = dependant
else:
- self.dependant = _get_dependant(
+ self.dependant = di_state.get_dependent(
self.handler._original_call,
dependencies,
)
@@ -138,9 +136,8 @@ async def is_suitable(
cache.get(parser) or await parser(msg),
)
- message._decoded_body = cache[decoder] = cache.get(decoder) or await decoder(
- message,
- )
+ # NOTE: final decoder will be set for success filter
+ message.set_decoder(decoder)
if await self.filter(message):
return message
diff --git a/faststream/_internal/subscriber/call_wrapper/call.py b/faststream/_internal/subscriber/call_wrapper/call.py
index 208821ad38..14d081b52f 100644
--- a/faststream/_internal/subscriber/call_wrapper/call.py
+++ b/faststream/_internal/subscriber/call_wrapper/call.py
@@ -11,8 +11,8 @@
from unittest.mock import MagicMock
import anyio
+from fast_depends import inject
from fast_depends.core import CallModel, build_call_model
-from fast_depends.use import _InjectWrapper, inject
from faststream._internal.types import (
MsgType,
@@ -23,13 +23,27 @@
from faststream.exceptions import SetupError
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.use import InjectWrapper
from faststream._internal.basic_types import Decorator
from faststream._internal.publisher.proto import PublisherProto
+ from faststream._internal.state.fast_depends import DIState
from faststream.message import StreamMessage
+def ensure_call_wrapper(
+ call: Union[
+ "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
+ Callable[P_HandlerParams, T_HandlerReturn],
+ ],
+) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
+ if isinstance(call, HandlerCallWrapper):
+ return call
+
+ return HandlerCallWrapper(call)
+
+
class HandlerCallWrapper(Generic[MsgType, P_HandlerParams, T_HandlerReturn]):
"""A generic class to wrap handler calls."""
@@ -50,31 +64,18 @@ class HandlerCallWrapper(Generic[MsgType, P_HandlerParams, T_HandlerReturn]):
"mock",
)
- def __new__(
- cls,
- call: Union[
- "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
- Callable[P_HandlerParams, T_HandlerReturn],
- ],
- ) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
- """Create a new instance of the class."""
- if isinstance(call, cls):
- return call
- return super().__new__(cls)
-
def __init__(
self,
call: Callable[P_HandlerParams, T_HandlerReturn],
) -> None:
"""Initialize a handler."""
- if not isinstance(call, HandlerCallWrapper):
- self._original_call = call
- self._wrapped_call = None
- self._publishers = []
+ self._original_call = call
+ self._wrapped_call = None
+ self._publishers = []
- self.mock = None
- self.future = None
- self.is_test = False
+ self.mock = None
+ self.future = None
+ self.is_test = False
def __call__(
self,
@@ -84,7 +85,7 @@ def __call__(
"""Calls the object as a function."""
return self._original_call(*args, **kwargs)
- def call_wrapped(
+ async def call_wrapped(
self,
message: "StreamMessage[MsgType]",
) -> Awaitable[Any]:
@@ -92,8 +93,8 @@ def call_wrapped(
assert self._wrapped_call, "You should use `set_wrapped` first" # nosec B101
if self.is_test:
assert self.mock # nosec B101
- self.mock(message._decoded_body)
- return self._wrapped_call(message)
+ self.mock(await message.decode())
+ return await self._wrapped_call(message)
async def wait_call(self, timeout: Optional[float] = None) -> None:
"""Waits for a call with an optional timeout."""
@@ -144,12 +145,10 @@ def refresh(self, with_mock: bool = False) -> None:
def set_wrapped(
self,
*,
- apply_types: bool,
- is_validate: bool,
- dependencies: Iterable["Depends"],
- _get_dependant: Optional[Callable[..., Any]],
+ dependencies: Iterable["Dependant"],
_call_decorators: Iterable["Decorator"],
- ) -> Optional["CallModel[..., Any]"]:
+ state: "DIState",
+ ) -> Optional["CallModel"]:
call = self._original_call
for decor in _call_decorators:
call = decor(call)
@@ -157,16 +156,20 @@ def set_wrapped(
f: Callable[..., Awaitable[Any]] = to_async(call)
- dependent: Optional[CallModel[..., Any]] = None
- if _get_dependant is None:
+ dependent: Optional[CallModel] = None
+ if state.get_dependent is None:
dependent = build_call_model(
f,
- cast=is_validate,
- extra_dependencies=dependencies, # type: ignore[arg-type]
+ extra_dependencies=dependencies,
+ dependency_provider=state.provider,
+ serializer_cls=state.serializer,
)
- if apply_types:
- wrapper: _InjectWrapper[Any, Any] = inject(func=None)
+ if state.use_fastdepends:
+ wrapper: InjectWrapper[Any, Any] = inject(
+ func=None,
+ context__=state.context,
+ )
f = wrapper(func=f, model=dependent)
f = _wrap_decode_message(
diff --git a/faststream/_internal/subscriber/call_wrapper/proto.py b/faststream/_internal/subscriber/call_wrapper/proto.py
index 161905351a..160b054e6b 100644
--- a/faststream/_internal/subscriber/call_wrapper/proto.py
+++ b/faststream/_internal/subscriber/call_wrapper/proto.py
@@ -19,7 +19,7 @@
)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from .call import HandlerCallWrapper
@@ -36,7 +36,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> Callable[
[Callable[P_HandlerParams, T_HandlerReturn]],
"HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
@@ -54,7 +54,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]": ...
def __call__(
@@ -69,7 +69,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> Union[
"HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
Callable[
diff --git a/faststream/_internal/subscriber/mixins.py b/faststream/_internal/subscriber/mixins.py
new file mode 100644
index 0000000000..412f8f2c79
--- /dev/null
+++ b/faststream/_internal/subscriber/mixins.py
@@ -0,0 +1,74 @@
+import asyncio
+from collections.abc import Coroutine
+from typing import TYPE_CHECKING, Any
+
+import anyio
+
+from .usecase import SubscriberUsecase
+
+if TYPE_CHECKING:
+ from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
+ from nats.aio.msg import Msg
+
+
+class TasksMixin(SubscriberUsecase[Any]):
+ def __init__(self, **kwargs: Any) -> None:
+ super().__init__(**kwargs)
+ self.tasks: list[asyncio.Task[Any]] = []
+
+ def add_task(self, coro: Coroutine[Any, Any, Any]) -> None:
+ self.tasks.append(asyncio.create_task(coro))
+
+ async def close(self) -> None:
+ """Clean up handler subscription, cancel consume task in graceful mode."""
+ await super().close()
+
+ for task in self.tasks:
+ if not task.done():
+ task.cancel()
+
+ self.tasks = []
+
+
+class ConcurrentMixin(TasksMixin):
+ send_stream: "MemoryObjectSendStream[Msg]"
+ receive_stream: "MemoryObjectReceiveStream[Msg]"
+
+ def __init__(
+ self,
+ *,
+ max_workers: int,
+ **kwargs: Any,
+ ) -> None:
+ self.max_workers = max_workers
+
+ self.send_stream, self.receive_stream = anyio.create_memory_object_stream(
+ max_buffer_size=max_workers
+ )
+ self.limiter = anyio.Semaphore(max_workers)
+
+ super().__init__(**kwargs)
+
+ def start_consume_task(self) -> None:
+ self.add_task(self._serve_consume_queue())
+
+ async def _serve_consume_queue(
+ self,
+ ) -> None:
+ """Endless task consuming messages from in-memory queue.
+
+ Suitable to batch messages by amount, timestamps, etc and call `consume` for this batches.
+ """
+ async with anyio.create_task_group() as tg:
+ async for msg in self.receive_stream:
+ tg.start_soon(self._consume_msg, msg)
+
+ async def _consume_msg(self, msg: "Msg") -> None:
+ """Proxy method to call `self.consume` with semaphore block."""
+ async with self.limiter:
+ await self.consume(msg)
+
+ async def _put_msg(self, msg: "Msg") -> None:
+ """Proxy method to put msg into in-memory queue with semaphore block."""
+ async with self.limiter:
+ await self.send_stream.send(msg)
diff --git a/faststream/_internal/subscriber/proto.py b/faststream/_internal/subscriber/proto.py
index a9feaf54cd..2f7152066e 100644
--- a/faststream/_internal/subscriber/proto.py
+++ b/faststream/_internal/subscriber/proto.py
@@ -1,24 +1,22 @@
from abc import abstractmethod
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Any, Callable, Optional
+from typing import TYPE_CHECKING, Any, Optional
from typing_extensions import Self, override
from faststream._internal.proto import Endpoint
from faststream._internal.subscriber.call_wrapper.proto import WrapperProto
from faststream._internal.types import MsgType
-from faststream.specification.proto.endpoint import EndpointSpecification
-from faststream.specification.schema.subscriber import SubscriberSpec
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, Decorator, LoggerProto
+ from faststream._internal.basic_types import AnyDict
from faststream._internal.publisher.proto import (
BasePublisherProto,
ProducerProto,
)
- from faststream._internal.subscriber.call_item import HandlerItem
+ from faststream._internal.state import BrokerState, Pointer
from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
@@ -28,16 +26,17 @@
from faststream.message import StreamMessage
from faststream.response import Response
+ from .call_item import HandlerItem
+
class SubscriberProto(
- EndpointSpecification[SubscriberSpec],
Endpoint,
WrapperProto[MsgType],
):
calls: list["HandlerItem[MsgType]"]
running: bool
- _broker_dependencies: Iterable["Depends"]
+ _broker_dependencies: Iterable["Dependant"]
_broker_middlewares: Iterable["BrokerMiddleware[MsgType]"]
_producer: Optional["ProducerProto"]
@@ -56,17 +55,12 @@ def get_log_context(
def _setup( # type: ignore[override]
self,
*,
- logger: Optional["LoggerProto"],
- graceful_timeout: Optional[float],
+ extra_context: "AnyDict",
+ # broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
- producer: Optional["ProducerProto"],
- extra_context: "AnyDict",
- # FastDepends options
- apply_types: bool,
- is_validate: bool,
- _get_dependant: Optional[Callable[..., Any]],
- _call_decorators: Iterable["Decorator"],
+ # dependant args
+ state: "Pointer[BrokerState]",
) -> None: ...
@abstractmethod
@@ -106,5 +100,5 @@ def add_call(
parser_: "CustomCallable",
decoder_: "CustomCallable",
middlewares_: Iterable["SubscriberMiddleware[Any]"],
- dependencies_: Iterable["Depends"],
+ dependencies_: Iterable["Dependant"],
) -> Self: ...
diff --git a/faststream/_internal/subscriber/specified.py b/faststream/_internal/subscriber/specified.py
new file mode 100644
index 0000000000..e419b388de
--- /dev/null
+++ b/faststream/_internal/subscriber/specified.py
@@ -0,0 +1,85 @@
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
+
+from faststream.exceptions import SetupError
+from faststream.specification.asyncapi.message import parse_handler_params
+from faststream.specification.asyncapi.utils import to_camelcase
+from faststream.specification.proto import EndpointSpecification
+from faststream.specification.schema import SubscriberSpec
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import (
+ MsgType,
+ )
+
+ from .call_item import HandlerItem
+
+
+class SpecificationSubscriber(
+ EndpointSpecification[SubscriberSpec],
+):
+ calls: list["HandlerItem[MsgType]"]
+
+ def __init__(
+ self,
+ *args: Any,
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ **kwargs: Any,
+ ) -> None:
+ self.calls = []
+
+ self.title_ = title_
+ self.description_ = description_
+ self.include_in_schema = include_in_schema
+
+ # Call next base class parent init
+ super().__init__(*args, **kwargs)
+
+ @property
+ def call_name(self) -> str:
+ """Returns the name of the handler call."""
+ if not self.calls:
+ return "Subscriber"
+
+ return to_camelcase(self.calls[0].call_name)
+
+ def get_default_description(self) -> Optional[str]:
+ """Returns the description of the handler."""
+ if not self.calls:
+ return None
+
+ return self.calls[0].description
+
+ def get_payloads(self) -> list[tuple["AnyDict", str]]:
+ """Get the payloads of the handler."""
+ payloads: list[tuple[AnyDict, str]] = []
+
+ for h in self.calls:
+ if h.dependant is None:
+ msg = "You should setup `Handler` at first."
+ raise SetupError(msg)
+
+ body = parse_handler_params(
+ h.dependant,
+ prefix=f"{self.title_ or self.call_name}:Message",
+ )
+
+ payloads.append((body, to_camelcase(h.call_name)))
+
+ if not self.calls:
+ payloads.append(
+ (
+ {
+ "title": f"{self.title_ or self.call_name}:Message:Payload",
+ },
+ to_camelcase(self.call_name),
+ ),
+ )
+
+ return payloads
diff --git a/faststream/_internal/subscriber/usecase.py b/faststream/_internal/subscriber/usecase.py
index 06c27a8142..3a8aa1227d 100644
--- a/faststream/_internal/subscriber/usecase.py
+++ b/faststream/_internal/subscriber/usecase.py
@@ -7,20 +7,20 @@
Any,
Callable,
Optional,
- Union,
overload,
)
from typing_extensions import Self, override
-from faststream._internal.context.repository import context
from faststream._internal.subscriber.call_item import HandlerItem
-from faststream._internal.subscriber.call_wrapper.call import HandlerCallWrapper
+from faststream._internal.subscriber.call_wrapper.call import (
+ HandlerCallWrapper,
+ ensure_call_wrapper,
+)
from faststream._internal.subscriber.proto import SubscriberProto
from faststream._internal.subscriber.utils import (
MultiLock,
default_filter,
- get_watcher_context,
resolve_custom_func,
)
from faststream._internal.types import (
@@ -30,19 +30,19 @@
)
from faststream._internal.utils.functions import sync_fake_context, to_async
from faststream.exceptions import SetupError, StopConsume, SubscriberNotFound
+from faststream.middlewares import AckPolicy, AcknowledgementMiddleware
+from faststream.middlewares.logging import CriticalLogMiddleware
from faststream.response import ensure_response
-from faststream.specification.asyncapi.message import parse_handler_params
-from faststream.specification.asyncapi.utils import to_camelcase
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, Decorator, LoggerProto
+ from faststream._internal.basic_types import AnyDict, Decorator
+ from faststream._internal.context.repository import ContextRepo
from faststream._internal.publisher.proto import (
BasePublisherProto,
- ProducerProto,
)
- from faststream._internal.setup import SetupState
+ from faststream._internal.state import BrokerState, Pointer
from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
@@ -69,7 +69,7 @@ def __init__(
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
middlewares: Iterable["SubscriberMiddleware[Any]"],
- dependencies: Iterable["Depends"],
+ dependencies: Iterable["Dependant"],
) -> None:
self.parser = parser
self.decoder = decoder
@@ -85,24 +85,19 @@ class SubscriberUsecase(SubscriberProto[MsgType]):
extra_context: "AnyDict"
graceful_timeout: Optional[float]
- _broker_dependencies: Iterable["Depends"]
+ _broker_dependencies: Iterable["Dependant"]
_call_options: Optional["_CallOptions"]
_call_decorators: Iterable["Decorator"]
def __init__(
self,
*,
- no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- # AsyncAPI information
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
+ ack_policy: AckPolicy,
) -> None:
"""Initialize a new instance of the class."""
self.calls = []
@@ -110,12 +105,11 @@ def __init__(
self._parser = default_parser
self._decoder = default_decoder
self._no_reply = no_reply
- # Watcher args
- self._no_ack = no_ack
- self._retry = retry
+ self.ack_policy = ack_policy
self._call_options = None
self._call_decorators = ()
+
self.running = False
self.lock = sync_fake_context()
@@ -124,16 +118,9 @@ def __init__(
self._broker_middlewares = broker_middlewares
# register in setup later
- self._producer = None
- self.graceful_timeout = None
self.extra_context = {}
self.extra_watcher_options = {}
- # AsyncAPI
- self.title_ = title_
- self.description_ = description_
- self.include_in_schema = include_in_schema
-
def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
self._broker_middlewares = (*self._broker_middlewares, middleware)
@@ -141,24 +128,18 @@ def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None:
def _setup( # type: ignore[override]
self,
*,
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "Pointer[BrokerState]",
) -> None:
- self.lock = MultiLock()
+ # TODO: add EmptyBrokerState to init
+ self._state = state
- self._producer = producer
- self.graceful_timeout = graceful_timeout
self.extra_context = extra_context
- self.watcher = get_watcher_context(logger, self._no_ack, self._retry)
-
for call in self.calls:
if parser := call.item_parser or broker_parser:
async_parser = resolve_custom_func(to_async(parser), self._parser)
@@ -176,14 +157,9 @@ def _setup( # type: ignore[override]
call._setup(
parser=async_parser,
decoder=async_decoder,
- apply_types=state.depends_params.apply_types,
- is_validate=state.depends_params.is_validate,
- _get_dependant=state.depends_params.get_dependent,
- _call_decorators=(
- *self._call_decorators,
- *state.depends_params.call_decorators,
- ),
+ state=state,
broker_dependencies=self._broker_dependencies,
+ _call_decorators=self._call_decorators,
)
call.handler.refresh(with_mock=False)
@@ -191,6 +167,8 @@ def _setup( # type: ignore[override]
@abstractmethod
async def start(self) -> None:
"""Start the handler."""
+ self.lock = MultiLock()
+
self.running = True
@abstractmethod
@@ -201,7 +179,7 @@ async def close(self) -> None:
"""
self.running = False
if isinstance(self.lock, MultiLock):
- await self.lock.wait_release(self.graceful_timeout)
+ await self.lock.wait_release(self._state.get().graceful_timeout)
def add_call(
self,
@@ -209,7 +187,7 @@ def add_call(
parser_: Optional["CustomCallable"],
decoder_: Optional["CustomCallable"],
middlewares_: Iterable["SubscriberMiddleware[Any]"],
- dependencies_: Iterable["Depends"],
+ dependencies_: Iterable["Dependant"],
) -> Self:
self._call_options = _CallOptions(
parser=parser_,
@@ -228,7 +206,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> Callable[
[Callable[P_HandlerParams, T_HandlerReturn]],
"HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]",
@@ -243,7 +221,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]": ...
def __call__(
@@ -254,7 +232,7 @@ def __call__(
parser: Optional["CustomCallable"] = None,
decoder: Optional["CustomCallable"] = None,
middlewares: Iterable["SubscriberMiddleware[Any]"] = (),
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
) -> Any:
if (options := self._call_options) is None:
msg = (
@@ -271,8 +249,8 @@ def __call__(
def real_wrapper(
func: Callable[P_HandlerParams, T_HandlerReturn],
) -> "HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn]":
- handler = HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn](
- func,
+ handler: HandlerCallWrapper[MsgType, P_HandlerParams, T_HandlerReturn] = (
+ ensure_call_wrapper(func)
)
self.calls.append(
HandlerItem[MsgType](
@@ -308,7 +286,7 @@ async def consume(self, msg: MsgType) -> Any:
# Stop handler at `exit()` call
await self.close()
- if app := context.get("app"):
+ if app := self._state.get().di_state.context.get("app"):
app.exit()
except Exception: # nosec B110
@@ -317,19 +295,22 @@ async def consume(self, msg: MsgType) -> Any:
async def process_message(self, msg: MsgType) -> "Response":
"""Execute all message processing stages."""
+ broker_state = self._state.get()
+ context: ContextRepo = broker_state.di_state.context
+ logger_state = broker_state.logger_state
+
async with AsyncExitStack() as stack:
stack.enter_context(self.lock)
# Enter context before middlewares
+ stack.enter_context(context.scope("logger", logger_state.logger.logger))
for k, v in self.extra_context.items():
stack.enter_context(context.scope(k, v))
- stack.enter_context(context.scope("handler_", self))
-
# enter all middlewares
middlewares: list[BaseMiddleware] = []
- for base_m in self._broker_middlewares:
- middleware = base_m(msg)
+ for base_m in self.__build__middlewares_stack():
+ middleware = base_m(msg, context=context)
middlewares.append(middleware)
await middleware.__aenter__()
@@ -343,15 +324,6 @@ async def process_message(self, msg: MsgType) -> "Response":
break
if message is not None:
- # Acknowledgement scope
- # TODO: move it to scope enter at `retry` option deprecation
- await stack.enter_async_context(
- self.watcher(
- message,
- **self.extra_watcher_options,
- ),
- )
-
stack.enter_context(
context.scope("log_context", self.get_log_context(message)),
)
@@ -376,10 +348,8 @@ async def process_message(self, msg: MsgType) -> "Response":
self.__get_response_publisher(message),
h.handler._publishers,
):
- await p.publish(
- result_msg.body,
- **result_msg.as_publish_kwargs(),
- # publisher middlewares
+ await p._publish(
+ result_msg.as_publish_command(),
_extra_middlewares=(m.publish_scope for m in middlewares),
)
@@ -391,6 +361,7 @@ async def process_message(self, msg: MsgType) -> "Response":
for m in middlewares:
stack.push_async_exit(m.__aexit__)
+ # Reraise it to catch in tests
if parsing_error:
raise parsing_error
@@ -400,6 +371,28 @@ async def process_message(self, msg: MsgType) -> "Response":
# An error was raised and processed by some middleware
return ensure_response(None)
+ def __build__middlewares_stack(self) -> tuple["BaseMiddleware", ...]:
+ logger_state = self._state.get().logger_state
+
+ if self.ack_policy is AckPolicy.DO_NOTHING:
+ broker_middlewares = (
+ CriticalLogMiddleware(logger_state),
+ *self._broker_middlewares,
+ )
+
+ else:
+ broker_middlewares = (
+ AcknowledgementMiddleware(
+ logger=logger_state,
+ ack_policy=self.ack_policy,
+ extra_options=self.extra_watcher_options,
+ ),
+ CriticalLogMiddleware(logger_state),
+ *self._broker_middlewares,
+ )
+
+ return broker_middlewares
+
def __get_response_publisher(
self,
message: "StreamMessage[MsgType]",
@@ -417,48 +410,3 @@ def get_log_context(
return {
"message_id": getattr(message, "message_id", ""),
}
-
- # AsyncAPI methods
-
- @property
- def call_name(self) -> str:
- """Returns the name of the handler call."""
- if not self.calls:
- return "Subscriber"
-
- return to_camelcase(self.calls[0].call_name)
-
- def get_default_description(self) -> Optional[str]:
- """Returns the description of the handler."""
- if not self.calls: # pragma: no cover
- return None
-
- return self.calls[0].description
-
- def get_payloads(self) -> list[tuple["AnyDict", str]]:
- """Get the payloads of the handler."""
- payloads: list[tuple[AnyDict, str]] = []
-
- for h in self.calls:
- if h.dependant is None:
- msg = "You should setup `Handler` at first."
- raise SetupError(msg)
-
- body = parse_handler_params(
- h.dependant,
- prefix=f"{self.title_ or self.call_name}:Message",
- )
-
- payloads.append((body, to_camelcase(h.call_name)))
-
- if not self.calls:
- payloads.append(
- (
- {
- "title": f"{self.title_ or self.call_name}:Message:Payload",
- },
- to_camelcase(self.call_name),
- ),
- )
-
- return payloads
diff --git a/faststream/_internal/subscriber/utils.py b/faststream/_internal/subscriber/utils.py
index f3099b6490..213a52c414 100644
--- a/faststream/_internal/subscriber/utils.py
+++ b/faststream/_internal/subscriber/utils.py
@@ -1,7 +1,7 @@
import asyncio
import inspect
from collections.abc import Awaitable, Iterable
-from contextlib import AbstractAsyncContextManager, AsyncExitStack, suppress
+from contextlib import AsyncExitStack, suppress
from functools import partial
from typing import (
TYPE_CHECKING,
@@ -15,49 +15,48 @@
import anyio
from typing_extensions import Literal, Self, overload
-from faststream._internal.subscriber.acknowledgement_watcher import (
- WatcherContext,
- get_watcher,
-)
from faststream._internal.types import MsgType
-from faststream._internal.utils.functions import fake_context, return_input, to_async
+from faststream._internal.utils.functions import return_input, to_async
+from faststream.message.source_type import SourceType
if TYPE_CHECKING:
from types import TracebackType
- from faststream._internal.basic_types import LoggerProto
from faststream._internal.types import (
AsyncCallable,
- BrokerMiddleware,
CustomCallable,
SyncCallable,
)
from faststream.message import StreamMessage
+ from faststream.middlewares import BaseMiddleware
@overload
async def process_msg(
msg: Literal[None],
- middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ middlewares: Iterable["BaseMiddleware"],
parser: Callable[[MsgType], Awaitable["StreamMessage[MsgType]"]],
decoder: Callable[["StreamMessage[MsgType]"], "Any"],
+ source_type: SourceType = SourceType.CONSUME,
) -> None: ...
@overload
async def process_msg(
msg: MsgType,
- middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ middlewares: Iterable["BaseMiddleware"],
parser: Callable[[MsgType], Awaitable["StreamMessage[MsgType]"]],
decoder: Callable[["StreamMessage[MsgType]"], "Any"],
+ source_type: SourceType = SourceType.CONSUME,
) -> "StreamMessage[MsgType]": ...
async def process_msg(
msg: Optional[MsgType],
- middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ middlewares: Iterable["BaseMiddleware"],
parser: Callable[[MsgType], Awaitable["StreamMessage[MsgType]"]],
decoder: Callable[["StreamMessage[MsgType]"], "Any"],
+ source_type: SourceType = SourceType.CONSUME,
) -> Optional["StreamMessage[MsgType]"]:
if msg is None:
return None
@@ -69,12 +68,12 @@ async def process_msg(
] = return_input
for m in middlewares:
- mid = m(msg)
- await stack.enter_async_context(mid)
- return_msg = partial(mid.consume_scope, return_msg)
+ await stack.enter_async_context(m)
+ return_msg = partial(m.consume_scope, return_msg)
parsed_msg = await parser(msg)
- parsed_msg._decoded_body = await decoder(parsed_msg)
+ parsed_msg._source_type = source_type
+ parsed_msg.set_decoder(decoder)
return await return_msg(parsed_msg)
msg = "unreachable"
@@ -86,24 +85,6 @@ async def default_filter(msg: "StreamMessage[Any]") -> bool:
return not msg.processed
-def get_watcher_context(
- logger: Optional["LoggerProto"],
- no_ack: bool,
- retry: Union[bool, int],
- **extra_options: Any,
-) -> Callable[..., "AbstractAsyncContextManager[None]"]:
- """Create Acknowledgement scope."""
- if no_ack:
- return fake_context
-
- return partial(
- WatcherContext,
- watcher=get_watcher(logger, retry),
- logger=logger,
- **extra_options,
- )
-
-
class MultiLock:
"""A class representing a multi lock."""
diff --git a/faststream/_internal/testing/broker.py b/faststream/_internal/testing/broker.py
index 7e33fd6ab5..ac57846c80 100644
--- a/faststream/_internal/testing/broker.py
+++ b/faststream/_internal/testing/broker.py
@@ -1,6 +1,6 @@
import warnings
from abc import abstractmethod
-from collections.abc import AsyncGenerator, Generator
+from collections.abc import AsyncGenerator, Generator, Iterator
from contextlib import asynccontextmanager, contextmanager
from functools import partial
from typing import (
@@ -14,6 +14,8 @@
from unittest.mock import MagicMock
from faststream._internal.broker.broker import BrokerUsecase
+from faststream._internal.state.logger.logger_proxy import RealLoggerObject
+from faststream._internal.subscriber.utils import MultiLock
from faststream._internal.testing.app import TestApp
from faststream._internal.testing.ast import is_contains_context_name
from faststream._internal.utils.functions import sync_fake_context
@@ -67,8 +69,13 @@ async def __aenter__(self) -> Broker:
self._ctx = self._create_ctx()
return await self._ctx.__aenter__()
- async def __aexit__(self, *args: object) -> None:
- await self._ctx.__aexit__(*args)
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> None:
+ await self._ctx.__aexit__(exc_type, exc_val, exc_tb)
@asynccontextmanager
async def _create_ctx(self) -> AsyncGenerator[Broker, None]:
@@ -87,6 +94,24 @@ async def _create_ctx(self) -> AsyncGenerator[Broker, None]:
finally:
self._fake_close(self.broker)
+ @contextmanager
+ def _patch_producer(self, broker: Broker) -> Iterator[None]:
+ raise NotImplementedError
+
+ @contextmanager
+ def _patch_logger(self, broker: Broker) -> Iterator[None]:
+ state = broker._state.get()
+ state._setup_logger_state()
+
+ logger_state = state.logger_state
+ old_log_object = logger_state.logger
+
+ logger_state.logger = RealLoggerObject(MagicMock())
+ try:
+ yield
+ finally:
+ logger_state.logger = old_log_object
+
@contextmanager
def _patch_broker(self, broker: Broker) -> Generator[None, None, None]:
with (
@@ -109,11 +134,8 @@ def _patch_broker(self, broker: Broker) -> Generator[None, None, None]:
"_connection",
new=None,
),
- mock.patch.object(
- broker,
- "_producer",
- new=None,
- ),
+ self._patch_producer(broker),
+ self._patch_logger(broker),
mock.patch.object(
broker,
"ping",
@@ -159,6 +181,7 @@ async def publisher_response_subscriber(msg: Any) -> None:
for subscriber in broker._subscribers:
subscriber.running = True
+ subscriber.lock = MultiLock()
def _fake_close(
self,
diff --git a/faststream/_internal/types.py b/faststream/_internal/types.py
index f7c47c9461..ea1ecd3dbf 100644
--- a/faststream/_internal/types.py
+++ b/faststream/_internal/types.py
@@ -10,9 +10,11 @@
from typing_extensions import ParamSpec, TypeAlias
-from faststream._internal.basic_types import AsyncFunc, AsyncFuncAny
+from faststream._internal.basic_types import AsyncFuncAny
+from faststream._internal.context.repository import ContextRepo
from faststream.message import StreamMessage
from faststream.middlewares import BaseMiddleware
+from faststream.response.response import PublishCommand
MsgType = TypeVar("MsgType")
StreamMsg = TypeVar("StreamMsg", bound=StreamMessage[Any])
@@ -30,14 +32,11 @@
[Any],
Any,
]
-AsyncCallable: TypeAlias = Callable[
- [Any],
- Awaitable[Any],
-]
+AsyncCallable: TypeAlias = AsyncFuncAny
AsyncCustomCallable: TypeAlias = Union[
- AsyncCallable,
+ AsyncFuncAny,
Callable[
- [Any, AsyncCallable],
+ [Any, AsyncFuncAny],
Awaitable[Any],
],
]
@@ -64,7 +63,18 @@
]
-BrokerMiddleware: TypeAlias = Callable[[Optional[MsgType]], BaseMiddleware]
+class BrokerMiddleware(Protocol[MsgType]):
+ """Middleware builder interface."""
+
+ def __call__(
+ self,
+ msg: Optional[MsgType],
+ /,
+ *,
+ context: ContextRepo,
+ ) -> BaseMiddleware: ...
+
+
SubscriberMiddleware: TypeAlias = Callable[
[AsyncFuncAny, MsgType],
MsgType,
@@ -76,7 +86,6 @@ class PublisherMiddleware(Protocol):
def __call__(
self,
- call_next: AsyncFunc,
- *__args: Any,
- **__kwargs: Any,
+ call_next: Callable[[PublishCommand], Awaitable[PublishCommand]],
+ msg: PublishCommand,
) -> Any: ...
diff --git a/faststream/_internal/utils/functions.py b/faststream/_internal/utils/functions.py
index 6f24aad0b6..be90e6f0a2 100644
--- a/faststream/_internal/utils/functions.py
+++ b/faststream/_internal/utils/functions.py
@@ -5,13 +5,19 @@
Any,
Callable,
Optional,
+ TypeVar,
Union,
overload,
)
import anyio
from fast_depends.core import CallModel
-from fast_depends.utils import run_async as call_or_await
+from fast_depends.utils import (
+ is_coroutine_callable,
+ run_async as call_or_await,
+ run_in_threadpool,
+)
+from typing_extensions import ParamSpec
from faststream._internal.basic_types import F_Return, F_Spec
@@ -23,6 +29,9 @@
"to_async",
)
+P = ParamSpec("P")
+T = TypeVar("T")
+
@overload
def to_async(
@@ -43,11 +52,13 @@ def to_async(
],
) -> Callable[F_Spec, Awaitable[F_Return]]:
"""Converts a synchronous function to an asynchronous function."""
+ if is_coroutine_callable(func):
+ return func
@wraps(func)
async def to_async_wrapper(*args: F_Spec.args, **kwargs: F_Spec.kwargs) -> F_Return:
"""Wraps a function to make it asynchronous."""
- return await call_or_await(func, *args, **kwargs)
+ return await run_in_threadpool(func, *args, **kwargs)
return to_async_wrapper
@@ -57,7 +68,7 @@ def timeout_scope(
raise_timeout: bool = False,
) -> AbstractContextManager[anyio.CancelScope]:
scope: Callable[[Optional[float]], AbstractContextManager[anyio.CancelScope]]
- scope = anyio.fail_after if raise_timeout else anyio.move_on_after # type: ignore[assignment]
+ scope = anyio.fail_after if raise_timeout else anyio.move_on_after
return scope(timeout)
@@ -72,10 +83,8 @@ def sync_fake_context(*args: Any, **kwargs: Any) -> Iterator[None]:
yield None
-def drop_response_type(
- model: CallModel[F_Spec, F_Return],
-) -> CallModel[F_Spec, F_Return]:
- model.response_model = None
+def drop_response_type(model: CallModel) -> CallModel:
+ model.serializer.response_callback = None
return model
diff --git a/faststream/app.py b/faststream/app.py
index c2e7ba2a10..6e31e12b62 100644
--- a/faststream/app.py
+++ b/faststream/app.py
@@ -15,14 +15,14 @@
from faststream._internal.basic_types import Lifespan, LoggerProto
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.cli.supervisors.utils import set_exit
+from faststream._internal.constants import EMPTY
from faststream._internal.log import logger
from faststream.asgi.app import AsgiFastStream
-P_HookParams = ParamSpec("P_HookParams")
-T_HookReturn = TypeVar("T_HookReturn")
-
-
if TYPE_CHECKING:
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
+
from faststream._internal.basic_types import (
AnyCallable,
Lifespan,
@@ -32,16 +32,21 @@
from faststream._internal.broker.broker import BrokerUsecase
from faststream.asgi.types import ASGIApp
+P_HookParams = ParamSpec("P_HookParams")
+T_HookReturn = TypeVar("T_HookReturn")
+
class FastStream(Application):
"""A class representing a FastStream application."""
def __init__(
self,
- broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ broker: "BrokerUsecase[Any, Any]",
/,
# regular broker args
logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
lifespan: Optional["Lifespan"] = None,
on_startup: Sequence["AnyCallable"] = (),
after_startup: Sequence["AnyCallable"] = (),
@@ -49,31 +54,35 @@ def __init__(
after_shutdown: Sequence["AnyCallable"] = (),
) -> None:
super().__init__(
- broker=broker,
+ broker,
logger=logger,
+ provider=provider,
+ serializer=serializer,
lifespan=lifespan,
on_startup=on_startup,
after_startup=after_startup,
on_shutdown=on_shutdown,
after_shutdown=after_shutdown,
)
- self._should_exit = anyio.Event()
+ self._should_exit = False
async def run(
self,
log_level: int = logging.INFO,
run_extra_options: Optional[dict[str, "SettingField"]] = None,
+ sleep_time: float = 0.1,
) -> None:
"""Run FastStream Application."""
- assert self.broker, "You should setup a broker" # nosec B101
-
set_exit(lambda *_: self.exit(), sync=False)
async with self.lifespan_context(**(run_extra_options or {})):
try:
async with anyio.create_task_group() as tg:
tg.start_soon(self._startup, log_level, run_extra_options)
- await self._should_exit.wait()
+
+ while not self._should_exit: # noqa: ASYNC110 (requested by creator)
+ await anyio.sleep(sleep_time)
+
await self._shutdown(log_level)
tg.cancel_scope.cancel()
except ExceptionGroup as e:
@@ -82,7 +91,7 @@ async def run(
def exit(self) -> None:
"""Stop application manually."""
- self._should_exit.set()
+ self._should_exit = True
def as_asgi(
self,
diff --git a/faststream/asgi/app.py b/faststream/asgi/app.py
index 7258bccef2..b5293ea508 100644
--- a/faststream/asgi/app.py
+++ b/faststream/asgi/app.py
@@ -15,6 +15,7 @@
from faststream._internal._compat import HAS_TYPER, ExceptionGroup
from faststream._internal.application import Application
+from faststream._internal.constants import EMPTY
from faststream._internal.log import logger
from faststream.asgi.response import AsgiResponse
from faststream.asgi.websocket import WebSocketClose
@@ -24,6 +25,8 @@
from types import FrameType
from anyio.abc import TaskStatus
+ from fast_depends import Provider
+ from fast_depends.library.serializer import SerializerProto
from faststream._internal.basic_types import (
AnyCallable,
@@ -75,11 +78,13 @@ class AsgiFastStream(Application):
def __init__(
self,
- broker: Optional["BrokerUsecase[Any, Any]"] = None,
+ broker: "BrokerUsecase[Any, Any]",
/,
asgi_routes: Sequence[tuple[str, "ASGIApp"]] = (),
# regular broker args
logger: Optional["LoggerProto"] = logger,
+ provider: Optional["Provider"] = None,
+ serializer: Optional["SerializerProto"] = EMPTY,
lifespan: Optional["Lifespan"] = None,
# hooks
on_startup: Sequence["AnyCallable"] = (),
@@ -88,8 +93,10 @@ def __init__(
after_shutdown: Sequence["AnyCallable"] = (),
) -> None:
super().__init__(
- broker=broker,
+ broker,
logger=logger,
+ provider=provider,
+ serializer=serializer,
lifespan=lifespan,
on_startup=on_startup,
after_startup=after_startup,
diff --git a/faststream/confluent/annotations.py b/faststream/confluent/annotations.py
index e3c1f82af9..3ee4a9bf6f 100644
--- a/faststream/confluent/annotations.py
+++ b/faststream/confluent/annotations.py
@@ -5,7 +5,6 @@
from faststream.confluent.broker import KafkaBroker as KB
from faststream.confluent.message import KafkaMessage as KM
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
-from faststream.params import NoCast
__all__ = (
"ContextRepo",
@@ -13,7 +12,6 @@
"KafkaMessage",
"KafkaProducer",
"Logger",
- "NoCast",
)
KafkaMessage = Annotated[KM, Context("message")]
diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py
index 89ef3e5ed8..53b487b031 100644
--- a/faststream/confluent/broker/broker.py
+++ b/faststream/confluent/broker/broker.py
@@ -23,23 +23,25 @@
from faststream.confluent.client import AsyncConfluentConsumer, AsyncConfluentProducer
from faststream.confluent.config import ConfluentFastConfig
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+from faststream.confluent.response import KafkaPublishCommand
from faststream.confluent.schemas.params import ConsumerConnectionParams
from faststream.confluent.security import parse_security
-from faststream.exceptions import NOT_CONNECTED_YET
from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
from .logging import make_kafka_logger_state
from .registrator import KafkaRegistrator
if TYPE_CHECKING:
+ import asyncio
from types import TracebackType
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
Decorator,
LoggerProto,
SendableMessage,
@@ -49,6 +51,7 @@
CustomCallable,
)
from faststream.confluent.config import ConfluentConfig
+ from faststream.confluent.message import KafkaMessage
from faststream.security import BaseSecurity
from faststream.specification.schema.extra import Tag, TagDict
@@ -66,7 +69,7 @@ class KafkaBroker(
],
):
url: list[str]
- _producer: Optional[AsyncConfluentFastProducer]
+ _producer: AsyncConfluentFastProducer
def __init__(
self,
@@ -265,7 +268,7 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
@@ -322,10 +325,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -396,12 +396,19 @@ def __init__(
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
)
self.client_id = client_id
- self._producer = None
+
self.config = ConfluentFastConfig(config)
+ self._state.patch_value(
+ producer=AsyncConfluentFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
async def close(
self,
exc_type: Optional[type[BaseException]] = None,
@@ -410,9 +417,7 @@ async def close(
) -> None:
await super().close(exc_type, exc_val, exc_tb)
- if self._producer is not None: # pragma: no branch
- await self._producer.stop()
- self._producer = None
+ await self._producer.disconnect()
self._connection = None
@@ -445,17 +450,13 @@ async def _connect( # type: ignore[override]
config=self.config,
)
- self._producer = AsyncConfluentFastProducer(
- producer=native_producer,
- parser=self._parser,
- decoder=self._decoder,
- )
+ self._producer.connect(native_producer)
connection_kwargs, _ = filter_by_dict(ConsumerConnectionParams, kwargs)
return partial(
AsyncConfluentConsumer,
**connection_kwargs,
- logger=self._state.logger_state,
+ logger=self._state.get().logger_state,
config=self.config,
)
@@ -475,64 +476,90 @@ def _subscriber_setup_extra(self) -> "AnyDict":
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
+ message: Annotated[
+ "SendableMessage",
+ Doc("Message body to send."),
+ ],
+ topic: Annotated[
+ str,
+ Doc("Topic where the message will be published."),
+ ],
+ *,
+ key: Union[bytes, str, None] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- reply_to: str = "",
- no_confirm: bool = False,
- # extra options to be compatible with test client
- **kwargs: Any,
- ) -> None:
- correlation_id = correlation_id or gen_cor_id()
+ headers: Annotated[
+ Optional[dict[str, str]],
+ Doc("Message headers to store metainformation."),
+ ] = None,
+ correlation_id: Annotated[
+ Optional[str],
+ Doc(
+ "Manual message **correlation_id** setter. "
+ "**correlation_id** is a useful option to trace messages.",
+ ),
+ ] = None,
+ reply_to: Annotated[
+ str,
+ Doc("Reply message topic name to send response."),
+ ] = "",
+ no_confirm: Annotated[
+ bool,
+ Doc("Do not wait for Kafka publish confirmation."),
+ ] = False,
+ ) -> "asyncio.Future":
+ """Publish message directly.
+
+ This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
+ applications or to publish messages from time to time.
- await super().publish(
+ Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
+ """
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
reply_to=reply_to,
no_confirm=no_confirm,
- **kwargs,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
self,
message: "SendableMessage",
topic: str,
- key: Optional[bytes] = None,
+ *,
+ key: Union[bytes, str, None] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- ) -> Optional[Any]:
- correlation_id = correlation_id or gen_cor_id()
-
- return await super().request(
+ ) -> "KafkaMessage":
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
timeout=timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
)
+ msg: KafkaMessage = await super()._basic_request(cmd, producer=self._producer)
+ return msg
+
async def publish_batch(
self,
- *msgs: "SendableMessage",
+ *messages: "SendableMessage",
topic: str,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
@@ -541,38 +568,33 @@ async def publish_batch(
correlation_id: Optional[str] = None,
no_confirm: bool = False,
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
- for m in self._middlewares:
- call = partial(m(None).publish_scope, call)
-
- await call(
- *msgs,
+ cmd = KafkaPublishCommand(
+ *messages,
topic=topic,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
reply_to=reply_to,
- correlation_id=correlation_id,
no_confirm=no_confirm,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
with anyio.move_on_after(timeout) as cancel_scope:
- if self._producer is None:
+ if not self._producer:
return False
while True:
if cancel_scope.cancel_called:
return False
- if await self._producer._producer.ping(timeout=timeout):
+ if await self._producer.ping(timeout=timeout):
return True
await anyio.sleep(sleep_time)
diff --git a/faststream/confluent/broker/logging.py b/faststream/confluent/broker/logging.py
index 276c571373..b4523d2b40 100644
--- a/faststream/confluent/broker/logging.py
+++ b/faststream/confluent/broker/logging.py
@@ -1,14 +1,16 @@
+import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from faststream._internal.log.logging import get_broker_logger
-from faststream._internal.setup.logger import (
+from faststream._internal.state.logger import (
DefaultLoggerStorage,
make_logger_state,
)
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
class KafkaParamsStorage(DefaultLoggerStorage):
@@ -21,6 +23,11 @@ def __init__(
self._max_topic_len = 4
self._max_group_len = 0
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
def setup_log_contest(self, params: "AnyDict") -> None:
self._max_topic_len = max(
(
@@ -35,7 +42,7 @@ def setup_log_contest(self, params: "AnyDict") -> None:
),
)
- def get_logger(self) -> Optional["LoggerProto"]:
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
message_id_ln = 10
# TODO: generate unique logger names to not share between brokers
@@ -58,10 +65,12 @@ def get_logger(self) -> Optional["LoggerProto"]:
f"%(message_id)-{message_id_ln}s ",
"- %(message)s",
)),
+ context=context,
+ log_level=self.logger_log_level,
)
make_kafka_logger_state = partial(
make_logger_state,
- default_storag_cls=KafkaParamsStorage,
+ default_storage_cls=KafkaParamsStorage,
)
diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py
index 823238e587..3bcc604719 100644
--- a/faststream/confluent/broker/registrator.py
+++ b/faststream/confluent/broker/registrator.py
@@ -13,13 +13,15 @@
from typing_extensions import Doc, override
from faststream._internal.broker.abc_broker import ABCBroker
-from faststream.confluent.publisher.publisher import SpecificationPublisher
+from faststream._internal.constants import EMPTY
+from faststream.confluent.publisher.factory import create_publisher
from faststream.confluent.subscriber.factory import create_subscriber
from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.types import (
CustomCallable,
@@ -27,12 +29,12 @@
SubscriberMiddleware,
)
from faststream.confluent.message import KafkaMessage
- from faststream.confluent.publisher.publisher import (
+ from faststream.confluent.publisher.specified import (
SpecificationBatchPublisher,
SpecificationDefaultPublisher,
)
from faststream.confluent.schemas import TopicPartition
- from faststream.confluent.subscriber.subscriber import (
+ from faststream.confluent.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
@@ -279,8 +281,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -294,14 +296,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -550,8 +548,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -565,14 +563,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -821,8 +815,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -836,14 +830,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1095,8 +1085,8 @@ def subscriber(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1110,14 +1100,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1174,10 +1160,9 @@ def subscriber(
},
is_manual=not auto_commit,
# subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# Specification
title_=title,
@@ -1508,7 +1493,7 @@ def publisher(
Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`.
"""
- publisher = SpecificationPublisher.create(
+ publisher = create_publisher(
# batch flag
batch=batch,
# default args
@@ -1519,7 +1504,7 @@ def publisher(
headers=headers,
reply_to=reply_to,
# publisher-specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# Specification
title_=title,
@@ -1530,4 +1515,5 @@ def publisher(
if batch:
return cast("SpecificationBatchPublisher", super().publisher(publisher))
+
return cast("SpecificationDefaultPublisher", super().publisher(publisher))
diff --git a/faststream/confluent/client.py b/faststream/confluent/client.py
index 74a3bed571..7b7b0b8fe2 100644
--- a/faststream/confluent/client.py
+++ b/faststream/confluent/client.py
@@ -26,7 +26,7 @@
from typing_extensions import NotRequired, TypedDict
from faststream._internal.basic_types import AnyDict, LoggerProto
- from faststream._internal.setup.logger import LoggerState
+ from faststream._internal.state.logger import LoggerState
class _SendKwargs(TypedDict):
value: Optional[Union[str, bytes]]
@@ -134,7 +134,7 @@ async def send(
timestamp_ms: Optional[int] = None,
headers: Optional[list[tuple[str, Union[str, bytes]]]] = None,
no_confirm: bool = False,
- ) -> None:
+ ) -> "asyncio.Future":
"""Sends a single message to a Kafka topic."""
kwargs: _SendKwargs = {
"value": value,
@@ -164,6 +164,7 @@ def ack_callback(err: Any, msg: Optional[Message]) -> None:
if not no_confirm:
await result_future
+ return result_future
def create_batch(self) -> "BatchBuilder":
"""Creates a batch for sending multiple messages."""
diff --git a/faststream/confluent/fastapi/__init__.py b/faststream/confluent/fastapi/__init__.py
index dc7cb73000..21354fcf98 100644
--- a/faststream/confluent/fastapi/__init__.py
+++ b/faststream/confluent/fastapi/__init__.py
@@ -2,10 +2,11 @@
from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.confluent.broker import KafkaBroker as KB
-from faststream.confluent.fastapi.fastapi import KafkaRouter
from faststream.confluent.message import KafkaMessage as KM
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+from .fastapi import KafkaRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/confluent/fastapi/fastapi.py b/faststream/confluent/fastapi/fastapi.py
index 0f9388beb8..197aa380af 100644
--- a/faststream/confluent/fastapi/fastapi.py
+++ b/faststream/confluent/fastapi/fastapi.py
@@ -25,6 +25,7 @@
from faststream._internal.constants import EMPTY
from faststream._internal.fastapi.router import StreamRouter
from faststream.confluent.broker.broker import KafkaBroker as KB
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from enum import Enum
@@ -42,17 +43,17 @@
)
from faststream.confluent.config import ConfluentConfig
from faststream.confluent.message import KafkaMessage
- from faststream.confluent.publisher.publisher import (
+ from faststream.confluent.publisher.specified import (
SpecificationBatchPublisher,
SpecificationDefaultPublisher,
)
from faststream.confluent.schemas import TopicPartition
- from faststream.confluent.subscriber.subscriber import (
+ from faststream.confluent.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
Partition = TypeVar("Partition")
@@ -833,14 +834,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1607,14 +1604,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -2004,14 +1997,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -2187,8 +2176,7 @@ def subscriber(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
diff --git a/faststream/confluent/message.py b/faststream/confluent/message.py
index bdf0d5cb13..14ea16efc2 100644
--- a/faststream/confluent/message.py
+++ b/faststream/confluent/message.py
@@ -66,7 +66,7 @@ async def ack(self) -> None:
"""Acknowledge the Kafka message."""
if self.is_manual and not self.committed:
await self.consumer.commit()
- await super().ack()
+ await super().ack()
async def nack(self) -> None:
"""Reject the Kafka message."""
@@ -81,4 +81,4 @@ async def nack(self) -> None:
partition=raw_message.partition(),
offset=raw_message.offset(),
)
- await super().nack()
+ await super().nack()
diff --git a/faststream/confluent/opentelemetry/provider.py b/faststream/confluent/opentelemetry/provider.py
index 8ebeea51d7..07031b5449 100644
--- a/faststream/confluent/opentelemetry/provider.py
+++ b/faststream/confluent/opentelemetry/provider.py
@@ -11,6 +11,7 @@
from confluent_kafka import Message
from faststream._internal.basic_types import AnyDict
+ from faststream.confluent.response import KafkaPublishCommand
from faststream.message import StreamMessage
@@ -20,29 +21,29 @@ class BaseConfluentTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType])
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> "AnyDict":
attrs = {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
- if (partition := kwargs.get("partition")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition
+ if cmd.partition is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = cmd.partition
- if (key := kwargs.get("key")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key
+ if cmd.key is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = cmd.key
return attrs
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class ConfluentTelemetrySettingsProvider(
diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py
index 85063b53b2..5a04cc2248 100644
--- a/faststream/confluent/parser.py
+++ b/faststream/confluent/parser.py
@@ -1,23 +1,30 @@
from collections.abc import Sequence
-from typing import TYPE_CHECKING, Any, Optional, Union
+from typing import TYPE_CHECKING, Any, Union
-from faststream._internal.context.repository import context
-from faststream.confluent.message import FAKE_CONSUMER, KafkaMessage
from faststream.message import decode_message
+from .message import FAKE_CONSUMER, KafkaMessage
+
if TYPE_CHECKING:
from confluent_kafka import Message
from faststream._internal.basic_types import DecodedMessage
- from faststream.confluent.subscriber.usecase import LogicSubscriber
- from faststream.message import StreamMessage
+
+ from .message import ConsumerProtocol, StreamMessage
class AsyncConfluentParser:
"""A class to parse Kafka messages."""
- @staticmethod
+ def __init__(self, is_manual: bool = False) -> None:
+ self.is_manual = is_manual
+ self._consumer: ConsumerProtocol = FAKE_CONSUMER
+
+ def _setup(self, consumer: "ConsumerProtocol") -> None:
+ self._consumer = consumer
+
async def parse_message(
+ self,
message: "Message",
) -> KafkaMessage:
"""Parses a Kafka message."""
@@ -27,8 +34,6 @@ async def parse_message(
offset = message.offset()
_, timestamp = message.timestamp()
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return KafkaMessage(
body=body,
headers=headers,
@@ -37,12 +42,12 @@ async def parse_message(
message_id=f"{offset}-{timestamp}",
correlation_id=headers.get("correlation_id"),
raw_message=message,
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
- is_manual=getattr(handler, "is_manual", True),
+ consumer=self._consumer,
+ is_manual=self.is_manual,
)
- @staticmethod
async def parse_message_batch(
+ self,
message: tuple["Message", ...],
) -> KafkaMessage:
"""Parses a batch of messages from a Kafka consumer."""
@@ -60,8 +65,6 @@ async def parse_message_batch(
_, first_timestamp = first.timestamp()
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return KafkaMessage(
body=body,
headers=headers,
@@ -71,24 +74,23 @@ async def parse_message_batch(
message_id=f"{first.offset()}-{last.offset()}-{first_timestamp}",
correlation_id=headers.get("correlation_id"),
raw_message=message,
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
- is_manual=getattr(handler, "is_manual", True),
+ consumer=self._consumer,
+ is_manual=self.is_manual,
)
- @staticmethod
async def decode_message(
+ self,
msg: "StreamMessage[Message]",
) -> "DecodedMessage":
"""Decodes a message."""
return decode_message(msg)
- @classmethod
async def decode_message_batch(
- cls,
+ self,
msg: "StreamMessage[tuple[Message, ...]]",
) -> "DecodedMessage":
"""Decode a batch of messages."""
- return [decode_message(await cls.parse_message(m)) for m in msg.raw_message]
+ return [decode_message(await self.parse_message(m)) for m in msg.raw_message]
def _parse_msg_headers(
diff --git a/faststream/confluent/prometheus/__init__.py b/faststream/confluent/prometheus/__init__.py
new file mode 100644
index 0000000000..7498fa5ddc
--- /dev/null
+++ b/faststream/confluent/prometheus/__init__.py
@@ -0,0 +1,3 @@
+from faststream.confluent.prometheus.middleware import KafkaPrometheusMiddleware
+
+__all__ = ("KafkaPrometheusMiddleware",)
diff --git a/faststream/confluent/prometheus/middleware.py b/faststream/confluent/prometheus/middleware.py
new file mode 100644
index 0000000000..d294522330
--- /dev/null
+++ b/faststream/confluent/prometheus/middleware.py
@@ -0,0 +1,27 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream.confluent.prometheus.provider import settings_provider_factory
+from faststream.prometheus.middleware import PrometheusMiddleware
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+
+class KafkaPrometheusMiddleware(PrometheusMiddleware):
+ def __init__(
+ self,
+ *,
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ super().__init__(
+ settings_provider_factory=settings_provider_factory,
+ registry=registry,
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
diff --git a/faststream/confluent/prometheus/provider.py b/faststream/confluent/prometheus/provider.py
new file mode 100644
index 0000000000..e9e91a4587
--- /dev/null
+++ b/faststream/confluent/prometheus/provider.py
@@ -0,0 +1,64 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
+
+from faststream.message.message import MsgType, StreamMessage
+from faststream.prometheus import (
+ ConsumeAttrs,
+ MetricsSettingsProvider,
+)
+
+if TYPE_CHECKING:
+ from confluent_kafka import Message
+
+ from faststream.confluent.response import KafkaPublishCommand
+
+
+class BaseConfluentMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
+ __slots__ = ("messaging_system",)
+
+ def __init__(self) -> None:
+ self.messaging_system = "kafka"
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> str:
+ return cmd.destination
+
+
+class ConfluentMetricsSettingsProvider(BaseConfluentMetricsSettingsProvider["Message"]):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[Message]",
+ ) -> ConsumeAttrs:
+ return {
+ "destination_name": cast(str, msg.raw_message.topic()),
+ "message_size": len(msg.body),
+ "messages_count": 1,
+ }
+
+
+class BatchConfluentMetricsSettingsProvider(
+ BaseConfluentMetricsSettingsProvider[tuple["Message", ...]]
+):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[tuple[Message, ...]]",
+ ) -> ConsumeAttrs:
+ raw_message = msg.raw_message[0]
+ return {
+ "destination_name": cast(str, raw_message.topic()),
+ "message_size": len(bytearray().join(cast(Sequence[bytes], msg.body))),
+ "messages_count": len(msg.raw_message),
+ }
+
+
+def settings_provider_factory(
+ msg: Union["Message", Sequence["Message"], None],
+) -> Union[
+ ConfluentMetricsSettingsProvider,
+ BatchConfluentMetricsSettingsProvider,
+]:
+ if isinstance(msg, Sequence):
+ return BatchConfluentMetricsSettingsProvider()
+ return ConfluentMetricsSettingsProvider()
diff --git a/faststream/confluent/publisher/factory.py b/faststream/confluent/publisher/factory.py
new file mode 100644
index 0000000000..284536604d
--- /dev/null
+++ b/faststream/confluent/publisher/factory.py
@@ -0,0 +1,139 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Literal,
+ Optional,
+ Union,
+ overload,
+)
+
+from faststream.exceptions import SetupError
+
+from .specified import SpecificationBatchPublisher, SpecificationDefaultPublisher
+
+if TYPE_CHECKING:
+ from confluent_kafka import Message as ConfluentMsg
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[True],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationBatchPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[False],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationDefaultPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable[
+ "BrokerMiddleware[Union[tuple[ConfluentMsg, ...], ConfluentMsg]]"
+ ],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]: ...
+
+
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable[
+ "BrokerMiddleware[Union[tuple[ConfluentMsg, ...], ConfluentMsg]]"
+ ],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]:
+ if batch:
+ if key:
+ msg = "You can't setup `key` with batch publisher"
+ raise SetupError(msg)
+
+ return SpecificationBatchPublisher(
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ return SpecificationDefaultPublisher(
+ key=key,
+ # basic args
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/confluent/publisher/fake.py b/faststream/confluent/publisher/fake.py
new file mode 100644
index 0000000000..82d97ab682
--- /dev/null
+++ b/faststream/confluent/publisher/fake.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.confluent.response import KafkaPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class KafkaFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ topic: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.topic = topic
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "KafkaPublishCommand"]
+ ) -> "KafkaPublishCommand":
+ real_cmd = KafkaPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.topic
+ return real_cmd
diff --git a/faststream/confluent/publisher/producer.py b/faststream/confluent/publisher/producer.py
index a4d9d9cf29..8c6144586b 100644
--- a/faststream/confluent/publisher/producer.py
+++ b/faststream/confluent/publisher/producer.py
@@ -5,13 +5,17 @@
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.subscriber.utils import resolve_custom_func
from faststream.confluent.parser import AsyncConfluentParser
-from faststream.exceptions import OperationForbiddenError
+from faststream.exceptions import FeatureNotSupportedException
from faststream.message import encode_message
+from .state import EmptyProducerState, ProducerState, RealProducer
+
if TYPE_CHECKING:
- from faststream._internal.basic_types import SendableMessage
+ import asyncio
+
from faststream._internal.types import CustomCallable
from faststream.confluent.client import AsyncConfluentProducer
+ from faststream.confluent.response import KafkaPublishCommand
class AsyncConfluentFastProducer(ProducerProto):
@@ -19,82 +23,62 @@ class AsyncConfluentFastProducer(ProducerProto):
def __init__(
self,
- producer: "AsyncConfluentProducer",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._producer = producer
+ self._producer: ProducerState = EmptyProducerState()
# NOTE: register default parser to be compatible with request
- default = AsyncConfluentParser
+ default = AsyncConfluentParser()
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ def connect(self, producer: "AsyncConfluentProducer") -> None:
+ self._producer = RealProducer(producer)
+
+ async def disconnect(self) -> None:
+ await self._producer.stop()
+ self._producer = EmptyProducerState()
+
+ def __bool__(self) -> bool:
+ return bool(self._producer)
+
+ async def ping(self, timeout: float) -> None:
+ return await self._producer.ping(timeout=timeout)
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- *,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: str = "",
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> "asyncio.Future":
"""Publish a message to a topic."""
- message, content_type = encode_message(message)
+ message, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- await self._producer.send(
- topic=topic,
+ return await self._producer.producer.send(
+ topic=cmd.destination,
value=message,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
headers=[(i, (j or "").encode()) for i, j in headers_to_send.items()],
- no_confirm=no_confirm,
+ no_confirm=cmd.no_confirm,
)
- async def stop(self) -> None:
- await self._producer.stop()
-
async def publish_batch(
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: str = "",
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to a topic."""
- batch = self._producer.create_batch()
+ batch = self._producer.producer.create_batch()
- headers_to_send = {"correlation_id": correlation_id, **(headers or {})}
+ headers_to_send = cmd.headers_to_publish()
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- for msg in msgs:
+ for msg in cmd.batch_bodies:
message, content_type = encode_message(msg)
if content_type:
@@ -108,20 +92,21 @@ async def publish_batch(
batch.append(
key=None,
value=message,
- timestamp=timestamp_ms,
+ timestamp=cmd.timestamp_ms,
headers=[(i, j.encode()) for i, j in final_headers.items()],
)
- await self._producer.send_batch(
+ await self._producer.producer.send_batch(
batch,
- topic,
- partition=partition,
- no_confirm=no_confirm,
+ cmd.destination,
+ partition=cmd.partition,
+ no_confirm=cmd.no_confirm,
)
@override
- async def request(self, *args: Any, **kwargs: Any) -> Optional[Any]:
+ async def request(
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> Any:
msg = "Kafka doesn't support `request` method without test client."
- raise OperationForbiddenError(
- msg,
- )
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/confluent/publisher/specified.py b/faststream/confluent/publisher/specified.py
new file mode 100644
index 0000000000..fec0faf183
--- /dev/null
+++ b/faststream/confluent/publisher/specified.py
@@ -0,0 +1,58 @@
+from typing import (
+ TYPE_CHECKING,
+)
+
+from faststream._internal.types import MsgType
+from faststream.confluent.publisher.usecase import (
+ BatchPublisher,
+ DefaultPublisher,
+ LogicPublisher,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+from faststream.specification.schema.channel import Channel
+from faststream.specification.schema.message import CorrelationId, Message
+from faststream.specification.schema.operation import Operation
+
+if TYPE_CHECKING:
+ from confluent_kafka import Message as ConfluentMsg
+
+
+class SpecificationPublisher(LogicPublisher[MsgType]):
+ """A class representing a publisher."""
+
+ def get_name(self) -> str:
+ return f"{self.topic}:Publisher"
+
+ def get_schema(self) -> dict[str, Channel]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: Channel(
+ description=self.description,
+ publish=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ correlationId=CorrelationId(
+ location="$message.header#/correlation_id",
+ ),
+ ),
+ ),
+ bindings=ChannelBinding(kafka=kafka.ChannelBinding(topic=self.topic)),
+ ),
+ }
+
+
+class SpecificationBatchPublisher(
+ BatchPublisher,
+ SpecificationPublisher[tuple["ConfluentMsg", ...]],
+):
+ pass
+
+
+class SpecificationDefaultPublisher(
+ DefaultPublisher,
+ SpecificationPublisher["ConfluentMsg"],
+):
+ pass
diff --git a/faststream/confluent/publisher/state.py b/faststream/confluent/publisher/state.py
new file mode 100644
index 0000000000..13f658903a
--- /dev/null
+++ b/faststream/confluent/publisher/state.py
@@ -0,0 +1,50 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from faststream.confluent.client import AsyncConfluentProducer
+
+
+class ProducerState(Protocol):
+ producer: "AsyncConfluentProducer"
+
+ def __bool__(self) -> bool: ...
+
+ async def ping(self, timeout: float) -> bool: ...
+
+ async def stop(self) -> None: ...
+
+
+class EmptyProducerState(ProducerState):
+ __slots__ = ()
+
+ @property
+ def producer(self) -> "AsyncConfluentProducer":
+ msg = "You can't use producer here, please connect broker first."
+ raise IncorrectState(msg)
+
+ async def ping(self, timeout: float) -> bool:
+ return False
+
+ def __bool__(self) -> bool:
+ return False
+
+ async def stop(self) -> None:
+ pass
+
+
+class RealProducer(ProducerState):
+ __slots__ = ("producer",)
+
+ def __init__(self, producer: "AsyncConfluentProducer") -> None:
+ self.producer = producer
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def stop(self) -> None:
+ await self.producer.stop()
+
+ async def ping(self, timeout: float) -> bool:
+ return await self.producer.ping(timeout=timeout)
diff --git a/faststream/confluent/publisher/usecase.py b/faststream/confluent/publisher/usecase.py
index 6b7fcf101c..d6b7132155 100644
--- a/faststream/confluent/publisher/usecase.py
+++ b/faststream/confluent/publisher/usecase.py
@@ -1,35 +1,34 @@
-from collections.abc import Awaitable, Iterable
-from functools import partial
-from itertools import chain
+from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
Any,
- Callable,
Optional,
Union,
- cast,
)
from confluent_kafka import Message
from typing_extensions import override
from faststream._internal.publisher.usecase import PublisherUsecase
-from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import MsgType
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream.confluent.response import KafkaPublishCommand
from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream._internal.basic_types import AnyDict, AsyncFunc, SendableMessage
+ import asyncio
+
+ from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.confluent.message import KafkaMessage
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[MsgType]):
"""A class to publish messages to a Kafka topic."""
- _producer: Optional["AsyncConfluentFastProducer"]
+ _producer: "AsyncConfluentFastProducer"
def __init__(
self,
@@ -60,9 +59,7 @@ def __init__(
self.topic = topic
self.partition = partition
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
+ self.headers = headers or {}
def add_prefix(self, prefix: str) -> None:
self.topic = f"{prefix}{self.topic}"
@@ -79,41 +76,20 @@ async def request(
headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> "KafkaMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "key": key,
- # basic args
- "timeout": timeout,
- "timestamp_ms": timestamp_ms,
- "topic": topic or self.topic,
- "partition": partition or self.partition,
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- }
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(message, **kwargs)
-
- msg: KafkaMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
+ cmd = KafkaPublishCommand(
+ message,
+ topic=topic or self.topic,
+ key=key,
+ partition=partition or self.partition,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ timeout=timeout,
+ _publish_type=PublishType.REQUEST,
)
+
+ msg: KafkaMessage = await self._basic_request(cmd)
return msg
@@ -121,7 +97,7 @@ class DefaultPublisher(LogicPublisher[Message]):
def __init__(
self,
*,
- key: Optional[bytes],
+ key: Union[bytes, str, None],
topic: str,
partition: Optional[int],
headers: Optional[dict[str, str]],
@@ -165,35 +141,39 @@ async def publish(
correlation_id: Optional[str] = None,
reply_to: str = "",
no_confirm: bool = False,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> "asyncio.Future":
+ cmd = KafkaPublishCommand(
+ message,
+ topic=topic or self.topic,
+ key=key or self.key,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
+ )
+ return await self._basic_publish(cmd, _extra_middlewares=())
- kwargs: AnyDict = {
- "key": key or self.key,
- # basic args
- "no_confirm": no_confirm,
- "topic": topic or self.topic,
- "partition": partition or self.partition,
- "timestamp_ms": timestamp_ms,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- }
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd)
- call: Callable[..., Awaitable[None]] = self._producer.publish
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.partition = cmd.partition or self.partition
+ cmd.key = cmd.key or self.key
- return await call(message, **kwargs)
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -207,11 +187,9 @@ async def request(
headers: Optional[dict[str, str]] = None,
correlation_id: Optional[str] = None,
timeout: float = 0.5,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> "KafkaMessage":
return await super().request(
- message=message,
+ message,
topic=topic,
key=key or self.key,
partition=partition,
@@ -219,7 +197,6 @@ async def request(
headers=headers,
correlation_id=correlation_id,
timeout=timeout,
- _extra_middlewares=_extra_middlewares,
)
@@ -227,8 +204,7 @@ class BatchPublisher(LogicPublisher[tuple[Message, ...]]):
@override
async def publish(
self,
- message: Union["SendableMessage", Iterable["SendableMessage"]],
- *extra_messages: "SendableMessage",
+ *messages: "SendableMessage",
topic: str = "",
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
@@ -236,36 +212,36 @@ async def publish(
correlation_id: Optional[str] = None,
reply_to: str = "",
no_confirm: bool = False,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ cmd = KafkaPublishCommand(
+ *messages,
+ key=None,
+ topic=topic or self.topic,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
+ )
- msgs: Iterable[SendableMessage]
- if extra_messages:
- msgs = (cast("SendableMessage", message), *extra_messages)
- else:
- msgs = cast(Iterable["SendableMessage"], message)
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
- kwargs: AnyDict = {
- "topic": topic or self.topic,
- "no_confirm": no_confirm,
- "partition": partition or self.partition,
- "timestamp_ms": timestamp_ms,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- }
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd, batch=True)
- call: AsyncFunc = self._producer.publish_batch
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.partition = cmd.partition or self.partition
- await call(*msgs, **kwargs)
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
diff --git a/faststream/confluent/response.py b/faststream/confluent/response.py
index 3d04d6ab35..3473e291bc 100644
--- a/faststream/confluent/response.py
+++ b/faststream/confluent/response.py
@@ -1,8 +1,11 @@
-from typing import TYPE_CHECKING, Optional
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.response import Response
+from faststream._internal.constants import EMPTY
+from faststream.response.publish_type import PublishType
+from faststream.response.response import PublishCommand, Response
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, SendableMessage
@@ -34,3 +37,101 @@ def as_publish_kwargs(self) -> "AnyDict":
"timestamp_ms": self.timestamp_ms,
"key": self.key,
}
+
+ @override
+ def as_publish_command(self) -> "KafkaPublishCommand":
+ return KafkaPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ # Kafka specific
+ topic="",
+ key=self.key,
+ timestamp_ms=self.timestamp_ms,
+ )
+
+
+class KafkaPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ topic: str,
+ _publish_type: PublishType,
+ key: Union[bytes, str, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ timeout: float = 0.5,
+ ) -> None:
+ super().__init__(
+ message,
+ destination=topic,
+ reply_to=reply_to,
+ correlation_id=correlation_id,
+ headers=headers,
+ _publish_type=_publish_type,
+ )
+ self.extra_bodies = messages
+
+ self.key = key
+ self.partition = partition
+ self.timestamp_ms = timestamp_ms
+ self.no_confirm = no_confirm
+
+ # request option
+ self.timeout = timeout
+
+ @property
+ def batch_bodies(self) -> tuple["SendableMessage", ...]:
+ if self.body is EMPTY:
+ return self.extra_bodies
+ return (self.body, *self.extra_bodies)
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "KafkaPublishCommand":
+ if isinstance(cmd, KafkaPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cmd.body, []
+ if batch:
+ if body is None:
+ body = EMPTY
+
+ if isinstance(body, Sequence) and not isinstance(body, str):
+ if body:
+ body, extra_bodies = body[0], body[1:]
+ else:
+ body = EMPTY
+
+ return cls(
+ body,
+ *extra_bodies,
+ topic=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def headers_to_publish(self) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py
index 2d7f599d18..a1039fc72f 100644
--- a/faststream/confluent/router.py
+++ b/faststream/confluent/router.py
@@ -16,11 +16,13 @@
BrokerRouter,
SubscriberRoute,
)
+from faststream._internal.constants import EMPTY
from faststream.confluent.broker.registrator import KafkaRegistrator
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import (
@@ -365,8 +367,8 @@ def __init__(
] = None,
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -380,14 +382,10 @@ def __init__(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -445,8 +443,7 @@ def __init__(
description=description,
include_in_schema=include_in_schema,
# FastDepends args
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
)
@@ -473,9 +470,9 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers.",
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py
index 336e02c159..b6edea0456 100644
--- a/faststream/confluent/subscriber/factory.py
+++ b/faststream/confluent/subscriber/factory.py
@@ -1,3 +1,4 @@
+import warnings
from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
@@ -7,14 +8,16 @@
overload,
)
-from faststream.confluent.subscriber.subscriber import (
+from faststream._internal.constants import EMPTY
+from faststream.confluent.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from confluent_kafka import Message as ConfluentMsg
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import BrokerMiddleware
@@ -33,10 +36,9 @@ def create_subscriber(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[tuple[ConfluentMsg, ...]]"],
# Specification args
title_: Optional[str],
@@ -57,10 +59,9 @@ def create_subscriber(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"],
# Specification args
title_: Optional[str],
@@ -81,10 +82,9 @@ def create_subscriber(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable[
"BrokerMiddleware[Union[ConfluentMsg, tuple[ConfluentMsg, ...]]]"
],
@@ -109,10 +109,9 @@ def create_subscriber(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable[
"BrokerMiddleware[Union[ConfluentMsg, tuple[ConfluentMsg, ...]]]"
],
@@ -124,6 +123,11 @@ def create_subscriber(
"SpecificationDefaultSubscriber",
"SpecificationBatchSubscriber",
]:
+ _validate_input_for_misconfigure(ack_policy=ack_policy, is_manual=is_manual)
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
+
if batch:
return SpecificationBatchSubscriber(
*topics,
@@ -133,9 +137,8 @@ def create_subscriber(
group_id=group_id,
connection_data=connection_data,
is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
@@ -149,12 +152,24 @@ def create_subscriber(
group_id=group_id,
connection_data=connection_data,
is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+
+
+def _validate_input_for_misconfigure(
+ *,
+ ack_policy: "AckPolicy",
+ is_manual: bool,
+) -> None:
+ if ack_policy is not EMPTY and not is_manual:
+ warnings.warn(
+ "You can't use acknowledgement policy with `is_manual=False` subscriber",
+ RuntimeWarning,
+ stacklevel=4,
+ )
diff --git a/faststream/confluent/subscriber/subscriber.py b/faststream/confluent/subscriber/specified.py
similarity index 100%
rename from faststream/confluent/subscriber/subscriber.py
rename to faststream/confluent/subscriber/specified.py
diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py
index 1f88912fdc..adb321dd4a 100644
--- a/faststream/confluent/subscriber/usecase.py
+++ b/faststream/confluent/subscriber/usecase.py
@@ -12,19 +12,19 @@
from confluent_kafka import KafkaException, Message
from typing_extensions import override
-from faststream._internal.publisher.fake import FakePublisher
from faststream._internal.subscriber.usecase import SubscriberUsecase
from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import MsgType
from faststream.confluent.parser import AsyncConfluentParser
+from faststream.confluent.publisher.fake import KafkaFakePublisher
from faststream.confluent.schemas import TopicPartition
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, LoggerProto
- from faststream._internal.publisher.proto import ProducerProto
- from faststream._internal.setup import SetupState
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
@@ -32,6 +32,7 @@
)
from faststream.confluent.client import AsyncConfluentConsumer
from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
class LogicSubscriber(ABC, SubscriberUsecase[MsgType]):
@@ -42,6 +43,7 @@ class LogicSubscriber(ABC, SubscriberUsecase[MsgType]):
builder: Optional[Callable[..., "AsyncConfluentConsumer"]]
consumer: Optional["AsyncConfluentConsumer"]
+ parser: AsyncConfluentParser
task: Optional["asyncio.Task[None]"]
client_id: Optional[str]
@@ -54,14 +56,12 @@ def __init__(
# Kafka information
group_id: Optional[str],
connection_data: "AnyDict",
- is_manual: bool,
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
# AsyncAPI args
title_: Optional[str],
@@ -72,9 +72,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -88,7 +87,6 @@ def __init__(
self.group_id = group_id
self.topics = topics
self.partitions = partitions
- self.is_manual = is_manual
self.consumer = None
self.task = None
@@ -104,24 +102,18 @@ def _setup( # type: ignore[override]
*,
client_id: Optional[str],
builder: Callable[..., "AsyncConfluentConsumer"],
- # basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
+ # basic args,
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "BrokerState",
) -> None:
self.client_id = client_id
self.builder = builder
super()._setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
@@ -140,6 +132,7 @@ async def start(self) -> None:
client_id=self.client_id,
**self.__connection_data,
)
+ self.parser._setup(consumer)
await consumer.start()
await super().start()
@@ -172,9 +165,13 @@ async def get_one(
raw_message = await self.consumer.getone(timeout=timeout)
+ context = self._state.get().di_state.context
+
return await process_msg(
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -182,16 +179,11 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "topic": message.reply_to,
- },
+ KafkaFakePublisher(
+ self._state.get().producer,
+ topic=message.reply_to,
),
)
@@ -262,30 +254,29 @@ def __init__(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Message]"],
# AsyncAPI args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> None:
+ self.parser = AsyncConfluentParser(is_manual=is_manual)
+
super().__init__(
*topics,
partitions=partitions,
polling_interval=polling_interval,
group_id=group_id,
connection_data=connection_data,
- is_manual=is_manual,
# subscriber args
- default_parser=AsyncConfluentParser.parse_message,
- default_decoder=AsyncConfluentParser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -326,10 +317,9 @@ def __init__(
connection_data: "AnyDict",
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[tuple[Message, ...]]"],
# AsyncAPI args
title_: Optional[str],
@@ -338,20 +328,20 @@ def __init__(
) -> None:
self.max_records = max_records
+ self.parser = AsyncConfluentParser(is_manual=is_manual)
+
super().__init__(
*topics,
partitions=partitions,
polling_interval=polling_interval,
group_id=group_id,
connection_data=connection_data,
- is_manual=is_manual,
# subscriber args
- default_parser=AsyncConfluentParser.parse_message_batch,
- default_decoder=AsyncConfluentParser.decode_message_batch,
+ default_parser=self.parser.parse_message_batch,
+ default_decoder=self.parser.decode_message_batch,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
diff --git a/faststream/confluent/testing.py b/faststream/confluent/testing.py
index d9c5026298..92676d6e7a 100644
--- a/faststream/confluent/testing.py
+++ b/faststream/confluent/testing.py
@@ -1,4 +1,5 @@
-from collections.abc import Generator, Iterable
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
from datetime import datetime, timezone
from typing import (
TYPE_CHECKING,
@@ -16,31 +17,38 @@
from faststream.confluent.broker import KafkaBroker
from faststream.confluent.parser import AsyncConfluentParser
from faststream.confluent.publisher.producer import AsyncConfluentFastProducer
-from faststream.confluent.publisher.publisher import SpecificationBatchPublisher
+from faststream.confluent.publisher.specified import SpecificationBatchPublisher
from faststream.confluent.schemas import TopicPartition
-from faststream.confluent.subscriber.subscriber import SpecificationBatchSubscriber
+from faststream.confluent.subscriber.usecase import BatchSubscriber
from faststream.exceptions import SubscriberNotFound
from faststream.message import encode_message, gen_cor_id
if TYPE_CHECKING:
from faststream._internal.basic_types import SendableMessage
- from faststream._internal.setup.logger import LoggerState
- from faststream.confluent.publisher.publisher import SpecificationPublisher
+ from faststream.confluent.publisher.specified import SpecificationPublisher
+ from faststream.confluent.response import KafkaPublishCommand
from faststream.confluent.subscriber.usecase import LogicSubscriber
+
__all__ = ("TestKafkaBroker",)
class TestKafkaBroker(TestBroker[KafkaBroker]):
"""A class to test Kafka brokers."""
+ @contextmanager
+ def _patch_producer(self, broker: KafkaBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: KafkaBroker,
*args: Any,
**kwargs: Any,
) -> Callable[..., AsyncMock]:
- broker._producer = FakeProducer(broker)
return _fake_connection
@staticmethod
@@ -93,126 +101,103 @@ class FakeProducer(AsyncConfluentFastProducer):
def __init__(self, broker: KafkaBroker) -> None:
self.broker = broker
- default = AsyncConfluentParser
+ default = AsyncConfluentParser()
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
- def _setup(self, logger_stater: "LoggerState") -> None:
- pass
+ def __bool__(self) -> bool:
+ return True
+
+ async def ping(self, timeout: float) -> None:
+ return True
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- no_confirm: bool = False,
- reply_to: str = "",
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a message to the Kafka broker."""
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
- reply_to=reply_to,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
msg_to_send = (
- [incoming]
- if isinstance(handler, SpecificationBatchSubscriber)
- else incoming
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
)
- await self._execute_handler(msg_to_send, topic, handler)
+ await self._execute_handler(msg_to_send, cmd.destination, handler)
async def publish_batch(
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to the Kafka broker."""
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
messages = (
build_message(
message=message,
- topic=topic,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
- reply_to=reply_to,
+ topic=cmd.destination,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
- for message in msgs
+ for message in cmd.batch_bodies
)
- if isinstance(handler, SpecificationBatchSubscriber):
- await self._execute_handler(list(messages), topic, handler)
+ if isinstance(handler, BatchSubscriber):
+ await self._execute_handler(list(messages), cmd.destination, handler)
else:
for m in messages:
- await self._execute_handler(m, topic, handler)
+ await self._execute_handler(m, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- timeout: Optional[float] = 0.5,
+ cmd: "KafkaPublishCommand",
) -> "MockConfluentMessage":
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id or gen_cor_id(),
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
msg_to_send = (
- [incoming]
- if isinstance(handler, SpecificationBatchSubscriber)
- else incoming
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
)
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg_to_send, topic, handler)
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(
+ msg_to_send, cmd.destination, handler
+ )
raise SubscriberNotFound
@@ -286,7 +271,7 @@ def build_message(
message: "SendableMessage",
topic: str,
*,
- correlation_id: str,
+ correlation_id: Optional[str] = None,
partition: Optional[int] = None,
timestamp_ms: Optional[int] = None,
key: Optional[bytes] = None,
@@ -298,7 +283,7 @@ def build_message(
k = key or b""
headers = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
+ "correlation_id": correlation_id or gen_cor_id(),
"reply_to": reply_to,
**(headers or {}),
}
diff --git a/faststream/exceptions.py b/faststream/exceptions.py
index 4b009adde2..6d51e76cb3 100644
--- a/faststream/exceptions.py
+++ b/faststream/exceptions.py
@@ -63,7 +63,7 @@ class NackMessage(HandlerException):
signature.
Args:
- extra_options (Any): Additional parameters that will be passed to `message.nack(**extra_options)` method.
+ kwargs (Any): Additional parameters that will be passed to `message.nack(**extra_options)` method.
"""
def __init__(self, **kwargs: Any) -> None:
@@ -82,7 +82,7 @@ class RejectMessage(HandlerException):
signature.
Args:
- extra_options (Any): Additional parameters that will be passed to `message.reject(**extra_options)` method.
+ kwargs (Any): Additional parameters that will be passed to `message.reject(**extra_options)` method.
"""
def __init__(self, **kwargs: Any) -> None:
@@ -115,7 +115,7 @@ def __str__(self) -> str:
)
-class OperationForbiddenError(FastStreamException, NotImplementedError):
+class FeatureNotSupportedException(FastStreamException, NotImplementedError): # noqa: N818
"""Raises at planned NotImplemented operation call."""
diff --git a/faststream/kafka/annotations.py b/faststream/kafka/annotations.py
index 1f5c70d524..607be0ea8e 100644
--- a/faststream/kafka/annotations.py
+++ b/faststream/kafka/annotations.py
@@ -7,7 +7,6 @@
from faststream.kafka.broker import KafkaBroker as KB
from faststream.kafka.message import KafkaMessage as KM
from faststream.kafka.publisher.producer import AioKafkaFastProducer
-from faststream.params import NoCast
__all__ = (
"ContextRepo",
@@ -15,7 +14,6 @@
"KafkaMessage",
"KafkaProducer",
"Logger",
- "NoCast",
)
Consumer = Annotated[AIOKafkaConsumer, Context("handler_.consumer")]
diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py
index 3a5d3c2e3f..bec2ed99c9 100644
--- a/faststream/kafka/broker/broker.py
+++ b/faststream/kafka/broker/broker.py
@@ -24,9 +24,11 @@
from faststream._internal.utils.data import filter_by_dict
from faststream.exceptions import NOT_CONNECTED_YET
from faststream.kafka.publisher.producer import AioKafkaFastProducer
+from faststream.kafka.response import KafkaPublishCommand
from faststream.kafka.schemas.params import ConsumerConnectionParams
from faststream.kafka.security import parse_security
from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
from .logging import make_kafka_logger_state
from .registrator import KafkaRegistrator
@@ -34,17 +36,17 @@
Partition = TypeVar("Partition")
if TYPE_CHECKING:
- from asyncio import AbstractEventLoop
+ import asyncio
from types import TracebackType
from aiokafka import ConsumerRecord
from aiokafka.abc import AbstractTokenProvider
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from typing_extensions import TypedDict, Unpack
from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
Decorator,
LoggerProto,
SendableMessage,
@@ -53,6 +55,7 @@
BrokerMiddleware,
CustomCallable,
)
+ from faststream.kafka.message import KafkaMessage
from faststream.security import BaseSecurity
from faststream.specification.schema.extra import Tag, TagDict
@@ -92,7 +95,7 @@ class KafkaInitKwargs(TypedDict, total=False):
Optional[AbstractTokenProvider],
Doc("OAuthBearer token provider instance."),
]
- loop: Optional[AbstractEventLoop]
+ loop: Optional[asyncio.AbstractEventLoop]
client_id: Annotated[
Optional[str],
Doc(
@@ -235,7 +238,7 @@ class KafkaBroker(
],
):
url: list[str]
- _producer: Optional["AioKafkaFastProducer"]
+ _producer: "AioKafkaFastProducer"
def __init__(
self,
@@ -289,7 +292,7 @@ def __init__(
Optional["AbstractTokenProvider"],
Doc("OAuthBearer token provider instance."),
] = None,
- loop: Optional["AbstractEventLoop"] = None,
+ loop: Optional["asyncio.AbstractEventLoop"] = None,
client_id: Annotated[
Optional[str],
Doc(
@@ -438,7 +441,7 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
@@ -495,10 +498,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -576,11 +576,16 @@ def __init__(
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
)
self.client_id = client_id
- self._producer = None
+ self._state.patch_value(
+ producer=AioKafkaFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
async def close(
self,
@@ -590,9 +595,7 @@ async def close(
) -> None:
await super().close(exc_type, exc_val, exc_tb)
- if self._producer is not None: # pragma: no branch
- await self._producer.stop()
- self._producer = None
+ await self._producer.disconnect()
self._connection = None
@@ -635,12 +638,7 @@ async def _connect( # type: ignore[override]
client_id=client_id,
)
- await producer.start()
- self._producer = AioKafkaFastProducer(
- producer=producer,
- parser=self._parser,
- decoder=self._decoder,
- )
+ await self._producer.connect(producer)
connection_kwargs, _ = filter_by_dict(ConsumerConnectionParams, kwargs)
return partial(aiokafka.AIOKafkaConsumer, **connection_kwargs)
@@ -722,9 +720,7 @@ async def publish( # type: ignore[override]
bool,
Doc("Do not wait for Kafka publish confirmation."),
] = False,
- # extra options to be compatible with test client
- **kwargs: Any,
- ) -> None:
+ ) -> "asyncio.Future":
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
@@ -732,21 +728,19 @@ async def publish( # type: ignore[override]
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
"""
- correlation_id = correlation_id or gen_cor_id()
-
- await super().publish(
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
reply_to=reply_to,
no_confirm=no_confirm,
- **kwargs,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
@@ -807,31 +801,32 @@ async def request( # type: ignore[override]
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- ) -> Optional[Any]:
- correlation_id = correlation_id or gen_cor_id()
-
- return await super().request(
+ ) -> "KafkaMessage":
+ cmd = KafkaPublishCommand(
message,
- producer=self._producer,
topic=topic,
key=key,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
- correlation_id=correlation_id,
timeout=timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
)
+ msg: KafkaMessage = await super()._basic_request(cmd, producer=self._producer)
+ return msg
+
async def publish_batch(
self,
- *msgs: Annotated[
+ *messages: Annotated[
"SendableMessage",
Doc("Messages bodies to send."),
],
topic: Annotated[
str,
Doc("Topic where the message will be published."),
- ],
+ ] = "",
partition: Annotated[
Optional[int],
Doc(
@@ -869,40 +864,36 @@ async def publish_batch(
bool,
Doc("Do not wait for Kafka publish confirmation."),
] = False,
- ) -> None:
+ ) -> "asyncio.Future":
assert self._producer, NOT_CONNECTED_YET # nosec B101
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
-
- for m in self._middlewares:
- call = partial(m(None).publish_scope, call)
-
- await call(
- *msgs,
+ cmd = KafkaPublishCommand(
+ *messages,
topic=topic,
partition=partition,
timestamp_ms=timestamp_ms,
headers=headers,
reply_to=reply_to,
- correlation_id=correlation_id,
no_confirm=no_confirm,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
with anyio.move_on_after(timeout) as cancel_scope:
- if self._producer is None:
+ if not self._producer:
return False
while True:
if cancel_scope.cancel_called:
return False
- if not self._producer._producer._closed:
+ if not self._producer.closed:
return True
await anyio.sleep(sleep_time)
diff --git a/faststream/kafka/broker/logging.py b/faststream/kafka/broker/logging.py
index c39b71a604..72a1420325 100644
--- a/faststream/kafka/broker/logging.py
+++ b/faststream/kafka/broker/logging.py
@@ -1,14 +1,16 @@
+import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from faststream._internal.log.logging import get_broker_logger
-from faststream._internal.setup.logger import (
+from faststream._internal.state.logger import (
DefaultLoggerStorage,
make_logger_state,
)
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
class KafkaParamsStorage(DefaultLoggerStorage):
@@ -21,6 +23,11 @@ def __init__(
self._max_topic_len = 4
self._max_group_len = 0
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
def setup_log_contest(self, params: "AnyDict") -> None:
self._max_topic_len = max(
(
@@ -35,7 +42,7 @@ def setup_log_contest(self, params: "AnyDict") -> None:
),
)
- def get_logger(self) -> Optional["LoggerProto"]:
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
message_id_ln = 10
# TODO: generate unique logger names to not share between brokers
@@ -58,10 +65,12 @@ def get_logger(self) -> Optional["LoggerProto"]:
f"%(message_id)-{message_id_ln}s ",
"- %(message)s",
)),
+ context=context,
+ log_level=self.logger_log_level,
)
make_kafka_logger_state = partial(
make_logger_state,
- default_storag_cls=KafkaParamsStorage,
+ default_storage_cls=KafkaParamsStorage,
)
diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py
index ce805e6066..3523c2bed7 100644
--- a/faststream/kafka/broker/registrator.py
+++ b/faststream/kafka/broker/registrator.py
@@ -16,14 +16,16 @@
from typing_extensions import Doc, override
from faststream._internal.broker.abc_broker import ABCBroker
-from faststream.kafka.publisher.publisher import SpecificationPublisher
+from faststream._internal.constants import EMPTY
+from faststream.kafka.publisher.factory import create_publisher
from faststream.kafka.subscriber.factory import create_subscriber
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.types import (
CustomCallable,
@@ -31,11 +33,11 @@
SubscriberMiddleware,
)
from faststream.kafka.message import KafkaMessage
- from faststream.kafka.publisher.publisher import (
+ from faststream.kafka.publisher.specified import (
SpecificationBatchPublisher,
SpecificationDefaultPublisher,
)
- from faststream.kafka.subscriber.subscriber import (
+ from faststream.kafka.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
@@ -381,8 +383,8 @@ def subscriber(
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -396,14 +398,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -751,8 +749,8 @@ def subscriber(
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -766,14 +764,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1121,8 +1115,8 @@ def subscriber(
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1136,14 +1130,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1494,8 +1484,8 @@ def subscriber(
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -1509,14 +1499,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1576,10 +1562,9 @@ def subscriber(
partitions=partitions,
is_manual=not auto_commit,
# subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# Specification
title_=title,
@@ -1911,7 +1896,7 @@ def publisher(
Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`.
"""
- publisher = SpecificationPublisher.create(
+ publisher = create_publisher(
# batch flag
batch=batch,
# default args
@@ -1922,7 +1907,7 @@ def publisher(
headers=headers,
reply_to=reply_to,
# publisher-specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# Specification
title_=title,
diff --git a/faststream/kafka/fastapi/__init__.py b/faststream/kafka/fastapi/__init__.py
index e2a8447ef7..9fda6d07d3 100644
--- a/faststream/kafka/fastapi/__init__.py
+++ b/faststream/kafka/fastapi/__init__.py
@@ -2,10 +2,11 @@
from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.kafka.broker import KafkaBroker as KB
-from faststream.kafka.fastapi.fastapi import KafkaRouter
from faststream.kafka.message import KafkaMessage as KM
from faststream.kafka.publisher.producer import AioKafkaFastProducer
+from .fastapi import KafkaRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/kafka/fastapi/fastapi.py b/faststream/kafka/fastapi/fastapi.py
index 5e536cc46f..46a07fb7a7 100644
--- a/faststream/kafka/fastapi/fastapi.py
+++ b/faststream/kafka/fastapi/fastapi.py
@@ -28,6 +28,7 @@
from faststream._internal.constants import EMPTY
from faststream._internal.fastapi.router import StreamRouter
from faststream.kafka.broker.broker import KafkaBroker as KB
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from asyncio import AbstractEventLoop
@@ -48,16 +49,16 @@
SubscriberMiddleware,
)
from faststream.kafka.message import KafkaMessage
- from faststream.kafka.publisher.publisher import (
+ from faststream.kafka.publisher.specified import (
SpecificationBatchPublisher,
SpecificationDefaultPublisher,
)
- from faststream.kafka.subscriber.subscriber import (
+ from faststream.kafka.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
Partition = TypeVar("Partition")
@@ -944,14 +945,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1434,14 +1431,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -1924,14 +1917,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -2417,14 +2406,10 @@ def subscriber(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -2607,8 +2592,7 @@ def subscriber(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
diff --git a/faststream/kafka/message.py b/faststream/kafka/message.py
index aa0eeefae9..e00c541795 100644
--- a/faststream/kafka/message.py
+++ b/faststream/kafka/message.py
@@ -77,7 +77,7 @@ async def nack(self) -> None:
partition=topic_partition,
offset=raw_message.offset,
)
- await super().nack()
+ await super().nack()
class KafkaAckableMessage(KafkaMessage):
@@ -85,4 +85,4 @@ async def ack(self) -> None:
"""Acknowledge the Kafka message."""
if not self.committed:
await self.consumer.commit()
- await super().ack()
+ await super().ack()
diff --git a/faststream/kafka/opentelemetry/provider.py b/faststream/kafka/opentelemetry/provider.py
index f9a43f54f6..cd2118ed33 100644
--- a/faststream/kafka/opentelemetry/provider.py
+++ b/faststream/kafka/opentelemetry/provider.py
@@ -11,6 +11,7 @@
from aiokafka import ConsumerRecord
from faststream._internal.basic_types import AnyDict
+ from faststream.kafka.response import KafkaPublishCommand
from faststream.message import StreamMessage
@@ -20,29 +21,29 @@ class BaseKafkaTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "kafka"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> "AnyDict":
attrs = {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
- if (partition := kwargs.get("partition")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition
+ if cmd.partition is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = cmd.partition
- if (key := kwargs.get("key")) is not None:
- attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key
+ if cmd.key is not None:
+ attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = cmd.key
return attrs
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "KafkaPublishCommand",
) -> str:
- return cast(str, kwargs["topic"])
+ return cmd.destination
class KafkaTelemetrySettingsProvider(
diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py
index c4ce947a34..94275cc041 100644
--- a/faststream/kafka/parser.py
+++ b/faststream/kafka/parser.py
@@ -1,7 +1,6 @@
from typing import TYPE_CHECKING, Any, Optional, cast
-from faststream._internal.context.repository import context
-from faststream.kafka.message import FAKE_CONSUMER, KafkaMessage
+from faststream.kafka.message import FAKE_CONSUMER, ConsumerProtocol, KafkaMessage
from faststream.message import decode_message
if TYPE_CHECKING:
@@ -10,7 +9,6 @@
from aiokafka import ConsumerRecord
from faststream._internal.basic_types import DecodedMessage
- from faststream.kafka.subscriber.usecase import LogicSubscriber
from faststream.message import StreamMessage
@@ -25,13 +23,17 @@ def __init__(
self.msg_class = msg_class
self.regex = regex
+ self._consumer: ConsumerProtocol = FAKE_CONSUMER
+
+ def _setup(self, consumer: ConsumerProtocol) -> None:
+ self._consumer = consumer
+
async def parse_message(
self,
message: "ConsumerRecord",
) -> "StreamMessage[ConsumerRecord]":
"""Parses a Kafka message."""
headers = {i: j.decode() for i, j in message.headers}
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
return self.msg_class(
body=message.value,
@@ -42,7 +44,7 @@ async def parse_message(
correlation_id=headers.get("correlation_id"),
raw_message=message,
path=self.get_path(message.topic),
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
+ consumer=self._consumer,
)
async def decode_message(
@@ -76,8 +78,6 @@ async def parse_message(
headers = next(iter(batch_headers), {})
- handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_")
-
return self.msg_class(
body=body,
headers=headers,
@@ -88,7 +88,7 @@ async def parse_message(
correlation_id=headers.get("correlation_id"),
raw_message=message,
path=self.get_path(first.topic),
- consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER,
+ consumer=self._consumer,
)
async def decode_message(
diff --git a/faststream/kafka/prometheus/__init__.py b/faststream/kafka/prometheus/__init__.py
new file mode 100644
index 0000000000..e5ae7e2d4f
--- /dev/null
+++ b/faststream/kafka/prometheus/__init__.py
@@ -0,0 +1,3 @@
+from faststream.kafka.prometheus.middleware import KafkaPrometheusMiddleware
+
+__all__ = ("KafkaPrometheusMiddleware",)
diff --git a/faststream/kafka/prometheus/middleware.py b/faststream/kafka/prometheus/middleware.py
new file mode 100644
index 0000000000..fd5948945a
--- /dev/null
+++ b/faststream/kafka/prometheus/middleware.py
@@ -0,0 +1,27 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream.kafka.prometheus.provider import settings_provider_factory
+from faststream.prometheus.middleware import PrometheusMiddleware
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+
+class KafkaPrometheusMiddleware(PrometheusMiddleware):
+ def __init__(
+ self,
+ *,
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ super().__init__(
+ settings_provider_factory=settings_provider_factory,
+ registry=registry,
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
diff --git a/faststream/kafka/prometheus/provider.py b/faststream/kafka/prometheus/provider.py
new file mode 100644
index 0000000000..9ea5ffbd3c
--- /dev/null
+++ b/faststream/kafka/prometheus/provider.py
@@ -0,0 +1,64 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union, cast
+
+from faststream.message.message import MsgType, StreamMessage
+from faststream.prometheus import (
+ MetricsSettingsProvider,
+)
+
+if TYPE_CHECKING:
+ from aiokafka import ConsumerRecord
+
+ from faststream.kafka.response import KafkaPublishCommand
+ from faststream.prometheus import ConsumeAttrs
+
+
+class BaseKafkaMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
+ __slots__ = ("messaging_system",)
+
+ def __init__(self) -> None:
+ self.messaging_system = "kafka"
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> str:
+ return cmd.destination
+
+
+class KafkaMetricsSettingsProvider(BaseKafkaMetricsSettingsProvider["ConsumerRecord"]):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[ConsumerRecord]",
+ ) -> "ConsumeAttrs":
+ return {
+ "destination_name": msg.raw_message.topic,
+ "message_size": len(msg.body),
+ "messages_count": 1,
+ }
+
+
+class BatchKafkaMetricsSettingsProvider(
+ BaseKafkaMetricsSettingsProvider[tuple["ConsumerRecord", ...]]
+):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[tuple[ConsumerRecord, ...]]",
+ ) -> "ConsumeAttrs":
+ raw_message = msg.raw_message[0]
+ return {
+ "destination_name": raw_message.topic,
+ "message_size": len(bytearray().join(cast(Sequence[bytes], msg.body))),
+ "messages_count": len(msg.raw_message),
+ }
+
+
+def settings_provider_factory(
+ msg: Union["ConsumerRecord", Sequence["ConsumerRecord"], None],
+) -> Union[
+ KafkaMetricsSettingsProvider,
+ BatchKafkaMetricsSettingsProvider,
+]:
+ if isinstance(msg, Sequence):
+ return BatchKafkaMetricsSettingsProvider()
+ return KafkaMetricsSettingsProvider()
diff --git a/faststream/kafka/publisher/factory.py b/faststream/kafka/publisher/factory.py
new file mode 100644
index 0000000000..16c2b69e60
--- /dev/null
+++ b/faststream/kafka/publisher/factory.py
@@ -0,0 +1,138 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Literal,
+ Optional,
+ Union,
+ overload,
+)
+
+from faststream.exceptions import SetupError
+
+from .specified import SpecificationBatchPublisher, SpecificationDefaultPublisher
+
+if TYPE_CHECKING:
+ from aiokafka import ConsumerRecord
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[True],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[tuple[ConsumerRecord, ...]]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationBatchPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: Literal[False],
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> "SpecificationDefaultPublisher": ...
+
+
+@overload
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable[
+ "BrokerMiddleware[Union[tuple[ConsumerRecord, ...], ConsumerRecord]]"
+ ],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]: ...
+
+
+def create_publisher(
+ *,
+ batch: bool,
+ key: Optional[bytes],
+ topic: str,
+ partition: Optional[int],
+ headers: Optional[dict[str, str]],
+ reply_to: str,
+ # Publisher args
+ broker_middlewares: Iterable[
+ "BrokerMiddleware[Union[tuple[ConsumerRecord, ...], ConsumerRecord]]"
+ ],
+ middlewares: Iterable["PublisherMiddleware"],
+ # Specification args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> Union[
+ "SpecificationBatchPublisher",
+ "SpecificationDefaultPublisher",
+]:
+ if batch:
+ if key:
+ msg = "You can't setup `key` with batch publisher"
+ raise SetupError(msg)
+
+ return SpecificationBatchPublisher(
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+ return SpecificationDefaultPublisher(
+ key=key,
+ # basic args
+ topic=topic,
+ partition=partition,
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/kafka/publisher/fake.py b/faststream/kafka/publisher/fake.py
new file mode 100644
index 0000000000..92ecbabcb8
--- /dev/null
+++ b/faststream/kafka/publisher/fake.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.kafka.response import KafkaPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class KafkaFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ topic: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.topic = topic
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "KafkaPublishCommand"]
+ ) -> "KafkaPublishCommand":
+ real_cmd = KafkaPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.topic
+ return real_cmd
diff --git a/faststream/kafka/publisher/producer.py b/faststream/kafka/publisher/producer.py
index 93441fb2bd..a6574e104d 100644
--- a/faststream/kafka/publisher/producer.py
+++ b/faststream/kafka/publisher/producer.py
@@ -1,19 +1,23 @@
-from typing import TYPE_CHECKING, Any, Optional, Union
+from typing import TYPE_CHECKING, Any, Optional
from typing_extensions import override
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.subscriber.utils import resolve_custom_func
-from faststream.exceptions import OperationForbiddenError
+from faststream.exceptions import FeatureNotSupportedException
from faststream.kafka.message import KafkaMessage
from faststream.kafka.parser import AioKafkaParser
from faststream.message import encode_message
+from .state import EmptyProducerState, ProducerState, RealProducer
+
if TYPE_CHECKING:
+ import asyncio
+
from aiokafka import AIOKafkaProducer
- from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import CustomCallable
+ from faststream.kafka.response import KafkaPublishCommand
class AioKafkaFastProducer(ProducerProto):
@@ -21,87 +25,72 @@ class AioKafkaFastProducer(ProducerProto):
def __init__(
self,
- producer: "AIOKafkaProducer",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._producer = producer
+ self._producer: ProducerState = EmptyProducerState()
# NOTE: register default parser to be compatible with request
default = AioKafkaParser(
msg_class=KafkaMessage,
regex=None,
)
+
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ async def connect(self, producer: "AIOKafkaProducer") -> None:
+ await producer.start()
+ self._producer = RealProducer(producer)
+
+ async def disconnect(self) -> None:
+ await self._producer.stop()
+ self._producer = EmptyProducerState()
+
+ def __bool__(self) -> None:
+ return bool(self._producer)
+
+ @property
+ def closed(self) -> bool:
+ return self._producer.closed
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- *,
- correlation_id: str,
- key: Union[bytes, Any, None] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> "asyncio.Future":
"""Publish a message to a topic."""
- message, content_type = encode_message(message)
+ message, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- send_future = await self._producer.send(
- topic=topic,
+ send_future = await self._producer.producer.send(
+ topic=cmd.destination,
value=message,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
headers=[(i, (j or "").encode()) for i, j in headers_to_send.items()],
)
- if not no_confirm:
- await send_future
- async def stop(self) -> None:
- await self._producer.stop()
+ if not cmd.no_confirm:
+ await send_future
+ return send_future
async def publish_batch(
self,
- *msgs: "SendableMessage",
- correlation_id: str,
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- no_confirm: bool = False,
- ) -> None:
+ cmd: "KafkaPublishCommand",
+ ) -> "asyncio.Future":
"""Publish a batch of messages to a topic."""
- batch = self._producer.create_batch()
+ batch = self._producer.producer.create_batch()
- headers_to_send = {"correlation_id": correlation_id, **(headers or {})}
+ headers_to_send = cmd.headers_to_publish()
- if reply_to:
- headers_to_send["reply_to"] = headers_to_send.get(
- "reply_to",
- reply_to,
- )
-
- for msg in msgs:
- message, content_type = encode_message(msg)
+ for body in cmd.batch_bodies:
+ message, content_type = encode_message(body)
if content_type:
final_headers = {
@@ -114,17 +103,23 @@ async def publish_batch(
batch.append(
key=None,
value=message,
- timestamp=timestamp_ms,
+ timestamp=cmd.timestamp_ms,
headers=[(i, j.encode()) for i, j in final_headers.items()],
)
- send_future = await self._producer.send_batch(batch, topic, partition=partition)
- if not no_confirm:
+ send_future = await self._producer.producer.send_batch(
+ batch,
+ cmd.destination,
+ partition=cmd.partition,
+ )
+ if not cmd.no_confirm:
await send_future
+ return send_future
@override
- async def request(self, *args: Any, **kwargs: Any) -> Optional[Any]:
+ async def request(
+ self,
+ cmd: "KafkaPublishCommand",
+ ) -> Any:
msg = "Kafka doesn't support `request` method without test client."
- raise OperationForbiddenError(
- msg,
- )
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/kafka/publisher/specified.py b/faststream/kafka/publisher/specified.py
new file mode 100644
index 0000000000..d765cc8f8b
--- /dev/null
+++ b/faststream/kafka/publisher/specified.py
@@ -0,0 +1,56 @@
+from typing import TYPE_CHECKING
+
+from faststream._internal.types import MsgType
+from faststream.kafka.publisher.usecase import (
+ BatchPublisher,
+ DefaultPublisher,
+ LogicPublisher,
+)
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema.bindings import ChannelBinding, kafka
+from faststream.specification.schema.channel import Channel
+from faststream.specification.schema.message import CorrelationId, Message
+from faststream.specification.schema.operation import Operation
+
+if TYPE_CHECKING:
+ from aiokafka import ConsumerRecord
+
+
+class SpecificationPublisher(LogicPublisher[MsgType]):
+ """A class representing a publisher."""
+
+ def get_name(self) -> str:
+ return f"{self.topic}:Publisher"
+
+ def get_schema(self) -> dict[str, Channel]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: Channel(
+ description=self.description,
+ publish=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ correlationId=CorrelationId(
+ location="$message.header#/correlation_id",
+ ),
+ ),
+ ),
+ bindings=ChannelBinding(kafka=kafka.ChannelBinding(topic=self.topic)),
+ ),
+ }
+
+
+class SpecificationBatchPublisher(
+ BatchPublisher,
+ SpecificationPublisher[tuple["ConsumerRecord", ...]],
+):
+ pass
+
+
+class SpecificationDefaultPublisher(
+ DefaultPublisher,
+ SpecificationPublisher["ConsumerRecord"],
+):
+ pass
diff --git a/faststream/kafka/publisher/state.py b/faststream/kafka/publisher/state.py
new file mode 100644
index 0000000000..3094cf02c1
--- /dev/null
+++ b/faststream/kafka/publisher/state.py
@@ -0,0 +1,49 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from aiokafka import AIOKafkaProducer
+
+
+class ProducerState(Protocol):
+ producer: "AIOKafkaProducer"
+ closed: bool
+
+ def __bool__(self) -> bool: ...
+
+ async def stop(self) -> None: ...
+
+
+class EmptyProducerState(ProducerState):
+ __slots__ = ()
+
+ closed = True
+
+ @property
+ def producer(self) -> "AIOKafkaProducer":
+ msg = "You can't use producer here, please connect broker first."
+ raise IncorrectState(msg)
+
+ def __bool__(self) -> bool:
+ return False
+
+ async def stop(self) -> None:
+ pass
+
+
+class RealProducer(ProducerState):
+ __slots__ = ("producer",)
+
+ def __init__(self, producer: "AIOKafkaProducer") -> None:
+ self.producer = producer
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def stop(self) -> None:
+ await self.producer.stop()
+
+ @property
+ def closed(self) -> bool:
+ return self.producer._closed or False
diff --git a/faststream/kafka/publisher/usecase.py b/faststream/kafka/publisher/usecase.py
index de0094033e..0f005770de 100644
--- a/faststream/kafka/publisher/usecase.py
+++ b/faststream/kafka/publisher/usecase.py
@@ -1,36 +1,36 @@
-from collections.abc import Awaitable, Iterable
-from functools import partial
-from itertools import chain
+from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
Annotated,
Any,
- Callable,
Optional,
Union,
- cast,
)
from aiokafka import ConsumerRecord
from typing_extensions import Doc, override
from faststream._internal.publisher.usecase import PublisherUsecase
-from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import MsgType
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream.kafka.message import KafkaMessage
+from faststream.kafka.response import KafkaPublishCommand
from faststream.message import gen_cor_id
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream._internal.basic_types import AsyncFunc, SendableMessage
+ import asyncio
+
+ from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.kafka.message import KafkaMessage
from faststream.kafka.publisher.producer import AioKafkaFastProducer
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[MsgType]):
"""A class to publish messages to a Kafka topic."""
- _producer: Optional["AioKafkaFastProducer"]
+ _producer: "AioKafkaFastProducer"
def __init__(
self,
@@ -61,9 +61,7 @@ def __init__(
self.topic = topic
self.partition = partition
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
+ self.headers = headers or {}
def add_prefix(self, prefix: str) -> None:
self.topic = f"{prefix}{self.topic}"
@@ -127,47 +125,20 @@ async def request(
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "KafkaMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- topic = topic or self.topic
- partition = partition or self.partition
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = KafkaPublishCommand(
message,
- topic=topic,
+ topic=topic or self.topic,
key=key,
- partition=partition,
- headers=headers,
- timeout=timeout,
- correlation_id=correlation_id,
+ partition=partition or self.partition,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
timestamp_ms=timestamp_ms,
+ timeout=timeout,
+ _publish_type=PublishType.REQUEST,
)
- msg: KafkaMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
- )
+ msg: KafkaMessage = await self._basic_request(cmd)
return msg
@@ -175,7 +146,7 @@ class DefaultPublisher(LogicPublisher[ConsumerRecord]):
def __init__(
self,
*,
- key: Optional[bytes],
+ key: Union[bytes, str, None],
topic: str,
partition: Optional[int],
headers: Optional[dict[str, str]],
@@ -269,43 +240,39 @@ async def publish(
bool,
Doc("Do not wait for Kafka publish confirmation."),
] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- topic = topic or self.topic
- key = key or self.key
- partition = partition or self.partition
- headers = headers or self.headers
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
-
- call: Callable[..., Awaitable[None]] = self._producer.publish
-
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
-
- await call(
+ ) -> "asyncio.Future":
+ cmd = KafkaPublishCommand(
message,
- topic=topic,
- key=key,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- correlation_id=correlation_id,
+ topic=topic or self.topic,
+ key=key or self.key,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
timestamp_ms=timestamp_ms,
no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd)
+
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+
+ cmd.partition = cmd.partition or self.partition
+ cmd.key = cmd.key or self.key
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -366,14 +333,9 @@ async def request(
float,
Doc("Timeout to send RPC request."),
] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "KafkaMessage":
return await super().request(
- message=message,
+ message,
topic=topic,
key=key or self.key,
partition=partition,
@@ -381,7 +343,6 @@ async def request(
headers=headers,
correlation_id=correlation_id,
timeout=timeout,
- _extra_middlewares=_extra_middlewares,
)
@@ -389,11 +350,7 @@ class BatchPublisher(LogicPublisher[tuple["ConsumerRecord", ...]]):
@override
async def publish(
self,
- message: Annotated[
- Union["SendableMessage", Iterable["SendableMessage"]],
- Doc("One message or iterable messages bodies to send."),
- ],
- *extra_messages: Annotated[
+ *messages: Annotated[
"SendableMessage",
Doc("Messages bodies to send."),
],
@@ -438,44 +395,36 @@ async def publish(
bool,
Doc("Do not wait for Kafka publish confirmation."),
] = False,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> "asyncio.Future":
+ cmd = KafkaPublishCommand(
+ *messages,
+ key=None,
+ topic=topic or self.topic,
+ partition=partition or self.partition,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ timestamp_ms=timestamp_ms,
+ no_confirm=no_confirm,
+ _publish_type=PublishType.PUBLISH,
+ )
- msgs: Iterable[SendableMessage]
- if extra_messages:
- msgs = (cast("SendableMessage", message), *extra_messages)
- else:
- msgs = cast(Iterable["SendableMessage"], message)
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
- topic = topic or self.topic
- partition = partition or self.partition
- headers = headers or self.headers
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = KafkaPublishCommand.from_cmd(cmd, batch=True)
- call: AsyncFunc = self._producer.publish_batch
+ cmd.destination = self.topic
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.partition = cmd.partition or self.partition
- await call(
- *msgs,
- topic=topic,
- partition=partition,
- headers=headers,
- reply_to=reply_to,
- correlation_id=correlation_id,
- timestamp_ms=timestamp_ms,
- no_confirm=no_confirm,
- )
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
diff --git a/faststream/kafka/response.py b/faststream/kafka/response.py
index 3d04d6ab35..13d3c186bf 100644
--- a/faststream/kafka/response.py
+++ b/faststream/kafka/response.py
@@ -1,8 +1,11 @@
-from typing import TYPE_CHECKING, Optional
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Optional, Union
from typing_extensions import override
-from faststream.response import Response
+from faststream._internal.constants import EMPTY
+from faststream.response.publish_type import PublishType
+from faststream.response.response import PublishCommand, Response
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, SendableMessage
@@ -28,9 +31,99 @@ def __init__(
self.key = key
@override
- def as_publish_kwargs(self) -> "AnyDict":
- return {
- **super().as_publish_kwargs(),
- "timestamp_ms": self.timestamp_ms,
- "key": self.key,
- }
+ def as_publish_command(self) -> "KafkaPublishCommand":
+ return KafkaPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ # Kafka specific
+ topic="",
+ key=self.key,
+ timestamp_ms=self.timestamp_ms,
+ )
+
+
+class KafkaPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ topic: str,
+ _publish_type: PublishType,
+ key: Union[bytes, Any, None] = None,
+ partition: Optional[int] = None,
+ timestamp_ms: Optional[int] = None,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ reply_to: str = "",
+ no_confirm: bool = False,
+ timeout: float = 0.5,
+ ) -> None:
+ super().__init__(
+ message,
+ destination=topic,
+ reply_to=reply_to,
+ correlation_id=correlation_id,
+ headers=headers,
+ _publish_type=_publish_type,
+ )
+ self.extra_bodies = messages
+
+ self.key = key
+ self.partition = partition
+ self.timestamp_ms = timestamp_ms
+ self.no_confirm = no_confirm
+
+ # request option
+ self.timeout = timeout
+
+ @property
+ def batch_bodies(self) -> tuple["SendableMessage", ...]:
+ if self.body is EMPTY:
+ return self.extra_bodies
+ return (self.body, *self.extra_bodies)
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "KafkaPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "KafkaPublishCommand":
+ if isinstance(cmd, KafkaPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cmd.body, []
+ if batch:
+ if body is None:
+ body = EMPTY
+
+ if isinstance(body, Sequence) and not isinstance(body, str):
+ if body:
+ body, extra_bodies = body[0], body[1:]
+ else:
+ body = EMPTY
+
+ return cls(
+ body,
+ *extra_bodies,
+ topic=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def headers_to_publish(self) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py
index e79e422e8b..b4fecbe3e0 100644
--- a/faststream/kafka/router.py
+++ b/faststream/kafka/router.py
@@ -17,13 +17,15 @@
BrokerRouter,
SubscriberRoute,
)
+from faststream._internal.constants import EMPTY
from faststream.kafka.broker.registrator import KafkaRegistrator
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import ConsumerRecord, TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import (
@@ -468,8 +470,8 @@ def __init__(
] = (),
# broker args
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -483,14 +485,10 @@ def __init__(
Iterable["SubscriberMiddleware[KafkaMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -555,8 +553,7 @@ def __init__(
description=description,
include_in_schema=include_in_schema,
# FastDepends args
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
)
@@ -583,9 +580,9 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers.",
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
diff --git a/faststream/kafka/subscriber/factory.py b/faststream/kafka/subscriber/factory.py
index a31a5fde93..bb559873cc 100644
--- a/faststream/kafka/subscriber/factory.py
+++ b/faststream/kafka/subscriber/factory.py
@@ -1,3 +1,4 @@
+import warnings
from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
@@ -7,16 +8,18 @@
overload,
)
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
-from faststream.kafka.subscriber.subscriber import (
+from faststream.kafka.subscriber.specified import (
SpecificationBatchSubscriber,
SpecificationDefaultSubscriber,
)
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from aiokafka import ConsumerRecord, TopicPartition
from aiokafka.abc import ConsumerRebalanceListener
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import BrokerMiddleware
@@ -36,10 +39,9 @@ def create_subscriber(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[tuple[ConsumerRecord, ...]]"],
# Specification args
title_: Optional[str],
@@ -62,10 +64,9 @@ def create_subscriber(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"],
# Specification args
title_: Optional[str],
@@ -88,10 +89,9 @@ def create_subscriber(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable[
"BrokerMiddleware[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]"
],
@@ -118,10 +118,9 @@ def create_subscriber(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable[
"BrokerMiddleware[Union[ConsumerRecord, tuple[ConsumerRecord, ...]]]"
],
@@ -133,24 +132,17 @@ def create_subscriber(
"SpecificationDefaultSubscriber",
"SpecificationBatchSubscriber",
]:
- if is_manual and not group_id:
- msg = "You must use `group_id` with manual commit mode."
- raise SetupError(msg)
+ _validate_input_for_misconfigure(
+ *topics,
+ pattern=pattern,
+ partitions=partitions,
+ ack_policy=ack_policy,
+ is_manual=is_manual,
+ group_id=group_id,
+ )
- if not topics and not partitions and not pattern:
- msg = "You should provide either `topics` or `partitions` or `pattern`."
- raise SetupError(
- msg,
- )
- if topics and partitions:
- msg = "You can't provide both `topics` and `partitions`."
- raise SetupError(msg)
- if topics and pattern:
- msg = "You can't provide both `topics` and `pattern`."
- raise SetupError(msg)
- if partitions and pattern:
- msg = "You can't provide both `partitions` and `pattern`."
- raise SetupError(msg)
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
if batch:
return SpecificationBatchSubscriber(
@@ -163,9 +155,8 @@ def create_subscriber(
connection_args=connection_args,
partitions=partitions,
is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
@@ -181,12 +172,46 @@ def create_subscriber(
connection_args=connection_args,
partitions=partitions,
is_manual=is_manual,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+
+
+def _validate_input_for_misconfigure(
+ *topics: str,
+ partitions: Iterable["TopicPartition"],
+ pattern: Optional[str],
+ ack_policy: "AckPolicy",
+ is_manual: bool,
+ group_id: Optional[str],
+) -> None:
+ if ack_policy is not EMPTY and not is_manual:
+ warnings.warn(
+ "You can't use acknowledgement policy with `is_manual=False` subscriber",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if is_manual and not group_id:
+ msg = "You must use `group_id` with manual commit mode."
+ raise SetupError(msg)
+
+ if not topics and not partitions and not pattern:
+ msg = "You should provide either `topics` or `partitions` or `pattern`."
+ raise SetupError(
+ msg,
+ )
+ if topics and partitions:
+ msg = "You can't provide both `topics` and `partitions`."
+ raise SetupError(msg)
+ if topics and pattern:
+ msg = "You can't provide both `topics` and `pattern`."
+ raise SetupError(msg)
+ if partitions and pattern:
+ msg = "You can't provide both `partitions` and `pattern`."
+ raise SetupError(msg)
diff --git a/faststream/kafka/subscriber/subscriber.py b/faststream/kafka/subscriber/specified.py
similarity index 100%
rename from faststream/kafka/subscriber/subscriber.py
rename to faststream/kafka/subscriber/specified.py
diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py
index 7a38b79055..fab52a66f2 100644
--- a/faststream/kafka/subscriber/usecase.py
+++ b/faststream/kafka/subscriber/usecase.py
@@ -14,7 +14,6 @@
from aiokafka.errors import ConsumerStoppedError, KafkaError
from typing_extensions import override
-from faststream._internal.publisher.fake import FakePublisher
from faststream._internal.subscriber.usecase import SubscriberUsecase
from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import (
@@ -26,16 +25,18 @@
from faststream._internal.utils.path import compile_path
from faststream.kafka.message import KafkaAckableMessage, KafkaMessage
from faststream.kafka.parser import AioKafkaBatchParser, AioKafkaParser
+from faststream.kafka.publisher.fake import KafkaFakePublisher
if TYPE_CHECKING:
from aiokafka import AIOKafkaConsumer, ConsumerRecord
from aiokafka.abc import ConsumerRebalanceListener
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, LoggerProto
- from faststream._internal.publisher.proto import ProducerProto
- from faststream._internal.setup import SetupState
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
class LogicSubscriber(SubscriberUsecase[MsgType]):
@@ -50,6 +51,7 @@ class LogicSubscriber(SubscriberUsecase[MsgType]):
task: Optional["asyncio.Task[None]"]
client_id: Optional[str]
batch: bool
+ parser: AioKafkaParser
def __init__(
self,
@@ -63,10 +65,9 @@ def __init__(
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
# AsyncAPI args
title_: Optional[str],
@@ -77,9 +78,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -110,23 +110,17 @@ def _setup( # type: ignore[override]
client_id: Optional[str],
builder: Callable[..., "AIOKafkaConsumer"],
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "BrokerState",
) -> None:
self.client_id = client_id
self.builder = builder
super()._setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
@@ -143,6 +137,8 @@ async def start(self) -> None:
**self.__connection_args,
)
+ self.parser._setup(consumer)
+
if self.topics or self._pattern:
consumer.subscribe(
topics=self.topics,
@@ -192,9 +188,13 @@ async def get_one(
((raw_message,),) = raw_messages.values()
+ context = self._state.get().di_state.context
+
msg: StreamMessage[MsgType] = await process_msg(
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -203,16 +203,11 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "topic": message.reply_to,
- },
+ KafkaFakePublisher(
+ self._state.get().producer,
+ topic=message.reply_to,
),
)
@@ -288,10 +283,9 @@ def __init__(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"],
# AsyncAPI args
title_: Optional[str],
@@ -308,7 +302,7 @@ def __init__(
else:
reg = None
- parser = AioKafkaParser(
+ self.parser = AioKafkaParser(
msg_class=KafkaAckableMessage if is_manual else KafkaMessage,
regex=reg,
)
@@ -321,12 +315,11 @@ def __init__(
connection_args=connection_args,
partitions=partitions,
# subscriber args
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -369,10 +362,9 @@ def __init__(
partitions: Iterable["TopicPartition"],
is_manual: bool,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable[
"BrokerMiddleware[Sequence[tuple[ConsumerRecord, ...]]]"
],
@@ -394,7 +386,7 @@ def __init__(
else:
reg = None
- parser = AioKafkaBatchParser(
+ self.parser = AioKafkaBatchParser(
msg_class=KafkaAckableMessage if is_manual else KafkaMessage,
regex=reg,
)
@@ -407,12 +399,11 @@ def __init__(
connection_args=connection_args,
partitions=partitions,
# subscriber args
- default_parser=parser.parse_message,
- default_decoder=parser.decode_message,
+ default_parser=self.parser.parse_message,
+ default_decoder=self.parser.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py
index a0135e9083..96e0614183 100755
--- a/faststream/kafka/testing.py
+++ b/faststream/kafka/testing.py
@@ -1,5 +1,6 @@
import re
-from collections.abc import Generator, Iterable
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
from datetime import datetime, timezone
from typing import (
TYPE_CHECKING,
@@ -21,13 +22,14 @@
from faststream.kafka.message import KafkaMessage
from faststream.kafka.parser import AioKafkaParser
from faststream.kafka.publisher.producer import AioKafkaFastProducer
-from faststream.kafka.publisher.publisher import SpecificationBatchPublisher
-from faststream.kafka.subscriber.subscriber import SpecificationBatchSubscriber
+from faststream.kafka.publisher.specified import SpecificationBatchPublisher
+from faststream.kafka.subscriber.usecase import BatchSubscriber
from faststream.message import encode_message, gen_cor_id
if TYPE_CHECKING:
from faststream._internal.basic_types import SendableMessage
- from faststream.kafka.publisher.publisher import SpecificationPublisher
+ from faststream.kafka.publisher.specified import SpecificationPublisher
+ from faststream.kafka.response import KafkaPublishCommand
from faststream.kafka.subscriber.usecase import LogicSubscriber
__all__ = ("TestKafkaBroker",)
@@ -36,13 +38,19 @@
class TestKafkaBroker(TestBroker[KafkaBroker]):
"""A class to test Kafka brokers."""
+ @contextmanager
+ def _patch_producer(self, broker: KafkaBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: KafkaBroker,
*args: Any,
**kwargs: Any,
) -> Callable[..., AsyncMock]:
- broker._producer = FakeProducer(broker)
return _fake_connection
@staticmethod
@@ -96,120 +104,101 @@ def __init__(self, broker: KafkaBroker) -> None:
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
+ def __bool__(self) -> None:
+ return True
+
+ @property
+ def closed(self) -> bool:
+ return False
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- reply_to: str = "",
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a message to the Kafka broker."""
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
msg_to_send = (
- [incoming]
- if isinstance(handler, SpecificationBatchSubscriber)
- else incoming
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
)
- await self._execute_handler(msg_to_send, topic, handler)
+ await self._execute_handler(msg_to_send, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- topic: str,
- key: Optional[bytes] = None,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- *,
- timeout: Optional[float] = 0.5,
+ cmd: "KafkaPublishCommand",
) -> "ConsumerRecord":
incoming = build_message(
- message=message,
- topic=topic,
- key=key,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ topic=cmd.destination,
+ key=cmd.key,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
msg_to_send = (
- [incoming]
- if isinstance(handler, SpecificationBatchSubscriber)
- else incoming
+ [incoming] if isinstance(handler, BatchSubscriber) else incoming
)
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg_to_send, topic, handler)
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(
+ msg_to_send, cmd.destination, handler
+ )
raise SubscriberNotFound
async def publish_batch(
self,
- *msgs: "SendableMessage",
- topic: str,
- partition: Optional[int] = None,
- timestamp_ms: Optional[int] = None,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
- no_confirm: bool = False,
+ cmd: "KafkaPublishCommand",
) -> None:
"""Publish a batch of messages to the Kafka broker."""
for handler in _find_handler(
self.broker._subscribers,
- topic,
- partition,
+ cmd.destination,
+ cmd.partition,
):
messages = (
build_message(
message=message,
- topic=topic,
- partition=partition,
- timestamp_ms=timestamp_ms,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
+ topic=cmd.destination,
+ partition=cmd.partition,
+ timestamp_ms=cmd.timestamp_ms,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
- for message in msgs
+ for message in cmd.batch_bodies
)
- if isinstance(handler, SpecificationBatchSubscriber):
- await self._execute_handler(list(messages), topic, handler)
+ if isinstance(handler, BatchSubscriber):
+ await self._execute_handler(list(messages), cmd.destination, handler)
else:
for m in messages:
- await self._execute_handler(m, topic, handler)
+ await self._execute_handler(m, cmd.destination, handler)
async def _execute_handler(
self,
diff --git a/faststream/message/__init__.py b/faststream/message/__init__.py
index 51064d78a4..2dd53d6c4e 100644
--- a/faststream/message/__init__.py
+++ b/faststream/message/__init__.py
@@ -1,8 +1,10 @@
from .message import AckStatus, StreamMessage
+from .source_type import SourceType
from .utils import decode_message, encode_message, gen_cor_id
__all__ = (
"AckStatus",
+ "SourceType",
"StreamMessage",
"decode_message",
"encode_message",
diff --git a/faststream/message/message.py b/faststream/message/message.py
index 4d41404771..a7db6f895d 100644
--- a/faststream/message/message.py
+++ b/faststream/message/message.py
@@ -9,17 +9,20 @@
)
from uuid import uuid4
+from .source_type import SourceType
+
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, DecodedMessage
+ from faststream._internal.types import AsyncCallable
# prevent circular imports
MsgType = TypeVar("MsgType")
class AckStatus(str, Enum):
- acked = "acked"
- nacked = "nacked"
- rejected = "rejected"
+ ACKED = "ACKED"
+ NACKED = "NACKED"
+ REJECTED = "REJECTED"
class StreamMessage(Generic[MsgType]):
@@ -37,11 +40,13 @@ def __init__(
content_type: Optional[str] = None,
correlation_id: Optional[str] = None,
message_id: Optional[str] = None,
+ source_type: SourceType = SourceType.CONSUME,
) -> None:
self.raw_message = raw_message
self.body = body
self.reply_to = reply_to
self.content_type = content_type
+ self._source_type = source_type
self.headers = headers or {}
self.batch_headers = batch_headers or []
@@ -49,11 +54,21 @@ def __init__(
self.correlation_id = correlation_id or str(uuid4())
self.message_id = message_id or self.correlation_id
- # Setup later
- self._decoded_body: Optional[DecodedMessage] = None
self.committed: Optional[AckStatus] = None
self.processed = False
+ # Setup later
+ self.__decoder: Optional[AsyncCallable] = None
+ self.__decoded_caches: dict[
+ Any, Any
+ ] = {} # Cache values between filters and tests
+
+ def set_decoder(self, decoder: "AsyncCallable") -> None:
+ self.__decoder = decoder
+
+ def clear_cache(self) -> None:
+ self.__decoded_caches.clear()
+
def __repr__(self) -> str:
inner = ", ".join(
filter(
@@ -75,18 +90,26 @@ def __repr__(self) -> str:
return f"{self.__class__.__name__}({inner})"
async def decode(self) -> Optional["DecodedMessage"]:
- """Serialize the message by lazy decoder."""
- # TODO: make it lazy after `decoded_body` removed
- return self._decoded_body
+ """Serialize the message by lazy decoder.
+
+ Returns a cache after first usage. To prevent such behavior, please call
+ `message.clear_cache()` after `message.body` changes.
+ """
+ assert self.__decoder, "You should call `set_decoder()` method first." # nosec B101
+
+ if (result := self.__decoded_caches.get(self.__decoder)) is None:
+ result = self.__decoded_caches[self.__decoder] = await self.__decoder(self)
+
+ return result
async def ack(self) -> None:
if self.committed is None:
- self.committed = AckStatus.acked
+ self.committed = AckStatus.ACKED
async def nack(self) -> None:
if self.committed is None:
- self.committed = AckStatus.nacked
+ self.committed = AckStatus.NACKED
async def reject(self) -> None:
if self.committed is None:
- self.committed = AckStatus.rejected
+ self.committed = AckStatus.REJECTED
diff --git a/faststream/message/source_type.py b/faststream/message/source_type.py
new file mode 100644
index 0000000000..b6e4f95fd9
--- /dev/null
+++ b/faststream/message/source_type.py
@@ -0,0 +1,9 @@
+from enum import Enum
+
+
+class SourceType(str, Enum):
+ CONSUME = "CONSUME"
+ """Message consumed by basic subscriber flow."""
+
+ RESPONSE = "RESPONSE"
+ """RPC response consumed."""
diff --git a/faststream/message/utils.py b/faststream/message/utils.py
index 5483c27bb5..715c336bd1 100644
--- a/faststream/message/utils.py
+++ b/faststream/message/utils.py
@@ -32,10 +32,10 @@ def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage":
if content_type := getattr(message, "content_type", False):
content_type = ContentTypes(cast(str, content_type))
- if content_type is ContentTypes.text:
+ if content_type is ContentTypes.TEXT:
m = body.decode()
- elif content_type is ContentTypes.json:
+ elif content_type is ContentTypes.JSON:
m = json_loads(body)
else:
@@ -65,10 +65,10 @@ def encode_message(
if isinstance(msg, str):
return (
msg.encode(),
- ContentTypes.text.value,
+ ContentTypes.TEXT.value,
)
return (
dump_json(msg),
- ContentTypes.json.value,
+ ContentTypes.JSON.value,
)
diff --git a/faststream/middlewares/__init__.py b/faststream/middlewares/__init__.py
index 0615c88194..f8d57bdf50 100644
--- a/faststream/middlewares/__init__.py
+++ b/faststream/middlewares/__init__.py
@@ -1,4 +1,11 @@
+from faststream.middlewares.acknowledgement.conf import AckPolicy
+from faststream.middlewares.acknowledgement.middleware import AcknowledgementMiddleware
from faststream.middlewares.base import BaseMiddleware
from faststream.middlewares.exception import ExceptionMiddleware
-__all__ = ("BaseMiddleware", "ExceptionMiddleware")
+__all__ = (
+ "AckPolicy",
+ "AcknowledgementMiddleware",
+ "BaseMiddleware",
+ "ExceptionMiddleware",
+)
diff --git a/faststream/middlewares/acknowledgement/__init__.py b/faststream/middlewares/acknowledgement/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/faststream/middlewares/acknowledgement/conf.py b/faststream/middlewares/acknowledgement/conf.py
new file mode 100644
index 0000000000..60b910264d
--- /dev/null
+++ b/faststream/middlewares/acknowledgement/conf.py
@@ -0,0 +1,8 @@
+from enum import Enum
+
+
+class AckPolicy(str, Enum):
+ ACK = "ack"
+ REJECT_ON_ERROR = "reject_on_error"
+ NACK_ON_ERROR = "nack_on_error"
+ DO_NOTHING = "do_nothing"
diff --git a/faststream/middlewares/acknowledgement/middleware.py b/faststream/middlewares/acknowledgement/middleware.py
new file mode 100644
index 0000000000..dd04e9c22e
--- /dev/null
+++ b/faststream/middlewares/acknowledgement/middleware.py
@@ -0,0 +1,123 @@
+import logging
+from typing import TYPE_CHECKING, Any, Optional
+
+from faststream.exceptions import (
+ AckMessage,
+ HandlerException,
+ NackMessage,
+ RejectMessage,
+)
+from faststream.middlewares.acknowledgement.conf import AckPolicy
+from faststream.middlewares.base import BaseMiddleware
+
+if TYPE_CHECKING:
+ from types import TracebackType
+
+ from faststream._internal.basic_types import AnyDict, AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream._internal.state import LoggerState
+ from faststream.message import StreamMessage
+
+
+class AcknowledgementMiddleware:
+ def __init__(
+ self, logger: "LoggerState", ack_policy: "AckPolicy", extra_options: "AnyDict"
+ ) -> None:
+ self.ack_policy = ack_policy
+ self.extra_options = extra_options
+ self.logger = logger
+
+ def __call__(
+ self, msg: Optional[Any], context: "ContextRepo"
+ ) -> "_AcknowledgementMiddleware":
+ return _AcknowledgementMiddleware(
+ msg,
+ logger=self.logger,
+ ack_policy=self.ack_policy,
+ extra_options=self.extra_options,
+ context=context,
+ )
+
+
+class _AcknowledgementMiddleware(BaseMiddleware):
+ def __init__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ logger: "LoggerState",
+ context: "ContextRepo",
+ extra_options: "AnyDict",
+ # can't be created with AckPolicy.DO_NOTHING
+ ack_policy: AckPolicy,
+ ) -> None:
+ super().__init__(msg, context=context)
+
+ self.ack_policy = ack_policy
+ self.extra_options = extra_options
+ self.logger = logger
+
+ self.message: Optional[StreamMessage[Any]] = None
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ self.message = msg
+ return await call_next(msg)
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ if not exc_type:
+ await self.__ack()
+
+ elif isinstance(exc_val, HandlerException):
+ if isinstance(exc_val, AckMessage):
+ await self.__ack(**exc_val.extra_options)
+
+ elif isinstance(exc_val, NackMessage):
+ await self.__nack(**exc_val.extra_options)
+
+ elif isinstance(exc_val, RejectMessage): # pragma: no branch
+ await self.__reject(**exc_val.extra_options)
+
+ # Exception was processed and suppressed
+ return True
+
+ elif self.ack_policy is AckPolicy.REJECT_ON_ERROR:
+ await self.__reject()
+
+ elif self.ack_policy is AckPolicy.NACK_ON_ERROR:
+ await self.__nack()
+
+ # Exception was not processed
+ return False
+
+ async def __ack(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.ack(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(er, logging.CRITICAL, exc_info=er)
+
+ async def __nack(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.nack(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(er, logging.CRITICAL, exc_info=er)
+
+ async def __reject(self, **exc_extra_options: Any) -> None:
+ if self.message:
+ try:
+ await self.message.reject(**exc_extra_options, **self.extra_options)
+ except Exception as er:
+ if self.logger is not None:
+ self.logger.log(er, logging.CRITICAL, exc_info=er)
diff --git a/faststream/middlewares/base.py b/faststream/middlewares/base.py
index a9ff8642ba..49ea3526d9 100644
--- a/faststream/middlewares/base.py
+++ b/faststream/middlewares/base.py
@@ -1,19 +1,29 @@
-from typing import TYPE_CHECKING, Any, Optional
+from collections.abc import Awaitable
+from typing import TYPE_CHECKING, Any, Callable, Optional
from typing_extensions import Self
if TYPE_CHECKING:
from types import TracebackType
- from faststream._internal.basic_types import AsyncFunc, AsyncFuncAny
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
from faststream.message import StreamMessage
+ from faststream.response.response import PublishCommand
class BaseMiddleware:
"""A base middleware class."""
- def __init__(self, msg: Optional[Any] = None) -> None:
+ def __init__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> None:
self.msg = msg
+ self.context = context
async def on_receive(self) -> None:
"""Hook to call on message receive."""
@@ -73,10 +83,8 @@ async def consume_scope(
async def on_publish(
self,
- msg: Any,
- *args: Any,
- **kwargs: Any,
- ) -> Any:
+ msg: "PublishCommand",
+ ) -> "PublishCommand":
"""Asynchronously handle a publish event."""
return msg
@@ -90,19 +98,13 @@ async def after_publish(
async def publish_scope(
self,
- call_next: "AsyncFunc",
- msg: Any,
- *args: Any,
- **kwargs: Any,
+ call_next: Callable[["PublishCommand"], Awaitable[Any]],
+ cmd: "PublishCommand",
) -> Any:
"""Publish a message and return an async iterator."""
err: Optional[Exception] = None
try:
- result = await call_next(
- await self.on_publish(msg, *args, **kwargs),
- *args,
- **kwargs,
- )
+ result = await call_next(await self.on_publish(cmd))
except Exception as e:
err = e
diff --git a/faststream/middlewares/exception.py b/faststream/middlewares/exception.py
index 2732037945..eb18aa4f24 100644
--- a/faststream/middlewares/exception.py
+++ b/faststream/middlewares/exception.py
@@ -12,7 +12,6 @@
from typing_extensions import Literal, TypeAlias
-from faststream._internal.context import context
from faststream._internal.utils import apply_types
from faststream._internal.utils.functions import sync_fake_context, to_async
from faststream.exceptions import IgnoredException
@@ -23,6 +22,7 @@
from types import TracebackType
from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
from faststream.message import StreamMessage
@@ -48,61 +48,6 @@
]
-class BaseExceptionMiddleware(BaseMiddleware):
- def __init__(
- self,
- handlers: CastedHandlers,
- publish_handlers: CastedPublishingHandlers,
- msg: Optional[Any] = None,
- ) -> None:
- super().__init__(msg)
- self._handlers = handlers
- self._publish_handlers = publish_handlers
-
- async def consume_scope(
- self,
- call_next: "AsyncFuncAny",
- msg: "StreamMessage[Any]",
- ) -> Any:
- try:
- return await call_next(await self.on_consume(msg))
-
- except Exception as exc:
- exc_type = type(exc)
-
- for handler_type, handler in self._publish_handlers:
- if issubclass(exc_type, handler_type):
- return await handler(exc)
-
- raise
-
- async def after_processed(
- self,
- exc_type: Optional[type[BaseException]] = None,
- exc_val: Optional[BaseException] = None,
- exc_tb: Optional["TracebackType"] = None,
- ) -> Optional[bool]:
- if exc_type:
- for handler_type, handler in self._handlers:
- if issubclass(exc_type, handler_type):
- # TODO: remove it after context will be moved to middleware
- # In case parser/decoder error occurred
- scope: AbstractContextManager[Any]
- if not context.get_local("message"):
- scope = context.scope("message", self.msg)
- else:
- scope = sync_fake_context()
-
- with scope:
- await handler(exc_val)
-
- return True
-
- return False
-
- return None
-
-
class ExceptionMiddleware:
__slots__ = ("_handlers", "_publish_handlers")
@@ -195,14 +140,81 @@ def default_wrapper(
return default_wrapper
- def __call__(self, msg: Optional[Any]) -> BaseExceptionMiddleware:
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_BaseExceptionMiddleware":
"""Real middleware runtime constructor."""
- return BaseExceptionMiddleware(
+ return _BaseExceptionMiddleware(
handlers=self._handlers,
publish_handlers=self._publish_handlers,
+ context=context,
msg=msg,
)
-async def ignore_handler(exception: IgnoredException) -> NoReturn:
+class _BaseExceptionMiddleware(BaseMiddleware):
+ def __init__(
+ self,
+ *,
+ handlers: CastedHandlers,
+ publish_handlers: CastedPublishingHandlers,
+ context: "ContextRepo",
+ msg: Optional[Any],
+ ) -> None:
+ super().__init__(msg, context=context)
+ self._handlers = handlers
+ self._publish_handlers = publish_handlers
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ try:
+ return await call_next(await self.on_consume(msg))
+
+ except Exception as exc:
+ exc_type = type(exc)
+
+ for handler_type, handler in self._publish_handlers:
+ if issubclass(exc_type, handler_type):
+ return await handler(exc, context__=self.context)
+
+ raise
+
+ async def after_processed(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_val: Optional[BaseException] = None,
+ exc_tb: Optional["TracebackType"] = None,
+ ) -> Optional[bool]:
+ if exc_type:
+ for handler_type, handler in self._handlers:
+ if issubclass(exc_type, handler_type):
+ # TODO: remove it after context will be moved to middleware
+ # In case parser/decoder error occurred
+ scope: AbstractContextManager[Any]
+ if not self.context.get_local("message"):
+ scope = self.context.scope("message", self.msg)
+ else:
+ scope = sync_fake_context()
+
+ with scope:
+ await handler(exc_val, context__=self.context)
+
+ return True
+
+ return False
+
+ return None
+
+
+async def ignore_handler(
+ exception: IgnoredException,
+ **kwargs: Any, # suppress context
+) -> NoReturn:
raise exception
diff --git a/faststream/middlewares/logging.py b/faststream/middlewares/logging.py
index 57ff030a94..fbc7412507 100644
--- a/faststream/middlewares/logging.py
+++ b/faststream/middlewares/logging.py
@@ -1,42 +1,67 @@
import logging
from typing import TYPE_CHECKING, Any, Optional
-from faststream._internal.context.repository import context
-from faststream._internal.setup.logger import LoggerState
from faststream.exceptions import IgnoredException
+from faststream.message.source_type import SourceType
from .base import BaseMiddleware
if TYPE_CHECKING:
from types import TracebackType
+ from faststream._internal.basic_types import AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream._internal.state.logger import LoggerState
from faststream.message import StreamMessage
class CriticalLogMiddleware:
- def __init__(self, logger: LoggerState) -> None:
+ def __init__(self, logger: "LoggerState") -> None:
"""Initialize the class."""
self.logger = logger
- def __call__(self, msg: Optional[Any] = None) -> Any:
- return LoggingMiddleware(logger=self.logger)
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_LoggingMiddleware":
+ return _LoggingMiddleware(
+ logger=self.logger,
+ msg=msg,
+ context=context,
+ )
-class LoggingMiddleware(BaseMiddleware):
+class _LoggingMiddleware(BaseMiddleware):
"""A middleware class for logging critical errors."""
- def __init__(self, logger: LoggerState) -> None:
+ def __init__(
+ self,
+ *,
+ logger: "LoggerState",
+ context: "ContextRepo",
+ msg: Optional[Any],
+ ) -> None:
+ super().__init__(msg, context=context)
self.logger = logger
+ self._source_type = SourceType.CONSUME
- async def on_consume(
+ async def consume_scope(
self,
+ call_next: "AsyncFuncAny",
msg: "StreamMessage[Any]",
) -> "StreamMessage[Any]":
- self.logger.log(
- "Received",
- extra=context.get_local("log_context", {}),
- )
- return await super().on_consume(msg)
+ source_type = self._source_type = msg._source_type
+
+ if source_type is not SourceType.RESPONSE:
+ self.logger.log(
+ "Received",
+ extra=self.context.get_local("log_context", {}),
+ )
+
+ return await call_next(msg)
async def __aexit__(
self,
@@ -45,26 +70,29 @@ async def __aexit__(
exc_tb: Optional["TracebackType"] = None,
) -> bool:
"""Asynchronously called after processing."""
- c = context.get_local("log_context", {})
-
- if exc_type:
- if issubclass(exc_type, IgnoredException):
- self.logger.log(
- log_level=logging.INFO,
- message=exc_val,
- extra=c,
- )
- else:
- self.logger.log(
- log_level=logging.ERROR,
- message=f"{exc_type.__name__}: {exc_val}",
- exc_info=exc_val,
- extra=c,
- )
-
- self.logger.log(message="Processed", extra=c)
-
- await super().after_processed(exc_type, exc_val, exc_tb)
+ if self._source_type is not SourceType.RESPONSE:
+ c = self.context.get_local("log_context", {})
+
+ if exc_type:
+ # TODO: move critical logging to `subscriber.consume()` method
+ if issubclass(exc_type, IgnoredException):
+ self.logger.log(
+ log_level=logging.INFO,
+ message=exc_val,
+ extra=c,
+ )
+
+ else:
+ self.logger.log(
+ log_level=logging.ERROR,
+ message=f"{exc_type.__name__}: {exc_val}",
+ exc_info=exc_val,
+ extra=c,
+ )
+
+ self.logger.log(message="Processed", extra=c)
+
+ await super().__aexit__(exc_type, exc_val, exc_tb)
# Exception was not processed
return False
diff --git a/faststream/nats/annotations.py b/faststream/nats/annotations.py
index 9bd2e29066..aead2fe490 100644
--- a/faststream/nats/annotations.py
+++ b/faststream/nats/annotations.py
@@ -8,12 +8,9 @@
from faststream.annotations import ContextRepo, Logger
from faststream.nats.broker import NatsBroker as _Broker
from faststream.nats.message import NatsMessage as _Message
-from faststream.nats.publisher.producer import (
- NatsFastProducer as _CoreProducer,
- NatsJSFastProducer as _JsProducer,
+from faststream.nats.subscriber.usecases.object_storage_subscriber import (
+ OBJECT_STORAGE_CONTEXT_KEY,
)
-from faststream.nats.subscriber.usecase import OBJECT_STORAGE_CONTEXT_KEY
-from faststream.params import NoCast
__all__ = (
"Client",
@@ -22,7 +19,6 @@
"Logger",
"NatsBroker",
"NatsMessage",
- "NoCast",
"ObjectStorage",
)
@@ -31,5 +27,3 @@
NatsBroker = Annotated[_Broker, Context("broker")]
Client = Annotated[_NatsClient, Context("broker._connection")]
JsClient = Annotated[_JetStream, Context("broker._stream")]
-NatsProducer = Annotated[_CoreProducer, Context("broker._producer")]
-NatsJsProducer = Annotated[_JsProducer, Context("broker._js_producer")]
diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py
index c2873dcc11..def4199ce5 100644
--- a/faststream/nats/broker/broker.py
+++ b/faststream/nats/broker/broker.py
@@ -1,5 +1,4 @@
import logging
-import warnings
from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
@@ -27,7 +26,7 @@
from nats.aio.msg import Msg
from nats.errors import Error
from nats.js.errors import BadRequestError
-from typing_extensions import Doc, override
+from typing_extensions import Doc, Literal, overload, override
from faststream.__about__ import SERVICE_NAME
from faststream._internal.broker.broker import BrokerUsecase
@@ -35,17 +34,20 @@
from faststream.message import gen_cor_id
from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
+from faststream.nats.response import NatsPublishCommand
from faststream.nats.security import parse_security
-from faststream.nats.subscriber.subscriber import SpecificationSubscriber
+from faststream.nats.subscriber.specified import SpecificationSubscriber
+from faststream.response.publish_type import PublishType
from .logging import make_nats_logger_state
from .registrator import NatsRegistrator
+from .state import BrokerState, ConnectedState, EmptyBrokerState
if TYPE_CHECKING:
- import ssl
from types import TracebackType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from nats.aio.client import (
Callback,
Credentials,
@@ -53,8 +55,7 @@
JWTCallback,
SignatureCallback,
)
- from nats.js.api import Placement, RePublish, StorageType
- from nats.js.client import JetStreamContext
+ from nats.js.api import Placement, PubAck, RePublish, StorageType
from nats.js.kv import KeyValue
from nats.js.object_store import ObjectStore
from typing_extensions import TypedDict, Unpack
@@ -65,15 +66,14 @@
LoggerProto,
SendableMessage,
)
- from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
)
from faststream.nats.message import NatsMessage
- from faststream.nats.publisher.publisher import SpecificationPublisher
+ from faststream.nats.publisher.specified import SpecificationPublisher
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class NatsInitKwargs(TypedDict, total=False):
"""NatsBroker.connect() method type hints."""
@@ -151,22 +151,10 @@ class NatsInitKwargs(TypedDict, total=False):
bool,
Doc("Boolean indicating should commands be echoed."),
]
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ]
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
]
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ]
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ]
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -195,6 +183,10 @@ class NatsInitKwargs(TypedDict, total=False):
Doc("A user credentials file or tuple of files."),
]
nkeys_seed: Annotated[
+ Optional[str],
+ Doc("Path-like object containing nkeys seed that will be used."),
+ ]
+ nkeys_seed_str: Annotated[
Optional[str],
Doc("Nkeys seed to be used."),
]
@@ -221,12 +213,11 @@ class NatsBroker(
"""A class to represent a NATS broker."""
url: list[str]
- stream: Optional["JetStreamContext"]
- _producer: Optional["NatsFastProducer"]
- _js_producer: Optional["NatsJSFastProducer"]
- _kv_declarer: Optional["KVBucketDeclarer"]
- _os_declarer: Optional["OSBucketDeclarer"]
+ _producer: "NatsFastProducer"
+ _js_producer: "NatsJSFastProducer"
+ _kv_declarer: "KVBucketDeclarer"
+ _os_declarer: "OSBucketDeclarer"
def __init__(
self,
@@ -308,22 +299,10 @@ def __init__(
bool,
Doc("Boolean indicating should commands be echoed."),
] = False,
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ] = None,
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
] = None,
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ] = None,
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ] = None,
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -353,7 +332,11 @@ def __init__(
] = None,
nkeys_seed: Annotated[
Optional[str],
- Doc("Nkeys seed to be used."),
+ Doc("Path-like object containing nkeys seed that will be used."),
+ ] = None,
+ nkeys_seed_str: Annotated[
+ Optional[str],
+ Doc("Raw nkeys seed to be used."),
] = None,
inbox_prefix: Annotated[
Union[str, bytes],
@@ -385,7 +368,7 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
@@ -437,10 +420,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -451,32 +431,7 @@ def __init__(
] = (),
) -> None:
"""Initialize the NatsBroker object."""
- if tls: # pragma: no cover
- warnings.warn(
- (
- "\nNATS `tls` option was deprecated and will be removed in 0.6.0"
- "\nPlease, use `security` with `BaseSecurity` or `SASLPlaintext` instead"
- ),
- DeprecationWarning,
- stacklevel=2,
- )
-
- if user or password:
- warnings.warn(
- (
- "\nNATS `user` and `password` options were deprecated and will be removed in 0.6.0"
- "\nPlease, use `security` with `SASLPlaintext` instead"
- ),
- DeprecationWarning,
- stacklevel=2,
- )
-
- secure_kwargs = {
- "tls": tls,
- "user": user,
- "password": password,
- **parse_security(security),
- }
+ secure_kwargs = parse_security(security)
servers = [servers] if isinstance(servers, str) else list(servers)
@@ -512,6 +467,7 @@ def __init__(
token=token,
user_credentials=user_credentials,
nkeys_seed=nkeys_seed,
+ nkeys_seed_str=nkeys_seed_str,
**secure_kwargs,
# callbacks
error_cb=self._log_connection_broken(error_cb),
@@ -543,32 +499,44 @@ def __init__(
),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
- self.__is_connected = False
- self._producer = None
+ self._state.patch_value(
+ producer=NatsFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
+ self._js_producer = NatsJSFastProducer(
+ decoder=self._decoder,
+ parser=self._parser,
+ )
+
+ self._kv_declarer = KVBucketDeclarer()
+ self._os_declarer = OSBucketDeclarer()
- # JS options
- self.stream = None
- self._js_producer = None
- self._kv_declarer = None
- self._os_declarer = None
+ self._connection_state: BrokerState = EmptyBrokerState()
@override
async def connect( # type: ignore[override]
self,
- servers: Annotated[
- Union[str, Iterable[str]],
- Doc("NATS cluster addresses to connect."),
- ] = EMPTY,
+ servers: Union[str, Iterable[str]] = EMPTY,
**kwargs: "Unpack[NatsInitKwargs]",
) -> "Client":
"""Connect broker object to NATS cluster.
To startup subscribers too you should use `broker.start()` after/instead this method.
+
+ Args:
+ servers: NATS cluster addresses to connect.
+ **kwargs: all other options from connection signature.
+
+ Returns:
+ `nats.aio.Client` connected object.
"""
if servers is not EMPTY:
connect_kwargs: AnyDict = {
@@ -581,25 +549,17 @@ async def connect( # type: ignore[override]
return await super().connect(**connect_kwargs)
async def _connect(self, **kwargs: Any) -> "Client":
- self.__is_connected = True
connection = await nats.connect(**kwargs)
- self._producer = NatsFastProducer(
- connection=connection,
- decoder=self._decoder,
- parser=self._parser,
- )
+ stream = connection.jetstream()
- stream = self.stream = connection.jetstream()
+ self._producer.connect(connection)
+ self._js_producer.connect(stream)
- self._js_producer = NatsJSFastProducer(
- connection=stream,
- decoder=self._decoder,
- parser=self._parser,
- )
+ self._kv_declarer.connect(stream)
+ self._os_declarer.connect(stream)
- self._kv_declarer = KVBucketDeclarer(stream)
- self._os_declarer = OSBucketDeclarer(stream)
+ self._connection_state = ConnectedState(connection, stream)
return connection
@@ -615,24 +575,26 @@ async def close(
await self._connection.drain()
self._connection = None
- self.stream = None
- self._producer = None
- self._js_producer = None
- self.__is_connected = False
+ self._producer.disconnect()
+ self._js_producer.disconnect()
+ self._kv_declarer.disconnect()
+ self._os_declarer.disconnect()
+
+ self._connection_state = EmptyBrokerState()
async def start(self) -> None:
"""Connect broker to NATS cluster and startup all subscribers."""
await self.connect()
self._setup()
- assert self.stream, "Broker should be started already" # nosec B101
+ stream_context = self._connection_state.stream
for stream in filter(
lambda x: x.declare,
self._stream_builder.objects.values(),
):
try:
- await self.stream.add_stream(
+ await stream_context.add_stream(
config=stream.config,
subjects=stream.subjects,
)
@@ -645,22 +607,23 @@ async def start(self) -> None:
stream=stream.name,
)
+ logger_state = self._state.get().logger_state
+
if (
e.description
== "stream name already in use with a different configuration"
):
- old_config = (await self.stream.stream_info(stream.name)).config
-
- self._state.logger_state.log(str(e), logging.WARNING, log_context)
- await self.stream.update_stream(
- config=stream.config,
- subjects=tuple(
- set(old_config.subjects or ()).union(stream.subjects),
- ),
- )
+ old_config = (await stream_context.stream_info(stream.name)).config
+
+ logger_state.log(str(e), logging.WARNING, log_context)
+
+ for subject in old_config.subjects or ():
+ stream.add_subject(subject)
+
+ await stream_context.update_stream(config=stream.config)
else: # pragma: no cover
- self._state.logger_state.log(
+ logger_state.log(
str(e),
logging.ERROR,
log_context,
@@ -673,142 +636,138 @@ async def start(self) -> None:
await super().start()
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Literal[None] = None,
+ timeout: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> "PubAck": ...
+
@override
- async def publish( # type: ignore[override]
+ async def publish(
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`).",
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ],
- headers: Annotated[
- Optional[dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway.",
- ),
- ] = None,
- reply_to: Annotated[
- str,
- Doc("NATS subject name to send response."),
- ] = "",
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages.",
- ),
- ] = None,
- stream: Annotated[
- Optional[str],
- Doc(
- "This option validates that the target subject is in presented stream. "
- "Can be omitted without any effect.",
- ),
- ] = None,
- timeout: Annotated[
- Optional[float],
- Doc("Timeout to send message to NATS."),
- ] = None,
- ) -> None:
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> Optional["PubAck"]:
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
applications or to publish messages from time to time.
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
- """
- publish_kwargs: AnyDict = {
- "subject": subject,
- "headers": headers,
- "reply_to": reply_to,
- }
-
- producer: Optional[ProducerProto]
- if stream is None:
- producer = self._producer
- else:
- producer = self._js_producer
- publish_kwargs.update(
- {
- "stream": stream,
- "timeout": timeout,
- },
- )
- await super().publish(
- message,
- producer=producer,
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ stream:
+ This option validates that the target subject is in presented stream.
+ Can be omitted without any effect if you doesn't want PubAck frame.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `None` if you publishes a regular message.
+ `nats.js.api.PubAck` if you publishes a message to stream.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
correlation_id=correlation_id or gen_cor_id(),
- **publish_kwargs,
+ subject=subject,
+ headers=headers,
+ reply_to=reply_to,
+ stream=stream,
+ timeout=timeout,
+ _publish_type=PublishType.PUBLISH,
)
+ producer = self._js_producer if stream is not None else self._producer
+
+ return await super()._basic_publish(cmd, producer=producer)
+
@override
async def request( # type: ignore[override]
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`).",
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ],
- headers: Annotated[
- Optional[dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway.",
- ),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages.",
- ),
- ] = None,
- stream: Annotated[
- Optional[str],
- Doc(
- "This option validates that the target subject is in presented stream. "
- "Can be omitted without any effect.",
- ),
- ] = None,
- timeout: Annotated[
- float,
- Doc("Timeout to send message to NATS."),
- ] = 0.5,
+ message: "SendableMessage",
+ subject: str,
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: float = 0.5,
) -> "NatsMessage":
- publish_kwargs = {
- "subject": subject,
- "headers": headers,
- "timeout": timeout,
- }
-
- producer: Optional[ProducerProto]
- if stream is None:
- producer = self._producer
-
- else:
- producer = self._js_producer
- publish_kwargs.update({"stream": stream})
-
- msg: NatsMessage = await super().request(
- message,
- producer=producer,
+ """Make a synchronous request to outer subscriber.
+
+ If out subscriber listens subject by stream, you should setup the same **stream** explicitly.
+ Another way you will reseave confirmation frame as a response.
+
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ stream:
+ This option validates that the target subject is in presented stream.
+ Can be omitted without any effect if you doesn't want PubAck frame.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `faststream.nats.message.NatsMessage` object as an outer subscriber response.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
correlation_id=correlation_id or gen_cor_id(),
- **publish_kwargs,
+ subject=subject,
+ headers=headers,
+ timeout=timeout,
+ stream=stream,
+ _publish_type=PublishType.REQUEST,
)
+
+ producer = self._js_producer if stream is not None else self._producer
+
+ msg: NatsMessage = await super()._basic_request(cmd, producer=producer)
return msg
@override
@@ -816,29 +775,11 @@ def setup_subscriber( # type: ignore[override]
self,
subscriber: "SpecificationSubscriber",
) -> None:
- connection: Union[
- Client,
- JetStreamContext,
- KVBucketDeclarer,
- OSBucketDeclarer,
- None,
- ] = None
-
- if getattr(subscriber, "kv_watch", None):
- connection = self._kv_declarer
-
- elif getattr(subscriber, "obj_watch", None):
- connection = self._os_declarer
-
- elif getattr(subscriber, "stream", None):
- connection = self.stream
-
- else:
- connection = self._connection
-
return super().setup_subscriber(
subscriber,
- connection=connection,
+ connection_state=self._connection_state,
+ kv_declarer=self._kv_declarer,
+ os_declarer=self._os_declarer,
)
@override
@@ -846,14 +787,7 @@ def setup_publisher( # type: ignore[override]
self,
publisher: "SpecificationPublisher",
) -> None:
- producer: Optional[ProducerProto] = None
-
- if publisher.stream is not None:
- if self._js_producer is not None:
- producer = self._js_producer
-
- elif self._producer is not None:
- producer = self._producer
+ producer = self._js_producer if publisher.stream is not None else self._producer
super().setup_publisher(publisher, producer=producer)
@@ -874,8 +808,6 @@ async def key_value(
# custom
declare: bool = True,
) -> "KeyValue":
- assert self._kv_declarer, "Broker should be connected already." # nosec B101
-
return await self._kv_declarer.create_key_value(
bucket=bucket,
description=description,
@@ -904,8 +836,6 @@ async def object_storage(
# custom
declare: bool = True,
) -> "ObjectStore":
- assert self._os_declarer, "Broker should be connected already." # nosec B101
-
return await self._os_declarer.create_object_store(
bucket=bucket,
description=description,
@@ -927,14 +857,14 @@ async def wrapper(err: Exception) -> None:
if error_cb is not None:
await error_cb(err)
- if isinstance(err, Error) and self.__is_connected:
- self._state.logger_state.log(
+ if isinstance(err, Error) and self._connection_state:
+ self._state.get().logger_state.log(
f"Connection broken with {err!r}",
logging.WARNING,
c,
exc_info=err,
)
- self.__is_connected = False
+ self._connection_state = self._connection_state.brake()
return wrapper
@@ -948,9 +878,11 @@ async def wrapper() -> None:
if cb is not None:
await cb()
- if not self.__is_connected:
- self._state.logger_state.log("Connection established", logging.INFO, c)
- self.__is_connected = True
+ if not self._connection_state:
+ self._state.get().logger_state.log(
+ "Connection established", logging.INFO, c
+ )
+ self._connection_state = self._connection_state.reconnect()
return wrapper
diff --git a/faststream/nats/broker/logging.py b/faststream/nats/broker/logging.py
index f4e2500cdb..d67cb8e4bf 100644
--- a/faststream/nats/broker/logging.py
+++ b/faststream/nats/broker/logging.py
@@ -1,14 +1,16 @@
+import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from faststream._internal.log.logging import get_broker_logger
-from faststream._internal.setup.logger import (
+from faststream._internal.state.logger import (
DefaultLoggerStorage,
make_logger_state,
)
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
class NatsParamsStorage(DefaultLoggerStorage):
@@ -22,6 +24,11 @@ def __init__(
self._max_stream_len = 0
self._max_subject_len = 4
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
def setup_log_contest(self, params: "AnyDict") -> None:
self._max_subject_len = max(
(
@@ -42,7 +49,7 @@ def setup_log_contest(self, params: "AnyDict") -> None:
),
)
- def get_logger(self) -> Optional["LoggerProto"]:
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
message_id_ln = 10
# TODO: generate unique logger names to not share between brokers
@@ -67,10 +74,12 @@ def get_logger(self) -> Optional["LoggerProto"]:
f"%(message_id)-{message_id_ln}s - ",
"%(message)s",
)),
+ context=context,
+ log_level=self.logger_log_level,
)
make_nats_logger_state = partial(
make_logger_state,
- default_storag_cls=NatsParamsStorage,
+ default_storage_cls=NatsParamsStorage,
)
diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py
index 580501f2e8..365c114dcb 100644
--- a/faststream/nats/broker/registrator.py
+++ b/faststream/nats/broker/registrator.py
@@ -5,14 +5,17 @@
from typing_extensions import Doc, override
from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.nats.helpers import StreamBuilder
-from faststream.nats.publisher.publisher import SpecificationPublisher
+from faststream.nats.publisher.factory import create_publisher
+from faststream.nats.publisher.specified import SpecificationPublisher
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
from faststream.nats.subscriber.factory import create_subscriber
-from faststream.nats.subscriber.subscriber import SpecificationSubscriber
+from faststream.nats.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.aio.msg import Msg
from faststream._internal.types import (
@@ -94,9 +97,9 @@ def subscriber( # type: ignore[override]
Doc("Enable Heartbeats for a consumer to detect failures."),
] = None,
flow_control: Annotated[
- bool,
+ Optional[bool],
Doc("Enable Flow Control for a consumer."),
- ] = False,
+ ] = None,
deliver_policy: Annotated[
Optional["api.DeliverPolicy"],
Doc("Deliver Policy to be used for subscription."),
@@ -140,8 +143,8 @@ def subscriber( # type: ignore[override]
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -159,14 +162,10 @@ def subscriber( # type: ignore[override]
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -221,10 +220,9 @@ def subscriber( # type: ignore[override]
inbox_prefix=inbox_prefix,
ack_first=ack_first,
# subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# AsyncAPI
title_=title,
@@ -314,7 +312,7 @@ def publisher( # type: ignore[override]
publisher = cast(
SpecificationPublisher,
super().publisher(
- publisher=SpecificationPublisher.create(
+ publisher=create_publisher(
subject=subject,
headers=headers,
# Core
@@ -323,7 +321,7 @@ def publisher( # type: ignore[override]
timeout=timeout,
stream=stream,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
@@ -345,7 +343,7 @@ def include_router( # type: ignore[override]
router: "NatsRegistrator",
*,
prefix: str = "",
- dependencies: Iterable["Depends"] = (),
+ dependencies: Iterable["Dependant"] = (),
middlewares: Iterable["BrokerMiddleware[Msg]"] = (),
include_in_schema: Optional[bool] = None,
) -> None:
diff --git a/faststream/nats/broker/state.py b/faststream/nats/broker/state.py
new file mode 100644
index 0000000000..08b5821597
--- /dev/null
+++ b/faststream/nats/broker/state.py
@@ -0,0 +1,78 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from nats.aio.client import Client
+ from nats.js import JetStreamContext
+
+
+class BrokerState(Protocol):
+ stream: "JetStreamContext"
+ connection: "Client"
+
+ def __bool__(self) -> bool: ...
+
+ def brake(self) -> "BrokerState": ...
+
+ def reconnect(self) -> "BrokerState": ...
+
+
+class EmptyBrokerState(BrokerState):
+ @property
+ def connection(self) -> "Client":
+ msg = "Connection is not available yet. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ @property
+ def stream(self) -> "JetStreamContext":
+ msg = "Stream is not available yet. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ def brake(self) -> "BrokerState":
+ return self
+
+ def reconnect(self) -> "BrokerState":
+ msg = "You can't reconnect an empty state. Please, connect the broker first."
+ raise IncorrectState(msg)
+
+ def __bool__(self) -> bool:
+ return False
+
+
+class ConnectedState(BrokerState):
+ def __init__(
+ self,
+ connection: "Client",
+ stream: "JetStreamContext",
+ ) -> None:
+ self.connection = connection
+ self.stream = stream
+
+ def __bool__(self) -> bool:
+ return True
+
+ def brake(self) -> "ConnectionBrokenState":
+ return ConnectionBrokenState(
+ connection=self.connection,
+ stream=self.stream,
+ )
+
+
+class ConnectionBrokenState(BrokerState):
+ def __init__(
+ self,
+ connection: "Client",
+ stream: "JetStreamContext",
+ ) -> None:
+ self.connection = connection
+ self.stream = stream
+
+ def __bool__(self) -> bool:
+ return False
+
+ def reconnect(self) -> "ConnectedState":
+ return ConnectedState(
+ connection=self.connection,
+ stream=self.stream,
+ )
diff --git a/faststream/nats/fastapi/__init__.py b/faststream/nats/fastapi/__init__.py
index 7351e313a2..b7aa38c664 100644
--- a/faststream/nats/fastapi/__init__.py
+++ b/faststream/nats/fastapi/__init__.py
@@ -5,16 +5,14 @@
from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.nats.broker import NatsBroker as NB
-from faststream.nats.fastapi.fastapi import NatsRouter
from faststream.nats.message import NatsMessage as NM
-from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
+
+from .fastapi import NatsRouter
NatsMessage = Annotated[NM, Context("message")]
NatsBroker = Annotated[NB, Context("broker")]
Client = Annotated[NatsClient, Context("broker._connection")]
JsClient = Annotated[JetStreamContext, Context("broker._stream")]
-NatsProducer = Annotated[NatsFastProducer, Context("broker._producer")]
-NatsJsProducer = Annotated[NatsJSFastProducer, Context("broker._js_producer")]
__all__ = (
"Client",
@@ -23,8 +21,6 @@
"JsClient",
"Logger",
"NatsBroker",
- "NatsJsProducer",
"NatsMessage",
- "NatsProducer",
"NatsRouter",
)
diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py
index e77d7de2b2..3c465c783c 100644
--- a/faststream/nats/fastapi/fastapi.py
+++ b/faststream/nats/fastapi/fastapi.py
@@ -32,12 +32,11 @@
from faststream.__about__ import SERVICE_NAME
from faststream._internal.constants import EMPTY
from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
from faststream.nats.broker import NatsBroker
-from faststream.nats.publisher.publisher import SpecificationPublisher
-from faststream.nats.subscriber.subscriber import SpecificationSubscriber
+from faststream.nats.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
- import ssl
from enum import Enum
from fastapi import params
@@ -61,9 +60,10 @@
SubscriberMiddleware,
)
from faststream.nats.message import NatsMessage
+ from faststream.nats.publisher.specified import SpecificationPublisher
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class NatsRouter(StreamRouter["Msg"]):
@@ -153,22 +153,10 @@ def __init__(
bool,
Doc("Boolean indicating should commands be echoed."),
] = False,
- tls: Annotated[
- Optional["ssl.SSLContext"],
- Doc("Some SSL context to make NATS connections secure."),
- ] = None,
tls_hostname: Annotated[
Optional[str],
Doc("Hostname for TLS."),
] = None,
- user: Annotated[
- Optional[str],
- Doc("Username for NATS auth."),
- ] = None,
- password: Annotated[
- Optional[str],
- Doc("Username password for NATS auth."),
- ] = None,
token: Annotated[
Optional[str],
Doc("Auth token for NATS auth."),
@@ -520,10 +508,7 @@ def __init__(
dont_randomize=dont_randomize,
flusher_queue_size=flusher_queue_size,
no_echo=no_echo,
- tls=tls,
tls_hostname=tls_hostname,
- user=user,
- password=password,
token=token,
drain_timeout=drain_timeout,
signature_cb=signature_cb,
@@ -627,9 +612,9 @@ def subscriber( # type: ignore[override]
Doc("Enable Heartbeats for a consumer to detect failures."),
] = None,
flow_control: Annotated[
- bool,
+ Optional[bool],
Doc("Enable Flow Control for a consumer."),
- ] = False,
+ ] = None,
deliver_policy: Annotated[
Optional["api.DeliverPolicy"],
Doc("Deliver Policy to be used for subscription."),
@@ -692,14 +677,10 @@ def subscriber( # type: ignore[override]
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -871,8 +852,7 @@ def subscriber( # type: ignore[override]
decoder=decoder,
middlewares=middlewares,
max_workers=max_workers,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -945,7 +925,7 @@ def publisher( # type: ignore[override]
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> SpecificationPublisher:
+ ) -> "SpecificationPublisher":
return self.broker.publisher(
subject,
headers=headers,
diff --git a/faststream/nats/helpers/bucket_declarer.py b/faststream/nats/helpers/bucket_declarer.py
index 2a0c3f68e4..617eb4edad 100644
--- a/faststream/nats/helpers/bucket_declarer.py
+++ b/faststream/nats/helpers/bucket_declarer.py
@@ -2,6 +2,8 @@
from nats.js.api import KeyValueConfig
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
+
if TYPE_CHECKING:
from nats.js import JetStreamContext
from nats.js.api import Placement, RePublish, StorageType
@@ -11,10 +13,17 @@
class KVBucketDeclarer:
buckets: dict[str, "KeyValue"]
- def __init__(self, connection: "JetStreamContext") -> None:
- self._connection = connection
+ def __init__(self) -> None:
self.buckets = {}
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "JetStreamContext") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
async def create_key_value(
self,
bucket: str,
@@ -34,7 +43,7 @@ async def create_key_value(
) -> "KeyValue":
if (key_value := self.buckets.get(bucket)) is None:
if declare:
- key_value = await self._connection.create_key_value(
+ key_value = await self.__state.connection.create_key_value(
config=KeyValueConfig(
bucket=bucket,
description=description,
@@ -50,7 +59,7 @@ async def create_key_value(
),
)
else:
- key_value = await self._connection.key_value(bucket)
+ key_value = await self.__state.connection.key_value(bucket)
self.buckets[bucket] = key_value
diff --git a/faststream/nats/helpers/obj_storage_declarer.py b/faststream/nats/helpers/obj_storage_declarer.py
index f137fa1586..f0f31918d7 100644
--- a/faststream/nats/helpers/obj_storage_declarer.py
+++ b/faststream/nats/helpers/obj_storage_declarer.py
@@ -2,6 +2,8 @@
from nats.js.api import ObjectStoreConfig
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
+
if TYPE_CHECKING:
from nats.js import JetStreamContext
from nats.js.api import Placement, StorageType
@@ -11,10 +13,17 @@
class OSBucketDeclarer:
buckets: dict[str, "ObjectStore"]
- def __init__(self, connection: "JetStreamContext") -> None:
- self._connection = connection
+ def __init__(self) -> None:
self.buckets = {}
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "JetStreamContext") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
async def create_object_store(
self,
bucket: str,
@@ -30,7 +39,7 @@ async def create_object_store(
) -> "ObjectStore":
if (object_store := self.buckets.get(bucket)) is None:
if declare:
- object_store = await self._connection.create_object_store(
+ object_store = await self.__state.connection.create_object_store(
bucket=bucket,
config=ObjectStoreConfig(
bucket=bucket,
@@ -43,7 +52,7 @@ async def create_object_store(
),
)
else:
- object_store = await self._connection.object_store(bucket)
+ object_store = await self.__state.connection.object_store(bucket)
self.buckets[bucket] = object_store
diff --git a/faststream/nats/helpers/state.py b/faststream/nats/helpers/state.py
new file mode 100644
index 0000000000..91c9f84ff7
--- /dev/null
+++ b/faststream/nats/helpers/state.py
@@ -0,0 +1,27 @@
+from typing import Protocol, TypeVar
+
+from nats.aio.client import Client
+from nats.js import JetStreamContext
+
+from faststream.exceptions import IncorrectState
+
+ClientT = TypeVar("ClientT", Client, JetStreamContext)
+
+
+class ConnectionState(Protocol[ClientT]):
+ connection: ClientT
+
+
+class EmptyConnectionState(ConnectionState[ClientT]):
+ __slots__ = ()
+
+ @property
+ def connection(self) -> ClientT:
+ raise IncorrectState
+
+
+class ConnectedState(ConnectionState[ClientT]):
+ __slots__ = ("connection",)
+
+ def __init__(self, connection: ClientT) -> None:
+ self.connection = connection
diff --git a/faststream/nats/message.py b/faststream/nats/message.py
index c3e5b5a158..cbefcce62d 100644
--- a/faststream/nats/message.py
+++ b/faststream/nats/message.py
@@ -15,7 +15,7 @@ async def ack(self) -> None:
# to be compatible with `self.raw_message.ack()`
if not self.raw_message._ackd:
await self.raw_message.ack()
- await super().ack()
+ await super().ack()
async def nack(
self,
@@ -23,12 +23,12 @@ async def nack(
) -> None:
if not self.raw_message._ackd:
await self.raw_message.nak(delay=delay)
- await super().nack()
+ await super().nack()
async def reject(self) -> None:
if not self.raw_message._ackd:
await self.raw_message.term()
- await super().reject()
+ await super().reject()
async def in_progress(self) -> None:
if not self.raw_message._ackd:
diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py
index 93364d7bb2..32d9d2d4e1 100644
--- a/faststream/nats/opentelemetry/provider.py
+++ b/faststream/nats/opentelemetry/provider.py
@@ -4,7 +4,6 @@
from nats.aio.msg import Msg
from opentelemetry.semconv.trace import SpanAttributes
-from faststream.__about__ import SERVICE_NAME
from faststream._internal.types import MsgType
from faststream.opentelemetry import TelemetrySettingsProvider
from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME
@@ -12,6 +11,7 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict
from faststream.message import StreamMessage
+ from faststream.nats.response import NatsPublishCommand
class BaseNatsTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
@@ -20,22 +20,21 @@ class BaseNatsTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]):
def __init__(self) -> None:
self.messaging_system = "nats"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "NatsPublishCommand",
) -> "AnyDict":
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["subject"],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "NatsPublishCommand",
) -> str:
- subject: str = kwargs.get("subject", SERVICE_NAME)
- return subject
+ return cmd.destination
class NatsTelemetrySettingsProvider(BaseNatsTelemetrySettingsProvider["Msg"]):
diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py
index 0f3b2f4c8c..d5ddcfe316 100644
--- a/faststream/nats/parser.py
+++ b/faststream/nats/parser.py
@@ -54,9 +54,8 @@ async def decode_message(
class NatsParser(NatsBaseParser):
"""A class to parse NATS core messages."""
- def __init__(self, *, pattern: str, no_ack: bool) -> None:
+ def __init__(self, *, pattern: str) -> None:
super().__init__(pattern=pattern)
- self.no_ack = no_ack
async def parse_message(
self,
@@ -69,8 +68,7 @@ async def parse_message(
headers = message.header or {}
- if not self.no_ack:
- message._ackd = True # prevent message from acking
+ message._ackd = True # prevent Core message from acknowledgement
return NatsMessage(
raw_message=message,
diff --git a/faststream/nats/prometheus/__init__.py b/faststream/nats/prometheus/__init__.py
new file mode 100644
index 0000000000..564d3ea4f4
--- /dev/null
+++ b/faststream/nats/prometheus/__init__.py
@@ -0,0 +1,3 @@
+from faststream.nats.prometheus.middleware import NatsPrometheusMiddleware
+
+__all__ = ("NatsPrometheusMiddleware",)
diff --git a/faststream/nats/prometheus/middleware.py b/faststream/nats/prometheus/middleware.py
new file mode 100644
index 0000000000..9620cd651c
--- /dev/null
+++ b/faststream/nats/prometheus/middleware.py
@@ -0,0 +1,27 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream.nats.prometheus.provider import settings_provider_factory
+from faststream.prometheus.middleware import PrometheusMiddleware
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+
+class NatsPrometheusMiddleware(PrometheusMiddleware):
+ def __init__(
+ self,
+ *,
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ super().__init__(
+ settings_provider_factory=settings_provider_factory,
+ registry=registry,
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
diff --git a/faststream/nats/prometheus/provider.py b/faststream/nats/prometheus/provider.py
new file mode 100644
index 0000000000..6b5585eeb9
--- /dev/null
+++ b/faststream/nats/prometheus/provider.py
@@ -0,0 +1,66 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Union
+
+from nats.aio.msg import Msg
+
+from faststream.message.message import MsgType, StreamMessage
+from faststream.prometheus import (
+ ConsumeAttrs,
+ MetricsSettingsProvider,
+)
+
+if TYPE_CHECKING:
+ from faststream.nats.response import NatsPublishCommand
+
+
+class BaseNatsMetricsSettingsProvider(MetricsSettingsProvider[MsgType]):
+ __slots__ = ("messaging_system",)
+
+ def __init__(self) -> None:
+ self.messaging_system = "nats"
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "NatsPublishCommand",
+ ) -> str:
+ return cmd.destination
+
+
+class NatsMetricsSettingsProvider(BaseNatsMetricsSettingsProvider["Msg"]):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[Msg]",
+ ) -> ConsumeAttrs:
+ return {
+ "destination_name": msg.raw_message.subject,
+ "message_size": len(msg.body),
+ "messages_count": 1,
+ }
+
+
+class BatchNatsMetricsSettingsProvider(BaseNatsMetricsSettingsProvider[list["Msg"]]):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[list[Msg]]",
+ ) -> ConsumeAttrs:
+ raw_message = msg.raw_message[0]
+ return {
+ "destination_name": raw_message.subject,
+ "message_size": len(msg.body),
+ "messages_count": len(msg.raw_message),
+ }
+
+
+def settings_provider_factory(
+ msg: Union["Msg", Sequence["Msg"], None],
+) -> Union[
+ NatsMetricsSettingsProvider,
+ BatchNatsMetricsSettingsProvider,
+ None,
+]:
+ if isinstance(msg, Sequence):
+ return BatchNatsMetricsSettingsProvider()
+ if isinstance(msg, Msg) or msg is None:
+ return NatsMetricsSettingsProvider()
+ # KeyValue and Object Storage watch cases
+ return None
diff --git a/faststream/nats/publisher/factory.py b/faststream/nats/publisher/factory.py
new file mode 100644
index 0000000000..8f03f94b9d
--- /dev/null
+++ b/faststream/nats/publisher/factory.py
@@ -0,0 +1,43 @@
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Any, Optional
+
+from .specified import SpecificationPublisher
+
+if TYPE_CHECKING:
+ from nats.aio.msg import Msg
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.nats.schemas.js_stream import JStream
+
+
+def create_publisher(
+ *,
+ subject: str,
+ reply_to: str,
+ headers: Optional[dict[str, str]],
+ stream: Optional["JStream"],
+ timeout: Optional[float],
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> SpecificationPublisher:
+ return SpecificationPublisher(
+ subject=subject,
+ reply_to=reply_to,
+ headers=headers,
+ stream=stream,
+ timeout=timeout,
+ # Publisher args
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/nats/publisher/fake.py b/faststream/nats/publisher/fake.py
new file mode 100644
index 0000000000..7c70536e34
--- /dev/null
+++ b/faststream/nats/publisher/fake.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.nats.response import NatsPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class NatsFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ subject: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.subject = subject
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "NatsPublishCommand"]
+ ) -> "NatsPublishCommand":
+ real_cmd = NatsPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.subject
+ return real_cmd
diff --git a/faststream/nats/publisher/producer.py b/faststream/nats/publisher/producer.py
index 05d3a81402..aba0f78349 100644
--- a/faststream/nats/publisher/producer.py
+++ b/faststream/nats/publisher/producer.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import TYPE_CHECKING, Any, Optional
+from typing import TYPE_CHECKING, Optional
import anyio
import nats
@@ -7,19 +7,25 @@
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream.exceptions import FeatureNotSupportedException
from faststream.message import encode_message
+from faststream.nats.helpers.state import (
+ ConnectedState,
+ ConnectionState,
+ EmptyConnectionState,
+)
from faststream.nats.parser import NatsParser
if TYPE_CHECKING:
from nats.aio.client import Client
from nats.aio.msg import Msg
- from nats.js import JetStreamContext
+ from nats.js import JetStreamContext, api
- from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import (
AsyncCallable,
CustomCallable,
)
+ from faststream.nats.response import NatsPublishCommand
class NatsFastProducer(ProducerProto):
@@ -30,68 +36,67 @@ class NatsFastProducer(ProducerProto):
def __init__(
self,
- *,
- connection: "Client",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
-
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(pattern="")
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ self.__state: ConnectionState[Client] = EmptyConnectionState()
+
+ def connect(self, connection: "Client") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- **kwargs: Any, # suprress stream option
+ cmd: "NatsPublishCommand",
) -> None:
- payload, content_type = encode_message(message)
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- await self._connection.publish(
- subject=subject,
+ return await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
- reply=reply_to,
+ reply=cmd.reply_to,
headers=headers_to_send,
)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[dict[str, str]] = None,
- timeout: float = 0.5,
+ cmd: "NatsPublishCommand",
) -> "Msg":
- payload, content_type = encode_message(message)
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(),
}
- return await self._connection.request(
- subject=subject,
+ return await self.__state.connection.request(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- timeout=timeout,
+ timeout=cmd.timeout,
)
+ @override
+ async def publish_batch(
+ self,
+ cmd: "NatsPublishCommand",
+ ) -> None:
+ msg = "NATS doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
+
class NatsJSFastProducer(ProducerProto):
"""A class to represent a NATS JetStream producer."""
@@ -102,81 +107,68 @@ class NatsJSFastProducer(ProducerProto):
def __init__(
self,
*,
- connection: "JetStreamContext",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
-
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(pattern="") # core parser to serializer responses
self._parser = resolve_custom_func(parser, default.parse_message)
self._decoder = resolve_custom_func(decoder, default.decode_message)
+ self.__state: ConnectionState[JetStreamContext] = EmptyConnectionState()
+
+ def connect(self, connection: "Client") -> None:
+ self.__state = ConnectedState(connection)
+
+ def disconnect(self) -> None:
+ self.__state = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[dict[str, str]] = None,
- reply_to: str = "",
- stream: Optional[str] = None,
- timeout: Optional[float] = None,
- ) -> Optional[Any]:
- payload, content_type = encode_message(message)
+ cmd: "NatsPublishCommand",
+ ) -> "api.PubAck":
+ payload, content_type = encode_message(cmd.body)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
- **(headers or {}),
+ **cmd.headers_to_publish(js=True),
}
- if reply_to:
- headers_to_send.update({"reply_to": reply_to})
-
- await self._connection.publish(
- subject=subject,
+ return await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- stream=stream,
- timeout=timeout,
+ stream=cmd.stream,
+ timeout=cmd.timeout,
)
- return None
-
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: str,
- headers: Optional[dict[str, str]] = None,
- stream: Optional[str] = None,
- timeout: float = 0.5,
+ cmd: "NatsPublishCommand",
) -> "Msg":
- payload, content_type = encode_message(message)
+ payload, content_type = encode_message(cmd.body)
- reply_to = self._connection._nc.new_inbox()
+ reply_to = self.__state.connection._nc.new_inbox()
future: asyncio.Future[Msg] = asyncio.Future()
- sub = await self._connection._nc.subscribe(reply_to, future=future, max_msgs=1)
+ sub = await self.__state.connection._nc.subscribe(
+ reply_to, future=future, max_msgs=1
+ )
await sub.unsubscribe(limit=1)
headers_to_send = {
"content-type": content_type or "",
- "correlation_id": correlation_id,
"reply_to": reply_to,
- **(headers or {}),
+ **cmd.headers_to_publish(js=False),
}
- with anyio.fail_after(timeout):
- await self._connection.publish(
- subject=subject,
+ with anyio.fail_after(cmd.timeout):
+ await self.__state.connection.publish(
+ subject=cmd.destination,
payload=payload,
headers=headers_to_send,
- stream=stream,
- timeout=timeout,
+ stream=cmd.stream,
+ timeout=cmd.timeout,
)
msg = await future
@@ -191,3 +183,11 @@ async def request( # type: ignore[override]
raise nats.errors.NoRespondersError
return msg
+
+ @override
+ async def publish_batch(
+ self,
+ cmd: "NatsPublishCommand",
+ ) -> None:
+ msg = "NATS doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
diff --git a/faststream/nats/publisher/specified.py b/faststream/nats/publisher/specified.py
new file mode 100644
index 0000000000..41cfdc27b9
--- /dev/null
+++ b/faststream/nats/publisher/specified.py
@@ -0,0 +1,36 @@
+from faststream.nats.publisher.usecase import LogicPublisher
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema.bindings import ChannelBinding, nats
+from faststream.specification.schema.channel import Channel
+from faststream.specification.schema.message import CorrelationId, Message
+from faststream.specification.schema.operation import Operation
+
+
+class SpecificationPublisher(LogicPublisher):
+ """A class to represent a NATS publisher."""
+
+ def get_name(self) -> str:
+ return f"{self.subject}:Publisher"
+
+ def get_schema(self) -> dict[str, Channel]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: Channel(
+ description=self.description,
+ publish=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ correlationId=CorrelationId(
+ location="$message.header#/correlation_id",
+ ),
+ ),
+ ),
+ bindings=ChannelBinding(
+ nats=nats.ChannelBinding(
+ subject=self.subject,
+ ),
+ ),
+ ),
+ }
diff --git a/faststream/nats/publisher/usecase.py b/faststream/nats/publisher/usecase.py
index f76e24d070..9d3ccd92dc 100644
--- a/faststream/nats/publisher/usecase.py
+++ b/faststream/nats/publisher/usecase.py
@@ -1,35 +1,34 @@
-from collections.abc import Awaitable, Iterable
-from functools import partial
-from itertools import chain
+from collections.abc import Iterable
from typing import (
TYPE_CHECKING,
- Annotated,
Any,
- Callable,
Optional,
Union,
)
from nats.aio.msg import Msg
-from typing_extensions import Doc, override
+from typing_extensions import Literal, overload, override
from faststream._internal.publisher.usecase import PublisherUsecase
-from faststream._internal.subscriber.utils import process_msg
-from faststream.exceptions import NOT_CONNECTED_YET
from faststream.message import gen_cor_id
+from faststream.nats.response import NatsPublishCommand
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from faststream._internal.basic_types import AnyDict, SendableMessage
+ from nats.js import api
+
+ from faststream._internal.basic_types import SendableMessage
from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.nats.message import NatsMessage
from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer
from faststream.nats.schemas import JStream
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[Msg]):
"""A class to represent a NATS publisher."""
- _producer: Union["NatsFastProducer", "NatsJSFastProducer", None]
+ _producer: Union["NatsFastProducer", "NatsJSFastProducer"]
def __init__(
self,
@@ -62,137 +61,150 @@ def __init__(
self.subject = subject
self.stream = stream
self.timeout = timeout
- self.headers = headers
+ self.headers = headers or {}
self.reply_to = reply_to
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Literal[None] = None,
+ timeout: Optional[float] = None,
+ ) -> None: ...
+
+ @overload
+ async def publish(
+ self,
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ correlation_id: Optional[str] = None,
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ ) -> "api.PubAck": ...
+
@override
async def publish(
self,
message: "SendableMessage",
subject: str = "",
- *,
headers: Optional[dict[str, str]] = None,
reply_to: str = "",
correlation_id: Optional[str] = None,
stream: Optional[str] = None,
timeout: Optional[float] = None,
- # publisher specific
- _extra_middlewares: Iterable["PublisherMiddleware"] = (),
- ) -> None:
+ ) -> Optional["api.PubAck"]:
"""Publish message directly.
Args:
- message (SendableMessage): Message body to send.
+ message:
+ Message body to send.
Can be any encodable object (native python types or `pydantic.BaseModel`).
- subject (str): NATS subject to send message (default is `''`).
- headers (:obj:`dict` of :obj:`str`: :obj:`str`, optional): Message headers to store metainformation (default is `None`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
**content-type** and **correlation_id** will be set automatically by framework anyway.
-
- reply_to (str): NATS subject name to send response (default is `None`).
- correlation_id (str, optional): Manual message **correlation_id** setter (default is `None`).
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
**correlation_id** is a useful option to trace messages.
-
- stream (str, optional): This option validates that the target subject is in presented stream (default is `None`).
- Can be omitted without any effect.
- timeout (float, optional): Timeout to send message to NATS in seconds (default is `None`).
-
- _extra_middlewares (:obj:`Iterable` of :obj:`PublisherMiddleware`): Extra middlewares to wrap publishing process (default is `()`).
+ stream:
+ This option validates that the target subject is in presented stream.
+ Can be omitted without any effect if you doesn't want PubAck frame.
+ timeout:
+ Timeout to send message to NATS.
+
+ Returns:
+ `None` if you publishes a regular message.
+ `nats.js.api.PubAck` if you publishes a message to stream.
"""
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "subject": subject or self.subject,
- "headers": headers or self.headers,
- "reply_to": reply_to or self.reply_to,
- "correlation_id": correlation_id or gen_cor_id(),
- }
+ cmd = NatsPublishCommand(
+ message,
+ subject=subject or self.subject,
+ headers=self.headers | (headers or {}),
+ reply_to=reply_to or self.reply_to,
+ correlation_id=correlation_id or gen_cor_id(),
+ stream=stream or getattr(self.stream, "name", None),
+ timeout=timeout or self.timeout,
+ _publish_type=PublishType.PUBLISH,
+ )
+ return await self._basic_publish(cmd, _extra_middlewares=())
- if stream := stream or getattr(self.stream, "name", None):
- kwargs.update({"stream": stream, "timeout": timeout or self.timeout})
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "NatsPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = NatsPublishCommand.from_cmd(cmd)
- call: Callable[..., Awaitable[Any]] = self._producer.publish
+ cmd.destination = self.subject
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ if self.stream:
+ cmd.stream = self.stream.name
+ cmd.timeout = self.timeout
- await call(message, **kwargs)
+ return await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
self,
- message: Annotated[
- "SendableMessage",
- Doc(
- "Message body to send. "
- "Can be any encodable object (native python types or `pydantic.BaseModel`).",
- ),
- ],
- subject: Annotated[
- str,
- Doc("NATS subject to send message."),
- ] = "",
- *,
- headers: Annotated[
- Optional[dict[str, str]],
- Doc(
- "Message headers to store metainformation. "
- "**content-type** and **correlation_id** will be set automatically by framework anyway.",
- ),
- ] = None,
- correlation_id: Annotated[
- Optional[str],
- Doc(
- "Manual message **correlation_id** setter. "
- "**correlation_id** is a useful option to trace messages.",
- ),
- ] = None,
- timeout: Annotated[
- float,
- Doc("Timeout to send message to NATS."),
- ] = 0.5,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
+ message: "SendableMessage",
+ subject: str = "",
+ headers: Optional[dict[str, str]] = None,
+ correlation_id: Optional[str] = None,
+ timeout: float = 0.5,
) -> "NatsMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "subject": subject or self.subject,
- "headers": headers or self.headers,
- "timeout": timeout or self.timeout,
- "correlation_id": correlation_id or gen_cor_id(),
- }
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
- message,
- **kwargs,
- )
+ """Make a synchronous request to outer subscriber.
+
+ If out subscriber listens subject by stream, you should setup the same **stream** explicitly.
+ Another way you will reseave confirmation frame as a response.
+
+ Note:
+ To setup **stream** option, please use `__init__` method.
+
+ Args:
+ message:
+ Message body to send.
+ Can be any encodable object (native python types or `pydantic.BaseModel`).
+ subject:
+ NATS subject to send message.
+ headers:
+ Message headers to store metainformation.
+ **content-type** and **correlation_id** will be set automatically by framework anyway.
+ reply_to:
+ NATS subject name to send response.
+ correlation_id:
+ Manual message **correlation_id** setter.
+ **correlation_id** is a useful option to trace messages.
+ timeout:
+ Timeout to send message to NATS.
- msg: NatsMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
+ Returns:
+ `faststream.nats.message.NatsMessage` object as an outer subscriber response.
+ """
+ cmd = NatsPublishCommand(
+ message=message,
+ subject=subject or self.subject,
+ headers=self.headers | (headers or {}),
+ timeout=timeout or self.timeout,
+ correlation_id=correlation_id or gen_cor_id(),
+ stream=getattr(self.stream, "name", None),
+ _publish_type=PublishType.REQUEST,
)
+
+ msg: NatsMessage = await self._basic_request(cmd)
return msg
def add_prefix(self, prefix: str) -> None:
diff --git a/faststream/nats/response.py b/faststream/nats/response.py
index 625ac866f0..f8121bf883 100644
--- a/faststream/nats/response.py
+++ b/faststream/nats/response.py
@@ -1,11 +1,12 @@
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.response import Response
+from faststream.response.publish_type import PublishType
+from faststream.response.response import PublishCommand, Response
if TYPE_CHECKING:
- from faststream._internal.basic_types import AnyDict, SendableMessage
+ from faststream._internal.basic_types import SendableMessage
class NatsResponse(Response):
@@ -25,8 +26,81 @@ def __init__(
self.stream = stream
@override
- def as_publish_kwargs(self) -> "AnyDict":
- return {
- **super().as_publish_kwargs(),
- "stream": self.stream,
- }
+ def as_publish_command(self) -> "NatsPublishCommand":
+ return NatsPublishCommand(
+ message=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ # Nats specific
+ subject="",
+ stream=self.stream,
+ )
+
+
+class NatsPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "SendableMessage",
+ *,
+ subject: str = "",
+ correlation_id: Optional[str] = None,
+ headers: Optional[dict[str, str]] = None,
+ reply_to: str = "",
+ stream: Optional[str] = None,
+ timeout: Optional[float] = None,
+ _publish_type: PublishType,
+ ) -> None:
+ super().__init__(
+ body=message,
+ destination=subject,
+ correlation_id=correlation_id,
+ headers=headers,
+ reply_to=reply_to,
+ _publish_type=_publish_type,
+ )
+
+ self.stream = stream
+ self.timeout = timeout
+
+ def headers_to_publish(self, *, js: bool = False) -> dict[str, str]:
+ headers = {}
+
+ if self.correlation_id:
+ headers["correlation_id"] = self.correlation_id
+
+ if js and self.reply_to:
+ headers["reply_to"] = self.reply_to
+
+ return headers | self.headers
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "NatsPublishCommand"],
+ ) -> "NatsPublishCommand":
+ if isinstance(cmd, NatsPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ return cls(
+ message=cmd.body,
+ subject=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
+
+ def __repr__(self) -> str:
+ body = [f"body='{self.body}'", f"subject='{self.destination}'"]
+ if self.stream:
+ body.append(f"stream={self.stream}")
+ if self.reply_to:
+ body.append(f"reply_to='{self.reply_to}'")
+ body.extend((
+ f"headers={self.headers}",
+ f"correlation_id='{self.correlation_id}'",
+ f"publish_type={self.publish_type}",
+ ))
+ return f"{self.__class__.__name__}({', '.join(body)})"
diff --git a/faststream/nats/router.py b/faststream/nats/router.py
index 982f274b50..be895eb8af 100644
--- a/faststream/nats/router.py
+++ b/faststream/nats/router.py
@@ -16,10 +16,12 @@
BrokerRouter,
SubscriberRoute,
)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.nats.broker.registrator import NatsRegistrator
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.aio.msg import Msg
from faststream._internal.basic_types import SendableMessage
@@ -185,9 +187,9 @@ def __init__(
Doc("Enable Heartbeats for a consumer to detect failures."),
] = None,
flow_control: Annotated[
- bool,
+ Optional[bool],
Doc("Enable Flow Control for a consumer."),
- ] = False,
+ ] = None,
deliver_policy: Annotated[
Optional["api.DeliverPolicy"],
Doc("Deliver Policy to be used for subscription."),
@@ -231,8 +233,8 @@ def __init__(
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -250,14 +252,10 @@ def __init__(
int,
Doc("Number of workers to process messages concurrently."),
] = 1,
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -307,8 +305,7 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -334,9 +331,9 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers.",
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
diff --git a/faststream/nats/schemas/js_stream.py b/faststream/nats/schemas/js_stream.py
index da6e41f3bf..3ad4fc2e4f 100644
--- a/faststream/nats/schemas/js_stream.py
+++ b/faststream/nats/schemas/js_stream.py
@@ -6,6 +6,7 @@
from faststream._internal.proto import NameRequired
from faststream._internal.utils.path import compile_path
+from faststream.middlewares import AckPolicy
if TYPE_CHECKING:
from re import Pattern
@@ -120,13 +121,10 @@ def __init__(
"cluster may be available but for reads only.",
),
] = None,
- no_ack: Annotated[
- bool,
- Doc(
- "Should stream acknowledge writes or not. Without acks publisher can't determine, does message "
- "received by stream or not.",
- ),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = AckPolicy.REJECT_ON_ERROR,
template_owner: Optional[str] = None,
duplicate_window: Annotated[
float,
@@ -191,9 +189,11 @@ def __init__(
super().__init__(name)
subjects = subjects or []
+ no_ack = ack_policy is AckPolicy.DO_NOTHING
self.subjects = subjects
self.declare = declare
+
self.config = StreamConfig(
name=name,
description=description,
diff --git a/faststream/nats/subscriber/subscription.py b/faststream/nats/subscriber/adapters.py
similarity index 100%
rename from faststream/nats/subscriber/subscription.py
rename to faststream/nats/subscriber/adapters.py
diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py
index b3170988b8..c17e6e808d 100644
--- a/faststream/nats/subscriber/factory.py
+++ b/faststream/nats/subscriber/factory.py
@@ -1,3 +1,4 @@
+import warnings
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, Optional, Union
@@ -5,14 +6,16 @@
DEFAULT_SUB_PENDING_BYTES_LIMIT,
DEFAULT_SUB_PENDING_MSGS_LIMIT,
)
-from nats.js.api import ConsumerConfig
+from nats.js.api import ConsumerConfig, DeliverPolicy
from nats.js.client import (
DEFAULT_JS_SUB_PENDING_BYTES_LIMIT,
DEFAULT_JS_SUB_PENDING_MSGS_LIMIT,
)
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
-from faststream.nats.subscriber.subscriber import (
+from faststream.middlewares import AckPolicy
+from faststream.nats.subscriber.specified import (
SpecificationBatchPullStreamSubscriber,
SpecificationConcurrentCoreSubscriber,
SpecificationConcurrentPullStreamSubscriber,
@@ -21,11 +24,11 @@
SpecificationKeyValueWatchSubscriber,
SpecificationObjStoreWatchSubscriber,
SpecificationPullStreamSubscriber,
- SpecificationStreamSubscriber,
+ SpecificationPushStreamSubscriber,
)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from nats.js import api
from faststream._internal.basic_types import AnyDict
@@ -46,7 +49,7 @@ def create_subscriber(
config: Optional["api.ConsumerConfig"],
ordered_consumer: bool,
idle_heartbeat: Optional[float],
- flow_control: bool,
+ flow_control: Optional[bool],
deliver_policy: Optional["api.DeliverPolicy"],
headers_only: Optional[bool],
# pull args
@@ -59,10 +62,9 @@ def create_subscriber(
max_workers: int,
stream: Optional["JStream"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Any]"],
# Specification information
title_: Optional[str],
@@ -71,7 +73,7 @@ def create_subscriber(
) -> Union[
"SpecificationCoreSubscriber",
"SpecificationConcurrentCoreSubscriber",
- "SpecificationStreamSubscriber",
+ "SpecificationPushStreamSubscriber",
"SpecificationConcurrentPushStreamSubscriber",
"SpecificationPullStreamSubscriber",
"SpecificationConcurrentPullStreamSubscriber",
@@ -79,20 +81,43 @@ def create_subscriber(
"SpecificationKeyValueWatchSubscriber",
"SpecificationObjStoreWatchSubscriber",
]:
- if pull_sub is not None and stream is None:
- msg = "Pull subscriber can be used only with a stream"
- raise SetupError(msg)
+ _validate_input_for_misconfigure(
+ subject=subject,
+ queue=queue,
+ pending_msgs_limit=pending_msgs_limit,
+ pending_bytes_limit=pending_bytes_limit,
+ max_msgs=max_msgs,
+ durable=durable,
+ config=config,
+ ordered_consumer=ordered_consumer,
+ idle_heartbeat=idle_heartbeat,
+ flow_control=flow_control,
+ deliver_policy=deliver_policy,
+ headers_only=headers_only,
+ pull_sub=pull_sub,
+ ack_policy=ack_policy,
+ kv_watch=kv_watch,
+ obj_watch=obj_watch,
+ ack_first=ack_first,
+ max_workers=max_workers,
+ stream=stream,
+ )
- if not subject and not config:
- msg = "You must provide either `subject` or `config` option."
- raise SetupError(msg)
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
config = config or ConsumerConfig(filter_subjects=[])
+ if config.durable_name is None:
+ config.durable_name = durable
+ if config.idle_heartbeat is None:
+ config.idle_heartbeat = idle_heartbeat
+ if config.headers_only is None:
+ config.headers_only = headers_only
+ if config.deliver_policy is DeliverPolicy.ALL:
+ config.deliver_policy = deliver_policy or DeliverPolicy.ALL
if stream:
- # TODO: pull & queue warning
- # TODO: push & durable warning
-
+ # Both JS Subscribers
extra_options: AnyDict = {
"pending_msgs_limit": pending_msgs_limit
or DEFAULT_JS_SUB_PENDING_MSGS_LIMIT,
@@ -103,9 +128,11 @@ def create_subscriber(
}
if pull_sub is not None:
+ # JS Pull Subscriber
extra_options.update({"inbox_prefix": inbox_prefix})
else:
+ # JS Push Subscriber
extra_options.update(
{
"ordered_consumer": ordered_consumer,
@@ -118,6 +145,7 @@ def create_subscriber(
)
else:
+ # Core Subscriber
extra_options = {
"pending_msgs_limit": pending_msgs_limit or DEFAULT_SUB_PENDING_MSGS_LIMIT,
"pending_bytes_limit": pending_bytes_limit
@@ -159,9 +187,7 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -177,9 +203,7 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -199,9 +223,8 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -219,9 +242,8 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -240,9 +262,8 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -259,9 +280,8 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification
@@ -270,7 +290,7 @@ def create_subscriber(
include_in_schema=include_in_schema,
)
- return SpecificationStreamSubscriber(
+ return SpecificationPushStreamSubscriber(
stream=stream,
subject=subject,
queue=queue,
@@ -278,9 +298,8 @@ def create_subscriber(
# basic args
extra_options=extra_options,
# Subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# Specification information
@@ -288,3 +307,202 @@ def create_subscriber(
description_=description_,
include_in_schema=include_in_schema,
)
+
+
+def _validate_input_for_misconfigure( # noqa: PLR0915
+ subject: str,
+ queue: str, # default ""
+ pending_msgs_limit: Optional[int],
+ pending_bytes_limit: Optional[int],
+ max_msgs: int, # default 0
+ durable: Optional[str],
+ config: Optional["api.ConsumerConfig"],
+ ordered_consumer: bool, # default False
+ idle_heartbeat: Optional[float],
+ flow_control: Optional[bool],
+ deliver_policy: Optional["api.DeliverPolicy"],
+ headers_only: Optional[bool],
+ pull_sub: Optional["PullSub"],
+ kv_watch: Optional["KvWatch"],
+ obj_watch: Optional["ObjWatch"],
+ ack_policy: "AckPolicy", # default EMPTY
+ ack_first: bool, # default False
+ max_workers: int, # default 1
+ stream: Optional["JStream"],
+) -> None:
+ if not subject and not config:
+ msg = "You must provide either the `subject` or `config` option."
+ raise SetupError(msg)
+
+ if stream and kv_watch:
+ msg = "You can't use both the `stream` and `kv_watch` options simultaneously."
+ raise SetupError(msg)
+
+ if stream and obj_watch:
+ msg = "You can't use both the `stream` and `obj_watch` options simultaneously."
+ raise SetupError(msg)
+
+ if kv_watch and obj_watch:
+ msg = (
+ "You can't use both the `kv_watch` and `obj_watch` options simultaneously."
+ )
+ raise SetupError(msg)
+
+ if pull_sub and not stream:
+ msg = "JetStream Pull Subscriber can only be used with the `stream` option."
+ raise SetupError(msg)
+
+ if ack_policy is not EMPTY:
+ if obj_watch is not None:
+ warnings.warn(
+ "You can't use acknowledgement policy with ObjectStorage watch subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ elif kv_watch is not None:
+ warnings.warn(
+ "You can't use acknowledgement policy with KeyValue watch subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ elif stream is None:
+ warnings.warn(
+ "You can't use acknowledgement policy with core subscriber. Use JetStream instead.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if max_msgs > 0 and any((stream, kv_watch, obj_watch)):
+ warnings.warn(
+ "The `max_msgs` option can be used only with a NATS Core Subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if not stream:
+ if obj_watch or kv_watch:
+ # Obj/Kv Subscriber
+ if pending_msgs_limit is not None:
+ warnings.warn(
+ message="The `pending_msgs_limit` option can be used only with JetStream (Pull/Push) or Core Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if pending_bytes_limit is not None:
+ warnings.warn(
+ message="The `pending_bytes_limit` option can be used only with JetStream (Pull/Push) or Core Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if queue:
+ warnings.warn(
+ message="The `queue` option can be used only with JetStream Push or Core Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if max_workers > 1:
+ warnings.warn(
+ message="The `max_workers` option can be used only with JetStream (Pull/Push) or Core Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ # Core/Obj/Kv Subscriber
+ if durable:
+ warnings.warn(
+ message="The `durable` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if config is not None:
+ warnings.warn(
+ message="The `config` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if ordered_consumer:
+ warnings.warn(
+ message="The `ordered_consumer` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if idle_heartbeat is not None:
+ warnings.warn(
+ message="The `idle_heartbeat` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if flow_control:
+ warnings.warn(
+ message="The `flow_control` option can be used only with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if deliver_policy:
+ warnings.warn(
+ message="The `deliver_policy` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if headers_only:
+ warnings.warn(
+ message="The `headers_only` option can be used only with JetStream (Pull/Push) Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if ack_first:
+ warnings.warn(
+ message="The `ack_first` option can be used only with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ # JetStream Subscribers
+ elif pull_sub:
+ if queue:
+ warnings.warn(
+ message="The `queue` option has no effect with JetStream Pull Subscription. You probably wanted to use the `durable` option instead.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if ordered_consumer:
+ warnings.warn(
+ "The `ordered_consumer` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if ack_first:
+ warnings.warn(
+ message="The `ack_first` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if flow_control:
+ warnings.warn(
+ message="The `flow_control` option has no effect with JetStream Pull Subscription. It can only be used with JetStream Push Subscription.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
+
+ # JS PushSub
+ elif durable is not None:
+ warnings.warn(
+ message="The JetStream Push consumer with the `durable` option can't be scaled horizontally across multiple instances. You probably wanted to use the `queue` option instead. Also, we strongly recommend using the Jetstream PullSubsriber with the `durable` option as the default.",
+ category=RuntimeWarning,
+ stacklevel=4,
+ )
diff --git a/faststream/nats/subscriber/subscriber.py b/faststream/nats/subscriber/specified.py
similarity index 95%
rename from faststream/nats/subscriber/subscriber.py
rename to faststream/nats/subscriber/specified.py
index 0b3efb1317..1700052680 100644
--- a/faststream/nats/subscriber/subscriber.py
+++ b/faststream/nats/subscriber/specified.py
@@ -2,7 +2,7 @@
from typing_extensions import override
-from faststream.nats.subscriber.usecase import (
+from faststream.nats.subscriber.usecases import (
BatchPullStreamSubscriber,
ConcurrentCoreSubscriber,
ConcurrentPullStreamSubscriber,
@@ -21,7 +21,7 @@
from faststream.specification.schema.operation import Operation
-class SpecificationSubscriber(LogicSubscriber[Any, Any]):
+class SpecificationSubscriber(LogicSubscriber[Any]):
"""A class to represent a NATS handler."""
def get_default_name(self) -> str:
@@ -64,7 +64,7 @@ class SpecificationConcurrentCoreSubscriber(
"""One-message core concurrent consumer with Specification methods."""
-class SpecificationStreamSubscriber(
+class SpecificationPushStreamSubscriber(
SpecificationSubscriber,
PushStreamSubscription,
):
diff --git a/faststream/nats/subscriber/state.py b/faststream/nats/subscriber/state.py
new file mode 100644
index 0000000000..d8e2825d83
--- /dev/null
+++ b/faststream/nats/subscriber/state.py
@@ -0,0 +1,60 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from nats.aio.client import Client
+ from nats.js import JetStreamContext
+
+ from faststream.nats.broker.state import BrokerState
+ from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+
+
+class SubscriberState(Protocol):
+ client: "Client"
+ js: "JetStreamContext"
+ kv_declarer: "KVBucketDeclarer"
+ os_declarer: "OSBucketDeclarer"
+
+
+class EmptySubscriberState(SubscriberState):
+ @property
+ def client(self) -> "Client":
+ msg = "Connection is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def js(self) -> "JetStreamContext":
+ msg = "Stream is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def kv_declarer(self) -> "KVBucketDeclarer":
+ msg = "KeyValue is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+ @property
+ def os_declarer(self) -> "OSBucketDeclarer":
+ msg = "ObjectStorage is not available yet. Please, setup the subscriber first."
+ raise IncorrectState(msg)
+
+
+class ConnectedSubscriberState(SubscriberState):
+ def __init__(
+ self,
+ *,
+ parent_state: "BrokerState",
+ kv_declarer: "KVBucketDeclarer",
+ os_declarer: "OSBucketDeclarer",
+ ) -> None:
+ self._parent_state = parent_state
+ self.kv_declarer = kv_declarer
+ self.os_declarer = os_declarer
+
+ @property
+ def client(self) -> "Client":
+ return self._parent_state.connection
+
+ @property
+ def js(self) -> "JetStreamContext":
+ return self._parent_state.stream
diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py
index 457db20587..e2c6b207e1 100644
--- a/faststream/nats/subscriber/usecase.py
+++ b/faststream/nats/subscriber/usecase.py
@@ -1,6 +1,5 @@
-import asyncio
from abc import abstractmethod
-from collections.abc import Awaitable, Coroutine, Iterable, Sequence
+from collections.abc import Awaitable, Iterable
from contextlib import suppress
from typing import (
TYPE_CHECKING,
@@ -9,23 +8,21 @@
Callable,
Generic,
Optional,
- TypeVar,
- Union,
cast,
)
import anyio
-from fast_depends.dependencies import Depends
from nats.errors import ConnectionClosedError, TimeoutError
from nats.js.api import ConsumerConfig, ObjectInfo
from typing_extensions import Doc, override
-from faststream._internal.context.repository import context
-from faststream._internal.publisher.fake import FakePublisher
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
from faststream._internal.subscriber.usecase import SubscriberUsecase
from faststream._internal.subscriber.utils import process_msg
from faststream._internal.types import MsgType
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream.middlewares import AckPolicy
+from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+from faststream.nats.message import NatsMessage
from faststream.nats.parser import (
BatchParser,
JsParser,
@@ -33,15 +30,17 @@
NatsParser,
ObjParser,
)
+from faststream.nats.publisher.fake import NatsFakePublisher
from faststream.nats.schemas.js_stream import compile_nats_wildcard
-from faststream.nats.subscriber.subscription import (
+from faststream.nats.subscriber.adapters import (
UnsubscribeAdapter,
Unsubscriptable,
)
+from .state import ConnectedSubscriberState, EmptySubscriberState, SubscriberState
+
if TYPE_CHECKING:
- from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
- from nats.aio.client import Client
+ from fast_depends.dependencies import Dependant
from nats.aio.msg import Msg
from nats.aio.subscription import Subscription
from nats.js import JetStreamContext
@@ -50,32 +49,31 @@
from faststream._internal.basic_types import (
AnyDict,
- LoggerProto,
SendableMessage,
)
- from faststream._internal.publisher.proto import ProducerProto
- from faststream._internal.setup import SetupState
+ from faststream._internal.publisher.proto import BasePublisherProto, ProducerProto
+ from faststream._internal.state import (
+ BrokerState as BasicState,
+ Pointer,
+ )
from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
CustomCallable,
)
from faststream.message import StreamMessage
+ from faststream.nats.broker.state import BrokerState
from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
- from faststream.nats.message import NatsKvMessage, NatsMessage, NatsObjMessage
+ from faststream.nats.message import NatsKvMessage, NatsObjMessage
from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub
-ConnectionType = TypeVar("ConnectionType")
-
-
-class LogicSubscriber(SubscriberUsecase[MsgType], Generic[ConnectionType, MsgType]):
+class LogicSubscriber(SubscriberUsecase[MsgType], Generic[MsgType]):
"""A class to represent a NATS handler."""
subscription: Optional[Unsubscriptable]
_fetch_sub: Optional[Unsubscriptable]
producer: Optional["ProducerProto"]
- _connection: Optional[ConnectionType]
def __init__(
self,
@@ -86,10 +84,9 @@ def __init__(
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
# AsyncAPI args
title_: Optional[str],
@@ -105,9 +102,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -116,33 +112,34 @@ def __init__(
include_in_schema=include_in_schema,
)
- self._connection = None
self._fetch_sub = None
self.subscription = None
self.producer = None
+ self._connection_state: SubscriberState = EmptySubscriberState()
+
@override
def _setup( # type: ignore[override]
self,
*,
- connection: ConnectionType,
+ connection_state: "BrokerState",
+ os_declarer: "OSBucketDeclarer",
+ kv_declarer: "KVBucketDeclarer",
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "Pointer[BasicState]",
) -> None:
- self._connection = connection
+ self._connection_state = ConnectedSubscriberState(
+ parent_state=connection_state,
+ os_declarer=os_declarer,
+ kv_declarer=kv_declarer,
+ )
super()._setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
@@ -157,12 +154,10 @@ def clear_subject(self) -> str:
async def start(self) -> None:
"""Create NATS subscription and start consume tasks."""
- assert self._connection, NOT_CONNECTED_YET # nosec B101
-
await super().start()
if self.calls:
- await self._create_subscription(connection=self._connection)
+ await self._create_subscription()
async def close(self) -> None:
"""Clean up handler subscription, cancel consume task in graceful mode."""
@@ -177,11 +172,7 @@ async def close(self) -> None:
self.subscription = None
@abstractmethod
- async def _create_subscription(
- self,
- *,
- connection: ConnectionType,
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription object to consume messages."""
raise NotImplementedError
@@ -227,7 +218,7 @@ def _resolved_subject_string(self) -> str:
return self.subject or ", ".join(self.config.filter_subjects or ())
-class _DefaultSubscriber(LogicSubscriber[ConnectionType, MsgType]):
+class _DefaultSubscriber(LogicSubscriber[MsgType]):
def __init__(
self,
*,
@@ -238,10 +229,9 @@ def __init__(
# Subscriber args
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
# AsyncAPI args
title_: Optional[str],
@@ -256,9 +246,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -270,17 +259,12 @@ def __init__(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
- if self._producer is None:
- return ()
-
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "subject": message.reply_to,
- },
+ NatsFakePublisher(
+ producer=self._state.get().producer,
+ subject=message.reply_to,
),
)
@@ -298,74 +282,7 @@ def get_log_context(
)
-class _TasksMixin(LogicSubscriber[Any, Any]):
- def __init__(self, **kwargs: Any) -> None:
- self.tasks: list[asyncio.Task[Any]] = []
-
- super().__init__(**kwargs)
-
- def add_task(self, coro: Coroutine[Any, Any, Any]) -> None:
- self.tasks.append(asyncio.create_task(coro))
-
- async def close(self) -> None:
- """Clean up handler subscription, cancel consume task in graceful mode."""
- await super().close()
-
- for task in self.tasks:
- if not task.done():
- task.cancel()
-
- self.tasks = []
-
-
-class _ConcurrentMixin(_TasksMixin):
- send_stream: "MemoryObjectSendStream[Msg]"
- receive_stream: "MemoryObjectReceiveStream[Msg]"
-
- def __init__(
- self,
- *,
- max_workers: int,
- **kwargs: Any,
- ) -> None:
- self.max_workers = max_workers
-
- self.send_stream, self.receive_stream = anyio.create_memory_object_stream(
- max_buffer_size=max_workers,
- )
- self.limiter = anyio.Semaphore(max_workers)
-
- super().__init__(**kwargs)
-
- def start_consume_task(self) -> None:
- self.add_task(self._serve_consume_queue())
-
- async def _serve_consume_queue(
- self,
- ) -> None:
- """Endless task consuming messages from in-memory queue.
-
- Suitable to batch messages by amount, timestamps, etc and call `consume` for this batches.
- """
- async with anyio.create_task_group() as tg:
- async for msg in self.receive_stream:
- tg.start_soon(self._consume_msg, msg)
-
- async def _consume_msg(
- self,
- msg: "Msg",
- ) -> None:
- """Proxy method to call `self.consume` with semaphore block."""
- async with self.limiter:
- await self.consume(msg)
-
- async def _put_msg(self, msg: "Msg") -> None:
- """Proxy method to put msg into in-memory queue with semaphore block."""
- async with self.limiter:
- await self.send_stream.send(msg)
-
-
-class CoreSubscriber(_DefaultSubscriber["Client", "Msg"]):
+class CoreSubscriber(_DefaultSubscriber["Msg"]):
subscription: Optional["Subscription"]
_fetch_sub: Optional["Subscription"]
@@ -378,17 +295,15 @@ def __init__(
queue: str,
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> None:
- parser_ = NatsParser(pattern=subject, no_ack=no_ack)
+ parser_ = NatsParser(pattern=subject)
self.queue = queue
@@ -400,9 +315,8 @@ def __init__(
default_parser=parser_.parse_message,
default_decoder=parser_.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -417,13 +331,12 @@ async def get_one(
*,
timeout: float = 5.0,
) -> "Optional[NatsMessage]":
- assert self._connection, "Please, start() subscriber first" # nosec B101
assert ( # nosec B101
not self.calls
), "You can't use `get_one` method if subscriber has registered handlers."
if self._fetch_sub is None:
- fetch_sub = self._fetch_sub = await self._connection.subscribe(
+ fetch_sub = self._fetch_sub = await self._connection_state.client.subscribe(
subject=self.clear_subject,
queue=self.queue,
**self.extra_options,
@@ -436,25 +349,25 @@ async def get_one(
except TimeoutError:
return None
+ context = self._state.get().di_state.context
+
msg: NatsMessage = await process_msg( # type: ignore[assignment]
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
return msg
@override
- async def _create_subscription(
- self,
- *,
- connection: "Client",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
- self.subscription = await connection.subscribe(
+ self.subscription = await self._connection_state.client.subscribe(
subject=self.clear_subject,
queue=self.queue,
cb=self.consume,
@@ -477,7 +390,7 @@ def get_log_context(
class ConcurrentCoreSubscriber(
- _ConcurrentMixin,
+ ConcurrentMixin,
CoreSubscriber,
):
def __init__(
@@ -490,10 +403,8 @@ def __init__(
queue: str,
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
@@ -508,9 +419,7 @@ def __init__(
queue=queue,
extra_options=extra_options,
# Propagated args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -520,18 +429,14 @@ def __init__(
)
@override
- async def _create_subscription(
- self,
- *,
- connection: "Client",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
self.start_consume_task()
- self.subscription = await connection.subscribe(
+ self.subscription = await self._connection_state.client.subscribe(
subject=self.clear_subject,
queue=self.queue,
cb=self._put_msg,
@@ -539,7 +444,7 @@ async def _create_subscription(
)
-class _StreamSubscriber(_DefaultSubscriber["JetStreamContext", "Msg"]):
+class _StreamSubscriber(_DefaultSubscriber["Msg"]):
_fetch_sub: Optional["JetStreamContext.PullSubscription"]
def __init__(
@@ -552,10 +457,9 @@ def __init__(
queue: str,
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
@@ -575,9 +479,8 @@ def __init__(
default_parser=parser_.parse_message,
default_decoder=parser_.decode_message,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -607,7 +510,6 @@ async def get_one(
*,
timeout: float = 5,
) -> Optional["NatsMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
assert ( # nosec B101
not self.calls
), "You can't use `get_one` method if subscriber has registered handlers."
@@ -622,7 +524,7 @@ async def get_one(
if inbox_prefix := self.extra_options.get("inbox_prefix"):
extra_options["inbox_prefix"] = inbox_prefix
- self._fetch_sub = await self._connection.pull_subscribe(
+ self._fetch_sub = await self._connection_state.js.pull_subscribe(
subject=self.clear_subject,
config=self.config,
**extra_options,
@@ -638,9 +540,13 @@ async def get_one(
except (TimeoutError, ConnectionClosedError):
return None
+ context = self._state.get().di_state.context
+
msg: NatsMessage = await process_msg( # type: ignore[assignment]
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -651,16 +557,12 @@ class PushStreamSubscription(_StreamSubscriber):
subscription: Optional["JetStreamContext.PushSubscription"]
@override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
- self.subscription = await connection.subscribe(
+ self.subscription = await self._connection_state.js.subscribe(
subject=self.clear_subject,
queue=self.queue,
cb=self.consume,
@@ -670,7 +572,7 @@ async def _create_subscription(
class ConcurrentPushStreamSubscriber(
- _ConcurrentMixin,
+ ConcurrentMixin,
_StreamSubscriber,
):
subscription: Optional["JetStreamContext.PushSubscription"]
@@ -686,10 +588,9 @@ def __init__(
queue: str,
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
@@ -705,9 +606,8 @@ def __init__(
queue=queue,
extra_options=extra_options,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -717,18 +617,14 @@ def __init__(
)
@override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
self.start_consume_task()
- self.subscription = await connection.subscribe(
+ self.subscription = await self._connection_state.js.subscribe(
subject=self.clear_subject,
queue=self.queue,
cb=self._put_msg,
@@ -738,7 +634,7 @@ async def _create_subscription(
class PullStreamSubscriber(
- _TasksMixin,
+ TasksMixin,
_StreamSubscriber,
):
subscription: Optional["JetStreamContext.PullSubscription"]
@@ -753,10 +649,9 @@ def __init__(
config: "ConsumerConfig",
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
@@ -773,9 +668,8 @@ def __init__(
extra_options=extra_options,
queue="",
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -785,16 +679,12 @@ def __init__(
)
@override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
- self.subscription = await connection.pull_subscribe(
+ self.subscription = await self._connection_state.js.pull_subscribe(
subject=self.clear_subject,
config=self.config,
**self.extra_options,
@@ -823,7 +713,7 @@ async def _consume_pull(
class ConcurrentPullStreamSubscriber(
- _ConcurrentMixin,
+ ConcurrentMixin,
PullStreamSubscriber,
):
def __init__(
@@ -837,10 +727,9 @@ def __init__(
config: "ConsumerConfig",
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
# AsyncAPI args
title_: Optional[str],
@@ -856,9 +745,8 @@ def __init__(
config=config,
extra_options=extra_options,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -868,18 +756,14 @@ def __init__(
)
@override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
self.start_consume_task()
- self.subscription = await connection.pull_subscribe(
+ self.subscription = await self._connection_state.js.pull_subscribe(
subject=self.clear_subject,
config=self.config,
**self.extra_options,
@@ -888,8 +772,8 @@ async def _create_subscription(
class BatchPullStreamSubscriber(
- _TasksMixin,
- _DefaultSubscriber["JetStreamContext", list["Msg"]],
+ TasksMixin,
+ _DefaultSubscriber[list["Msg"]],
):
"""Batch-message consumer class."""
@@ -906,10 +790,9 @@ def __init__(
pull_sub: "PullSub",
extra_options: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
# AsyncAPI args
title_: Optional[str],
@@ -929,9 +812,8 @@ def __init__(
default_parser=parser.parse_batch,
default_decoder=parser.decode_batch,
# Propagated args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI args
@@ -946,13 +828,14 @@ async def get_one(
*,
timeout: float = 5,
) -> Optional["NatsMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
assert ( # nosec B101
not self.calls
), "You can't use `get_one` method if subscriber has registered handlers."
if not self._fetch_sub:
- fetch_sub = self._fetch_sub = await self._connection.pull_subscribe(
+ fetch_sub = (
+ self._fetch_sub
+ ) = await self._connection_state.js.pull_subscribe(
subject=self.clear_subject,
config=self.config,
**self.extra_options,
@@ -968,25 +851,27 @@ async def get_one(
except TimeoutError:
return None
- msg: NatsMessage = await process_msg(
- msg=raw_message,
- middlewares=self._broker_middlewares,
- parser=self._parser,
- decoder=self._decoder,
+ context = self._state.get().di_state.context
+
+ return cast(
+ NatsMessage,
+ await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ ),
)
- return msg
@override
- async def _create_subscription(
- self,
- *,
- connection: "JetStreamContext",
- ) -> None:
+ async def _create_subscription(self) -> None:
"""Create NATS subscription and start consume task."""
if self.subscription:
return
- self.subscription = await connection.pull_subscribe(
+ self.subscription = await self._connection_state.js.pull_subscribe(
subject=self.clear_subject,
config=self.config,
**self.extra_options,
@@ -1009,8 +894,8 @@ async def _consume_pull(self) -> None:
class KeyValueWatchSubscriber(
- _TasksMixin,
- LogicSubscriber["KVBucketDeclarer", "KeyValue.Entry"],
+ TasksMixin,
+ LogicSubscriber["KeyValue.Entry"],
):
subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"]
_fetch_sub: Optional[UnsubscribeAdapter["KeyValue.KeyWatcher"]]
@@ -1021,7 +906,7 @@ def __init__(
subject: str,
config: "ConsumerConfig",
kv_watch: "KvWatch",
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"],
# AsyncAPI args
title_: Optional[str],
@@ -1035,9 +920,8 @@ def __init__(
subject=subject,
config=config,
extra_options=None,
- no_ack=True,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=True,
- retry=False,
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
broker_middlewares=broker_middlewares,
@@ -1054,13 +938,12 @@ async def get_one(
*,
timeout: float = 5,
) -> Optional["NatsKvMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
assert ( # nosec B101
not self.calls
), "You can't use `get_one` method if subscriber has registered handlers."
if not self._fetch_sub:
- bucket = await self._connection.create_key_value(
+ bucket = await self._connection_state.kv_declarer.create_key_value(
bucket=self.kv_watch.name,
declare=self.kv_watch.declare,
)
@@ -1081,28 +964,29 @@ async def get_one(
sleep_interval = timeout / 10
with anyio.move_on_after(timeout):
while ( # noqa: ASYNC110
- raw_message := await fetch_sub.obj.updates(timeout) # type: ignore[no-untyped-call]
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
) is None:
await anyio.sleep(sleep_interval)
+ context = self._state.get().di_state.context
+
msg: NatsKvMessage = await process_msg(
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
return msg
@override
- async def _create_subscription(
- self,
- *,
- connection: "KVBucketDeclarer",
- ) -> None:
+ async def _create_subscription(self) -> None:
if self.subscription:
return
- bucket = await connection.create_key_value(
+ bucket = await self._connection_state.kv_declarer.create_key_value(
bucket=self.kv_watch.name,
declare=self.kv_watch.declare,
)
@@ -1117,9 +1001,9 @@ async def _create_subscription(
),
)
- self.add_task(self._consume_watch())
+ self.add_task(self.__consume_watch())
- async def _consume_watch(self) -> None:
+ async def __consume_watch(self) -> None:
assert self.subscription, "You should call `create_subscription` at first." # nosec B101
key_watcher = self.subscription.obj
@@ -1128,7 +1012,8 @@ async def _consume_watch(self) -> None:
with suppress(ConnectionClosedError, TimeoutError):
message = cast(
Optional["KeyValue.Entry"],
- await key_watcher.updates(self.kv_watch.timeout), # type: ignore[no-untyped-call]
+ # type: ignore[no-untyped-call]
+ await key_watcher.updates(self.kv_watch.timeout),
)
if message:
@@ -1140,8 +1025,8 @@ def _make_response_publisher(
"StreamMessage[KeyValue.Entry]",
Doc("Message requiring reply"),
],
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
return ()
def get_log_context(
@@ -1163,8 +1048,8 @@ def get_log_context(
class ObjStoreWatchSubscriber(
- _TasksMixin,
- LogicSubscriber["OSBucketDeclarer", ObjectInfo],
+ TasksMixin,
+ LogicSubscriber[ObjectInfo],
):
subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"]
_fetch_sub: Optional[UnsubscribeAdapter["ObjectStore.ObjectWatcher"]]
@@ -1175,7 +1060,7 @@ def __init__(
subject: str,
config: "ConsumerConfig",
obj_watch: "ObjWatch",
- broker_dependencies: Iterable[Depends],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
# AsyncAPI args
title_: Optional[str],
@@ -1191,9 +1076,8 @@ def __init__(
subject=subject,
config=config,
extra_options=None,
- no_ack=True,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=True,
- retry=False,
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
broker_middlewares=broker_middlewares,
@@ -1210,13 +1094,12 @@ async def get_one(
*,
timeout: float = 5,
) -> Optional["NatsObjMessage"]:
- assert self._connection, "Please, start() subscriber first" # nosec B101
assert ( # nosec B101
not self.calls
), "You can't use `get_one` method if subscriber has registered handlers."
if not self._fetch_sub:
- self.bucket = await self._connection.create_object_store(
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
bucket=self.subject,
declare=self.obj_watch.declare,
)
@@ -1236,35 +1119,36 @@ async def get_one(
sleep_interval = timeout / 10
with anyio.move_on_after(timeout):
while ( # noqa: ASYNC110
- raw_message := await fetch_sub.obj.updates(timeout) # type: ignore[no-untyped-call]
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
) is None:
await anyio.sleep(sleep_interval)
+ context = self._state.get().di_state.context
+
msg: NatsObjMessage = await process_msg(
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
return msg
@override
- async def _create_subscription(
- self,
- *,
- connection: "OSBucketDeclarer",
- ) -> None:
+ async def _create_subscription(self) -> None:
if self.subscription:
return
- self.bucket = await connection.create_object_store(
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
bucket=self.subject,
declare=self.obj_watch.declare,
)
- self.add_task(self._consume_watch())
+ self.add_task(self.__consume_watch())
- async def _consume_watch(self) -> None:
+ async def __consume_watch(self) -> None:
assert self.bucket, "You should call `create_subscription` at first." # nosec B101
# Should be created inside task to avoid nats-py lock
@@ -1276,10 +1160,13 @@ async def _consume_watch(self) -> None:
self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"](obj_watch)
+ context = self._state.get().di_state.context
+
while self.running:
with suppress(TimeoutError):
message = cast(
Optional["ObjectInfo"],
+ # type: ignore[no-untyped-call]
await obj_watch.updates(self.obj_watch.timeout),
)
@@ -1293,8 +1180,8 @@ def _make_response_publisher(
"StreamMessage[ObjectInfo]",
Doc("Message requiring reply"),
],
- ) -> Sequence[FakePublisher]:
- """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope."""
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
return ()
def get_log_context(
diff --git a/faststream/nats/subscriber/usecases/__init__.py b/faststream/nats/subscriber/usecases/__init__.py
new file mode 100644
index 0000000000..040a9f9680
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/__init__.py
@@ -0,0 +1,26 @@
+from .basic import LogicSubscriber
+from .core_subscriber import ConcurrentCoreSubscriber, CoreSubscriber
+from .key_value_subscriber import KeyValueWatchSubscriber
+from .object_storage_subscriber import ObjStoreWatchSubscriber
+from .stream_pull_subscriber import (
+ BatchPullStreamSubscriber,
+ ConcurrentPullStreamSubscriber,
+ PullStreamSubscriber,
+)
+from .stream_push_subscriber import (
+ ConcurrentPushStreamSubscriber,
+ PushStreamSubscription,
+)
+
+__all__ = (
+ "BatchPullStreamSubscriber",
+ "ConcurrentCoreSubscriber",
+ "ConcurrentPullStreamSubscriber",
+ "ConcurrentPushStreamSubscriber",
+ "CoreSubscriber",
+ "KeyValueWatchSubscriber",
+ "LogicSubscriber",
+ "ObjStoreWatchSubscriber",
+ "PullStreamSubscriber",
+ "PushStreamSubscription",
+)
diff --git a/faststream/nats/subscriber/usecases/basic.py b/faststream/nats/subscriber/usecases/basic.py
new file mode 100644
index 0000000000..3b5f30e1fc
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/basic.py
@@ -0,0 +1,247 @@
+from abc import abstractmethod
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Generic,
+ Optional,
+)
+
+from typing_extensions import override
+
+from faststream._internal.subscriber.usecase import SubscriberUsecase
+from faststream._internal.types import MsgType
+from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+from faststream.nats.publisher.fake import NatsFakePublisher
+from faststream.nats.schemas.js_stream import compile_nats_wildcard
+from faststream.nats.subscriber.adapters import (
+ Unsubscriptable,
+)
+from faststream.nats.subscriber.state import (
+ ConnectedSubscriberState,
+ EmptySubscriberState,
+ SubscriberState,
+)
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ )
+ from faststream._internal.publisher.proto import BasePublisherProto, ProducerProto
+ from faststream._internal.state import (
+ BrokerState as BasicState,
+ Pointer,
+ )
+ from faststream._internal.types import (
+ AsyncCallable,
+ BrokerMiddleware,
+ CustomCallable,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.broker.state import BrokerState
+ from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer
+
+
+class LogicSubscriber(SubscriberUsecase[MsgType], Generic[MsgType]):
+ """Basic class for all NATS Subscriber types (KeyValue, ObjectStorage, Core & JetStream)."""
+
+ subscription: Optional[Unsubscriptable]
+ _fetch_sub: Optional[Unsubscriptable]
+ producer: Optional["ProducerProto"]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ self.subject = subject
+ self.config = config
+
+ self.extra_options = extra_options or {}
+
+ super().__init__(
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ self._fetch_sub = None
+ self.subscription = None
+ self.producer = None
+
+ self._connection_state: SubscriberState = EmptySubscriberState()
+
+ @override
+ def _setup( # type: ignore[override]
+ self,
+ *,
+ connection_state: "BrokerState",
+ os_declarer: "OSBucketDeclarer",
+ kv_declarer: "KVBucketDeclarer",
+ # basic args
+ extra_context: "AnyDict",
+ # broker options
+ broker_parser: Optional["CustomCallable"],
+ broker_decoder: Optional["CustomCallable"],
+ # dependant args
+ state: "Pointer[BasicState]",
+ ) -> None:
+ self._connection_state = ConnectedSubscriberState(
+ parent_state=connection_state,
+ os_declarer=os_declarer,
+ kv_declarer=kv_declarer,
+ )
+
+ super()._setup(
+ extra_context=extra_context,
+ broker_parser=broker_parser,
+ broker_decoder=broker_decoder,
+ state=state,
+ )
+
+ @property
+ def clear_subject(self) -> str:
+ """Compile `test.{name}` to `test.*` subject."""
+ _, path = compile_nats_wildcard(self.subject)
+ return path
+
+ async def start(self) -> None:
+ """Create NATS subscription and start consume tasks."""
+ await super().start()
+
+ if self.calls:
+ await self._create_subscription()
+
+ async def close(self) -> None:
+ """Clean up handler subscription, cancel consume task in graceful mode."""
+ await super().close()
+
+ if self.subscription is not None:
+ await self.subscription.unsubscribe()
+ self.subscription = None
+
+ if self._fetch_sub is not None:
+ await self._fetch_sub.unsubscribe()
+ self.subscription = None
+
+ @abstractmethod
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription object to consume messages."""
+ raise NotImplementedError
+
+ @staticmethod
+ def build_log_context(
+ message: Optional["StreamMessage[MsgType]"],
+ subject: str,
+ *,
+ queue: str = "",
+ stream: str = "",
+ ) -> dict[str, str]:
+ """Static method to build log context out of `self.consume` scope."""
+ return {
+ "subject": subject,
+ "queue": queue,
+ "stream": stream,
+ "message_id": getattr(message, "message_id", ""),
+ }
+
+ def add_prefix(self, prefix: str) -> None:
+ """Include Subscriber in router."""
+ if self.subject:
+ self.subject = f"{prefix}{self.subject}"
+ else:
+ self.config.filter_subjects = [
+ f"{prefix}{subject}" for subject in (self.config.filter_subjects or ())
+ ]
+
+ @property
+ def _resolved_subject_string(self) -> str:
+ return self.subject or ", ".join(self.config.filter_subjects or ())
+
+
+class DefaultSubscriber(LogicSubscriber[MsgType]):
+ """Basic class for Core & JetStream Subscribers."""
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ # default args
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ default_parser: "AsyncCallable",
+ default_decoder: "AsyncCallable",
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[MsgType]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=default_parser,
+ default_decoder=default_decoder,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ def _make_response_publisher(
+ self,
+ message: "StreamMessage[Any]",
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return (
+ NatsFakePublisher(
+ producer=self._state.get().producer,
+ subject=message.reply_to,
+ ),
+ )
+
+ def get_log_context(
+ self,
+ message: Optional["StreamMessage[MsgType]"],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ )
diff --git a/faststream/nats/subscriber/usecases/core_subscriber.py b/faststream/nats/subscriber/usecases/core_subscriber.py
new file mode 100644
index 0000000000..3cff6547d2
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/core_subscriber.py
@@ -0,0 +1,186 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+)
+
+from nats.errors import TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import NatsParser
+
+from .basic import DefaultSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.aio.subscription import Subscription
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsMessage
+
+
+class CoreSubscriber(DefaultSubscriber["Msg"]):
+ subscription: Optional["Subscription"]
+ _fetch_sub: Optional["Subscription"]
+
+ def __init__(
+ self,
+ *,
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ parser_ = NatsParser(pattern=subject)
+
+ self.queue = queue
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser_.parse_message,
+ default_decoder=parser_.decode_message,
+ # Propagated args
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5.0,
+ ) -> "Optional[NatsMessage]":
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if self._fetch_sub is None:
+ fetch_sub = self._fetch_sub = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ **self.extra_options,
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ try:
+ raw_message = await fetch_sub.next_msg(timeout=timeout)
+ except TimeoutError:
+ return None
+
+ context = self._state.get().di_state.context
+
+ msg: NatsMessage = await process_msg( # type: ignore[assignment]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self.consume,
+ **self.extra_options,
+ )
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[Msg]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ queue=self.queue,
+ )
+
+
+class ConcurrentCoreSubscriber(ConcurrentMixin, CoreSubscriber):
+ def __init__(
+ self,
+ *,
+ max_workers: int,
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ max_workers=max_workers,
+ # basic args
+ subject=subject,
+ config=config,
+ queue=queue,
+ extra_options=extra_options,
+ # Propagated args
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.client.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self._put_msg,
+ **self.extra_options,
+ )
diff --git a/faststream/nats/subscriber/usecases/key_value_subscriber.py b/faststream/nats/subscriber/usecases/key_value_subscriber.py
new file mode 100644
index 0000000000..cf4a2a3f4e
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/key_value_subscriber.py
@@ -0,0 +1,188 @@
+from collections.abc import Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import (
+ KvParser,
+)
+from faststream.nats.subscriber.adapters import (
+ UnsubscribeAdapter,
+)
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.js.api import ConsumerConfig
+ from nats.js.kv import KeyValue
+
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsKvMessage
+ from faststream.nats.schemas import KvWatch
+
+
+class KeyValueWatchSubscriber(
+ TasksMixin,
+ LogicSubscriber["KeyValue.Entry"],
+):
+ subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"]
+ _fetch_sub: Optional[UnsubscribeAdapter["KeyValue.KeyWatcher"]]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ kv_watch: "KvWatch",
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ parser = KvParser(pattern=subject)
+ self.kv_watch = kv_watch
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=None,
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=True,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsKvMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ bucket = await self._connection_state.kv_declarer.create_key_value(
+ bucket=self.kv_watch.name,
+ declare=self.kv_watch.declare,
+ )
+
+ fetch_sub = self._fetch_sub = UnsubscribeAdapter["KeyValue.KeyWatcher"](
+ await bucket.watch(
+ keys=self.clear_subject,
+ headers_only=self.kv_watch.headers_only,
+ include_history=self.kv_watch.include_history,
+ ignore_deletes=self.kv_watch.ignore_deletes,
+ meta_only=self.kv_watch.meta_only,
+ ),
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ raw_message = None
+ sleep_interval = timeout / 10
+ with anyio.move_on_after(timeout):
+ while ( # noqa: ASYNC110
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
+ ) is None:
+ await anyio.sleep(sleep_interval)
+
+ context = self._state.get().di_state.context
+
+ msg: NatsKvMessage = await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ if self.subscription:
+ return
+
+ bucket = await self._connection_state.kv_declarer.create_key_value(
+ bucket=self.kv_watch.name,
+ declare=self.kv_watch.declare,
+ )
+
+ self.subscription = UnsubscribeAdapter["KeyValue.KeyWatcher"](
+ await bucket.watch(
+ keys=self.clear_subject,
+ headers_only=self.kv_watch.headers_only,
+ include_history=self.kv_watch.include_history,
+ ignore_deletes=self.kv_watch.ignore_deletes,
+ meta_only=self.kv_watch.meta_only,
+ ),
+ )
+
+ self.add_task(self.__consume_watch())
+
+ async def __consume_watch(self) -> None:
+ assert self.subscription, "You should call `create_subscription` at first." # nosec B101
+
+ key_watcher = self.subscription.obj
+
+ while self.running:
+ with suppress(ConnectionClosedError, TimeoutError):
+ message = cast(
+ Optional["KeyValue.Entry"],
+ # type: ignore[no-untyped-call]
+ await key_watcher.updates(self.kv_watch.timeout),
+ )
+
+ if message:
+ await self.consume(message)
+
+ def _make_response_publisher(
+ self,
+ message: Annotated[
+ "StreamMessage[KeyValue.Entry]",
+ Doc("Message requiring reply"),
+ ],
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return ()
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[KeyValue.Entry]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ stream=self.kv_watch.name,
+ )
diff --git a/faststream/nats/subscriber/usecases/object_storage_subscriber.py b/faststream/nats/subscriber/usecases/object_storage_subscriber.py
new file mode 100644
index 0000000000..a1d5bace48
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/object_storage_subscriber.py
@@ -0,0 +1,192 @@
+from collections.abc import Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import TimeoutError
+from nats.js.api import ConsumerConfig, ObjectInfo
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.mixins import TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
+from faststream.nats.parser import (
+ ObjParser,
+)
+from faststream.nats.subscriber.adapters import (
+ UnsubscribeAdapter,
+)
+
+from .basic import LogicSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js.object_store import ObjectStore
+
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.nats.message import NatsObjMessage
+ from faststream.nats.schemas import ObjWatch
+
+
+OBJECT_STORAGE_CONTEXT_KEY = "__object_storage"
+
+
+class ObjStoreWatchSubscriber(
+ TasksMixin,
+ LogicSubscriber[ObjectInfo],
+):
+ subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"]
+ _fetch_sub: Optional[UnsubscribeAdapter["ObjectStore.ObjectWatcher"]]
+
+ def __init__(
+ self,
+ *,
+ subject: str,
+ config: "ConsumerConfig",
+ obj_watch: "ObjWatch",
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ parser = ObjParser(pattern="")
+
+ self.obj_watch = obj_watch
+ self.obj_watch_conn = None
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=None,
+ ack_policy=AckPolicy.DO_NOTHING,
+ no_reply=True,
+ default_parser=parser.parse_message,
+ default_decoder=parser.decode_message,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsObjMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
+ bucket=self.subject,
+ declare=self.obj_watch.declare,
+ )
+
+ obj_watch = await self.bucket.watch(
+ ignore_deletes=self.obj_watch.ignore_deletes,
+ include_history=self.obj_watch.include_history,
+ meta_only=self.obj_watch.meta_only,
+ )
+ fetch_sub = self._fetch_sub = UnsubscribeAdapter[
+ "ObjectStore.ObjectWatcher"
+ ](obj_watch)
+ else:
+ fetch_sub = self._fetch_sub
+
+ raw_message = None
+ sleep_interval = timeout / 10
+ with anyio.move_on_after(timeout):
+ while ( # noqa: ASYNC110
+ # type: ignore[no-untyped-call]
+ raw_message := await fetch_sub.obj.updates(timeout)
+ ) is None:
+ await anyio.sleep(sleep_interval)
+
+ context = self._state.get().di_state.context
+
+ msg: NatsObjMessage = await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
+
+ @override
+ async def _create_subscription(self) -> None:
+ if self.subscription:
+ return
+
+ self.bucket = await self._connection_state.os_declarer.create_object_store(
+ bucket=self.subject,
+ declare=self.obj_watch.declare,
+ )
+
+ self.add_task(self.__consume_watch())
+
+ async def __consume_watch(self) -> None:
+ assert self.bucket, "You should call `create_subscription` at first." # nosec B101
+
+ # Should be created inside task to avoid nats-py lock
+ obj_watch = await self.bucket.watch(
+ ignore_deletes=self.obj_watch.ignore_deletes,
+ include_history=self.obj_watch.include_history,
+ meta_only=self.obj_watch.meta_only,
+ )
+
+ self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"](obj_watch)
+
+ context = self._state.get().di_state.context
+
+ while self.running:
+ with suppress(TimeoutError):
+ message = cast(
+ Optional["ObjectInfo"],
+ # type: ignore[no-untyped-call]
+ await obj_watch.updates(self.obj_watch.timeout),
+ )
+
+ if message:
+ with context.scope(OBJECT_STORAGE_CONTEXT_KEY, self.bucket):
+ await self.consume(message)
+
+ def _make_response_publisher(
+ self,
+ message: Annotated[
+ "StreamMessage[ObjectInfo]",
+ Doc("Message requiring reply"),
+ ],
+ ) -> Iterable["BasePublisherProto"]:
+ """Create Publisher objects to use it as one of `publishers` in `self.consume` scope."""
+ return ()
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[ObjectInfo]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self.subject,
+ )
diff --git a/faststream/nats/subscriber/usecases/stream_basic.py b/faststream/nats/subscriber/usecases/stream_basic.py
new file mode 100644
index 0000000000..c053f2ce5e
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_basic.py
@@ -0,0 +1,142 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Annotated,
+ Optional,
+)
+
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import Doc, override
+
+from faststream._internal.subscriber.utils import process_msg
+from faststream.nats.parser import (
+ JsParser,
+)
+
+from .basic import DefaultSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js import JetStreamContext
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ )
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.message import NatsMessage
+ from faststream.nats.schemas import JStream
+
+
+class StreamSubscriber(DefaultSubscriber["Msg"]):
+ _fetch_sub: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ stream: "JStream",
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ parser_ = JsParser(pattern=subject)
+
+ self.queue = queue
+ self.stream = stream
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser_.parse_message,
+ default_decoder=parser_.decode_message,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ def get_log_context(
+ self,
+ message: Annotated[
+ Optional["StreamMessage[Msg]"],
+ Doc("Message which we are building context for"),
+ ],
+ ) -> dict[str, str]:
+ """Log context factory using in `self.consume` scope."""
+ return self.build_log_context(
+ message=message,
+ subject=self._resolved_subject_string,
+ queue=self.queue,
+ stream=self.stream.name,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ extra_options = {
+ "pending_bytes_limit": self.extra_options["pending_bytes_limit"],
+ "pending_msgs_limit": self.extra_options["pending_msgs_limit"],
+ "durable": self.extra_options["durable"],
+ "stream": self.extra_options["stream"],
+ }
+ if inbox_prefix := self.extra_options.get("inbox_prefix"):
+ extra_options["inbox_prefix"] = inbox_prefix
+
+ self._fetch_sub = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **extra_options,
+ )
+
+ try:
+ raw_message = (
+ await self._fetch_sub.fetch(
+ batch=1,
+ timeout=timeout,
+ )
+ )[0]
+ except (TimeoutError, ConnectionClosedError):
+ return None
+
+ context = self._state.get().di_state.context
+
+ msg: NatsMessage = await process_msg( # type: ignore[assignment]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ return msg
diff --git a/faststream/nats/subscriber/usecases/stream_pull_subscriber.py b/faststream/nats/subscriber/usecases/stream_pull_subscriber.py
new file mode 100644
index 0000000000..44d82e89dd
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_pull_subscriber.py
@@ -0,0 +1,298 @@
+from collections.abc import Awaitable, Iterable
+from contextlib import suppress
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Optional,
+ cast,
+)
+
+import anyio
+from nats.errors import ConnectionClosedError, TimeoutError
+from typing_extensions import override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin, TasksMixin
+from faststream._internal.subscriber.utils import process_msg
+from faststream.nats.message import NatsMessage
+from faststream.nats.parser import (
+ BatchParser,
+)
+
+from .basic import DefaultSubscriber
+from .stream_basic import StreamSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js import JetStreamContext
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ SendableMessage,
+ )
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.schemas import JStream, PullSub
+
+
+class PullStreamSubscriber(
+ TasksMixin,
+ StreamSubscriber,
+):
+ subscription: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ pull_sub: "PullSub",
+ stream: "JStream",
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ self.pull_sub = pull_sub
+
+ super().__init__(
+ # basic args
+ stream=stream,
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ queue="",
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull(cb=self.consume))
+
+ async def _consume_pull(
+ self,
+ cb: Callable[["Msg"], Awaitable["SendableMessage"]],
+ ) -> None:
+ """Endless task consuming messages using NATS Pull subscriber."""
+ assert self.subscription # nosec B101
+
+ while self.running: # pragma: no branch
+ messages = []
+ with suppress(TimeoutError, ConnectionClosedError):
+ messages = await self.subscription.fetch(
+ batch=self.pull_sub.batch_size,
+ timeout=self.pull_sub.timeout,
+ )
+
+ if messages:
+ async with anyio.create_task_group() as tg:
+ for msg in messages:
+ tg.start_soon(cb, msg)
+
+
+class ConcurrentPullStreamSubscriber(
+ ConcurrentMixin,
+ PullStreamSubscriber,
+):
+ def __init__(
+ self,
+ *,
+ max_workers: int,
+ # default args
+ pull_sub: "PullSub",
+ stream: "JStream",
+ subject: str,
+ config: "ConsumerConfig",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ max_workers=max_workers,
+ # basic args
+ pull_sub=pull_sub,
+ stream=stream,
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull(cb=self._put_msg))
+
+
+class BatchPullStreamSubscriber(
+ TasksMixin,
+ DefaultSubscriber[list["Msg"]],
+):
+ """Batch-message consumer class."""
+
+ subscription: Optional["JetStreamContext.PullSubscription"]
+ _fetch_sub: Optional["JetStreamContext.PullSubscription"]
+
+ def __init__(
+ self,
+ *,
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ stream: "JStream",
+ pull_sub: "PullSub",
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[list[Msg]]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ parser = BatchParser(pattern=subject)
+
+ self.stream = stream
+ self.pull_sub = pull_sub
+
+ super().__init__(
+ subject=subject,
+ config=config,
+ extra_options=extra_options,
+ # subscriber args
+ default_parser=parser.parse_batch,
+ default_decoder=parser.decode_batch,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def get_one(
+ self,
+ *,
+ timeout: float = 5,
+ ) -> Optional["NatsMessage"]:
+ assert ( # nosec B101
+ not self.calls
+ ), "You can't use `get_one` method if subscriber has registered handlers."
+
+ if not self._fetch_sub:
+ fetch_sub = (
+ self._fetch_sub
+ ) = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ else:
+ fetch_sub = self._fetch_sub
+
+ try:
+ raw_message = await fetch_sub.fetch(
+ batch=1,
+ timeout=timeout,
+ )
+ except TimeoutError:
+ return None
+
+ context = self._state.get().di_state.context
+
+ return cast(
+ NatsMessage,
+ await process_msg(
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
+ parser=self._parser,
+ decoder=self._decoder,
+ ),
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.pull_subscribe(
+ subject=self.clear_subject,
+ config=self.config,
+ **self.extra_options,
+ )
+ self.add_task(self._consume_pull())
+
+ async def _consume_pull(self) -> None:
+ """Endless task consuming messages using NATS Pull subscriber."""
+ assert self.subscription, "You should call `create_subscription` at first." # nosec B101
+
+ while self.running: # pragma: no branch
+ with suppress(TimeoutError, ConnectionClosedError):
+ messages = await self.subscription.fetch(
+ batch=self.pull_sub.batch_size,
+ timeout=self.pull_sub.timeout,
+ )
+
+ if messages:
+ await self.consume(messages)
diff --git a/faststream/nats/subscriber/usecases/stream_push_subscriber.py b/faststream/nats/subscriber/usecases/stream_push_subscriber.py
new file mode 100644
index 0000000000..ac14ae3509
--- /dev/null
+++ b/faststream/nats/subscriber/usecases/stream_push_subscriber.py
@@ -0,0 +1,106 @@
+from collections.abc import Iterable
+from typing import (
+ TYPE_CHECKING,
+ Optional,
+)
+
+from typing_extensions import override
+
+from faststream._internal.subscriber.mixins import ConcurrentMixin
+
+from .stream_basic import StreamSubscriber
+
+if TYPE_CHECKING:
+ from fast_depends.dependencies import Dependant
+ from nats.aio.msg import Msg
+ from nats.js import JetStreamContext
+ from nats.js.api import ConsumerConfig
+
+ from faststream._internal.basic_types import (
+ AnyDict,
+ )
+ from faststream._internal.types import (
+ BrokerMiddleware,
+ )
+ from faststream.middlewares import AckPolicy
+ from faststream.nats.schemas import JStream
+
+
+class PushStreamSubscription(StreamSubscriber):
+ subscription: Optional["JetStreamContext.PushSubscription"]
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.subscription = await self._connection_state.js.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self.consume,
+ config=self.config,
+ **self.extra_options,
+ )
+
+
+class ConcurrentPushStreamSubscriber(
+ ConcurrentMixin,
+ StreamSubscriber,
+):
+ subscription: Optional["JetStreamContext.PushSubscription"]
+
+ def __init__(
+ self,
+ *,
+ max_workers: int,
+ stream: "JStream",
+ # default args
+ subject: str,
+ config: "ConsumerConfig",
+ queue: str,
+ extra_options: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[Msg]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ super().__init__(
+ max_workers=max_workers,
+ # basic args
+ stream=stream,
+ subject=subject,
+ config=config,
+ queue=queue,
+ extra_options=extra_options,
+ # Propagated args
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_middlewares=broker_middlewares,
+ broker_dependencies=broker_dependencies,
+ # AsyncAPI args
+ description_=description_,
+ title_=title_,
+ include_in_schema=include_in_schema,
+ )
+
+ @override
+ async def _create_subscription(self) -> None:
+ """Create NATS subscription and start consume task."""
+ if self.subscription:
+ return
+
+ self.start_consume_task()
+
+ self.subscription = await self._connection_state.js.subscribe(
+ subject=self.clear_subject,
+ queue=self.queue,
+ cb=self._put_msg,
+ config=self.config,
+ **self.extra_options,
+ )
diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py
index 4df82ae989..63c9fb9a22 100644
--- a/faststream/nats/testing.py
+++ b/faststream/nats/testing.py
@@ -1,4 +1,5 @@
-from collections.abc import Generator, Iterable
+from collections.abc import Generator, Iterable, Iterator
+from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
@@ -16,14 +17,16 @@
from faststream.exceptions import SubscriberNotFound
from faststream.message import encode_message, gen_cor_id
from faststream.nats.broker import NatsBroker
+from faststream.nats.broker.state import ConnectedState
from faststream.nats.parser import NatsParser
from faststream.nats.publisher.producer import NatsFastProducer
from faststream.nats.schemas.js_stream import is_subject_match_wildcard
if TYPE_CHECKING:
from faststream._internal.basic_types import SendableMessage
- from faststream.nats.publisher.publisher import SpecificationPublisher
- from faststream.nats.subscriber.usecase import LogicSubscriber
+ from faststream.nats.publisher.specified import SpecificationPublisher
+ from faststream.nats.response import NatsPublishCommand
+ from faststream.nats.subscriber.usecases.basic import LogicSubscriber
__all__ = ("TestNatsBroker",)
@@ -51,51 +54,58 @@ def create_publisher_fake_subscriber(
return sub, is_real
- @staticmethod
- async def _fake_connect( # type: ignore[override]
+ @contextmanager
+ def _patch_producer(self, broker: NatsBroker) -> Iterator[None]:
+ old_js_producer, old_producer = broker._js_producer, broker._producer
+ fake_producer = broker._js_producer = FakeProducer(broker)
+
+ broker._state.patch_value(producer=fake_producer)
+ try:
+ yield
+ finally:
+ broker._js_producer = old_js_producer
+ broker._state.patch_value(producer=old_producer)
+
+ async def _fake_connect(
+ self,
broker: NatsBroker,
*args: Any,
**kwargs: Any,
) -> AsyncMock:
- broker.stream = AsyncMock()
- broker._js_producer = broker._producer = FakeProducer( # type: ignore[assignment]
- broker,
- )
+ if not broker._connection_state:
+ broker._connection_state = ConnectedState(AsyncMock(), AsyncMock())
return AsyncMock()
+ def _fake_start(self, broker: NatsBroker, *args: Any, **kwargs: Any) -> None:
+ if not broker._connection_state:
+ broker._connection_state = ConnectedState(AsyncMock(), AsyncMock())
+ return super()._fake_start(broker, *args, **kwargs)
+
class FakeProducer(NatsFastProducer):
def __init__(self, broker: NatsBroker) -> None:
self.broker = broker
- default = NatsParser(pattern="", no_ack=False)
+ default = NatsParser(pattern="")
self._parser = resolve_custom_func(broker._parser, default.parse_message)
self._decoder = resolve_custom_func(broker._decoder, default.decode_message)
@override
async def publish( # type: ignore[override]
- self,
- message: "SendableMessage",
- subject: str,
- reply_to: str = "",
- headers: Optional[dict[str, str]] = None,
- correlation_id: Optional[str] = None,
- # NatsJSFastProducer compatibility
- timeout: Optional[float] = None,
- stream: Optional[str] = None,
+ self, cmd: "NatsPublishCommand"
) -> None:
incoming = build_message(
- message=message,
- subject=subject,
- headers=headers,
- correlation_id=correlation_id,
- reply_to=reply_to,
+ message=cmd.body,
+ subject=cmd.destination,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
)
for handler in _find_handler(
self.broker._subscribers,
- subject,
- stream,
+ cmd.destination,
+ cmd.stream,
):
msg: Union[list[PatchedMessage], PatchedMessage]
@@ -104,31 +114,24 @@ async def publish( # type: ignore[override]
else:
msg = incoming
- await self._execute_handler(msg, subject, handler)
+ await self._execute_handler(msg, cmd.destination, handler)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- subject: str,
- *,
- correlation_id: Optional[str] = None,
- headers: Optional[dict[str, str]] = None,
- timeout: float = 0.5,
- # NatsJSFastProducer compatibility
- stream: Optional[str] = None,
+ cmd: "NatsPublishCommand",
) -> "PatchedMessage":
incoming = build_message(
- message=message,
- subject=subject,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ subject=cmd.destination,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
for handler in _find_handler(
self.broker._subscribers,
- subject,
- stream,
+ cmd.destination,
+ cmd.stream,
):
msg: Union[list[PatchedMessage], PatchedMessage]
@@ -137,8 +140,8 @@ async def request( # type: ignore[override]
else:
msg = incoming
- with anyio.fail_after(timeout):
- return await self._execute_handler(msg, subject, handler)
+ with anyio.fail_after(cmd.timeout):
+ return await self._execute_handler(msg, cmd.destination, handler)
raise SubscriberNotFound
diff --git a/faststream/opentelemetry/baggage.py b/faststream/opentelemetry/baggage.py
index 1a84a1d904..b29f24e1bc 100644
--- a/faststream/opentelemetry/baggage.py
+++ b/faststream/opentelemetry/baggage.py
@@ -62,8 +62,7 @@ def to_headers(self, headers: Optional["AnyDict"] = None) -> "AnyDict":
def from_msg(cls, msg: "StreamMessage[Any]") -> Self:
"""Create a Baggage instance from a StreamMessage."""
if len(msg.batch_headers) <= 1:
- payload = baggage.get_all(_BAGGAGE_PROPAGATOR.extract(msg.headers))
- return cls(cast("AnyDict", payload))
+ return cls.from_headers(msg.headers)
cumulative_baggage: AnyDict = {}
batch_baggage: list[AnyDict] = []
@@ -75,5 +74,11 @@ def from_msg(cls, msg: "StreamMessage[Any]") -> Self:
return cls(cumulative_baggage, batch_baggage)
+ @classmethod
+ def from_headers(cls, headers: "AnyDict") -> Self:
+ """Create a Baggage instance from headers."""
+ payload = baggage.get_all(_BAGGAGE_PROPAGATOR.extract(headers))
+ return cls(cast("AnyDict", payload))
+
def __repr__(self) -> str:
return self._baggage.__repr__()
diff --git a/faststream/opentelemetry/middleware.py b/faststream/opentelemetry/middleware.py
index 8305e3457a..853b85ccc7 100644
--- a/faststream/opentelemetry/middleware.py
+++ b/faststream/opentelemetry/middleware.py
@@ -10,10 +10,7 @@
from opentelemetry.trace import Link, Span
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
-from faststream import (
- BaseMiddleware,
- context as fs_context,
-)
+from faststream import BaseMiddleware
from faststream.opentelemetry.baggage import Baggage
from faststream.opentelemetry.consts import (
ERROR_TYPE,
@@ -22,7 +19,6 @@
WITH_BATCH,
MessageAction,
)
-from faststream.opentelemetry.provider import TelemetrySettingsProvider
if TYPE_CHECKING:
from contextvars import Token
@@ -33,13 +29,58 @@
from opentelemetry.util.types import Attributes
from faststream._internal.basic_types import AnyDict, AsyncFunc, AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
from faststream.message import StreamMessage
+ from faststream.opentelemetry.provider import TelemetrySettingsProvider
+ from faststream.response.response import PublishCommand
_BAGGAGE_PROPAGATOR = W3CBaggagePropagator()
_TRACE_PROPAGATOR = TraceContextTextMapPropagator()
+class TelemetryMiddleware:
+ # NOTE: should it be class or function?
+ __slots__ = (
+ "_meter",
+ "_metrics",
+ "_settings_provider_factory",
+ "_tracer",
+ )
+
+ def __init__(
+ self,
+ *,
+ settings_provider_factory: Callable[
+ [Any],
+ Optional["TelemetrySettingsProvider[Any]"],
+ ],
+ tracer_provider: Optional["TracerProvider"] = None,
+ meter_provider: Optional["MeterProvider"] = None,
+ meter: Optional["Meter"] = None,
+ include_messages_counters: bool = False,
+ ) -> None:
+ self._tracer = _get_tracer(tracer_provider)
+ self._meter = _get_meter(meter_provider, meter)
+ self._metrics = _MetricsContainer(self._meter, include_messages_counters)
+ self._settings_provider_factory = settings_provider_factory
+
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_BaseTelemetryMiddleware":
+ return _BaseTelemetryMiddleware(
+ msg,
+ tracer=self._tracer,
+ metrics_container=self._metrics,
+ settings_provider_factory=self._settings_provider_factory,
+ context=context,
+ )
+
+
class _MetricsContainer:
__slots__ = (
"include_messages_counters",
@@ -112,19 +153,21 @@ def observe_consume(
)
-class BaseTelemetryMiddleware(BaseMiddleware):
+class _BaseTelemetryMiddleware(BaseMiddleware):
def __init__(
self,
+ msg: Optional[Any],
+ /,
*,
tracer: "Tracer",
settings_provider_factory: Callable[
[Any],
- Optional[TelemetrySettingsProvider[Any]],
+ Optional["TelemetrySettingsProvider[Any]"],
],
metrics_container: _MetricsContainer,
- msg: Optional[Any] = None,
+ context: "ContextRepo",
) -> None:
- self.msg = msg
+ super().__init__(msg, context=context)
self._tracer = tracer
self._metrics = metrics_container
@@ -136,29 +179,27 @@ def __init__(
async def publish_scope(
self,
call_next: "AsyncFunc",
- msg: Any,
- *args: Any,
- **kwargs: Any,
+ msg: "PublishCommand",
) -> Any:
if (provider := self.__settings_provider) is None:
- return await call_next(msg, *args, **kwargs)
+ return await call_next(msg)
- headers = kwargs.pop("headers", {}) or {}
+ headers = msg.headers
current_context = context.get_current()
- destination_name = provider.get_publish_destination_name(kwargs)
+ destination_name = provider.get_publish_destination_name(msg)
- current_baggage: Optional[Baggage] = fs_context.get_local("baggage")
+ current_baggage: Optional[Baggage] = self.context.get_local("baggage")
if current_baggage:
headers.update(current_baggage.to_headers())
- trace_attributes = provider.get_publish_attrs_from_kwargs(kwargs)
+ trace_attributes = provider.get_publish_attrs_from_cmd(msg)
metrics_attributes = {
SpanAttributes.MESSAGING_SYSTEM: provider.messaging_system,
SpanAttributes.MESSAGING_DESTINATION_NAME: destination_name,
}
# NOTE: if batch with single message?
- if (msg_count := len((msg, *args))) > 1:
+ if (msg_count := len(msg.batch_bodies)) > 1:
trace_attributes[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = msg_count
current_context = _BAGGAGE_PROPAGATOR.extract(headers, current_context)
_BAGGAGE_PROPAGATOR.inject(
@@ -196,7 +237,8 @@ async def publish_scope(
SpanAttributes.MESSAGING_OPERATION,
MessageAction.PUBLISH,
)
- result = await call_next(msg, *args, headers=headers, **kwargs)
+ msg.headers = headers
+ result = await call_next(msg)
except Exception as e:
metrics_attributes[ERROR_TYPE] = type(e).__name__
@@ -207,7 +249,7 @@ async def publish_scope(
self._metrics.observe_publish(metrics_attributes, duration, msg_count)
for key, token in self._scope_tokens:
- fs_context.reset_local(key, token)
+ self.context.reset_local(key, token)
return result
@@ -260,9 +302,15 @@ async def consume_scope(
)
self._current_span = span
- self._scope_tokens.append(("span", fs_context.set_local("span", span)))
+ self._scope_tokens.append((
+ "span",
+ self.context.set_local("span", span),
+ ))
self._scope_tokens.append(
- ("baggage", fs_context.set_local("baggage", Baggage.from_msg(msg))),
+ (
+ "baggage",
+ self.context.set_local("baggage", Baggage.from_msg(msg)),
+ ),
)
new_context = trace.set_span_in_context(span, current_context)
@@ -295,41 +343,6 @@ async def after_processed(
return False
-class TelemetryMiddleware:
- # NOTE: should it be class or function?
- __slots__ = (
- "_meter",
- "_metrics",
- "_settings_provider_factory",
- "_tracer",
- )
-
- def __init__(
- self,
- *,
- settings_provider_factory: Callable[
- [Any],
- Optional[TelemetrySettingsProvider[Any]],
- ],
- tracer_provider: Optional["TracerProvider"] = None,
- meter_provider: Optional["MeterProvider"] = None,
- meter: Optional["Meter"] = None,
- include_messages_counters: bool = False,
- ) -> None:
- self._tracer = _get_tracer(tracer_provider)
- self._meter = _get_meter(meter_provider, meter)
- self._metrics = _MetricsContainer(self._meter, include_messages_counters)
- self._settings_provider_factory = settings_provider_factory
-
- def __call__(self, msg: Optional[Any]) -> BaseMiddleware:
- return BaseTelemetryMiddleware(
- tracer=self._tracer,
- metrics_container=self._metrics,
- settings_provider_factory=self._settings_provider_factory,
- msg=msg,
- )
-
-
def _get_meter(
meter_provider: Optional["MeterProvider"] = None,
meter: Optional["Meter"] = None,
diff --git a/faststream/opentelemetry/provider.py b/faststream/opentelemetry/provider.py
index 304f1f332b..6e2aaa90b6 100644
--- a/faststream/opentelemetry/provider.py
+++ b/faststream/opentelemetry/provider.py
@@ -5,6 +5,7 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict
from faststream.message import StreamMessage
+ from faststream.response.response import PublishCommand
class TelemetrySettingsProvider(Protocol[MsgType]):
@@ -20,12 +21,12 @@ def get_consume_destination_name(
msg: "StreamMessage[MsgType]",
) -> str: ...
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> "AnyDict": ...
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "PublishCommand",
) -> str: ...
diff --git a/faststream/prometheus/__init__.py b/faststream/prometheus/__init__.py
new file mode 100644
index 0000000000..a06f158ff3
--- /dev/null
+++ b/faststream/prometheus/__init__.py
@@ -0,0 +1,9 @@
+from faststream.prometheus.middleware import PrometheusMiddleware
+from faststream.prometheus.provider import MetricsSettingsProvider
+from faststream.prometheus.types import ConsumeAttrs
+
+__all__ = (
+ "ConsumeAttrs",
+ "MetricsSettingsProvider",
+ "PrometheusMiddleware",
+)
diff --git a/faststream/prometheus/consts.py b/faststream/prometheus/consts.py
new file mode 100644
index 0000000000..8e592d14ae
--- /dev/null
+++ b/faststream/prometheus/consts.py
@@ -0,0 +1,17 @@
+from faststream.exceptions import AckMessage, NackMessage, RejectMessage, SkipMessage
+from faststream.message.message import AckStatus
+from faststream.prometheus.types import ProcessingStatus
+
+PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP = {
+ AckMessage: ProcessingStatus.acked,
+ NackMessage: ProcessingStatus.nacked,
+ RejectMessage: ProcessingStatus.rejected,
+ SkipMessage: ProcessingStatus.skipped,
+}
+
+
+PROCESSING_STATUS_BY_ACK_STATUS = {
+ AckStatus.ACKED: ProcessingStatus.acked,
+ AckStatus.NACKED: ProcessingStatus.nacked,
+ AckStatus.REJECTED: ProcessingStatus.rejected,
+}
diff --git a/faststream/prometheus/container.py b/faststream/prometheus/container.py
new file mode 100644
index 0000000000..dd93701b05
--- /dev/null
+++ b/faststream/prometheus/container.py
@@ -0,0 +1,101 @@
+from collections.abc import Sequence
+from typing import Optional
+
+from prometheus_client import CollectorRegistry, Counter, Gauge, Histogram
+
+
+class MetricsContainer:
+ __slots__ = (
+ "_metrics_prefix",
+ "_registry",
+ "published_messages_duration_seconds",
+ "published_messages_exceptions_total",
+ "published_messages_total",
+ "received_messages_in_process",
+ "received_messages_size_bytes",
+ "received_messages_total",
+ "received_processed_messages_duration_seconds",
+ "received_processed_messages_exceptions_total",
+ "received_processed_messages_total",
+ )
+
+ DEFAULT_SIZE_BUCKETS = (
+ 2.0**4,
+ 2.0**6,
+ 2.0**8,
+ 2.0**10,
+ 2.0**12,
+ 2.0**14,
+ 2.0**16,
+ 2.0**18,
+ 2.0**20,
+ 2.0**22,
+ 2.0**24,
+ float("inf"),
+ )
+
+ def __init__(
+ self,
+ registry: "CollectorRegistry",
+ *,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ self._registry = registry
+ self._metrics_prefix = metrics_prefix
+
+ self.received_messages_total = Counter(
+ name=f"{metrics_prefix}_received_messages_total",
+ documentation="Count of received messages by broker and handler",
+ labelnames=["app_name", "broker", "handler"],
+ registry=registry,
+ )
+ self.received_messages_size_bytes = Histogram(
+ name=f"{metrics_prefix}_received_messages_size_bytes",
+ documentation="Histogram of received messages size in bytes by broker and handler",
+ labelnames=["app_name", "broker", "handler"],
+ registry=registry,
+ buckets=received_messages_size_buckets or self.DEFAULT_SIZE_BUCKETS,
+ )
+ self.received_messages_in_process = Gauge(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ documentation="Gauge of received messages in process by broker and handler",
+ labelnames=["app_name", "broker", "handler"],
+ registry=registry,
+ )
+ self.received_processed_messages_total = Counter(
+ name=f"{metrics_prefix}_received_processed_messages_total",
+ documentation="Count of received processed messages by broker, handler and status",
+ labelnames=["app_name", "broker", "handler", "status"],
+ registry=registry,
+ )
+ self.received_processed_messages_duration_seconds = Histogram(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds",
+ documentation="Histogram of received processed messages duration in seconds by broker and handler",
+ labelnames=["app_name", "broker", "handler"],
+ registry=registry,
+ )
+ self.received_processed_messages_exceptions_total = Counter(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions_total",
+ documentation="Count of received processed messages exceptions by broker, handler and exception_type",
+ labelnames=["app_name", "broker", "handler", "exception_type"],
+ registry=registry,
+ )
+ self.published_messages_total = Counter(
+ name=f"{metrics_prefix}_published_messages_total",
+ documentation="Count of published messages by destination and status",
+ labelnames=["app_name", "broker", "destination", "status"],
+ registry=registry,
+ )
+ self.published_messages_duration_seconds = Histogram(
+ name=f"{metrics_prefix}_published_messages_duration_seconds",
+ documentation="Histogram of published messages duration in seconds by broker and destination",
+ labelnames=["app_name", "broker", "destination"],
+ registry=registry,
+ )
+ self.published_messages_exceptions_total = Counter(
+ name=f"{metrics_prefix}_published_messages_exceptions_total",
+ documentation="Count of published messages exceptions by broker, destination and exception_type",
+ labelnames=["app_name", "broker", "destination", "exception_type"],
+ registry=registry,
+ )
diff --git a/faststream/prometheus/manager.py b/faststream/prometheus/manager.py
new file mode 100644
index 0000000000..844634475e
--- /dev/null
+++ b/faststream/prometheus/manager.py
@@ -0,0 +1,133 @@
+from faststream.prometheus.container import MetricsContainer
+from faststream.prometheus.types import ProcessingStatus, PublishingStatus
+
+
+class MetricsManager:
+ __slots__ = ("_app_name", "_container")
+
+ def __init__(
+ self, container: MetricsContainer, *, app_name: str = "faststream"
+ ) -> None:
+ self._container = container
+ self._app_name = app_name
+
+ def add_received_message(self, broker: str, handler: str, amount: int = 1) -> None:
+ self._container.received_messages_total.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ ).inc(amount)
+
+ def observe_received_messages_size(
+ self,
+ broker: str,
+ handler: str,
+ size: int,
+ ) -> None:
+ self._container.received_messages_size_bytes.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ ).observe(size)
+
+ def add_received_message_in_process(
+ self,
+ broker: str,
+ handler: str,
+ amount: int = 1,
+ ) -> None:
+ self._container.received_messages_in_process.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ ).inc(amount)
+
+ def remove_received_message_in_process(
+ self,
+ broker: str,
+ handler: str,
+ amount: int = 1,
+ ) -> None:
+ self._container.received_messages_in_process.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ ).dec(amount)
+
+ def add_received_processed_message(
+ self,
+ broker: str,
+ handler: str,
+ status: ProcessingStatus,
+ amount: int = 1,
+ ) -> None:
+ self._container.received_processed_messages_total.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ status=status.value,
+ ).inc(amount)
+
+ def observe_received_processed_message_duration(
+ self,
+ duration: float,
+ broker: str,
+ handler: str,
+ ) -> None:
+ self._container.received_processed_messages_duration_seconds.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ ).observe(duration)
+
+ def add_received_processed_message_exception(
+ self,
+ broker: str,
+ handler: str,
+ exception_type: str,
+ ) -> None:
+ self._container.received_processed_messages_exceptions_total.labels(
+ app_name=self._app_name,
+ broker=broker,
+ handler=handler,
+ exception_type=exception_type,
+ ).inc()
+
+ def add_published_message(
+ self,
+ broker: str,
+ destination: str,
+ status: PublishingStatus,
+ amount: int = 1,
+ ) -> None:
+ self._container.published_messages_total.labels(
+ app_name=self._app_name,
+ broker=broker,
+ destination=destination,
+ status=status.value,
+ ).inc(amount)
+
+ def observe_published_message_duration(
+ self,
+ duration: float,
+ broker: str,
+ destination: str,
+ ) -> None:
+ self._container.published_messages_duration_seconds.labels(
+ app_name=self._app_name,
+ broker=broker,
+ destination=destination,
+ ).observe(duration)
+
+ def add_published_message_exception(
+ self,
+ broker: str,
+ destination: str,
+ exception_type: str,
+ ) -> None:
+ self._container.published_messages_exceptions_total.labels(
+ app_name=self._app_name,
+ broker=broker,
+ destination=destination,
+ exception_type=exception_type,
+ ).inc()
diff --git a/faststream/prometheus/middleware.py b/faststream/prometheus/middleware.py
new file mode 100644
index 0000000000..d61dc42b0c
--- /dev/null
+++ b/faststream/prometheus/middleware.py
@@ -0,0 +1,209 @@
+import time
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Callable, Optional
+
+from faststream import BaseMiddleware
+from faststream._internal.constants import EMPTY
+from faststream.message import SourceType
+from faststream.prometheus.consts import (
+ PROCESSING_STATUS_BY_ACK_STATUS,
+ PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP,
+)
+from faststream.prometheus.container import MetricsContainer
+from faststream.prometheus.manager import MetricsManager
+from faststream.prometheus.provider import MetricsSettingsProvider
+from faststream.prometheus.types import ProcessingStatus, PublishingStatus
+from faststream.response import PublishType
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+ from faststream._internal.basic_types import AsyncFunc, AsyncFuncAny
+ from faststream._internal.context.repository import ContextRepo
+ from faststream.message.message import StreamMessage
+ from faststream.response.response import PublishCommand
+
+
+class PrometheusMiddleware:
+ __slots__ = ("_metrics_container", "_metrics_manager", "_settings_provider_factory")
+
+ def __init__(
+ self,
+ *,
+ settings_provider_factory: Callable[
+ [Any], Optional[MetricsSettingsProvider[Any]]
+ ],
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ if app_name is EMPTY:
+ app_name = metrics_prefix
+
+ self._settings_provider_factory = settings_provider_factory
+ self._metrics_container = MetricsContainer(
+ registry,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
+ self._metrics_manager = MetricsManager(
+ self._metrics_container,
+ app_name=app_name,
+ )
+
+ def __call__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ context: "ContextRepo",
+ ) -> "_PrometheusMiddleware":
+ return _PrometheusMiddleware(
+ msg,
+ metrics_manager=self._metrics_manager,
+ settings_provider_factory=self._settings_provider_factory,
+ context=context,
+ )
+
+
+class _PrometheusMiddleware(BaseMiddleware):
+ def __init__(
+ self,
+ msg: Optional[Any],
+ /,
+ *,
+ settings_provider_factory: Callable[
+ [Any], Optional[MetricsSettingsProvider[Any]]
+ ],
+ metrics_manager: MetricsManager,
+ context: "ContextRepo",
+ ) -> None:
+ self._metrics_manager = metrics_manager
+ self._settings_provider = settings_provider_factory(msg)
+ super().__init__(msg, context=context)
+
+ async def consume_scope(
+ self,
+ call_next: "AsyncFuncAny",
+ msg: "StreamMessage[Any]",
+ ) -> Any:
+ if self._settings_provider is None or msg._source_type is SourceType.RESPONSE:
+ return await call_next(msg)
+
+ messaging_system = self._settings_provider.messaging_system
+ consume_attrs = self._settings_provider.get_consume_attrs_from_message(msg)
+ destination_name = consume_attrs["destination_name"]
+
+ self._metrics_manager.add_received_message(
+ amount=consume_attrs["messages_count"],
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ self._metrics_manager.observe_received_messages_size(
+ size=consume_attrs["message_size"],
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ self._metrics_manager.add_received_message_in_process(
+ amount=consume_attrs["messages_count"],
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ err: Optional[Exception] = None
+ start_time = time.perf_counter()
+
+ try:
+ result = await call_next(await self.on_consume(msg))
+
+ except Exception as e:
+ err = e
+ self._metrics_manager.add_received_processed_message_exception(
+ exception_type=type(err).__name__,
+ broker=messaging_system,
+ handler=destination_name,
+ )
+ raise
+
+ finally:
+ duration = time.perf_counter() - start_time
+ self._metrics_manager.observe_received_processed_message_duration(
+ duration=duration,
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ self._metrics_manager.remove_received_message_in_process(
+ amount=consume_attrs["messages_count"],
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ status = ProcessingStatus.acked
+
+ if msg.committed or err:
+ status = (
+ PROCESSING_STATUS_BY_ACK_STATUS.get(msg.committed) # type: ignore[arg-type]
+ or PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP.get(type(err))
+ or ProcessingStatus.error
+ )
+
+ self._metrics_manager.add_received_processed_message(
+ amount=consume_attrs["messages_count"],
+ status=status,
+ broker=messaging_system,
+ handler=destination_name,
+ )
+
+ return result
+
+ async def publish_scope(
+ self,
+ call_next: "AsyncFunc",
+ cmd: "PublishCommand",
+ ) -> Any:
+ if self._settings_provider is None or cmd.publish_type is PublishType.REPLY:
+ return await call_next(cmd)
+
+ destination_name = (
+ self._settings_provider.get_publish_destination_name_from_cmd(cmd)
+ )
+ messaging_system = self._settings_provider.messaging_system
+
+ err: Optional[Exception] = None
+ start_time = time.perf_counter()
+
+ try:
+ result = await call_next(cmd)
+
+ except Exception as e:
+ err = e
+ self._metrics_manager.add_published_message_exception(
+ exception_type=type(err).__name__,
+ broker=messaging_system,
+ destination=destination_name,
+ )
+ raise
+
+ finally:
+ duration = time.perf_counter() - start_time
+
+ self._metrics_manager.observe_published_message_duration(
+ duration=duration,
+ broker=messaging_system,
+ destination=destination_name,
+ )
+
+ status = PublishingStatus.error if err else PublishingStatus.success
+
+ self._metrics_manager.add_published_message(
+ amount=len(cmd.batch_bodies),
+ status=status,
+ broker=messaging_system,
+ destination=destination_name,
+ )
+
+ return result
diff --git a/faststream/prometheus/provider.py b/faststream/prometheus/provider.py
new file mode 100644
index 0000000000..acbf68702f
--- /dev/null
+++ b/faststream/prometheus/provider.py
@@ -0,0 +1,21 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.message.message import MsgType, StreamMessage
+
+if TYPE_CHECKING:
+ from faststream.prometheus import ConsumeAttrs
+ from faststream.response.response import PublishCommand
+
+
+class MetricsSettingsProvider(Protocol[MsgType]):
+ messaging_system: str
+
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[MsgType]",
+ ) -> "ConsumeAttrs": ...
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "PublishCommand",
+ ) -> str: ...
diff --git a/faststream/prometheus/types.py b/faststream/prometheus/types.py
new file mode 100644
index 0000000000..ae6ffb7161
--- /dev/null
+++ b/faststream/prometheus/types.py
@@ -0,0 +1,21 @@
+from enum import Enum
+from typing import TypedDict
+
+
+class ProcessingStatus(str, Enum):
+ acked = "acked"
+ nacked = "nacked"
+ rejected = "rejected"
+ skipped = "skipped"
+ error = "error"
+
+
+class PublishingStatus(str, Enum):
+ success = "success"
+ error = "error"
+
+
+class ConsumeAttrs(TypedDict):
+ message_size: int
+ destination_name: str
+ messages_count: int
diff --git a/faststream/rabbit/annotations.py b/faststream/rabbit/annotations.py
index 4a135ecae9..dcbea9d20e 100644
--- a/faststream/rabbit/annotations.py
+++ b/faststream/rabbit/annotations.py
@@ -4,7 +4,6 @@
from faststream._internal.context import Context
from faststream.annotations import ContextRepo, Logger
-from faststream.params import NoCast
from faststream.rabbit.broker import RabbitBroker as RB
from faststream.rabbit.message import RabbitMessage as RM
from faststream.rabbit.publisher.producer import AioPikaFastProducer
@@ -14,7 +13,6 @@
"Connection",
"ContextRepo",
"Logger",
- "NoCast",
"RabbitBroker",
"RabbitMessage",
"RabbitProducer",
diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py
index 920c39f72d..7b7e585829 100644
--- a/faststream/rabbit/broker/broker.py
+++ b/faststream/rabbit/broker/broker.py
@@ -18,10 +18,11 @@
from faststream.__about__ import SERVICE_NAME
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.constants import EMPTY
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream._internal.publisher.proto import PublisherProto
from faststream.message import gen_cor_id
from faststream.rabbit.helpers.declarer import RabbitDeclarer
from faststream.rabbit.publisher.producer import AioPikaFastProducer
+from faststream.rabbit.response import RabbitPublishCommand
from faststream.rabbit.schemas import (
RABBIT_REPLY,
RabbitExchange,
@@ -29,6 +30,7 @@
)
from faststream.rabbit.security import parse_security
from faststream.rabbit.utils import build_url
+from faststream.response.publish_type import PublishType
from .logging import make_rabbit_logger_state
from .registrator import RabbitRegistrator
@@ -44,7 +46,8 @@
RobustQueue,
)
from aio_pika.abc import DateType, HeadersType, SSLOptions, TimeoutType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from pamqp.common import FieldTable
from yarl import URL
@@ -56,7 +59,7 @@
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.types import AioPikaSendableMessage
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class RabbitBroker(
@@ -66,9 +69,10 @@ class RabbitBroker(
"""A class to represent a RabbitMQ broker."""
url: str
- _producer: Optional["AioPikaFastProducer"]
- declarer: Optional[RabbitDeclarer]
+ _producer: "AioPikaFastProducer"
+ declarer: RabbitDeclarer
+
_channel: Optional["RobustChannel"]
def __init__(
@@ -161,7 +165,7 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
@@ -213,10 +217,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -280,7 +281,7 @@ def __init__(
),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
@@ -290,7 +291,15 @@ def __init__(
self.app_id = app_id
self._channel = None
- self.declarer = None
+
+ declarer = self.declarer = RabbitDeclarer()
+ self._state.patch_value(
+ producer=AioPikaFastProducer(
+ declarer=declarer,
+ decoder=self._decoder,
+ parser=self._parser,
+ )
+ )
@property
def _subscriber_setup_extra(self) -> "AnyDict":
@@ -301,13 +310,21 @@ def _subscriber_setup_extra(self) -> "AnyDict":
"declarer": self.declarer,
}
- @property
- def _publisher_setup_extra(self) -> "AnyDict":
- return {
- **super()._publisher_setup_extra,
- "app_id": self.app_id,
- "virtual_host": self.virtual_host,
- }
+ def setup_publisher(
+ self,
+ publisher: PublisherProto[IncomingMessage],
+ **kwargs: Any,
+ ) -> None:
+ return super().setup_publisher(
+ publisher,
+ **(
+ {
+ "app_id": self.app_id,
+ "virtual_host": self.virtual_host,
+ }
+ | kwargs
+ ),
+ )
@override
async def connect( # type: ignore[override]
@@ -461,18 +478,14 @@ async def _connect( # type: ignore[override]
),
)
- declarer = self.declarer = RabbitDeclarer(channel)
- await declarer.declare_queue(RABBIT_REPLY)
-
- self._producer = AioPikaFastProducer(
- declarer=declarer,
- decoder=self._decoder,
- parser=self._parser,
- )
-
if self._max_consumers:
await channel.set_qos(prefetch_count=int(self._max_consumers))
+ self.declarer.connect(connection=connection, channel=channel)
+ await self.declarer.declare_queue(RABBIT_REPLY)
+
+ self._producer.connect()
+
return connection
async def close(
@@ -493,25 +506,24 @@ async def close(
await self._connection.close()
self._connection = None
- self.declarer = None
- self._producer = None
+ self.declarer.disconnect()
+ self._producer.disconnect()
async def start(self) -> None:
"""Connect broker to RabbitMQ and startup all subscribers."""
await self.connect()
self._setup()
- if self._max_consumers:
- self._state.logger_state.log(f"Set max consumers to {self._max_consumers}")
-
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
-
for publisher in self._publishers:
if publisher.exchange is not None:
await self.declare_exchange(publisher.exchange)
await super().start()
+ logger_state = self._state.get().logger_state
+ if self._max_consumers:
+ logger_state.log(f"Set max consumers to {self._max_consumers}")
+
@override
async def publish( # type: ignore[override]
self,
@@ -619,32 +631,31 @@ async def publish( # type: ignore[override]
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
"""
- routing = routing_key or RabbitQueue.validate(queue).routing
- correlation_id = correlation_id or gen_cor_id()
-
- return await super().publish(
+ cmd = RabbitPublishCommand(
message,
- producer=self._producer,
- routing_key=routing,
+ routing_key=routing_key or RabbitQueue.validate(queue).routing,
+ exchange=RabbitExchange.validate(exchange),
+ correlation_id=correlation_id or gen_cor_id(),
app_id=self.app_id,
- exchange=exchange,
mandatory=mandatory,
immediate=immediate,
persist=persist,
reply_to=reply_to,
headers=headers,
- correlation_id=correlation_id,
content_type=content_type,
content_encoding=content_encoding,
expiration=expiration,
message_id=message_id,
- timestamp=timestamp,
message_type=message_type,
+ timestamp=timestamp,
user_id=user_id,
timeout=timeout,
priority=priority,
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
+
@override
async def request( # type: ignore[override]
self,
@@ -739,16 +750,12 @@ async def request( # type: ignore[override]
Doc("The message priority (0 by default)."),
] = None,
) -> "RabbitMessage":
- routing = routing_key or RabbitQueue.validate(queue).routing
- correlation_id = correlation_id or gen_cor_id()
-
- msg: RabbitMessage = await super().request(
+ cmd = RabbitPublishCommand(
message,
- producer=self._producer,
- correlation_id=correlation_id,
- routing_key=routing,
+ routing_key=routing_key or RabbitQueue.validate(queue).routing,
+ exchange=RabbitExchange.validate(exchange),
+ correlation_id=correlation_id or gen_cor_id(),
app_id=self.app_id,
- exchange=exchange,
mandatory=mandatory,
immediate=immediate,
persist=persist,
@@ -757,12 +764,15 @@ async def request( # type: ignore[override]
content_encoding=content_encoding,
expiration=expiration,
message_id=message_id,
- timestamp=timestamp,
message_type=message_type,
+ timestamp=timestamp,
user_id=user_id,
timeout=timeout,
priority=priority,
+ _publish_type=PublishType.REQUEST,
)
+
+ msg: RabbitMessage = await super()._basic_request(cmd, producer=self._producer)
return msg
async def declare_queue(
@@ -773,7 +783,6 @@ async def declare_queue(
],
) -> "RobustQueue":
"""Declares queue object in **RabbitMQ**."""
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
return await self.declarer.declare_queue(queue)
async def declare_exchange(
@@ -784,7 +793,6 @@ async def declare_exchange(
],
) -> "RobustExchange":
"""Declares exchange object in **RabbitMQ**."""
- assert self.declarer, NOT_CONNECTED_YET # nosec B101
return await self.declarer.declare_exchange(exchange)
@override
diff --git a/faststream/rabbit/broker/logging.py b/faststream/rabbit/broker/logging.py
index 4074d3e6df..21b0172004 100644
--- a/faststream/rabbit/broker/logging.py
+++ b/faststream/rabbit/broker/logging.py
@@ -1,14 +1,16 @@
+import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from faststream._internal.log.logging import get_broker_logger
-from faststream._internal.setup.logger import (
+from faststream._internal.state.logger import (
DefaultLoggerStorage,
make_logger_state,
)
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
class RabbitParamsStorage(DefaultLoggerStorage):
@@ -21,6 +23,11 @@ def __init__(
self._max_exchange_len = 4
self._max_queue_len = 4
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
def setup_log_contest(self, params: "AnyDict") -> None:
self._max_exchange_len = max(
self._max_exchange_len,
@@ -31,7 +38,7 @@ def setup_log_contest(self, params: "AnyDict") -> None:
len(params.get("queue", "")),
)
- def get_logger(self) -> "LoggerProto":
+ def get_logger(self, *, context: "ContextRepo") -> "LoggerProto":
message_id_ln = 10
# TODO: generate unique logger names to not share between brokers
@@ -50,10 +57,12 @@ def get_logger(self) -> "LoggerProto":
f"%(message_id)-{message_id_ln}s "
"- %(message)s"
),
+ context=context,
+ log_level=self.logger_log_level,
)
make_rabbit_logger_state = partial(
make_logger_state,
- default_storag_cls=RabbitParamsStorage,
+ default_storage_cls=RabbitParamsStorage,
)
diff --git a/faststream/rabbit/broker/registrator.py b/faststream/rabbit/broker/registrator.py
index 32aedfcfec..37ca0066f7 100644
--- a/faststream/rabbit/broker/registrator.py
+++ b/faststream/rabbit/broker/registrator.py
@@ -4,19 +4,22 @@
from typing_extensions import Doc, override
from faststream._internal.broker.abc_broker import ABCBroker
-from faststream.rabbit.publisher.publisher import SpecificationPublisher
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
+from faststream.rabbit.publisher.factory import create_publisher
+from faststream.rabbit.publisher.specified import SpecificationPublisher
from faststream.rabbit.publisher.usecase import PublishKwargs
from faststream.rabbit.schemas import (
RabbitExchange,
RabbitQueue,
)
from faststream.rabbit.subscriber.factory import create_subscriber
-from faststream.rabbit.subscriber.subscriber import SpecificationSubscriber
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from aio_pika import IncomingMessage # noqa: F401
from aio_pika.abc import DateType, HeadersType, TimeoutType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import (
@@ -56,10 +59,14 @@ def subscriber( # type: ignore[override]
Optional["AnyDict"],
Doc("Extra consumer arguments to use in `queue.consume(...)` method."),
] = None,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -73,14 +80,6 @@ def subscriber( # type: ignore[override]
Iterable["SubscriberMiddleware[RabbitMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
no_reply: Annotated[
bool,
Doc(
@@ -112,10 +111,9 @@ def subscriber( # type: ignore[override]
exchange=RabbitExchange.validate(exchange),
consume_args=consume_args,
# subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# AsyncAPI
title_=title,
@@ -240,7 +238,7 @@ def publisher( # type: ignore[override]
Optional[str],
Doc("Publisher connection User ID, validated if set."),
] = None,
- ) -> SpecificationPublisher:
+ ) -> "SpecificationPublisher":
"""Creates long-living and AsyncAPI-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
@@ -266,13 +264,13 @@ def publisher( # type: ignore[override]
return cast(
SpecificationPublisher,
super().publisher(
- SpecificationPublisher.create(
+ create_publisher(
routing_key=routing_key,
queue=RabbitQueue.validate(queue),
exchange=RabbitExchange.validate(exchange),
message_kwargs=message_kwargs,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
diff --git a/faststream/rabbit/fastapi/__init__.py b/faststream/rabbit/fastapi/__init__.py
index da296cd366..cb7c7c26d4 100644
--- a/faststream/rabbit/fastapi/__init__.py
+++ b/faststream/rabbit/fastapi/__init__.py
@@ -2,10 +2,11 @@
from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.rabbit.broker import RabbitBroker as RB
-from faststream.rabbit.fastapi.router import RabbitRouter
from faststream.rabbit.message import RabbitMessage as RM
from faststream.rabbit.publisher.producer import AioPikaFastProducer
+from .fastapi import RabbitRouter
+
RabbitMessage = Annotated[RM, Context("message")]
RabbitBroker = Annotated[RB, Context("broker")]
RabbitProducer = Annotated[AioPikaFastProducer, Context("broker._producer")]
diff --git a/faststream/rabbit/fastapi/router.py b/faststream/rabbit/fastapi/fastapi.py
similarity index 98%
rename from faststream/rabbit/fastapi/router.py
rename to faststream/rabbit/fastapi/fastapi.py
index 927e432420..02d2d4b2e9 100644
--- a/faststream/rabbit/fastapi/router.py
+++ b/faststream/rabbit/fastapi/fastapi.py
@@ -20,13 +20,13 @@
from faststream.__about__ import SERVICE_NAME
from faststream._internal.constants import EMPTY
from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
from faststream.rabbit.broker.broker import RabbitBroker as RB
-from faststream.rabbit.publisher.publisher import SpecificationPublisher
from faststream.rabbit.schemas import (
RabbitExchange,
RabbitQueue,
)
-from faststream.rabbit.subscriber.subscriber import SpecificationSubscriber
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from enum import Enum
@@ -48,8 +48,9 @@
SubscriberMiddleware,
)
from faststream.rabbit.message import RabbitMessage
+ from faststream.rabbit.publisher.specified import SpecificationPublisher
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class RabbitRouter(StreamRouter["IncomingMessage"]):
@@ -511,14 +512,10 @@ def subscriber( # type: ignore[override]
Iterable["SubscriberMiddleware[RabbitMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -675,8 +672,7 @@ def subscriber( # type: ignore[override]
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -800,7 +796,7 @@ def publisher(
Optional[str],
Doc("Publisher connection User ID, validated if set."),
] = None,
- ) -> SpecificationPublisher:
+ ) -> "SpecificationPublisher":
return self.broker.publisher(
queue=queue,
exchange=exchange,
diff --git a/faststream/rabbit/helpers/declarer.py b/faststream/rabbit/helpers/declarer.py
index b7bf52165c..dc890617d0 100644
--- a/faststream/rabbit/helpers/declarer.py
+++ b/faststream/rabbit/helpers/declarer.py
@@ -1,5 +1,7 @@
from typing import TYPE_CHECKING, cast
+from .state import ConnectedState, ConnectionState, EmptyConnectionState
+
if TYPE_CHECKING:
import aio_pika
@@ -9,12 +11,22 @@
class RabbitDeclarer:
"""An utility class to declare RabbitMQ queues and exchanges."""
- __channel: "aio_pika.RobustChannel"
- __queues: dict["RabbitQueue", "aio_pika.RobustQueue"]
- __exchanges: dict["RabbitExchange", "aio_pika.RobustExchange"]
+ def __init__(self) -> None:
+ self.__queues: dict[RabbitQueue, aio_pika.RobustQueue] = {}
+ self.__exchanges: dict[RabbitExchange, aio_pika.RobustExchange] = {}
+
+ self.__connection: ConnectionState = EmptyConnectionState()
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(<{self.__connection.__class__.__name__}>, queues={list(self.__queues.keys())}, exchanges={list(self.__exchanges.keys())})"
+
+ def connect(
+ self, connection: "aio_pika.RobustConnection", channel: "aio_pika.RobustChannel"
+ ) -> None:
+ self.__connection = ConnectedState(connection=connection, channel=channel)
- def __init__(self, channel: "aio_pika.RobustChannel") -> None:
- self.__channel = channel
+ def disconnect(self) -> None:
+ self.__connection = EmptyConnectionState()
self.__queues = {}
self.__exchanges = {}
@@ -27,7 +39,7 @@ async def declare_queue(
if (q := self.__queues.get(queue)) is None:
self.__queues[queue] = q = cast(
"aio_pika.RobustQueue",
- await self.__channel.declare_queue(
+ await self.__connection.channel.declare_queue(
name=queue.name,
durable=queue.durable,
exclusive=queue.exclusive,
@@ -48,12 +60,12 @@ async def declare_exchange(
) -> "aio_pika.RobustExchange":
"""Declare an exchange, parent exchanges and bind them each other."""
if not exchange.name:
- return self.__channel.default_exchange
+ return self.__connection.channel.default_exchange
if (exch := self.__exchanges.get(exchange)) is None:
self.__exchanges[exchange] = exch = cast(
"aio_pika.RobustExchange",
- await self.__channel.declare_exchange(
+ await self.__connection.channel.declare_exchange(
name=exchange.name,
type=exchange.type.value,
durable=exchange.durable,
diff --git a/faststream/rabbit/helpers/state.py b/faststream/rabbit/helpers/state.py
new file mode 100644
index 0000000000..182b588557
--- /dev/null
+++ b/faststream/rabbit/helpers/state.py
@@ -0,0 +1,35 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from aio_pika import RobustChannel, RobustConnection
+
+
+class ConnectionState(Protocol):
+ connection: "RobustConnection"
+ channel: "RobustChannel"
+
+
+class EmptyConnectionState(ConnectionState):
+ __slots__ = ()
+
+ error_msg = "You should connect broker first."
+
+ @property
+ def connection(self) -> "RobustConnection":
+ raise IncorrectState(self.error_msg)
+
+ @property
+ def channel(self) -> "RobustChannel":
+ raise IncorrectState(self.error_msg)
+
+
+class ConnectedState(ConnectionState):
+ __slots__ = ("channel", "connection")
+
+ def __init__(
+ self, connection: "RobustConnection", channel: "RobustChannel"
+ ) -> None:
+ self.connection = connection
+ self.channel = channel
diff --git a/faststream/rabbit/opentelemetry/provider.py b/faststream/rabbit/opentelemetry/provider.py
index ffa14e60e6..e9bd12c7fd 100644
--- a/faststream/rabbit/opentelemetry/provider.py
+++ b/faststream/rabbit/opentelemetry/provider.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING, Union
+from typing import TYPE_CHECKING
from opentelemetry.semconv.trace import SpanAttributes
@@ -10,7 +10,7 @@
from faststream._internal.basic_types import AnyDict
from faststream.message import StreamMessage
- from faststream.rabbit.schemas.exchange import RabbitExchange
+ from faststream.rabbit.response import RabbitPublishCommand
class RabbitTelemetrySettingsProvider(TelemetrySettingsProvider["IncomingMessage"]):
@@ -41,28 +41,19 @@ def get_consume_destination_name(
routing_key = msg.raw_message.routing_key
return f"{exchange}.{routing_key}"
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "RabbitPublishCommand",
) -> "AnyDict":
- exchange: Union[None, str, RabbitExchange] = kwargs.get("exchange")
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: getattr(
- exchange,
- "name",
- exchange or "",
- ),
- SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: kwargs[
- "routing_key"
- ],
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.exchange.name,
+ SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "RabbitPublishCommand",
) -> str:
- exchange: str = kwargs.get("exchange") or "default"
- routing_key: str = kwargs["routing_key"]
- return f"{exchange}.{routing_key}"
+ return f"{cmd.exchange.name or 'default'}.{cmd.destination}"
diff --git a/faststream/rabbit/parser.py b/faststream/rabbit/parser.py
index 70b2ba5492..de43697e17 100644
--- a/faststream/rabbit/parser.py
+++ b/faststream/rabbit/parser.py
@@ -61,19 +61,19 @@ async def decode_message(
def encode_message(
message: "AioPikaSendableMessage",
*,
- persist: bool,
- reply_to: Optional[str],
- headers: Optional["HeadersType"],
- content_type: Optional[str],
- content_encoding: Optional[str],
- priority: Optional[int],
- correlation_id: Optional[str],
- expiration: Optional["DateType"],
- message_id: Optional[str],
- timestamp: Optional["DateType"],
- message_type: Optional[str],
- user_id: Optional[str],
- app_id: Optional[str],
+ persist: bool = False,
+ reply_to: Optional[str] = None,
+ headers: Optional["HeadersType"] = None,
+ content_type: Optional[str] = None,
+ content_encoding: Optional[str] = None,
+ priority: Optional[int] = None,
+ correlation_id: Optional[str] = None,
+ expiration: "DateType" = None,
+ message_id: Optional[str] = None,
+ timestamp: "DateType" = None,
+ message_type: Optional[str] = None,
+ user_id: Optional[str] = None,
+ app_id: Optional[str] = None,
) -> Message:
"""Encodes a message for sending using AioPika."""
if isinstance(message, Message):
diff --git a/faststream/rabbit/prometheus/__init__.py b/faststream/rabbit/prometheus/__init__.py
new file mode 100644
index 0000000000..bdb07907ee
--- /dev/null
+++ b/faststream/rabbit/prometheus/__init__.py
@@ -0,0 +1,3 @@
+from faststream.rabbit.prometheus.middleware import RabbitPrometheusMiddleware
+
+__all__ = ("RabbitPrometheusMiddleware",)
diff --git a/faststream/rabbit/prometheus/middleware.py b/faststream/rabbit/prometheus/middleware.py
new file mode 100644
index 0000000000..78dd498576
--- /dev/null
+++ b/faststream/rabbit/prometheus/middleware.py
@@ -0,0 +1,27 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream.prometheus.middleware import PrometheusMiddleware
+from faststream.rabbit.prometheus.provider import RabbitMetricsSettingsProvider
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+
+class RabbitPrometheusMiddleware(PrometheusMiddleware):
+ def __init__(
+ self,
+ *,
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ super().__init__(
+ settings_provider_factory=lambda _: RabbitMetricsSettingsProvider(),
+ registry=registry,
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
diff --git a/faststream/rabbit/prometheus/provider.py b/faststream/rabbit/prometheus/provider.py
new file mode 100644
index 0000000000..f4fa0d977f
--- /dev/null
+++ b/faststream/rabbit/prometheus/provider.py
@@ -0,0 +1,38 @@
+from typing import TYPE_CHECKING
+
+from faststream.prometheus import (
+ ConsumeAttrs,
+ MetricsSettingsProvider,
+)
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+
+ from faststream.message.message import StreamMessage
+ from faststream.rabbit.response import RabbitPublishCommand
+
+
+class RabbitMetricsSettingsProvider(MetricsSettingsProvider["IncomingMessage"]):
+ __slots__ = ("messaging_system",)
+
+ def __init__(self) -> None:
+ self.messaging_system = "rabbitmq"
+
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[IncomingMessage]",
+ ) -> ConsumeAttrs:
+ exchange = msg.raw_message.exchange or "default"
+ routing_key = msg.raw_message.routing_key
+
+ return {
+ "destination_name": f"{exchange}.{routing_key}",
+ "message_size": len(msg.body),
+ "messages_count": 1,
+ }
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "RabbitPublishCommand",
+ ) -> str:
+ return f"{cmd.exchange.name or 'default'}.{cmd.destination}"
diff --git a/faststream/rabbit/publisher/factory.py b/faststream/rabbit/publisher/factory.py
new file mode 100644
index 0000000000..cf2bc27c86
--- /dev/null
+++ b/faststream/rabbit/publisher/factory.py
@@ -0,0 +1,43 @@
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Any, Optional
+
+from .specified import SpecificationPublisher
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+
+ from .usecase import PublishKwargs
+
+
+def create_publisher(
+ *,
+ routing_key: str,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ message_kwargs: "PublishKwargs",
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+) -> SpecificationPublisher:
+ return SpecificationPublisher(
+ routing_key=routing_key,
+ queue=queue,
+ exchange=exchange,
+ message_kwargs=message_kwargs,
+ # Publisher args
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ schema_=schema_,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
diff --git a/faststream/rabbit/publisher/fake.py b/faststream/rabbit/publisher/fake.py
new file mode 100644
index 0000000000..e5c67848e6
--- /dev/null
+++ b/faststream/rabbit/publisher/fake.py
@@ -0,0 +1,30 @@
+from typing import TYPE_CHECKING, Optional, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.rabbit.response import RabbitPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class RabbitFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ routing_key: str,
+ app_id: Optional[str],
+ ) -> None:
+ super().__init__(producer=producer)
+ self.routing_key = routing_key
+ self.app_id = str
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "RabbitPublishCommand"]
+ ) -> "RabbitPublishCommand":
+ real_cmd = RabbitPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.routing_key
+ real_cmd.app_id = self.app_id
+ return real_cmd
diff --git a/faststream/rabbit/publisher/options.py b/faststream/rabbit/publisher/options.py
new file mode 100644
index 0000000000..81343b7cf9
--- /dev/null
+++ b/faststream/rabbit/publisher/options.py
@@ -0,0 +1,28 @@
+from typing import TYPE_CHECKING, Optional
+
+from typing_extensions import TypedDict
+
+if TYPE_CHECKING:
+ from aio_pika.abc import DateType, HeadersType, TimeoutType
+
+
+class PublishOptions(TypedDict, total=False):
+ mandatory: bool
+ immediate: bool
+ timeout: "TimeoutType"
+
+
+class MessageOptions(TypedDict, total=False):
+ persist: bool
+ reply_to: Optional[str]
+ headers: Optional["HeadersType"]
+ content_type: Optional[str]
+ content_encoding: Optional[str]
+ priority: Optional[int]
+ expiration: "DateType"
+ message_id: Optional[str]
+ timestamp: "DateType"
+ message_type: Optional[str]
+ user_id: Optional[str]
+ app_id: Optional[str]
+ correlation_id: Optional[str]
diff --git a/faststream/rabbit/publisher/producer.py b/faststream/rabbit/publisher/producer.py
index 780ef16424..55fe050c19 100644
--- a/faststream/rabbit/publisher/producer.py
+++ b/faststream/rabbit/publisher/producer.py
@@ -1,15 +1,16 @@
from typing import (
TYPE_CHECKING,
Optional,
- Union,
+ Protocol,
cast,
)
import anyio
-from typing_extensions import override
+from typing_extensions import Unpack, override
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.subscriber.utils import resolve_custom_func
+from faststream.exceptions import FeatureNotSupportedException, IncorrectState
from faststream.rabbit.parser import AioPikaParser
from faststream.rabbit.schemas import RABBIT_REPLY, RabbitExchange
@@ -18,7 +19,7 @@
import aiormq
from aio_pika import IncomingMessage, RobustQueue
- from aio_pika.abc import AbstractIncomingMessage, DateType, HeadersType, TimeoutType
+ from aio_pika.abc import AbstractIncomingMessage, TimeoutType
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
from faststream._internal.types import (
@@ -26,9 +27,30 @@
CustomCallable,
)
from faststream.rabbit.helpers.declarer import RabbitDeclarer
+ from faststream.rabbit.response import MessageOptions, RabbitPublishCommand
from faststream.rabbit.types import AioPikaSendableMessage
+class LockState(Protocol):
+ lock: "anyio.Lock"
+
+
+class LockUnset(LockState):
+ __slots__ = ()
+
+ @property
+ def lock(self) -> "anyio.Lock":
+ msg = "You should call `producer.connect()` method at first."
+ raise IncorrectState(msg)
+
+
+class RealLock(LockState):
+ __slots__ = ("lock",)
+
+ def __init__(self) -> None:
+ self.lock = anyio.Lock()
+
+
class AioPikaFastProducer(ProducerProto):
"""A class for fast producing messages using aio-pika."""
@@ -44,108 +66,59 @@ def __init__(
) -> None:
self.declarer = declarer
- self._rpc_lock = anyio.Lock()
+ self.__lock: LockState = LockUnset()
default_parser = AioPikaParser()
self._parser = resolve_custom_func(parser, default_parser.parse_message)
self._decoder = resolve_custom_func(decoder, default_parser.decode_message)
+ def connect(self) -> None:
+ """Lock initialization.
+
+ Should be called in async context due `anyio.Lock` object can't be created outside event loop.
+ """
+ self.__lock = RealLock()
+
+ def disconnect(self) -> None:
+ self.__lock = LockUnset()
+
@override
async def publish( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: "TimeoutType" = None,
- persist: bool = False,
- reply_to: Optional[str] = None,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> Optional["aiormq.abc.ConfirmationFrameType"]:
"""Publish a message to a RabbitMQ queue."""
return await self._publish(
- message=message,
- exchange=exchange,
- routing_key=routing_key,
- mandatory=mandatory,
- immediate=immediate,
- timeout=timeout,
- persist=persist,
- reply_to=reply_to,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ **cmd.publish_options,
+ **cmd.message_options,
)
@override
async def request( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: Optional[float] = None,
- persist: bool = False,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> "IncomingMessage":
"""Publish a message to a RabbitMQ queue."""
async with _RPCCallback(
- self._rpc_lock,
+ self.__lock.lock,
await self.declarer.declare_queue(RABBIT_REPLY),
) as response_queue:
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
await self._publish(
- message=message,
- exchange=exchange,
- routing_key=routing_key,
- mandatory=mandatory,
- immediate=immediate,
- timeout=timeout,
- persist=persist,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
reply_to=RABBIT_REPLY.name,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
+ **cmd.publish_options,
+ **cmd.message_options,
)
return await response_queue.receive()
@@ -153,45 +126,18 @@ async def _publish(
self,
message: "AioPikaSendableMessage",
*,
- correlation_id: str,
- exchange: Union["RabbitExchange", str, None],
+ exchange: "RabbitExchange",
routing_key: str,
- mandatory: bool,
- immediate: bool,
- timeout: "TimeoutType",
- persist: bool,
- reply_to: Optional[str],
- headers: Optional["HeadersType"],
- content_type: Optional[str],
- content_encoding: Optional[str],
- priority: Optional[int],
- expiration: Optional["DateType"],
- message_id: Optional[str],
- timestamp: Optional["DateType"],
- message_type: Optional[str],
- user_id: Optional[str],
- app_id: Optional[str],
+ mandatory: bool = True,
+ immediate: bool = False,
+ timeout: "TimeoutType" = None,
+ **message_options: Unpack["MessageOptions"],
) -> Optional["aiormq.abc.ConfirmationFrameType"]:
"""Publish a message to a RabbitMQ exchange."""
- message = AioPikaParser.encode_message(
- message=message,
- persist=persist,
- reply_to=reply_to,
- headers=headers,
- content_type=content_type,
- content_encoding=content_encoding,
- priority=priority,
- correlation_id=correlation_id,
- expiration=expiration,
- message_id=message_id,
- timestamp=timestamp,
- message_type=message_type,
- user_id=user_id,
- app_id=app_id,
- )
+ message = AioPikaParser.encode_message(message=message, **message_options)
exchange_obj = await self.declarer.declare_exchange(
- exchange=RabbitExchange.validate(exchange),
+ exchange=exchange,
passive=True,
)
@@ -203,6 +149,14 @@ async def _publish(
timeout=timeout,
)
+ @override
+ async def publish_batch(
+ self,
+ cmd: "RabbitPublishCommand",
+ ) -> None:
+ msg = "RabbitMQ doesn't support publishing in batches."
+ raise FeatureNotSupportedException(msg)
+
class _RPCCallback:
"""A class provides an RPC lock."""
diff --git a/faststream/rabbit/publisher/publisher.py b/faststream/rabbit/publisher/specified.py
similarity index 63%
rename from faststream/rabbit/publisher/publisher.py
rename to faststream/rabbit/publisher/specified.py
index 14926fad2d..5d5356b661 100644
--- a/faststream/rabbit/publisher/publisher.py
+++ b/faststream/rabbit/publisher/specified.py
@@ -1,9 +1,14 @@
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Any, Optional
-
-from typing_extensions import override
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+)
-from faststream.rabbit.publisher.usecase import LogicPublisher, PublishKwargs
+from faststream._internal.publisher.specified import (
+ SpecificationPublisher as SpecificationPublisherMixin,
+)
+from faststream.rabbit.schemas.proto import BaseRMQInformation as RMQSpecificationMixin
from faststream.rabbit.utils import is_routing_exchange
from faststream.specification.asyncapi.utils import resolve_payloads
from faststream.specification.schema import Message, Operation, PublisherSpec
@@ -13,6 +18,8 @@
amqp,
)
+from .usecase import LogicPublisher, PublishKwargs
+
if TYPE_CHECKING:
from aio_pika import IncomingMessage
@@ -20,21 +27,53 @@
from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
-class SpecificationPublisher(LogicPublisher):
- """AsyncAPI-compatible Rabbit Publisher class.
+class SpecificationPublisher(
+ SpecificationPublisherMixin,
+ RMQSpecificationMixin,
+ LogicPublisher,
+):
+ """AsyncAPI-compatible Rabbit Publisher class."""
- Creating by
+ def __init__(
+ self,
+ *,
+ routing_key: str,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ # PublishCommand options
+ message_kwargs: "PublishKwargs",
+ # Publisher args
+ broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # AsyncAPI args
+ schema_: Optional[Any],
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ SpecificationPublisherMixin.__init__(
+ self,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ schema_=schema_,
+ )
- ```python
- publisher: SpecificationPublisher = (
- broker.publisher(...)
- )
- # or
- publisher: SpecificationPublisher = (
- router.publisher(...)
- )
- ```
- """
+ RMQSpecificationMixin.__init__(
+ self,
+ queue=queue,
+ exchange=exchange,
+ )
+
+ LogicPublisher.__init__(
+ self,
+ queue=queue,
+ exchange=exchange,
+ routing_key=routing_key,
+ message_kwargs=message_kwargs,
+ middlewares=middlewares,
+ broker_middlewares=broker_middlewares,
+ )
def get_default_name(self) -> str:
routing = (
@@ -48,19 +87,23 @@ def get_default_name(self) -> str:
def get_schema(self) -> dict[str, PublisherSpec]:
payloads = self.get_payloads()
+ exchange_binding = amqp.Exchange.from_exchange(self.exchange)
+ queue_binding = amqp.Queue.from_queue(self.queue)
+
return {
self.name: PublisherSpec(
description=self.description,
operation=Operation(
bindings=OperationBinding(
amqp=amqp.OperationBinding(
- cc=self.routing or None,
- delivery_mode=2
- if self.message_kwargs.get("persist")
- else 1,
- mandatory=self.message_kwargs.get("mandatory"), # type: ignore[arg-type]
- reply_to=self.message_kwargs.get("reply_to"), # type: ignore[arg-type]
- priority=self.message_kwargs.get("priority"), # type: ignore[arg-type]
+ routing_key=self.routing or None,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ack=True,
+ persist=self.message_options.get("persist"),
+ priority=self.message_options.get("priority"),
+ reply_to=self.message_options.get("reply_to"),
+ mandatory=self.publish_options.get("mandatory"),
),
),
message=Message(
@@ -75,42 +118,9 @@ def get_schema(self) -> dict[str, PublisherSpec]:
bindings=ChannelBinding(
amqp=amqp.ChannelBinding(
virtual_host=self.virtual_host,
- queue=amqp.Queue.from_queue(self.queue),
- exchange=amqp.Exchange.from_exchange(self.exchange),
+ queue=queue_binding,
+ exchange=exchange_binding,
),
),
),
}
-
- @override
- @classmethod
- def create( # type: ignore[override]
- cls,
- *,
- routing_key: str,
- queue: "RabbitQueue",
- exchange: "RabbitExchange",
- message_kwargs: "PublishKwargs",
- # Publisher args
- broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
- middlewares: Iterable["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
- ) -> "SpecificationPublisher":
- return cls(
- routing_key=routing_key,
- queue=queue,
- exchange=exchange,
- message_kwargs=message_kwargs,
- # Publisher args
- broker_middlewares=broker_middlewares,
- middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
- )
diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py
index 49cdf71f14..0ae13cf319 100644
--- a/faststream/rabbit/publisher/usecase.py
+++ b/faststream/rabbit/publisher/usecase.py
@@ -1,102 +1,41 @@
-from collections.abc import Awaitable, Iterable
+from collections.abc import Iterable
from copy import deepcopy
-from functools import partial
-from itertools import chain
from typing import (
TYPE_CHECKING,
Annotated,
- Any,
- Callable,
Optional,
Union,
)
from aio_pika import IncomingMessage
-from typing_extensions import Doc, TypedDict, Unpack, override
+from typing_extensions import Doc, Unpack, override
from faststream._internal.publisher.usecase import PublisherUsecase
-from faststream._internal.subscriber.utils import process_msg
-from faststream.exceptions import NOT_CONNECTED_YET
+from faststream._internal.utils.data import filter_by_dict
from faststream.message import gen_cor_id
-from faststream.rabbit.schemas import BaseRMQInformation, RabbitQueue
+from faststream.rabbit.response import RabbitPublishCommand
+from faststream.rabbit.schemas import RabbitExchange, RabbitQueue
+from faststream.response.publish_type import PublishType
+
+from .options import MessageOptions, PublishOptions
if TYPE_CHECKING:
import aiormq
- from aio_pika.abc import DateType, HeadersType, TimeoutType
- from faststream._internal.basic_types import AnyDict
+ from faststream._internal.state import BrokerState
from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.publisher.producer import AioPikaFastProducer
- from faststream.rabbit.schemas.exchange import RabbitExchange
from faststream.rabbit.types import AioPikaSendableMessage
+ from faststream.response.response import PublishCommand
# should be public to use in imports
-class RequestPublishKwargs(TypedDict, total=False):
+class RequestPublishKwargs(MessageOptions, PublishOptions, total=False):
"""Typed dict to annotate RabbitMQ requesters."""
- headers: Annotated[
- Optional["HeadersType"],
- Doc(
- "Message headers to store metainformation. "
- "Can be overridden by `publish.headers` if specified.",
- ),
- ]
- mandatory: Annotated[
- Optional[bool],
- Doc(
- "Client waits for confirmation that the message is placed to some queue. "
- "RabbitMQ returns message to client if there is no suitable queue.",
- ),
- ]
- immediate: Annotated[
- Optional[bool],
- Doc(
- "Client expects that there is consumer ready to take the message to work. "
- "RabbitMQ returns message to client if there is no suitable consumer.",
- ),
- ]
- timeout: Annotated[
- "TimeoutType",
- Doc("Send confirmation time from RabbitMQ."),
- ]
- persist: Annotated[
- Optional[bool],
- Doc("Restore the message on RabbitMQ reboot."),
- ]
-
- priority: Annotated[
- Optional[int],
- Doc("The message priority (0 by default)."),
- ]
- message_type: Annotated[
- Optional[str],
- Doc("Application-specific message type, e.g. **orders.created**."),
- ]
- content_type: Annotated[
- Optional[str],
- Doc(
- "Message **content-type** header. "
- "Used by application, not core RabbitMQ. "
- "Will be set automatically if not specified.",
- ),
- ]
- user_id: Annotated[
- Optional[str],
- Doc("Publisher connection User ID, validated if set."),
- ]
- expiration: Annotated[
- Optional["DateType"],
- Doc("Message expiration (lifetime) in seconds (or datetime or timedelta)."),
- ]
- content_encoding: Annotated[
- Optional[str],
- Doc("Message body content encoding, e.g. **gzip**."),
- ]
-
-class PublishKwargs(RequestPublishKwargs, total=False):
+class PublishKwargs(MessageOptions, PublishOptions, total=False):
"""Typed dict to annotate RabbitMQ publishers."""
reply_to: Annotated[
@@ -107,10 +46,7 @@ class PublishKwargs(RequestPublishKwargs, total=False):
]
-class LogicPublisher(
- PublisherUsecase[IncomingMessage],
- BaseRMQInformation,
-):
+class LogicPublisher(PublisherUsecase[IncomingMessage]):
"""A class to represent a RabbitMQ publisher."""
app_id: Optional[str]
@@ -123,51 +59,44 @@ def __init__(
routing_key: str,
queue: "RabbitQueue",
exchange: "RabbitExchange",
+ # PublishCommand options
message_kwargs: "PublishKwargs",
# Publisher args
broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
middlewares: Iterable["PublisherMiddleware"],
- # AsyncAPI args
- schema_: Optional[Any],
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
+ self.queue = queue
+ self.routing_key = routing_key
+
+ self.exchange = exchange
+
super().__init__(
broker_middlewares=broker_middlewares,
middlewares=middlewares,
- # AsyncAPI args
- schema_=schema_,
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
- self.routing_key = routing_key
+ request_options = dict(message_kwargs)
+ self.headers = request_options.pop("headers") or {}
+ self.reply_to = request_options.pop("reply_to", "")
+ self.timeout = request_options.pop("timeout", None)
- request_kwargs = dict(message_kwargs)
- self.reply_to = request_kwargs.pop("reply_to", None)
- self.message_kwargs = request_kwargs
+ message_options, _ = filter_by_dict(MessageOptions, request_options)
+ self.message_options = message_options
- # BaseRMQInformation
- self.queue = queue
- self.exchange = exchange
+ publish_options, _ = filter_by_dict(PublishOptions, request_options)
+ self.publish_options = publish_options
- # Setup it later
self.app_id = None
- self.virtual_host = ""
@override
def _setup( # type: ignore[override]
self,
*,
- producer: Optional["AioPikaFastProducer"],
- app_id: Optional[str],
- virtual_host: str,
+ state: "BrokerState",
) -> None:
- self.app_id = app_id
- self.virtual_host = virtual_host
- super()._setup(producer=producer)
+ # AppId was set in `faststream.rabbit.schemas.proto.BaseRMQInformation`
+ self.message_options["app_id"] = self.app_id
+ super()._setup(state=state)
@property
def routing(self) -> str:
@@ -202,53 +131,52 @@ async def publish(
"**correlation_id** is a useful option to trace messages.",
),
] = None,
- message_id: Annotated[
- Optional[str],
- Doc("Arbitrary message id. Generated automatically if not presented."),
- ] = None,
- timestamp: Annotated[
- Optional["DateType"],
- Doc("Message publish timestamp. Generated automatically if not presented."),
- ] = None,
# publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
**publish_kwargs: "Unpack[PublishKwargs]",
) -> Optional["aiormq.abc.ConfirmationFrameType"]:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "routing_key": routing_key
- or self.routing_key
- or RabbitQueue.validate(queue or self.queue).routing,
- "exchange": exchange or self.exchange.name,
- "app_id": self.app_id,
- "correlation_id": correlation_id or gen_cor_id(),
- "message_id": message_id,
- "timestamp": timestamp,
- # specific args
- "reply_to": self.reply_to,
- **self.message_kwargs,
- **publish_kwargs,
- }
-
- call: Callable[
- ...,
- Awaitable[Optional[aiormq.abc.ConfirmationFrameType]],
- ] = self._producer.publish
-
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ if not routing_key:
+ if q := RabbitQueue.validate(queue):
+ routing_key = q.routing
+ else:
+ routing_key = self.routing
+
+ headers = self.headers | publish_kwargs.pop("headers", {})
+ cmd = RabbitPublishCommand(
+ message,
+ routing_key=routing_key,
+ exchange=RabbitExchange.validate(exchange or self.exchange),
+ correlation_id=correlation_id or gen_cor_id(),
+ headers=headers,
+ _publish_type=PublishType.PUBLISH,
+ **(self.publish_options | self.message_options | publish_kwargs),
+ )
+
+ frame: Optional[aiormq.abc.ConfirmationFrameType] = await self._basic_publish(
+ cmd,
+ _extra_middlewares=(),
+ )
+ return frame
- return await call(message, **kwargs)
+ @override
+ async def _publish(
+ self,
+ cmd: Union["RabbitPublishCommand", "PublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RabbitPublishCommand.from_cmd(cmd)
+
+ cmd.destination = self.routing
+ cmd.reply_to = cmd.reply_to or self.reply_to
+ cmd.add_headers(self.headers, override=False)
+
+ cmd.timeout = cmd.timeout or self.timeout
+
+ cmd.message_options = {**self.message_options, **cmd.message_options}
+ cmd.publish_options = {**self.publish_options, **cmd.publish_options}
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -278,59 +206,27 @@ async def request(
"**correlation_id** is a useful option to trace messages.",
),
] = None,
- message_id: Annotated[
- Optional[str],
- Doc("Arbitrary message id. Generated automatically if not presented."),
- ] = None,
- timestamp: Annotated[
- Optional["DateType"],
- Doc("Message publish timestamp. Generated automatically if not presented."),
- ] = None,
# publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
**publish_kwargs: "Unpack[RequestPublishKwargs]",
) -> "RabbitMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs: AnyDict = {
- "routing_key": routing_key
- or self.routing_key
- or RabbitQueue.validate(queue or self.queue).routing,
- "exchange": exchange or self.exchange.name,
- "app_id": self.app_id,
- "correlation_id": correlation_id or gen_cor_id(),
- "message_id": message_id,
- "timestamp": timestamp,
- # specific args
- **self.message_kwargs,
- **publish_kwargs,
- }
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ if not routing_key:
+ if q := RabbitQueue.validate(queue):
+ routing_key = q.routing
+ else:
+ routing_key = self.routing
+
+ headers = self.headers | publish_kwargs.pop("headers", {})
+ cmd = RabbitPublishCommand(
message,
- **kwargs,
+ routing_key=routing_key,
+ exchange=RabbitExchange.validate(exchange or self.exchange),
+ correlation_id=correlation_id or gen_cor_id(),
+ headers=headers,
+ _publish_type=PublishType.PUBLISH,
+ **(self.publish_options | self.message_options | publish_kwargs),
)
- msg: RabbitMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
- )
+ msg: RabbitMessage = await self._basic_request(cmd)
return msg
def add_prefix(self, prefix: str) -> None:
diff --git a/faststream/rabbit/response.py b/faststream/rabbit/response.py
index 756c665c4b..9bac3f6417 100644
--- a/faststream/rabbit/response.py
+++ b/faststream/rabbit/response.py
@@ -1,13 +1,15 @@
-from typing import TYPE_CHECKING, Optional
+from typing import TYPE_CHECKING, Optional, Union
-from typing_extensions import override
+from typing_extensions import Unpack, override
-from faststream.response import Response
+from faststream.rabbit.schemas.exchange import RabbitExchange
+from faststream.response import PublishCommand, Response
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
- from aio_pika.abc import DateType, TimeoutType
+ from aio_pika.abc import TimeoutType
- from faststream._internal.basic_types import AnyDict
+ from faststream.rabbit.publisher.options import MessageOptions
from faststream.rabbit.types import AioPikaSendableMessage
@@ -16,48 +18,91 @@ def __init__(
self,
body: "AioPikaSendableMessage",
*,
- headers: Optional["AnyDict"] = None,
- correlation_id: Optional[str] = None,
- message_id: Optional[str] = None,
+ timeout: "TimeoutType" = None,
mandatory: bool = True,
immediate: bool = False,
- timeout: "TimeoutType" = None,
- persist: Optional[bool] = None,
- priority: Optional[int] = None,
- message_type: Optional[str] = None,
- content_type: Optional[str] = None,
- expiration: Optional["DateType"] = None,
- content_encoding: Optional[str] = None,
+ **message_options: Unpack["MessageOptions"],
) -> None:
+ headers = message_options.pop("headers", {})
+ correlation_id = message_options.pop("correlation_id", None)
+
super().__init__(
body=body,
headers=headers,
correlation_id=correlation_id,
)
- self.message_id = message_id
- self.mandatory = mandatory
- self.immediate = immediate
- self.timeout = timeout
- self.persist = persist
- self.priority = priority
- self.message_type = message_type
- self.content_type = content_type
- self.expiration = expiration
- self.content_encoding = content_encoding
+ self.message_options = message_options
+ self.publish_options = {
+ "mandatory": mandatory,
+ "immediate": immediate,
+ "timeout": timeout,
+ }
@override
- def as_publish_kwargs(self) -> "AnyDict":
- return {
- **super().as_publish_kwargs(),
- "message_id": self.message_id,
- "mandatory": self.mandatory,
- "immediate": self.immediate,
- "timeout": self.timeout,
- "persist": self.persist,
- "priority": self.priority,
- "message_type": self.message_type,
- "content_type": self.content_type,
- "expiration": self.expiration,
- "content_encoding": self.content_encoding,
+ def as_publish_command(self) -> "RabbitPublishCommand":
+ return RabbitPublishCommand(
+ message=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ # RMQ specific
+ routing_key="",
+ **self.publish_options,
+ **self.message_options,
+ )
+
+
+class RabbitPublishCommand(PublishCommand):
+ def __init__(
+ self,
+ message: "AioPikaSendableMessage",
+ *,
+ _publish_type: PublishType,
+ routing_key: str = "",
+ exchange: Optional[RabbitExchange] = None,
+ # publish kwargs
+ mandatory: bool = True,
+ immediate: bool = False,
+ timeout: "TimeoutType" = None,
+ correlation_id: Optional[str] = None,
+ **message_options: Unpack["MessageOptions"],
+ ) -> None:
+ headers = message_options.pop("headers", {})
+ reply_to = message_options.pop("reply_to", "")
+
+ super().__init__(
+ body=message,
+ destination=routing_key,
+ correlation_id=correlation_id,
+ headers=headers,
+ reply_to=reply_to,
+ _publish_type=_publish_type,
+ )
+ self.exchange = exchange or RabbitExchange()
+
+ self.timeout = timeout
+
+ self.message_options = message_options
+ self.publish_options = {
+ "mandatory": mandatory,
+ "immediate": immediate,
}
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "RabbitPublishCommand"],
+ ) -> "RabbitPublishCommand":
+ if isinstance(cmd, RabbitPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ return cls(
+ message=cmd.body,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
diff --git a/faststream/rabbit/router.py b/faststream/rabbit/router.py
index 8106d48078..8eaec725ef 100644
--- a/faststream/rabbit/router.py
+++ b/faststream/rabbit/router.py
@@ -8,18 +8,20 @@
BrokerRouter,
SubscriberRoute,
)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.rabbit.broker.registrator import RabbitRegistrator
if TYPE_CHECKING:
from aio_pika.abc import DateType, HeadersType, TimeoutType
from aio_pika.message import IncomingMessage
- from broker.types import PublisherMiddleware
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import (
BrokerMiddleware,
CustomCallable,
+ PublisherMiddleware,
SubscriberMiddleware,
)
from faststream.rabbit.message import RabbitMessage
@@ -214,8 +216,8 @@ def __init__(
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -229,14 +231,10 @@ def __init__(
Iterable["SubscriberMiddleware[RabbitMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- Union[bool, int],
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -270,8 +268,7 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -297,9 +294,9 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers.",
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
diff --git a/faststream/rabbit/schemas/exchange.py b/faststream/rabbit/schemas/exchange.py
index a95f78bba8..af146f78b0 100644
--- a/faststream/rabbit/schemas/exchange.py
+++ b/faststream/rabbit/schemas/exchange.py
@@ -28,6 +28,14 @@ class RabbitExchange(NameRequired):
"type",
)
+ def __repr__(self) -> str:
+ if self.passive:
+ body = ""
+ else:
+ body = f", robust={self.robust}, durable={self.durable}, auto_delete={self.auto_delete})"
+
+ return f"{self.__class__.__name__}({self.name}, type={self.type}, routing_key='{self.routing}'{body})"
+
def __hash__(self) -> int:
"""Supports hash to store real objects in declarer."""
return sum(
diff --git a/faststream/rabbit/schemas/proto.py b/faststream/rabbit/schemas/proto.py
index 226840925e..2109772124 100644
--- a/faststream/rabbit/schemas/proto.py
+++ b/faststream/rabbit/schemas/proto.py
@@ -1,13 +1,40 @@
-from typing import Optional, Protocol
+from typing import TYPE_CHECKING, Any, Optional
-from faststream.rabbit.schemas.exchange import RabbitExchange
-from faststream.rabbit.schemas.queue import RabbitQueue
+if TYPE_CHECKING:
+ from faststream.rabbit.schemas.exchange import RabbitExchange
+ from faststream.rabbit.schemas.queue import RabbitQueue
-class BaseRMQInformation(Protocol):
- """Base class to store AsyncAPI RMQ bindings."""
+class BaseRMQInformation:
+ """Base class to store Specification RMQ bindings."""
virtual_host: str
- queue: RabbitQueue
- exchange: Optional[RabbitExchange]
+ queue: "RabbitQueue"
+ exchange: "RabbitExchange"
app_id: Optional[str]
+
+ def __init__(
+ self,
+ *,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ ) -> None:
+ self.queue = queue
+ self.exchange = exchange
+
+ # Setup it later
+ self.app_id = None
+ self.virtual_host = ""
+
+ def _setup(
+ self,
+ *,
+ app_id: Optional[str],
+ virtual_host: str,
+ **kwargs: Any,
+ ) -> None:
+ self.app_id = app_id
+ self.virtual_host = virtual_host
+
+ # Setup next parent class
+ super()._setup(**kwargs)
diff --git a/faststream/rabbit/schemas/queue.py b/faststream/rabbit/schemas/queue.py
index 6a26a64dba..35f9955750 100644
--- a/faststream/rabbit/schemas/queue.py
+++ b/faststream/rabbit/schemas/queue.py
@@ -34,6 +34,14 @@ class RabbitQueue(NameRequired):
"timeout",
)
+ def __repr__(self) -> str:
+ if self.passive:
+ body = ""
+ else:
+ body = f", robust={self.robust}, durable={self.durable}, exclusive={self.exclusive}, auto_delete={self.auto_delete})"
+
+ return f"{self.__class__.__name__}({self.name}, routing_key='{self.routing}'{body})"
+
def __hash__(self) -> int:
"""Supports hash to store real objects in declarer."""
return sum(
diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py
index c69884503a..8a4475ec58 100644
--- a/faststream/rabbit/subscriber/factory.py
+++ b/faststream/rabbit/subscriber/factory.py
@@ -1,11 +1,13 @@
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Optional, Union
+from typing import TYPE_CHECKING, Optional
-from faststream.rabbit.subscriber.subscriber import SpecificationSubscriber
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
+from faststream.rabbit.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from aio_pika import IncomingMessage
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import BrokerMiddleware
@@ -18,26 +20,33 @@ def create_subscriber(
exchange: "RabbitExchange",
consume_args: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ ack_policy: "AckPolicy",
# AsyncAPI args
title_: Optional[str],
description_: Optional[str],
include_in_schema: bool,
) -> SpecificationSubscriber:
+ _validate_input_for_misconfigure()
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
+
return SpecificationSubscriber(
queue=queue,
exchange=exchange,
consume_args=consume_args,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
title_=title_,
description_=description_,
include_in_schema=include_in_schema,
)
+
+
+def _validate_input_for_misconfigure() -> None:
+ """Nothing to check yet."""
diff --git a/faststream/rabbit/subscriber/specified.py b/faststream/rabbit/subscriber/specified.py
new file mode 100644
index 0000000000..64ec790c47
--- /dev/null
+++ b/faststream/rabbit/subscriber/specified.py
@@ -0,0 +1,112 @@
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.subscriber.specified import (
+ SpecificationSubscriber as SpecificationSubscriberMixin,
+)
+from faststream.rabbit.schemas.proto import BaseRMQInformation as RMQSpecificationMixin
+from faststream.rabbit.subscriber.usecase import LogicSubscriber
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Message, Operation, SubscriberSpec
+from faststream.specification.schema.bindings import (
+ ChannelBinding,
+ OperationBinding,
+ amqp,
+)
+
+if TYPE_CHECKING:
+ from aio_pika import IncomingMessage
+ from fast_depends.dependencies import Dependant
+
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware
+ from faststream.middlewares import AckPolicy
+ from faststream.rabbit.schemas.exchange import RabbitExchange
+ from faststream.rabbit.schemas.queue import RabbitQueue
+
+
+class SpecificationSubscriber(
+ SpecificationSubscriberMixin,
+ RMQSpecificationMixin,
+ LogicSubscriber,
+):
+ """AsyncAPI-compatible Rabbit Subscriber class."""
+
+ def __init__(
+ self,
+ *,
+ queue: "RabbitQueue",
+ exchange: "RabbitExchange",
+ consume_args: Optional["AnyDict"],
+ # Subscriber args
+ ack_policy: "AckPolicy",
+ no_reply: bool,
+ broker_dependencies: Iterable["Dependant"],
+ broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ include_in_schema: bool,
+ ) -> None:
+ SpecificationSubscriberMixin.__init__(
+ self,
+ title_=title_,
+ description_=description_,
+ include_in_schema=include_in_schema,
+ )
+
+ RMQSpecificationMixin.__init__(
+ self,
+ queue=queue,
+ exchange=exchange,
+ )
+
+ LogicSubscriber.__init__(
+ self,
+ queue=queue,
+ consume_args=consume_args,
+ ack_policy=ack_policy,
+ no_reply=no_reply,
+ broker_dependencies=broker_dependencies,
+ broker_middlewares=broker_middlewares,
+ )
+
+ def get_default_name(self) -> str:
+ return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}"
+
+ def get_schema(self) -> dict[str, SubscriberSpec]:
+ payloads = self.get_payloads()
+
+ exchange_binding = amqp.Exchange.from_exchange(self.exchange)
+ queue_binding = amqp.Queue.from_queue(self.queue)
+
+ return {
+ self.name: SubscriberSpec(
+ description=self.description,
+ operation=Operation(
+ bindings=OperationBinding(
+ amqp=amqp.OperationBinding(
+ routing_key=self.queue.routing,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ack=True,
+ reply_to=None,
+ persist=None,
+ mandatory=None,
+ priority=None,
+ ),
+ ),
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads),
+ ),
+ ),
+ bindings=ChannelBinding(
+ amqp=amqp.ChannelBinding(
+ virtual_host=self.virtual_host,
+ queue=queue_binding,
+ exchange=exchange_binding,
+ ),
+ ),
+ ),
+ }
diff --git a/faststream/rabbit/subscriber/subscriber.py b/faststream/rabbit/subscriber/subscriber.py
deleted file mode 100644
index 23a73c2797..0000000000
--- a/faststream/rabbit/subscriber/subscriber.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from faststream.rabbit.subscriber.usecase import LogicSubscriber
-from faststream.specification.asyncapi.utils import resolve_payloads
-from faststream.specification.schema import Message, Operation, SubscriberSpec
-from faststream.specification.schema.bindings import (
- ChannelBinding,
- OperationBinding,
- amqp,
-)
-
-
-class SpecificationSubscriber(LogicSubscriber):
- """AsyncAPI-compatible Rabbit Subscriber class."""
-
- def get_default_name(self) -> str:
- return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}"
-
- def get_schema(self) -> dict[str, SubscriberSpec]:
- payloads = self.get_payloads()
-
- exchange_binding = amqp.Exchange.from_exchange(self.exchange)
-
- return {
- self.name: SubscriberSpec(
- description=self.description,
- operation=Operation(
- bindings=OperationBinding(
- amqp=amqp.OperationBinding(
- cc=self.queue.routing,
- ),
- )
- if exchange_binding.is_respect_routing_key
- else None,
- message=Message(
- title=f"{self.name}:Message",
- payload=resolve_payloads(payloads),
- ),
- ),
- bindings=ChannelBinding(
- amqp=amqp.ChannelBinding(
- virtual_host=self.virtual_host,
- queue=amqp.Queue.from_queue(self.queue),
- exchange=exchange_binding,
- ),
- ),
- ),
- }
diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py
index e7fc7e5205..df229a5cc4 100644
--- a/faststream/rabbit/subscriber/usecase.py
+++ b/faststream/rabbit/subscriber/usecase.py
@@ -1,29 +1,31 @@
+import asyncio
+import contextlib
from collections.abc import Iterable, Sequence
from typing import (
TYPE_CHECKING,
Any,
Optional,
- Union,
)
import anyio
from typing_extensions import override
-from faststream._internal.publisher.fake import FakePublisher
from faststream._internal.subscriber.usecase import SubscriberUsecase
from faststream._internal.subscriber.utils import process_msg
from faststream.exceptions import SetupError
from faststream.rabbit.parser import AioPikaParser
-from faststream.rabbit.schemas import BaseRMQInformation
+from faststream.rabbit.publisher.fake import RabbitFakePublisher
if TYPE_CHECKING:
from aio_pika import IncomingMessage, RobustQueue
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, LoggerProto
- from faststream._internal.setup import SetupState
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
from faststream._internal.types import BrokerMiddleware, CustomCallable
from faststream.message import StreamMessage
+ from faststream.middlewares import AckPolicy
from faststream.rabbit.helpers.declarer import RabbitDeclarer
from faststream.rabbit.message import RabbitMessage
from faststream.rabbit.publisher.producer import AioPikaFastProducer
@@ -33,10 +35,7 @@
)
-class LogicSubscriber(
- SubscriberUsecase["IncomingMessage"],
- BaseRMQInformation,
-):
+class LogicSubscriber(SubscriberUsecase["IncomingMessage"]):
"""A class to handle logic for RabbitMQ message consumption."""
app_id: Optional[str]
@@ -50,34 +49,25 @@ def __init__(
self,
*,
queue: "RabbitQueue",
- exchange: "RabbitExchange",
consume_args: Optional["AnyDict"],
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: Union[bool, int],
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"],
- # AsyncAPI args
- title_: Optional[str],
- description_: Optional[str],
- include_in_schema: bool,
) -> None:
+ self.queue = queue
+
parser = AioPikaParser(pattern=queue.path_regex)
super().__init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
- # AsyncAPI
- title_=title_,
- description_=description_,
- include_in_schema=include_in_schema,
)
self.consume_args = consume_args or {}
@@ -85,40 +75,25 @@ def __init__(
self._consumer_tag = None
self._queue_obj = None
- # BaseRMQInformation
- self.queue = queue
- self.exchange = exchange
# Setup it later
- self.app_id = None
- self.virtual_host = ""
self.declarer = None
@override
def _setup( # type: ignore[override]
self,
*,
- app_id: Optional[str],
- virtual_host: str,
declarer: "RabbitDeclarer",
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["AioPikaFastProducer"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "BrokerState",
) -> None:
- self.app_id = app_id
- self.virtual_host = virtual_host
self.declarer = declarer
super()._setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
@@ -184,7 +159,10 @@ async def get_one(
sleep_interval = timeout / 10
raw_message: Optional[IncomingMessage] = None
- with anyio.move_on_after(timeout):
+ with (
+ contextlib.suppress(asyncio.exceptions.CancelledError),
+ anyio.move_on_after(timeout),
+ ):
while ( # noqa: ASYNC110
raw_message := await self._queue_obj.get(
fail=False,
@@ -194,9 +172,13 @@ async def get_one(
) is None:
await anyio.sleep(sleep_interval)
+ context = self._state.get().di_state.context
+
msg: Optional[RabbitMessage] = await process_msg( # type: ignore[assignment]
msg=raw_message,
- middlewares=self._broker_middlewares,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -205,17 +187,12 @@ async def get_one(
def _make_response_publisher(
self,
message: "StreamMessage[Any]",
- ) -> Sequence["FakePublisher"]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "routing_key": message.reply_to,
- "app_id": self.app_id,
- },
+ RabbitFakePublisher(
+ self._state.get().producer,
+ routing_key=message.reply_to,
+ app_id=self.app_id,
),
)
diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py
index 5831fc903c..97b5619184 100644
--- a/faststream/rabbit/testing.py
+++ b/faststream/rabbit/testing.py
@@ -1,4 +1,4 @@
-from collections.abc import Generator, Mapping
+from collections.abc import Generator, Iterator, Mapping
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Optional, Union
from unittest import mock
@@ -18,7 +18,6 @@
from faststream.rabbit.broker.broker import RabbitBroker
from faststream.rabbit.parser import AioPikaParser
from faststream.rabbit.publisher.producer import AioPikaFastProducer
-from faststream.rabbit.publisher.publisher import SpecificationPublisher
from faststream.rabbit.schemas import (
ExchangeType,
RabbitExchange,
@@ -26,8 +25,10 @@
)
if TYPE_CHECKING:
- from aio_pika.abc import DateType, HeadersType, TimeoutType
+ from aio_pika.abc import DateType, HeadersType
+ from faststream.rabbit.publisher.specified import SpecificationPublisher
+ from faststream.rabbit.response import RabbitPublishCommand
from faststream.rabbit.subscriber.usecase import LogicSubscriber
from faststream.rabbit.types import AioPikaSendableMessage
@@ -39,7 +40,7 @@ class TestRabbitBroker(TestBroker[RabbitBroker]):
"""A class to test RabbitMQ brokers."""
@contextmanager
- def _patch_broker(self, broker: RabbitBroker) -> Generator[None, None, None]:
+ def _patch_broker(self, broker: "RabbitBroker") -> Generator[None, None, None]:
with (
mock.patch.object(
broker,
@@ -55,14 +56,21 @@ def _patch_broker(self, broker: RabbitBroker) -> Generator[None, None, None]:
):
yield
+ @contextmanager
+ def _patch_producer(self, broker: RabbitBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
- async def _fake_connect(broker: RabbitBroker, *args: Any, **kwargs: Any) -> None:
- broker._producer = FakeProducer(broker)
+ async def _fake_connect(broker: "RabbitBroker", *args: Any, **kwargs: Any) -> None:
+ pass
@staticmethod
def create_publisher_fake_subscriber(
- broker: RabbitBroker,
- publisher: SpecificationPublisher,
+ broker: "RabbitBroker",
+ publisher: "SpecificationPublisher",
) -> tuple["LogicSubscriber", bool]:
sub: Optional[LogicSubscriber] = None
for handler in broker._subscribers:
@@ -131,6 +139,7 @@ def build_message(
routing = routing_key or que.routing
+ correlation_id = correlation_id or gen_cor_id()
msg = AioPikaParser.encode_message(
message=message,
persist=persist,
@@ -141,7 +150,7 @@ def build_message(
priority=priority,
correlation_id=correlation_id,
expiration=expiration,
- message_id=message_id or gen_cor_id(),
+ message_id=message_id or correlation_id,
timestamp=timestamp,
message_type=message_type,
user_id=user_id,
@@ -194,47 +203,17 @@ def __init__(self, broker: RabbitBroker) -> None:
@override
async def publish( # type: ignore[override]
self,
- message: "AioPikaSendableMessage",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: "TimeoutType" = None,
- persist: bool = False,
- reply_to: Optional[str] = None,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> None:
"""Publish a message to a RabbitMQ queue or exchange."""
- exch = RabbitExchange.validate(exchange)
-
incoming = build_message(
- message=message,
- exchange=exch,
- routing_key=routing_key,
- reply_to=reply_to,
- app_id=app_id,
- user_id=user_id,
- message_type=message_type,
- headers=headers,
- persist=persist,
- message_id=message_id,
- priority=priority,
- content_encoding=content_encoding,
- content_type=content_type,
- correlation_id=correlation_id,
- expiration=expiration,
- timestamp=timestamp,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ **cmd.message_options,
)
for handler in self.broker._subscribers: # pragma: no branch
@@ -242,52 +221,23 @@ async def publish( # type: ignore[override]
handler,
incoming.routing_key,
incoming.headers,
- exch,
+ cmd.exchange,
):
await self._execute_handler(incoming, handler)
@override
async def request( # type: ignore[override]
self,
- message: "AioPikaSendableMessage" = "",
- exchange: Union["RabbitExchange", str, None] = None,
- *,
- correlation_id: str = "",
- routing_key: str = "",
- mandatory: bool = True,
- immediate: bool = False,
- timeout: Optional[float] = None,
- persist: bool = False,
- headers: Optional["HeadersType"] = None,
- content_type: Optional[str] = None,
- content_encoding: Optional[str] = None,
- priority: Optional[int] = None,
- expiration: Optional["DateType"] = None,
- message_id: Optional[str] = None,
- timestamp: Optional["DateType"] = None,
- message_type: Optional[str] = None,
- user_id: Optional[str] = None,
- app_id: Optional[str] = None,
+ cmd: "RabbitPublishCommand",
) -> "PatchedMessage":
"""Publish a message to a RabbitMQ queue or exchange."""
- exch = RabbitExchange.validate(exchange)
-
incoming = build_message(
- message=message,
- exchange=exch,
- routing_key=routing_key,
- app_id=app_id,
- user_id=user_id,
- message_type=message_type,
- headers=headers,
- persist=persist,
- message_id=message_id,
- priority=priority,
- content_encoding=content_encoding,
- content_type=content_type,
- correlation_id=correlation_id,
- expiration=expiration,
- timestamp=timestamp,
+ message=cmd.body,
+ exchange=cmd.exchange,
+ routing_key=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ **cmd.message_options,
)
for handler in self.broker._subscribers: # pragma: no branch
@@ -295,9 +245,9 @@ async def request( # type: ignore[override]
handler,
incoming.routing_key,
incoming.headers,
- exch,
+ cmd.exchange,
):
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
return await self._execute_handler(incoming, handler)
raise SubscriberNotFound
diff --git a/faststream/redis/annotations.py b/faststream/redis/annotations.py
index 4bbbb9b324..b4db951b54 100644
--- a/faststream/redis/annotations.py
+++ b/faststream/redis/annotations.py
@@ -4,14 +4,12 @@
from faststream._internal.context import Context
from faststream.annotations import ContextRepo, Logger
-from faststream.params import NoCast
from faststream.redis.broker.broker import RedisBroker as RB
from faststream.redis.message import UnifyRedisMessage
__all__ = (
"ContextRepo",
"Logger",
- "NoCast",
"Redis",
"RedisBroker",
"RedisMessage",
diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py
index af58975b9b..bae4bd9de3 100644
--- a/faststream/redis/broker/broker.py
+++ b/faststream/redis/broker/broker.py
@@ -1,6 +1,5 @@
import logging
from collections.abc import Iterable, Mapping
-from functools import partial
from typing import (
TYPE_CHECKING,
Annotated,
@@ -27,11 +26,12 @@
from faststream.__about__ import __version__
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.constants import EMPTY
-from faststream.exceptions import NOT_CONNECTED_YET
from faststream.message import gen_cor_id
from faststream.redis.message import UnifyRedisDict
from faststream.redis.publisher.producer import RedisFastProducer
+from faststream.redis.response import RedisPublishCommand
from faststream.redis.security import parse_security
+from faststream.response.publish_type import PublishType
from .logging import make_redis_logger_state
from .registrator import RedisRegistrator
@@ -39,13 +39,13 @@
if TYPE_CHECKING:
from types import TracebackType
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
+ from fast_depends.library.serializer import SerializerProto
from redis.asyncio.connection import BaseParser
from typing_extensions import TypedDict, Unpack
from faststream._internal.basic_types import (
AnyDict,
- AsyncFunc,
Decorator,
LoggerProto,
SendableMessage,
@@ -56,7 +56,7 @@
)
from faststream.redis.message import BaseMessage, RedisMessage
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class RedisInitKwargs(TypedDict, total=False):
host: Optional[str]
@@ -90,7 +90,7 @@ class RedisBroker(
"""Redis broker."""
url: str
- _producer: Optional[RedisFastProducer]
+ _producer: "RedisFastProducer"
def __init__(
self,
@@ -131,7 +131,7 @@ def __init__(
Doc("Custom parser object."),
] = None,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc("Dependencies to apply to all broker subscribers."),
] = (),
middlewares: Annotated[
@@ -183,10 +183,7 @@ def __init__(
bool,
Doc("Whether to use FastDepends or not."),
] = True,
- validate: Annotated[
- bool,
- Doc("Whether to cast types using Pydantic validation."),
- ] = True,
+ serializer: Optional["SerializerProto"] = EMPTY,
_get_dependant: Annotated[
Optional[Callable[..., Any]],
Doc("Custom library dependant generator callback."),
@@ -196,8 +193,6 @@ def __init__(
Doc("Any custom decorator to apply to wrapped functions."),
] = (),
) -> None:
- self._producer = None
-
if specification_url is None:
specification_url = url
@@ -248,11 +243,18 @@ def __init__(
),
# FastDepends args
apply_types=apply_types,
- validate=validate,
+ serializer=serializer,
_get_dependant=_get_dependant,
_call_decorators=_call_decorators,
)
+ self._state.patch_value(
+ producer=RedisFastProducer(
+ parser=self._parser,
+ decoder=self._decoder,
+ )
+ )
+
@override
async def connect( # type: ignore[override]
self,
@@ -331,11 +333,7 @@ async def _connect( # type: ignore[override]
)
client: Redis[bytes] = Redis.from_pool(pool) # type: ignore[attr-defined]
- self._producer = RedisFastProducer(
- connection=client,
- parser=self._parser,
- decoder=self._decoder,
- )
+ self._producer.connect(client)
return client
async def close(
@@ -346,6 +344,8 @@ async def close(
) -> None:
await super().close(exc_type, exc_val, exc_tb)
+ self._producer.disconnect()
+
if self._connection is not None:
await self._connection.aclose() # type: ignore[attr-defined]
self._connection = None
@@ -404,7 +404,7 @@ async def publish( # type: ignore[override]
"Remove eldest message if maxlen exceeded.",
),
] = None,
- ) -> None:
+ ) -> int:
"""Publish message directly.
This method allows you to publish message in not AsyncAPI-documented way. You can use it in another frameworks
@@ -412,9 +412,8 @@ async def publish( # type: ignore[override]
Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way.
"""
- await super().publish(
+ cmd = RedisPublishCommand(
message,
- producer=self._producer,
correlation_id=correlation_id or gen_cor_id(),
channel=channel,
list=list,
@@ -422,7 +421,9 @@ async def publish( # type: ignore[override]
maxlen=maxlen,
reply_to=reply_to,
headers=headers,
+ _publish_type=PublishType.PUBLISH,
)
+ return await super()._basic_publish(cmd, producer=self._producer)
@override
async def request( # type: ignore[override]
@@ -437,9 +438,8 @@ async def request( # type: ignore[override]
headers: Optional["AnyDict"] = None,
timeout: Optional[float] = 30.0,
) -> "RedisMessage":
- msg: RedisMessage = await super().request(
+ cmd = RedisPublishCommand(
message,
- producer=self._producer,
correlation_id=correlation_id or gen_cor_id(),
channel=channel,
list=list,
@@ -447,12 +447,14 @@ async def request( # type: ignore[override]
maxlen=maxlen,
headers=headers,
timeout=timeout,
+ _publish_type=PublishType.REQUEST,
)
+ msg: RedisMessage = await super()._basic_request(cmd, producer=self._producer)
return msg
async def publish_batch(
self,
- *msgs: Annotated[
+ *messages: Annotated[
"SendableMessage",
Doc("Messages bodies to send."),
],
@@ -467,23 +469,27 @@ async def publish_batch(
"**correlation_id** is a useful option to trace messages.",
),
] = None,
- ) -> None:
+ reply_to: Annotated[
+ str,
+ Doc("Reply message destination PubSub object name."),
+ ] = "",
+ headers: Annotated[
+ Optional["AnyDict"],
+ Doc("Message headers to store metainformation."),
+ ] = None,
+ ) -> int:
"""Publish multiple messages to Redis List by one request."""
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- correlation_id = correlation_id or gen_cor_id()
-
- call: AsyncFunc = self._producer.publish_batch
-
- for m in self._middlewares:
- call = partial(m(None).publish_scope, call)
-
- await call(
- *msgs,
+ cmd = RedisPublishCommand(
+ *messages,
list=list,
- correlation_id=correlation_id,
+ reply_to=reply_to,
+ headers=headers,
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
)
+ return await self._basic_publish_batch(cmd, producer=self._producer)
+
@override
async def ping(self, timeout: Optional[float]) -> bool:
sleep_time = (timeout or 10) / 10
diff --git a/faststream/redis/broker/logging.py b/faststream/redis/broker/logging.py
index fba197c5da..6fe8f718ae 100644
--- a/faststream/redis/broker/logging.py
+++ b/faststream/redis/broker/logging.py
@@ -1,14 +1,16 @@
+import logging
from functools import partial
from typing import TYPE_CHECKING, Optional
from faststream._internal.log.logging import get_broker_logger
-from faststream._internal.setup.logger import (
+from faststream._internal.state.logger import (
DefaultLoggerStorage,
make_logger_state,
)
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, LoggerProto
+ from faststream._internal.context import ContextRepo
class RedisParamsStorage(DefaultLoggerStorage):
@@ -20,6 +22,11 @@ def __init__(
self._max_channel_name = 4
+ self.logger_log_level = logging.INFO
+
+ def set_level(self, level: int) -> None:
+ self.logger_log_level = level
+
def setup_log_contest(self, params: "AnyDict") -> None:
self._max_channel_name = max(
(
@@ -28,7 +35,7 @@ def setup_log_contest(self, params: "AnyDict") -> None:
),
)
- def get_logger(self) -> Optional["LoggerProto"]:
+ def get_logger(self, *, context: "ContextRepo") -> Optional["LoggerProto"]:
message_id_ln = 10
# TODO: generate unique logger names to not share between brokers
@@ -45,10 +52,12 @@ def get_logger(self) -> Optional["LoggerProto"]:
f"%(message_id)-{message_id_ln}s "
"- %(message)s"
),
+ context=context,
+ log_level=self.logger_log_level,
)
make_redis_logger_state = partial(
make_logger_state,
- default_storag_cls=RedisParamsStorage,
+ default_storage_cls=RedisParamsStorage,
)
diff --git a/faststream/redis/broker/registrator.py b/faststream/redis/broker/registrator.py
index da6759f03a..10cf4afe98 100644
--- a/faststream/redis/broker/registrator.py
+++ b/faststream/redis/broker/registrator.py
@@ -4,13 +4,16 @@
from typing_extensions import Doc, override
from faststream._internal.broker.abc_broker import ABCBroker
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.publisher.publisher import SpecificationPublisher
+from faststream.redis.publisher.factory import create_publisher
+from faststream.redis.publisher.specified import SpecificationPublisher
from faststream.redis.subscriber.factory import SubsciberType, create_subscriber
-from faststream.redis.subscriber.subscriber import SpecificationSubscriber
+from faststream.redis.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict
from faststream._internal.types import (
@@ -19,7 +22,7 @@
SubscriberMiddleware,
)
from faststream.redis.message import UnifyRedisMessage
- from faststream.redis.publisher.publisher import PublisherType
+ from faststream.redis.publisher.specified import PublisherType
from faststream.redis.schemas import ListSub, PubSub, StreamSub
@@ -47,8 +50,8 @@ def subscriber( # type: ignore[override]
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -64,14 +67,10 @@ def subscriber( # type: ignore[override]
Iterable["SubscriberMiddleware[UnifyRedisMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -103,10 +102,9 @@ def subscriber( # type: ignore[override]
list=list,
stream=stream,
# subscriber args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
broker_dependencies=self._dependencies,
# AsyncAPI
title_=title,
@@ -174,7 +172,7 @@ def publisher( # type: ignore[override]
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> SpecificationPublisher:
+ ) -> "SpecificationPublisher":
"""Creates long-living and AsyncAPI-documented publisher object.
You can use it as a handler decorator (handler should be decorated by `@broker.subscriber(...)` too) - `@broker.publisher(...)`.
@@ -185,14 +183,14 @@ def publisher( # type: ignore[override]
return cast(
SpecificationPublisher,
super().publisher(
- SpecificationPublisher.create(
+ create_publisher(
channel=channel,
list=list,
stream=stream,
headers=headers,
reply_to=reply_to,
# Specific
- broker_middlewares=self._middlewares,
+ broker_middlewares=self.middlewares,
middlewares=middlewares,
# AsyncAPI
title_=title,
diff --git a/faststream/redis/fastapi/__init__.py b/faststream/redis/fastapi/__init__.py
index da6dfd1c85..117c03aae2 100644
--- a/faststream/redis/fastapi/__init__.py
+++ b/faststream/redis/fastapi/__init__.py
@@ -4,9 +4,10 @@
from faststream._internal.fastapi.context import Context, ContextRepo, Logger
from faststream.redis.broker.broker import RedisBroker as RB
-from faststream.redis.fastapi.fastapi import RedisRouter
from faststream.redis.message import BaseMessage as RM # noqa: N814
+from .fastapi import RedisRouter
+
__all__ = (
"Context",
"ContextRepo",
diff --git a/faststream/redis/fastapi/fastapi.py b/faststream/redis/fastapi/fastapi.py
index f1d5473990..25e2d6b186 100644
--- a/faststream/redis/fastapi/fastapi.py
+++ b/faststream/redis/fastapi/fastapi.py
@@ -25,11 +25,11 @@
from faststream.__about__ import SERVICE_NAME
from faststream._internal.constants import EMPTY
from faststream._internal.fastapi.router import StreamRouter
+from faststream.middlewares import AckPolicy
from faststream.redis.broker.broker import RedisBroker as RB
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.publisher.publisher import SpecificationPublisher
from faststream.redis.schemas import ListSub, PubSub, StreamSub
-from faststream.redis.subscriber.subscriber import SpecificationSubscriber
+from faststream.redis.subscriber.specified import SpecificationSubscriber
if TYPE_CHECKING:
from enum import Enum
@@ -48,8 +48,9 @@
SubscriberMiddleware,
)
from faststream.redis.message import UnifyRedisMessage
+ from faststream.redis.publisher.specified import SpecificationPublisher
from faststream.security import BaseSecurity
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import Tag, TagDict
class RedisRouter(StreamRouter[UnifyRedisDict]):
@@ -461,14 +462,10 @@ def subscriber( # type: ignore[override]
Iterable["SubscriberMiddleware[UnifyRedisMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -625,8 +622,7 @@ def subscriber( # type: ignore[override]
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -692,7 +688,7 @@ def publisher(
bool,
Doc("Whetever to include operation in AsyncAPI schema or not."),
] = True,
- ) -> SpecificationPublisher:
+ ) -> "SpecificationPublisher":
return self.broker.publisher(
channel,
list=list,
diff --git a/faststream/redis/helpers/__init__.py b/faststream/redis/helpers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/faststream/redis/helpers/state.py b/faststream/redis/helpers/state.py
new file mode 100644
index 0000000000..1d3d2a6bad
--- /dev/null
+++ b/faststream/redis/helpers/state.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Protocol
+
+from faststream.exceptions import IncorrectState
+
+if TYPE_CHECKING:
+ from redis.asyncio.client import Redis
+
+
+class ConnectionState(Protocol):
+ client: "Redis[bytes]"
+
+
+class EmptyConnectionState(ConnectionState):
+ __slots__ = ()
+
+ error_msg = "You should connect broker first."
+
+ @property
+ def client(self) -> "Redis[bytes]":
+ raise IncorrectState(self.error_msg)
+
+
+class ConnectedState(ConnectionState):
+ __slots__ = ("client",)
+
+ def __init__(self, client: "Redis[bytes]") -> None:
+ self.client = client
diff --git a/faststream/redis/message.py b/faststream/redis/message.py
index b13dad9308..b4b0d443d4 100644
--- a/faststream/redis/message.py
+++ b/faststream/redis/message.py
@@ -61,20 +61,20 @@ class RedisMessage(BrokerStreamMessage[PubSubMessage]):
pass
-class ListMessage(TypedDict):
+class _ListMessage(TypedDict):
"""A class to represent an Abstract List message."""
channel: str
-class DefaultListMessage(ListMessage):
+class DefaultListMessage(_ListMessage):
"""A class to represent a single List message."""
type: Literal["list"]
data: bytes
-class BatchListMessage(ListMessage):
+class BatchListMessage(_ListMessage):
"""A class to represent a List messages batch."""
type: Literal["blist"]
@@ -95,22 +95,22 @@ class RedisBatchListMessage(BrokerStreamMessage[BatchListMessage]):
bDATA_KEY = DATA_KEY.encode() # noqa: N816
-class StreamMessage(TypedDict):
+class _StreamMessage(TypedDict):
channel: str
message_ids: list[bytes]
-class DefaultStreamMessage(StreamMessage):
+class DefaultStreamMessage(_StreamMessage):
type: Literal["stream"]
data: dict[bytes, bytes]
-class BatchStreamMessage(StreamMessage):
+class BatchStreamMessage(_StreamMessage):
type: Literal["bstream"]
data: list[dict[bytes, bytes]]
-_StreamMsgType = TypeVar("_StreamMsgType", bound=StreamMessage)
+_StreamMsgType = TypeVar("_StreamMsgType", bound=_StreamMessage)
class _RedisStreamMessageMixin(BrokerStreamMessage[_StreamMsgType]):
@@ -124,7 +124,7 @@ async def ack(
ids = self.raw_message["message_ids"]
channel = self.raw_message["channel"]
await redis.xack(channel, group, *ids) # type: ignore[no-untyped-call]
- await super().ack()
+ await super().ack()
@override
async def nack(
diff --git a/faststream/redis/opentelemetry/provider.py b/faststream/redis/opentelemetry/provider.py
index ea8c17462c..d818864973 100644
--- a/faststream/redis/opentelemetry/provider.py
+++ b/faststream/redis/opentelemetry/provider.py
@@ -1,4 +1,3 @@
-from collections.abc import Sized
from typing import TYPE_CHECKING, cast
from opentelemetry.semconv.trace import SpanAttributes
@@ -9,6 +8,7 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict
from faststream.message import StreamMessage
+ from faststream.redis.response import RedisPublishCommand
class RedisTelemetrySettingsProvider(TelemetrySettingsProvider["AnyDict"]):
@@ -31,7 +31,7 @@ def get_consume_attrs_from_message(
if cast(str, msg.raw_message.get("type", "")).startswith("b"):
attrs[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = len(
- cast(Sized, msg._decoded_body),
+ msg.raw_message["data"]
)
return attrs
@@ -42,21 +42,21 @@ def get_consume_destination_name(
) -> str:
return self._get_destination(msg.raw_message)
- def get_publish_attrs_from_kwargs(
+ def get_publish_attrs_from_cmd(
self,
- kwargs: "AnyDict",
+ cmd: "RedisPublishCommand",
) -> "AnyDict":
return {
SpanAttributes.MESSAGING_SYSTEM: self.messaging_system,
- SpanAttributes.MESSAGING_DESTINATION_NAME: self._get_destination(kwargs),
- SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"],
+ SpanAttributes.MESSAGING_DESTINATION_NAME: cmd.destination,
+ SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: cmd.correlation_id,
}
def get_publish_destination_name(
self,
- kwargs: "AnyDict",
+ cmd: "RedisPublishCommand",
) -> str:
- return self._get_destination(kwargs)
+ return cmd.destination
@staticmethod
def _get_destination(kwargs: "AnyDict") -> str:
diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py
index 1d33c0e9f3..382d7be15b 100644
--- a/faststream/redis/parser.py
+++ b/faststream/redis/parser.py
@@ -200,7 +200,7 @@ def _parse_data(
dump_json(body),
{
**first_msg_headers,
- "content-type": ContentTypes.json.value,
+ "content-type": ContentTypes.JSON.value,
},
batch_headers,
)
@@ -239,7 +239,7 @@ def _parse_data(
dump_json(body),
{
**first_msg_headers,
- "content-type": ContentTypes.json.value,
+ "content-type": ContentTypes.JSON.value,
},
batch_headers,
)
diff --git a/faststream/redis/prometheus/__init__.py b/faststream/redis/prometheus/__init__.py
new file mode 100644
index 0000000000..84c831aabb
--- /dev/null
+++ b/faststream/redis/prometheus/__init__.py
@@ -0,0 +1,3 @@
+from faststream.redis.prometheus.middleware import RedisPrometheusMiddleware
+
+__all__ = ("RedisPrometheusMiddleware",)
diff --git a/faststream/redis/prometheus/middleware.py b/faststream/redis/prometheus/middleware.py
new file mode 100644
index 0000000000..8c62c745fd
--- /dev/null
+++ b/faststream/redis/prometheus/middleware.py
@@ -0,0 +1,27 @@
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Optional
+
+from faststream._internal.constants import EMPTY
+from faststream.prometheus.middleware import PrometheusMiddleware
+from faststream.redis.prometheus.provider import settings_provider_factory
+
+if TYPE_CHECKING:
+ from prometheus_client import CollectorRegistry
+
+
+class RedisPrometheusMiddleware(PrometheusMiddleware):
+ def __init__(
+ self,
+ *,
+ registry: "CollectorRegistry",
+ app_name: str = EMPTY,
+ metrics_prefix: str = "faststream",
+ received_messages_size_buckets: Optional[Sequence[float]] = None,
+ ) -> None:
+ super().__init__(
+ settings_provider_factory=settings_provider_factory,
+ registry=registry,
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
diff --git a/faststream/redis/prometheus/provider.py b/faststream/redis/prometheus/provider.py
new file mode 100644
index 0000000000..533905e6a8
--- /dev/null
+++ b/faststream/redis/prometheus/provider.py
@@ -0,0 +1,63 @@
+from typing import TYPE_CHECKING, Optional, Union
+
+from faststream.prometheus import (
+ ConsumeAttrs,
+ MetricsSettingsProvider,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+ from faststream.message.message import StreamMessage
+ from faststream.redis.response import RedisPublishCommand
+
+
+class BaseRedisMetricsSettingsProvider(MetricsSettingsProvider["AnyDict"]):
+ __slots__ = ("messaging_system",)
+
+ def __init__(self) -> None:
+ self.messaging_system = "redis"
+
+ def get_publish_destination_name_from_cmd(
+ self,
+ cmd: "RedisPublishCommand",
+ ) -> str:
+ return cmd.destination
+
+
+class RedisMetricsSettingsProvider(BaseRedisMetricsSettingsProvider):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[AnyDict]",
+ ) -> ConsumeAttrs:
+ return {
+ "destination_name": _get_destination(msg.raw_message),
+ "message_size": len(msg.body),
+ "messages_count": 1,
+ }
+
+
+class BatchRedisMetricsSettingsProvider(BaseRedisMetricsSettingsProvider):
+ def get_consume_attrs_from_message(
+ self,
+ msg: "StreamMessage[AnyDict]",
+ ) -> ConsumeAttrs:
+ return {
+ "destination_name": _get_destination(msg.raw_message),
+ "message_size": len(msg.body),
+ "messages_count": len(msg.raw_message["data"]),
+ }
+
+
+def settings_provider_factory(
+ msg: Optional["AnyDict"],
+) -> Union[
+ RedisMetricsSettingsProvider,
+ BatchRedisMetricsSettingsProvider,
+]:
+ if msg is not None and msg.get("type", "").startswith("b"):
+ return BatchRedisMetricsSettingsProvider()
+ return RedisMetricsSettingsProvider()
+
+
+def _get_destination(kwargs: "AnyDict") -> str:
+ return kwargs.get("channel") or kwargs.get("list") or kwargs.get("stream") or ""
diff --git a/faststream/redis/publisher/factory.py b/faststream/redis/publisher/factory.py
new file mode 100644
index 0000000000..2174a0b4c8
--- /dev/null
+++ b/faststream/redis/publisher/factory.py
@@ -0,0 +1,107 @@
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Any, Optional, Union
+
+from typing_extensions import TypeAlias
+
+from faststream.exceptions import SetupError
+from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub
+from faststream.redis.schemas.proto import validate_options
+
+from .specified import (
+ SpecificationChannelPublisher,
+ SpecificationListBatchPublisher,
+ SpecificationListPublisher,
+ SpecificationStreamPublisher,
+)
+
+if TYPE_CHECKING:
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
+ from faststream.redis.message import UnifyRedisDict
+
+
+PublisherType: TypeAlias = Union[
+ "SpecificationChannelPublisher",
+ "SpecificationStreamPublisher",
+ "SpecificationListPublisher",
+ "SpecificationListBatchPublisher",
+]
+
+
+def create_publisher(
+ *,
+ channel: Union["PubSub", str, None],
+ list: Union["ListSub", str, None],
+ stream: Union["StreamSub", str, None],
+ headers: Optional["AnyDict"],
+ reply_to: str,
+ broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
+ middlewares: Iterable["PublisherMiddleware"],
+ # AsyncAPI args
+ title_: Optional[str],
+ description_: Optional[str],
+ schema_: Optional[Any],
+ include_in_schema: bool,
+) -> PublisherType:
+ validate_options(channel=channel, list=list, stream=stream)
+
+ if (channel := PubSub.validate(channel)) is not None:
+ return SpecificationChannelPublisher(
+ channel=channel,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ if (stream := StreamSub.validate(stream)) is not None:
+ return SpecificationStreamPublisher(
+ stream=stream,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ if (list := ListSub.validate(list)) is not None:
+ if list.batch:
+ return SpecificationListBatchPublisher(
+ list=list,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+ return SpecificationListPublisher(
+ list=list,
+ # basic args
+ headers=headers,
+ reply_to=reply_to,
+ broker_middlewares=broker_middlewares,
+ middlewares=middlewares,
+ # AsyncAPI args
+ title_=title_,
+ description_=description_,
+ schema_=schema_,
+ include_in_schema=include_in_schema,
+ )
+
+ raise SetupError(INCORRECT_SETUP_MSG)
diff --git a/faststream/redis/publisher/fake.py b/faststream/redis/publisher/fake.py
new file mode 100644
index 0000000000..2fd055e6f2
--- /dev/null
+++ b/faststream/redis/publisher/fake.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Union
+
+from faststream._internal.publisher.fake import FakePublisher
+from faststream.redis.response import RedisPublishCommand
+
+if TYPE_CHECKING:
+ from faststream._internal.publisher.proto import ProducerProto
+ from faststream.response.response import PublishCommand
+
+
+class RedisFakePublisher(FakePublisher):
+ """Publisher Interface implementation to use as RPC or REPLY TO answer publisher."""
+
+ def __init__(
+ self,
+ producer: "ProducerProto",
+ channel: str,
+ ) -> None:
+ super().__init__(producer=producer)
+ self.channel = channel
+
+ def patch_command(
+ self, cmd: Union["PublishCommand", "RedisPublishCommand"]
+ ) -> "RedisPublishCommand":
+ real_cmd = RedisPublishCommand.from_cmd(cmd)
+ real_cmd.destination = self.channel
+ return real_cmd
diff --git a/faststream/redis/publisher/producer.py b/faststream/redis/publisher/producer.py
index 8c196e1a71..77fa98f5ca 100644
--- a/faststream/redis/publisher/producer.py
+++ b/faststream/redis/publisher/producer.py
@@ -6,15 +6,18 @@
from faststream._internal.publisher.proto import ProducerProto
from faststream._internal.subscriber.utils import resolve_custom_func
from faststream._internal.utils.nuid import NUID
-from faststream.exceptions import SetupError
+from faststream.redis.helpers.state import (
+ ConnectedState,
+ ConnectionState,
+ EmptyConnectionState,
+)
from faststream.redis.message import DATA_KEY
from faststream.redis.parser import RawMessage, RedisPubSubParser
-from faststream.redis.schemas import INCORRECT_SETUP_MSG
+from faststream.redis.response import DestinationType, RedisPublishCommand
if TYPE_CHECKING:
from redis.asyncio.client import Redis
- from faststream._internal.basic_types import AnyDict, SendableMessage
from faststream._internal.types import (
AsyncCallable,
CustomCallable,
@@ -24,17 +27,15 @@
class RedisFastProducer(ProducerProto):
"""A class to represent a Redis producer."""
- _connection: "Redis[bytes]"
_decoder: "AsyncCallable"
_parser: "AsyncCallable"
def __init__(
self,
- connection: "Redis[bytes]",
parser: Optional["CustomCallable"],
decoder: Optional["CustomCallable"],
) -> None:
- self._connection = connection
+ self._connection: ConnectionState = EmptyConnectionState()
default = RedisPubSubParser()
self._parser = resolve_custom_func(
@@ -46,96 +47,56 @@ def __init__(
default.decode_message,
)
+ def connect(self, client: "Redis[bytes]") -> None:
+ self._connection = ConnectedState(client)
+
+ def disconnect(self) -> None:
+ self._connection = EmptyConnectionState()
+
@override
async def publish( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- reply_to: str = "",
- ) -> None:
- if not any((channel, list, stream)):
- raise SetupError(INCORRECT_SETUP_MSG)
-
+ cmd: "RedisPublishCommand",
+ ) -> int:
msg = RawMessage.encode(
- message=message,
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
+ message=cmd.body,
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
- if channel is not None:
- await self._connection.publish(channel, msg)
- elif list is not None:
- await self._connection.rpush(list, msg)
- elif stream is not None:
- await self._connection.xadd(
- name=stream,
- fields={DATA_KEY: msg},
- maxlen=maxlen,
- )
- else:
- msg = "unreachable"
- raise AssertionError(msg)
+ return await self.__publish(msg, cmd)
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- timeout: Optional[float] = 30.0,
+ cmd: "RedisPublishCommand",
) -> "Any":
- if not any((channel, list, stream)):
- raise SetupError(INCORRECT_SETUP_MSG)
-
nuid = NUID()
reply_to = str(nuid.next(), "utf-8")
- psub = self._connection.pubsub()
+ psub = self._connection.client.pubsub()
await psub.subscribe(reply_to)
msg = RawMessage.encode(
- message=message,
+ message=cmd.body,
reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
+ headers=cmd.headers,
+ correlation_id=cmd.correlation_id,
)
- if channel is not None:
- await self._connection.publish(channel, msg)
- elif list is not None:
- await self._connection.rpush(list, msg)
- elif stream is not None:
- await self._connection.xadd(
- name=stream,
- fields={DATA_KEY: msg},
- maxlen=maxlen,
- )
- else:
- msg = "unreachable"
- raise AssertionError(msg)
+ await self.__publish(msg, cmd)
- with anyio.fail_after(timeout) as scope:
+ with anyio.fail_after(cmd.timeout) as scope:
# skip subscribe message
await psub.get_message(
ignore_subscribe_messages=True,
- timeout=timeout or 0.0,
+ timeout=cmd.timeout or 0.0,
)
# get real response
response_msg = await psub.get_message(
ignore_subscribe_messages=True,
- timeout=timeout or 0.0,
+ timeout=cmd.timeout or 0.0,
)
await psub.unsubscribe()
@@ -146,20 +107,33 @@ async def request( # type: ignore[override]
return response_msg
+ @override
async def publish_batch(
self,
- *msgs: "SendableMessage",
- list: str,
- correlation_id: str,
- headers: Optional["AnyDict"] = None,
- ) -> None:
- batch = (
+ cmd: "RedisPublishCommand",
+ ) -> int:
+ batch = [
RawMessage.encode(
message=msg,
- correlation_id=correlation_id,
- reply_to=None,
- headers=headers,
+ correlation_id=cmd.correlation_id,
+ reply_to=cmd.reply_to,
+ headers=cmd.headers,
)
- for msg in msgs
- )
- await self._connection.rpush(list, *batch)
+ for msg in cmd.batch_bodies
+ ]
+ return await self._connection.client.rpush(cmd.destination, *batch)
+
+ async def __publish(self, msg: bytes, cmd: "RedisPublishCommand") -> None:
+ if cmd.destination_type is DestinationType.Channel:
+ await self._connection.client.publish(cmd.destination, msg)
+ elif cmd.destination_type is DestinationType.List:
+ await self._connection.client.rpush(cmd.destination, msg)
+ elif cmd.destination_type is DestinationType.Stream:
+ await self._connection.client.xadd(
+ name=cmd.destination,
+ fields={DATA_KEY: msg},
+ maxlen=cmd.maxlen,
+ )
+ else:
+ error_msg = "unreachable"
+ raise AssertionError(error_msg)
diff --git a/faststream/redis/publisher/specified.py b/faststream/redis/publisher/specified.py
new file mode 100644
index 0000000000..f0598834c6
--- /dev/null
+++ b/faststream/redis/publisher/specified.py
@@ -0,0 +1,89 @@
+from typing import TYPE_CHECKING
+
+from faststream.redis.publisher.usecase import (
+ ChannelPublisher,
+ ListBatchPublisher,
+ ListPublisher,
+ LogicPublisher,
+ StreamPublisher,
+)
+from faststream.redis.schemas.proto import RedisSpecificationProtocol
+from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema.bindings import ChannelBinding, redis
+from faststream.specification.schema.channel import Channel
+from faststream.specification.schema.message import CorrelationId, Message
+from faststream.specification.schema.operation import Operation
+
+if TYPE_CHECKING:
+ from faststream.redis.schemas import ListSub
+
+
+class SpecificationPublisher(LogicPublisher, RedisSpecificationProtocol):
+ """A class to represent a Redis publisher."""
+
+ def get_schema(self) -> dict[str, Channel]:
+ payloads = self.get_payloads()
+
+ return {
+ self.name: Channel(
+ description=self.description,
+ publish=Operation(
+ message=Message(
+ title=f"{self.name}:Message",
+ payload=resolve_payloads(payloads, "Publisher"),
+ correlationId=CorrelationId(
+ location="$message.header#/correlation_id",
+ ),
+ ),
+ ),
+ bindings=ChannelBinding(
+ redis=self.channel_binding,
+ ),
+ ),
+ }
+
+
+class SpecificationChannelPublisher(ChannelPublisher, SpecificationPublisher):
+ def get_name(self) -> str:
+ return f"{self.channel.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.channel.name,
+ method="publish",
+ )
+
+
+class _ListPublisherMixin(SpecificationPublisher):
+ list: "ListSub"
+
+ def get_name(self) -> str:
+ return f"{self.list.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.list.name,
+ method="rpush",
+ )
+
+
+class SpecificationListPublisher(ListPublisher, _ListPublisherMixin):
+ pass
+
+
+class SpecificationListBatchPublisher(ListBatchPublisher, _ListPublisherMixin):
+ pass
+
+
+class SpecificationStreamPublisher(StreamPublisher, SpecificationPublisher):
+ def get_name(self) -> str:
+ return f"{self.stream.name}:Publisher"
+
+ @property
+ def channel_binding(self) -> "redis.ChannelBinding":
+ return redis.ChannelBinding(
+ channel=self.stream.name,
+ method="xadd",
+ )
diff --git a/faststream/redis/publisher/usecase.py b/faststream/redis/publisher/usecase.py
index 9aae92c837..479b9ccf66 100644
--- a/faststream/redis/publisher/usecase.py
+++ b/faststream/redis/publisher/usecase.py
@@ -1,30 +1,29 @@
from abc import abstractmethod
-from collections.abc import Awaitable, Iterable
+from collections.abc import Iterable
from copy import deepcopy
-from functools import partial
-from itertools import chain
-from typing import TYPE_CHECKING, Annotated, Any, Callable, Optional
+from typing import TYPE_CHECKING, Annotated, Any, Optional, Union
from typing_extensions import Doc, override
from faststream._internal.publisher.usecase import PublisherUsecase
-from faststream._internal.subscriber.utils import process_msg
-from faststream.exceptions import NOT_CONNECTED_YET
from faststream.message import gen_cor_id
from faststream.redis.message import UnifyRedisDict
-from faststream.redis.schemas import ListSub, PubSub, StreamSub
+from faststream.redis.response import RedisPublishCommand
+from faststream.response.publish_type import PublishType
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, SendableMessage
from faststream._internal.types import BrokerMiddleware, PublisherMiddleware
from faststream.redis.message import RedisMessage
from faststream.redis.publisher.producer import RedisFastProducer
+ from faststream.redis.schemas import ListSub, PubSub, StreamSub
+ from faststream.response.response import PublishCommand
class LogicPublisher(PublisherUsecase[UnifyRedisDict]):
"""A class to represent a Redis publisher."""
- _producer: Optional["RedisFastProducer"]
+ _producer: "RedisFastProducer"
def __init__(
self,
@@ -51,9 +50,7 @@ def __init__(
)
self.reply_to = reply_to
- self.headers = headers
-
- self._producer = None
+ self.headers = headers or {}
@abstractmethod
def subscriber_property(self, *, name_only: bool) -> "AnyDict":
@@ -128,39 +125,33 @@ async def publish(
"**correlation_id** is a useful option to trace messages.",
),
] = None,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> int:
+ cmd = RedisPublishCommand(
+ message,
+ channel=channel or self.channel.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
+ )
+ return await self._basic_publish(cmd, _extra_middlewares=())
- channel_sub = PubSub.validate(channel or self.channel)
- reply_to = reply_to or self.reply_to
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
- call: Callable[..., Awaitable[None]] = self._producer.publish
+ cmd.set_destination(channel=self.channel.name)
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- await call(
- message,
- channel=channel_sub.name,
- # basic args
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
- )
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -189,43 +180,17 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "channel": PubSub.validate(channel or self.channel).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ channel=channel or self.channel.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ timeout=timeout,
)
- msg: RedisMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
- )
+ msg: RedisMessage = await self._basic_request(cmd)
return msg
@@ -297,38 +262,34 @@ async def publish(
"**correlation_id** is a useful option to trace messages.",
),
] = None,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> int:
+ cmd = RedisPublishCommand(
+ message,
+ list=list or self.list.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
+ )
+
+ return await self._basic_publish(cmd, _extra_middlewares=())
- list_sub = ListSub.validate(list or self.list)
- reply_to = reply_to or self.reply_to
- correlation_id = correlation_id or gen_cor_id()
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
- call: Callable[..., Awaitable[None]] = self._producer.publish
+ cmd.set_destination(list=self.list.name)
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- await call(
- message,
- list=list_sub.name,
- # basic args
- reply_to=reply_to,
- headers=headers or self.headers,
- correlation_id=correlation_id,
- )
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -357,44 +318,17 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "list": ListSub.validate(list or self.list).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ list=list or self.list.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ timeout=timeout,
)
- msg: RedisMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
- )
+ msg: RedisMessage = await self._basic_request(cmd)
return msg
@@ -402,52 +336,57 @@ class ListBatchPublisher(ListPublisher):
@override
async def publish( # type: ignore[override]
self,
- message: Annotated[
- Iterable["SendableMessage"],
- Doc("Message body to send."),
- ] = (),
+ *messages: Annotated[
+ "SendableMessage",
+ Doc("Messages bodies to send."),
+ ],
list: Annotated[
- Optional[str],
- Doc("Redis List object name to send message."),
- ] = None,
- *,
+ str,
+ Doc("Redis List object name to send messages."),
+ ],
correlation_id: Annotated[
Optional[str],
- Doc("Has no real effect. Option to be compatible with original protocol."),
+ Doc(
+ "Manual message **correlation_id** setter. "
+ "**correlation_id** is a useful option to trace messages.",
+ ),
] = None,
+ reply_to: Annotated[
+ str,
+ Doc("Reply message destination PubSub object name."),
+ ] = "",
headers: Annotated[
Optional["AnyDict"],
Doc("Message headers to store metainformation."),
] = None,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- **kwargs: Any, # option to suppress maxlen
+ ) -> int:
+ cmd = RedisPublishCommand(
+ *messages,
+ list=list or self.list.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.PUBLISH,
+ )
+
+ return await self._basic_publish_batch(cmd, _extra_middlewares=())
+
+ @override
+ async def _publish( # type: ignore[override]
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd, batch=True)
- list_sub = ListSub.validate(list or self.list)
- correlation_id = correlation_id or gen_cor_id()
+ cmd.set_destination(list=self.list.name)
- call: Callable[..., Awaitable[None]] = self._producer.publish_batch
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
-
- await call(
- *message,
- list=list_sub.name,
- correlation_id=correlation_id,
- headers=headers or self.headers,
- )
+ await self._basic_publish_batch(cmd, _extra_middlewares=_extra_middlewares)
class StreamPublisher(LogicPublisher):
@@ -526,40 +465,36 @@ async def publish(
"Remove eldest message if maxlen exceeded.",
),
] = None,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
- ) -> None:
- assert self._producer, NOT_CONNECTED_YET # nosec B101
+ ) -> Any:
+ cmd = RedisPublishCommand(
+ message,
+ stream=stream or self.stream.name,
+ reply_to=reply_to or self.reply_to,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ maxlen=maxlen or self.stream.maxlen,
+ _publish_type=PublishType.PUBLISH,
+ )
- stream_sub = StreamSub.validate(stream or self.stream)
- maxlen = maxlen or stream_sub.maxlen
- reply_to = reply_to or self.reply_to
- headers = headers or self.headers
- correlation_id = correlation_id or gen_cor_id()
+ return await self._basic_publish(cmd, _extra_middlewares=())
- call: Callable[..., Awaitable[None]] = self._producer.publish
+ @override
+ async def _publish(
+ self,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ _extra_middlewares: Iterable["PublisherMiddleware"],
+ ) -> None:
+ """This method should be called in subscriber flow only."""
+ cmd = RedisPublishCommand.from_cmd(cmd)
- for m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- call = partial(m, call)
+ cmd.set_destination(stream=self.stream.name)
- await call(
- message,
- stream=stream_sub.name,
- maxlen=maxlen,
- # basic args
- reply_to=reply_to,
- headers=headers,
- correlation_id=correlation_id,
- )
+ cmd.add_headers(self.headers, override=False)
+ cmd.reply_to = cmd.reply_to or self.reply_to
+ cmd.maxlen = self.stream.maxlen
+
+ await self._basic_publish(cmd, _extra_middlewares=_extra_middlewares)
@override
async def request(
@@ -595,42 +530,16 @@ async def request(
Optional[float],
Doc("RPC reply waiting time."),
] = 30.0,
- # publisher specific
- _extra_middlewares: Annotated[
- Iterable["PublisherMiddleware"],
- Doc("Extra middlewares to wrap publishing process."),
- ] = (),
) -> "RedisMessage":
- assert self._producer, NOT_CONNECTED_YET # nosec B101
-
- kwargs = {
- "stream": StreamSub.validate(stream or self.stream).name,
- # basic args
- "headers": headers or self.headers,
- "correlation_id": correlation_id or gen_cor_id(),
- "timeout": timeout,
- }
-
- request: Callable[..., Awaitable[Any]] = self._producer.request
-
- for pub_m in chain(
- (
- _extra_middlewares
- or (m(None).publish_scope for m in self._broker_middlewares)
- ),
- self._middlewares,
- ):
- request = partial(pub_m, request)
-
- published_msg = await request(
+ cmd = RedisPublishCommand(
message,
- **kwargs,
+ stream=stream or self.stream.name,
+ headers=self.headers | (headers or {}),
+ correlation_id=correlation_id or gen_cor_id(),
+ _publish_type=PublishType.REQUEST,
+ maxlen=maxlen or self.stream.maxlen,
+ timeout=timeout,
)
- msg: RedisMessage = await process_msg(
- msg=published_msg,
- middlewares=self._broker_middlewares,
- parser=self._producer._parser,
- decoder=self._producer._decoder,
- )
+ msg: RedisMessage = await self._basic_request(cmd)
return msg
diff --git a/faststream/redis/response.py b/faststream/redis/response.py
index b5f4a231f9..d48ee0ba1a 100644
--- a/faststream/redis/response.py
+++ b/faststream/redis/response.py
@@ -1,13 +1,25 @@
-from typing import TYPE_CHECKING, Optional
+from collections.abc import Sequence
+from enum import Enum
+from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import override
-from faststream.response import Response
+from faststream._internal.constants import EMPTY
+from faststream.exceptions import SetupError
+from faststream.redis.schemas import INCORRECT_SETUP_MSG
+from faststream.response.publish_type import PublishType
+from faststream.response.response import PublishCommand, Response
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, SendableMessage
+class DestinationType(str, Enum):
+ Channel = "channel"
+ List = "list"
+ Stream = "stream"
+
+
class RedisResponse(Response):
def __init__(
self,
@@ -25,8 +37,111 @@ def __init__(
self.maxlen = maxlen
@override
- def as_publish_kwargs(self) -> "AnyDict":
- return {
- **super().as_publish_kwargs(),
- "maxlen": self.maxlen,
- }
+ def as_publish_command(self) -> "RedisPublishCommand":
+ return RedisPublishCommand(
+ self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ # Kafka specific
+ channel="fake-channel", # it will be replaced by reply-sender
+ maxlen=self.maxlen,
+ )
+
+
+class RedisPublishCommand(PublishCommand):
+ destination_type: DestinationType
+
+ def __init__(
+ self,
+ message: "SendableMessage",
+ /,
+ *messages: "SendableMessage",
+ _publish_type: "PublishType",
+ correlation_id: Optional[str] = None,
+ channel: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: Optional[str] = None,
+ maxlen: Optional[int] = None,
+ headers: Optional["AnyDict"] = None,
+ reply_to: str = "",
+ timeout: Optional[float] = 30.0,
+ ) -> None:
+ super().__init__(
+ message,
+ _publish_type=_publish_type,
+ correlation_id=correlation_id,
+ reply_to=reply_to,
+ destination="",
+ headers=headers,
+ )
+ self.extra_bodies = messages
+
+ self.set_destination(
+ channel=channel,
+ list=list,
+ stream=stream,
+ )
+
+ # Stream option
+ self.maxlen = maxlen
+
+ # Request option
+ self.timeout = timeout
+
+ def set_destination(
+ self,
+ *,
+ channel: Optional[str] = None,
+ list: Optional[str] = None,
+ stream: Optional[str] = None,
+ ) -> str:
+ if channel is not None:
+ self.destination_type = DestinationType.Channel
+ self.destination = channel
+ elif list is not None:
+ self.destination_type = DestinationType.List
+ self.destination = list
+ elif stream is not None:
+ self.destination_type = DestinationType.Stream
+ self.destination = stream
+ else:
+ raise SetupError(INCORRECT_SETUP_MSG)
+
+ @property
+ def batch_bodies(self) -> tuple["SendableMessage", ...]:
+ if self.body is EMPTY:
+ return self.extra_bodies
+ return (self.body, *self.extra_bodies)
+
+ @classmethod
+ def from_cmd(
+ cls,
+ cmd: Union["PublishCommand", "RedisPublishCommand"],
+ *,
+ batch: bool = False,
+ ) -> "RedisPublishCommand":
+ if isinstance(cmd, RedisPublishCommand):
+ # NOTE: Should return a copy probably.
+ return cmd
+
+ body, extra_bodies = cmd.body, []
+ if batch:
+ if body is None:
+ body = EMPTY
+
+ if isinstance(body, Sequence) and not isinstance(body, str):
+ if body:
+ body, extra_bodies = body[0], body[1:]
+ else:
+ body = EMPTY
+
+ return cls(
+ body,
+ *extra_bodies,
+ channel=cmd.destination,
+ correlation_id=cmd.correlation_id,
+ headers=cmd.headers,
+ reply_to=cmd.reply_to,
+ _publish_type=cmd.publish_type,
+ )
diff --git a/faststream/redis/router.py b/faststream/redis/router.py
index 4e651c78eb..fb155829a0 100644
--- a/faststream/redis/router.py
+++ b/faststream/redis/router.py
@@ -8,11 +8,13 @@
BrokerRouter,
SubscriberRoute,
)
+from faststream._internal.constants import EMPTY
+from faststream.middlewares import AckPolicy
from faststream.redis.broker.registrator import RedisRegistrator
from faststream.redis.message import BaseMessage
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.basic_types import AnyDict, SendableMessage
from faststream._internal.types import (
@@ -130,8 +132,8 @@ def __init__(
] = None,
# broker arguments
dependencies: Annotated[
- Iterable["Depends"],
- Doc("Dependencies list (`[Depends(),]`) to apply to the subscriber."),
+ Iterable["Dependant"],
+ Doc("Dependencies list (`[Dependant(),]`) to apply to the subscriber."),
] = (),
parser: Annotated[
Optional["CustomCallable"],
@@ -147,14 +149,10 @@ def __init__(
Iterable["SubscriberMiddleware[UnifyRedisMessage]"],
Doc("Subscriber middlewares to wrap incoming message processing."),
] = (),
- retry: Annotated[
- bool,
- Doc("Whether to `nack` message at processing exception."),
- ] = False,
- no_ack: Annotated[
- bool,
- Doc("Whether to disable **FastStream** autoacknowledgement logic or not."),
- ] = False,
+ ack_policy: Annotated[
+ AckPolicy,
+ Doc("Whether to disable **FastStream** auto acknowledgement logic or not."),
+ ] = EMPTY,
no_reply: Annotated[
bool,
Doc(
@@ -188,8 +186,7 @@ def __init__(
parser=parser,
decoder=decoder,
middlewares=middlewares,
- retry=retry,
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
title=title,
description=description,
@@ -215,9 +212,9 @@ def __init__(
] = (),
*,
dependencies: Annotated[
- Iterable["Depends"],
+ Iterable["Dependant"],
Doc(
- "Dependencies list (`[Depends(),]`) to apply to all routers' publishers/subscribers.",
+ "Dependencies list (`[Dependant(),]`) to apply to all routers' publishers/subscribers.",
),
] = (),
middlewares: Annotated[
diff --git a/faststream/redis/schemas/proto.py b/faststream/redis/schemas/proto.py
index d6cfaae000..83e6451a4e 100644
--- a/faststream/redis/schemas/proto.py
+++ b/faststream/redis/schemas/proto.py
@@ -2,14 +2,14 @@
from typing import TYPE_CHECKING, Any, Union
from faststream.exceptions import SetupError
-from faststream.specification.proto.endpoint import EndpointProto
+from faststream.specification.proto.endpoint import EndpointSpecification
if TYPE_CHECKING:
from faststream.redis.schemas import ListSub, PubSub, StreamSub
from faststream.specification.schema.bindings import redis
-class RedisAsyncAPIProtocol(EndpointProto):
+class RedisSpecificationProtocol(EndpointSpecification):
@property
@abstractmethod
def channel_binding(self) -> "redis.ChannelBinding": ...
@@ -23,7 +23,7 @@ def validate_options(
channel: Union["PubSub", str, None],
list: Union["ListSub", str, None],
stream: Union["StreamSub", str, None],
-) -> None:
+) -> str:
if all((channel, list)):
msg = "You can't use `PubSub` and `ListSub` both"
raise SetupError(msg)
@@ -33,3 +33,4 @@ def validate_options(
if all((list, stream)):
msg = "You can't use `ListSub` and `StreamSub` both"
raise SetupError(msg)
+ return channel or list or stream
diff --git a/faststream/redis/schemas/stream_sub.py b/faststream/redis/schemas/stream_sub.py
index 50a0b6d606..07488d5f86 100644
--- a/faststream/redis/schemas/stream_sub.py
+++ b/faststream/redis/schemas/stream_sub.py
@@ -3,6 +3,7 @@
from faststream._internal.proto import NameRequired
from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
class StreamSub(NameRequired):
@@ -27,11 +28,13 @@ def __init__(
group: Optional[str] = None,
consumer: Optional[str] = None,
batch: bool = False,
- no_ack: bool = False,
+ ack_policy: AckPolicy = AckPolicy.REJECT_ON_ERROR,
last_id: Optional[str] = None,
maxlen: Optional[int] = None,
max_records: Optional[int] = None,
) -> None:
+ no_ack = ack_policy is AckPolicy.DO_NOTHING
+
if (group and not consumer) or (not group and consumer):
msg = "You should specify `group` and `consumer` both"
raise SetupError(msg)
diff --git a/faststream/redis/subscriber/factory.py b/faststream/redis/subscriber/factory.py
index 8cd414f278..378b561348 100644
--- a/faststream/redis/subscriber/factory.py
+++ b/faststream/redis/subscriber/factory.py
@@ -1,31 +1,34 @@
+import warnings
from collections.abc import Iterable
from typing import TYPE_CHECKING, Optional, Union
from typing_extensions import TypeAlias
+from faststream._internal.constants import EMPTY
from faststream.exceptions import SetupError
+from faststream.middlewares import AckPolicy
from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub
from faststream.redis.schemas.proto import validate_options
-from faststream.redis.subscriber.subscriber import (
- AsyncAPIChannelSubscriber,
- AsyncAPIListBatchSubscriber,
- AsyncAPIListSubscriber,
- AsyncAPIStreamBatchSubscriber,
- AsyncAPIStreamSubscriber,
+from faststream.redis.subscriber.specified import (
+ SpecificationChannelSubscriber,
+ SpecificationListBatchSubscriber,
+ SpecificationListSubscriber,
+ SpecificationStreamBatchSubscriber,
+ SpecificationStreamSubscriber,
)
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
from faststream._internal.types import BrokerMiddleware
from faststream.redis.message import UnifyRedisDict
SubsciberType: TypeAlias = Union[
- "AsyncAPIChannelSubscriber",
- "AsyncAPIStreamBatchSubscriber",
- "AsyncAPIStreamSubscriber",
- "AsyncAPIListBatchSubscriber",
- "AsyncAPIListSubscriber",
+ "SpecificationChannelSubscriber",
+ "SpecificationStreamBatchSubscriber",
+ "SpecificationStreamSubscriber",
+ "SpecificationListBatchSubscriber",
+ "SpecificationListSubscriber",
]
@@ -35,25 +38,30 @@ def create_subscriber(
list: Union["ListSub", str, None],
stream: Union["StreamSub", str, None],
# Subscriber args
- no_ack: bool = False,
+ ack_policy: "AckPolicy",
no_reply: bool = False,
- retry: bool = False,
- broker_dependencies: Iterable["Depends"] = (),
+ broker_dependencies: Iterable["Dependant"] = (),
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (),
# AsyncAPI args
title_: Optional[str] = None,
description_: Optional[str] = None,
include_in_schema: bool = True,
) -> SubsciberType:
- validate_options(channel=channel, list=list, stream=stream)
+ _validate_input_for_misconfigure(
+ channel=channel,
+ list=list,
+ stream=stream,
+ ack_policy=ack_policy,
+ )
+
+ if ack_policy is EMPTY:
+ ack_policy = AckPolicy.REJECT_ON_ERROR
if (channel_sub := PubSub.validate(channel)) is not None:
- return AsyncAPIChannelSubscriber(
+ return SpecificationChannelSubscriber(
channel=channel_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -64,12 +72,11 @@ def create_subscriber(
if (stream_sub := StreamSub.validate(stream)) is not None:
if stream_sub.batch:
- return AsyncAPIStreamBatchSubscriber(
+ return SpecificationStreamBatchSubscriber(
stream=stream_sub,
# basic args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -77,12 +84,12 @@ def create_subscriber(
description_=description_,
include_in_schema=include_in_schema,
)
- return AsyncAPIStreamSubscriber(
+
+ return SpecificationStreamSubscriber(
stream=stream_sub,
# basic args
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -93,12 +100,10 @@ def create_subscriber(
if (list_sub := ListSub.validate(list)) is not None:
if list_sub.batch:
- return AsyncAPIListBatchSubscriber(
+ return SpecificationListBatchSubscriber(
list=list_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -106,12 +111,11 @@ def create_subscriber(
description_=description_,
include_in_schema=include_in_schema,
)
- return AsyncAPIListSubscriber(
+
+ return SpecificationListSubscriber(
list=list_sub,
# basic args
- no_ack=no_ack,
no_reply=no_reply,
- retry=retry,
broker_dependencies=broker_dependencies,
broker_middlewares=broker_middlewares,
# AsyncAPI args
@@ -121,3 +125,28 @@ def create_subscriber(
)
raise SetupError(INCORRECT_SETUP_MSG)
+
+
+def _validate_input_for_misconfigure(
+ *,
+ channel: Union["PubSub", str, None],
+ list: Union["ListSub", str, None],
+ stream: Union["StreamSub", str, None],
+ ack_policy: AckPolicy,
+) -> None:
+ validate_options(channel=channel, list=list, stream=stream)
+
+ if ack_policy is not EMPTY:
+ if channel:
+ warnings.warn(
+ "You can't use acknowledgement policy with PubSub subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
+
+ if list:
+ warnings.warn(
+ "You can't use acknowledgement policy with List subscriber.",
+ RuntimeWarning,
+ stacklevel=4,
+ )
diff --git a/faststream/redis/subscriber/subscriber.py b/faststream/redis/subscriber/specified.py
similarity index 78%
rename from faststream/redis/subscriber/subscriber.py
rename to faststream/redis/subscriber/specified.py
index e915e28351..24ae8407fe 100644
--- a/faststream/redis/subscriber/subscriber.py
+++ b/faststream/redis/subscriber/specified.py
@@ -1,21 +1,21 @@
from faststream.redis.schemas import ListSub, StreamSub
-from faststream.redis.schemas.proto import RedisAsyncAPIProtocol
+from faststream.redis.schemas.proto import RedisSpecificationProtocol
from faststream.redis.subscriber.usecase import (
BatchListSubscriber,
- BatchStreamSubscriber,
ChannelSubscriber,
ListSubscriber,
LogicSubscriber,
+ StreamBatchSubscriber,
StreamSubscriber,
)
from faststream.specification.asyncapi.utils import resolve_payloads
+from faststream.specification.schema import Channel
from faststream.specification.schema.bindings import ChannelBinding, redis
-from faststream.specification.schema.channel import Channel
from faststream.specification.schema.message import CorrelationId, Message
from faststream.specification.schema.operation import Operation
-class SpecificationSubscriber(LogicSubscriber, RedisAsyncAPIProtocol):
+class SpecificationSubscriber(LogicSubscriber, RedisSpecificationProtocol):
"""A class to represent a Redis handler."""
def get_schema(self) -> dict[str, Channel]:
@@ -41,8 +41,8 @@ def get_schema(self) -> dict[str, Channel]:
}
-class AsyncAPIChannelSubscriber(ChannelSubscriber, SpecificationSubscriber):
- def get_default_name(self) -> str:
+class SpecificationChannelSubscriber(ChannelSubscriber, SpecificationSubscriber):
+ def get_name(self) -> str:
return f"{self.channel.name}:{self.call_name}"
@property
@@ -69,11 +69,11 @@ def channel_binding(self) -> "redis.ChannelBinding":
)
-class AsyncAPIStreamSubscriber(StreamSubscriber, _StreamSubscriberMixin):
+class SpecificationStreamSubscriber(StreamSubscriber, _StreamSubscriberMixin):
pass
-class AsyncAPIStreamBatchSubscriber(BatchStreamSubscriber, _StreamSubscriberMixin):
+class SpecificationStreamBatchSubscriber(StreamBatchSubscriber, _StreamSubscriberMixin):
pass
@@ -91,9 +91,9 @@ def channel_binding(self) -> "redis.ChannelBinding":
)
-class AsyncAPIListSubscriber(ListSubscriber, _ListSubscriberMixin):
+class SpecificationListSubscriber(ListSubscriber, _ListSubscriberMixin):
pass
-class AsyncAPIListBatchSubscriber(BatchListSubscriber, _ListSubscriberMixin):
+class SpecificationListBatchSubscriber(BatchListSubscriber, _ListSubscriberMixin):
pass
diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py
index 6ecc4793c6..1f89ca54b4 100644
--- a/faststream/redis/subscriber/usecase.py
+++ b/faststream/redis/subscriber/usecase.py
@@ -19,9 +19,9 @@
from redis.exceptions import ResponseError
from typing_extensions import TypeAlias, override
-from faststream._internal.publisher.fake import FakePublisher
from faststream._internal.subscriber.usecase import SubscriberUsecase
from faststream._internal.subscriber.utils import process_msg
+from faststream.middlewares import AckPolicy
from faststream.redis.message import (
BatchListMessage,
BatchStreamMessage,
@@ -40,14 +40,15 @@
RedisPubSubParser,
RedisStreamParser,
)
+from faststream.redis.publisher.fake import RedisFakePublisher
from faststream.redis.schemas import ListSub, PubSub, StreamSub
if TYPE_CHECKING:
- from fast_depends.dependencies import Depends
+ from fast_depends.dependencies import Dependant
- from faststream._internal.basic_types import AnyDict, LoggerProto
- from faststream._internal.publisher.proto import ProducerProto
- from faststream._internal.setup import SetupState
+ from faststream._internal.basic_types import AnyDict
+ from faststream._internal.publisher.proto import BasePublisherProto
+ from faststream._internal.state import BrokerState
from faststream._internal.types import (
AsyncCallable,
BrokerMiddleware,
@@ -71,10 +72,9 @@ def __init__(
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -85,9 +85,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -105,22 +104,16 @@ def _setup( # type: ignore[override]
*,
connection: Optional["Redis[bytes]"],
# basic args
- logger: Optional["LoggerProto"],
- producer: Optional["ProducerProto"],
- graceful_timeout: Optional[float],
extra_context: "AnyDict",
# broker options
broker_parser: Optional["CustomCallable"],
broker_decoder: Optional["CustomCallable"],
# dependant args
- state: "SetupState",
+ state: "BrokerState",
) -> None:
self._client = connection
super()._setup(
- logger=logger,
- producer=producer,
- graceful_timeout=graceful_timeout,
extra_context=extra_context,
broker_parser=broker_parser,
broker_decoder=broker_decoder,
@@ -130,16 +123,11 @@ def _setup( # type: ignore[override]
def _make_response_publisher(
self,
message: "BrokerStreamMessage[UnifyRedisDict]",
- ) -> Sequence[FakePublisher]:
- if self._producer is None:
- return ()
-
+ ) -> Sequence["BasePublisherProto"]:
return (
- FakePublisher(
- self._producer.publish,
- publish_kwargs={
- "channel": message.reply_to,
- },
+ RedisFakePublisher(
+ self._state.get().producer,
+ channel=message.reply_to,
),
)
@@ -217,10 +205,8 @@ def __init__(
*,
channel: "PubSub",
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -232,9 +218,8 @@ def __init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -292,15 +277,19 @@ async def get_one( # type: ignore[override]
sleep_interval = timeout / 10
- message: Optional[PubSubMessage] = None
+ raw_message: Optional[PubSubMessage] = None
with anyio.move_on_after(timeout):
- while (message := await self._get_message(self.subscription)) is None: # noqa: ASYNC110
+ while (raw_message := await self._get_message(self.subscription)) is None: # noqa: ASYNC110
await anyio.sleep(sleep_interval)
+ context = self._state.get().di_state.context
+
msg: Optional[RedisMessage] = await process_msg( # type: ignore[assignment]
- msg=message,
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
+ msg=raw_message,
+ middlewares=(
+ m(raw_message, context=context) for m in self._broker_middlewares
+ ),
parser=self._parser,
decoder=self._decoder,
)
@@ -340,10 +329,9 @@ def __init__(
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -354,9 +342,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -418,13 +405,19 @@ async def get_one( # type: ignore[override]
if not raw_message:
return None
+ redis_incoming_msg = DefaultListMessage(
+ type="list",
+ data=raw_message,
+ channel=self.list_sub.name,
+ )
+
+ context = self._state.get().di_state.context
+
msg: RedisListMessage = await process_msg( # type: ignore[assignment]
- msg=DefaultListMessage(
- type="list",
- data=raw_message,
- channel=self.list_sub.name,
+ msg=redis_incoming_msg,
+ middlewares=(
+ m(redis_incoming_msg, context=context) for m in self._broker_middlewares
),
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
parser=self._parser,
decoder=self._decoder,
)
@@ -442,10 +435,8 @@ def __init__(
*,
list: ListSub,
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -458,9 +449,8 @@ def __init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -470,19 +460,21 @@ def __init__(
)
async def _get_msgs(self, client: "Redis[bytes]") -> None:
- raw_msg = await client.lpop(name=self.list_sub.name)
+ raw_msg = await client.blpop(
+ self.list_sub.name,
+ timeout=self.list_sub.polling_interval,
+ )
if raw_msg:
+ _, msg_data = raw_msg
+
msg = DefaultListMessage(
type="list",
- data=raw_msg,
+ data=msg_data,
channel=self.list_sub.name,
)
- await self.consume(msg) # type: ignore[arg-type]
-
- else:
- await anyio.sleep(self.list_sub.polling_interval)
+ await self.consume(msg)
class BatchListSubscriber(_ListHandlerMixin):
@@ -491,10 +483,8 @@ def __init__(
*,
list: ListSub,
# Subscriber args
- no_ack: bool,
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -507,9 +497,8 @@ def __init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=AckPolicy.DO_NOTHING,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -545,10 +534,9 @@ def __init__(
default_parser: "AsyncCallable",
default_decoder: "AsyncCallable",
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -559,9 +547,8 @@ def __init__(
default_parser=default_parser,
default_decoder=default_decoder,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -706,14 +693,20 @@ async def get_one( # type: ignore[override]
self.last_id = message_id.decode()
+ redis_incoming_msg = DefaultStreamMessage(
+ type="stream",
+ channel=stream_name.decode(),
+ message_ids=[message_id],
+ data=raw_message,
+ )
+
+ context = self._state.get().di_state.context
+
msg: RedisStreamMessage = await process_msg( # type: ignore[assignment]
- msg=DefaultStreamMessage(
- type="stream",
- channel=stream_name.decode(),
- message_ids=[message_id],
- data=raw_message,
+ msg=redis_incoming_msg,
+ middlewares=(
+ m(redis_incoming_msg, context=context) for m in self._broker_middlewares
),
- middlewares=self._broker_middlewares, # type: ignore[arg-type]
parser=self._parser,
decoder=self._decoder,
)
@@ -731,10 +724,9 @@ def __init__(
*,
stream: StreamSub,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -747,9 +739,8 @@ def __init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
@@ -794,16 +785,15 @@ async def _get_msgs(
await self.consume(msg) # type: ignore[arg-type]
-class BatchStreamSubscriber(_StreamHandlerMixin):
+class StreamBatchSubscriber(_StreamHandlerMixin):
def __init__(
self,
*,
stream: StreamSub,
# Subscriber args
- no_ack: bool,
+ ack_policy: "AckPolicy",
no_reply: bool,
- retry: bool,
- broker_dependencies: Iterable["Depends"],
+ broker_dependencies: Iterable["Dependant"],
broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"],
# AsyncAPI args
title_: Optional[str],
@@ -816,9 +806,8 @@ def __init__(
default_parser=parser.parse_message,
default_decoder=parser.decode_message,
# Propagated options
- no_ack=no_ack,
+ ack_policy=ack_policy,
no_reply=no_reply,
- retry=retry,
broker_middlewares=broker_middlewares,
broker_dependencies=broker_dependencies,
# AsyncAPI
diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py
index 8f065bf5a7..558a0fd5ae 100644
--- a/faststream/redis/testing.py
+++ b/faststream/redis/testing.py
@@ -1,5 +1,6 @@
import re
-from collections.abc import Sequence
+from collections.abc import Iterator, Sequence
+from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
@@ -28,6 +29,7 @@
)
from faststream.redis.parser import RawMessage, RedisPubSubParser
from faststream.redis.publisher.producer import RedisFastProducer
+from faststream.redis.response import DestinationType, RedisPublishCommand
from faststream.redis.schemas import INCORRECT_SETUP_MSG
from faststream.redis.subscriber.usecase import (
ChannelSubscriber,
@@ -38,7 +40,7 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, SendableMessage
- from faststream.redis.publisher.publisher import SpecificationPublisher
+ from faststream.redis.publisher.specified import SpecificationPublisher
__all__ = ("TestRedisBroker",)
@@ -71,13 +73,19 @@ def create_publisher_fake_subscriber(
return sub, is_real
+ @contextmanager
+ def _patch_producer(self, broker: RedisBroker) -> Iterator[None]:
+ old_producer = broker._state.get().producer
+ broker._state.patch_value(producer=FakeProducer(broker))
+ yield
+ broker._state.patch_value(producer=old_producer)
+
@staticmethod
async def _fake_connect( # type: ignore[override]
broker: RedisBroker,
*args: Any,
**kwargs: Any,
) -> AsyncMock:
- broker._producer = FakeProducer(broker)
connection = MagicMock()
pub_sub = AsyncMock()
@@ -108,26 +116,16 @@ def __init__(self, broker: RedisBroker) -> None:
@override
async def publish(
self,
- message: "SendableMessage",
- *,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- reply_to: str = "",
- correlation_id: Optional[str] = None,
+ cmd: "RedisPublishCommand",
) -> None:
- correlation_id = correlation_id or gen_cor_id()
-
body = build_message(
- message=message,
- reply_to=reply_to,
- correlation_id=correlation_id,
- headers=headers,
+ message=cmd.body,
+ reply_to=cmd.reply_to,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- destination = _make_destionation_kwargs(channel, list, stream)
+ destination = _make_destionation_kwargs(cmd)
visitors = (ChannelVisitor(), ListVisitor(), StreamVisitor())
for handler in self.broker._subscribers: # pragma: no branch
@@ -144,25 +142,15 @@ async def publish(
@override
async def request( # type: ignore[override]
self,
- message: "SendableMessage",
- *,
- correlation_id: str,
- channel: Optional[str] = None,
- list: Optional[str] = None,
- stream: Optional[str] = None,
- maxlen: Optional[int] = None,
- headers: Optional["AnyDict"] = None,
- timeout: Optional[float] = 30.0,
+ cmd: "RedisPublishCommand",
) -> "PubSubMessage":
- correlation_id = correlation_id or gen_cor_id()
-
body = build_message(
- message=message,
- correlation_id=correlation_id,
- headers=headers,
+ message=cmd.body,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- destination = _make_destionation_kwargs(channel, list, stream)
+ destination = _make_destionation_kwargs(cmd)
visitors = (ChannelVisitor(), ListVisitor(), StreamVisitor())
for handler in self.broker._subscribers: # pragma: no branch
@@ -174,34 +162,33 @@ async def request( # type: ignore[override]
handler, # type: ignore[arg-type]
)
- with anyio.fail_after(timeout):
+ with anyio.fail_after(cmd.timeout):
return await self._execute_handler(msg, handler)
raise SubscriberNotFound
async def publish_batch(
self,
- *msgs: "SendableMessage",
- list: str,
- headers: Optional["AnyDict"] = None,
- correlation_id: Optional[str] = None,
+ cmd: "RedisPublishCommand",
) -> None:
data_to_send = [
build_message(
m,
- correlation_id=correlation_id or gen_cor_id(),
- headers=headers,
+ correlation_id=cmd.correlation_id or gen_cor_id(),
+ headers=cmd.headers,
)
- for m in msgs
+ for m in cmd.batch_bodies
]
visitor = ListVisitor()
for handler in self.broker._subscribers: # pragma: no branch
- if visitor.visit(list=list, sub=handler):
+ if visitor.visit(list=cmd.destination, sub=handler):
casted_handler = cast(_ListHandlerMixin, handler)
if casted_handler.list_sub.batch:
- msg = visitor.get_message(list, data_to_send, casted_handler)
+ msg = visitor.get_message(
+ cmd.destination, data_to_send, casted_handler
+ )
await self._execute_handler(msg, handler)
@@ -375,18 +362,14 @@ class _DestinationKwargs(TypedDict, total=False):
stream: str
-def _make_destionation_kwargs(
- channel: Optional[str],
- list: Optional[str],
- stream: Optional[str],
-) -> _DestinationKwargs:
+def _make_destionation_kwargs(cmd: RedisPublishCommand) -> _DestinationKwargs:
destination: _DestinationKwargs = {}
- if channel:
- destination["channel"] = channel
- if list:
- destination["list"] = list
- if stream:
- destination["stream"] = stream
+ if cmd.destination_type is DestinationType.Channel:
+ destination["channel"] = cmd.destination
+ if cmd.destination_type is DestinationType.List:
+ destination["list"] = cmd.destination
+ if cmd.destination_type is DestinationType.Stream:
+ destination["stream"] = cmd.destination
if len(destination) != 1:
raise SetupError(INCORRECT_SETUP_MSG)
diff --git a/faststream/response/__init__.py b/faststream/response/__init__.py
index 686ec1dc50..9a0cc2410e 100644
--- a/faststream/response/__init__.py
+++ b/faststream/response/__init__.py
@@ -1,7 +1,10 @@
-from .response import Response
+from .publish_type import PublishType
+from .response import PublishCommand, Response
from .utils import ensure_response
__all__ = (
+ "PublishCommand",
+ "PublishType",
"Response",
"ensure_response",
)
diff --git a/faststream/response/publish_type.py b/faststream/response/publish_type.py
new file mode 100644
index 0000000000..ad74910a1e
--- /dev/null
+++ b/faststream/response/publish_type.py
@@ -0,0 +1,12 @@
+from enum import Enum
+
+
+class PublishType(str, Enum):
+ PUBLISH = "PUBLISH"
+ """Regular `broker/publisher.publish(...)` call."""
+
+ REPLY = "REPLY"
+ """Response to RPC/Reply-To request."""
+
+ REQUEST = "REQUEST"
+ """RPC request call."""
diff --git a/faststream/response/response.py b/faststream/response/response.py
index cbb338bed7..ff44643f35 100644
--- a/faststream/response/response.py
+++ b/faststream/response/response.py
@@ -1,5 +1,7 @@
from typing import TYPE_CHECKING, Any, Optional
+from .publish_type import PublishType
+
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict
@@ -17,19 +19,50 @@ def __init__(
self.headers = headers or {}
self.correlation_id = correlation_id
+ def as_publish_command(self) -> "PublishCommand":
+ return PublishCommand(
+ body=self.body,
+ headers=self.headers,
+ correlation_id=self.correlation_id,
+ _publish_type=PublishType.REPLY,
+ )
+
+
+class PublishCommand(Response):
+ def __init__(
+ self,
+ body: Any,
+ *,
+ _publish_type: PublishType,
+ reply_to: str = "",
+ destination: str = "",
+ correlation_id: Optional[str] = None,
+ headers: Optional["AnyDict"] = None,
+ ) -> None:
+ super().__init__(
+ body,
+ headers=headers,
+ correlation_id=correlation_id,
+ )
+
+ self.destination = destination
+ self.reply_to = reply_to
+
+ self.publish_type = _publish_type
+
+ @property
+ def batch_bodies(self) -> tuple["Any", ...]:
+ if self.body:
+ return (self.body,)
+ return ()
+
def add_headers(
self,
- extra_headers: "AnyDict",
+ headers: "AnyDict",
*,
override: bool = True,
) -> None:
if override:
- self.headers = {**self.headers, **extra_headers}
+ self.headers |= headers
else:
- self.headers = {**extra_headers, **self.headers}
-
- def as_publish_kwargs(self) -> "AnyDict":
- return {
- "headers": self.headers,
- "correlation_id": self.correlation_id,
- }
+ self.headers = headers | self.headers
diff --git a/faststream/specification/asyncapi/message.py b/faststream/specification/asyncapi/message.py
index d105200f6f..187cc70af0 100644
--- a/faststream/specification/asyncapi/message.py
+++ b/faststream/specification/asyncapi/message.py
@@ -1,6 +1,6 @@
from collections.abc import Sequence
from inspect import isclass
-from typing import TYPE_CHECKING, Any, Optional, overload
+from typing import TYPE_CHECKING, Optional, overload
from pydantic import BaseModel, create_model
@@ -16,15 +16,15 @@
from fast_depends.core import CallModel
-def parse_handler_params(call: "CallModel[Any, Any]", prefix: str = "") -> AnyDict:
+def parse_handler_params(call: "CallModel", prefix: str = "") -> AnyDict:
"""Parses the handler parameters."""
- model = call.model
+ model = getattr(call, "serializer", call).model
assert model # nosec B101
body = get_model_schema(
create_model( # type: ignore[call-overload]
model.__name__,
- **call.flat_params,
+ **{p.field_name: (p.field_type, p.default_value) for p in call.flat_params},
),
prefix=prefix,
exclude=tuple(call.custom_fields.keys()),
@@ -41,11 +41,11 @@ def get_response_schema(call: None, prefix: str = "") -> None: ...
@overload
-def get_response_schema(call: "CallModel[Any, Any]", prefix: str = "") -> AnyDict: ...
+def get_response_schema(call: "CallModel", prefix: str = "") -> AnyDict: ...
def get_response_schema(
- call: Optional["CallModel[Any, Any]"],
+ call: Optional["CallModel"],
prefix: str = "",
) -> Optional[AnyDict]:
"""Get the response schema for a given call."""
diff --git a/faststream/specification/asyncapi/utils.py b/faststream/specification/asyncapi/utils.py
index 2e6ffadfe2..7f16a215dc 100644
--- a/faststream/specification/asyncapi/utils.py
+++ b/faststream/specification/asyncapi/utils.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict
@@ -49,3 +49,36 @@ def resolve_payloads(
def clear_key(key: str) -> str:
return key.replace("/", ".")
+
+
+def move_pydantic_refs(
+ original: Any,
+ key: str,
+) -> Any:
+ """Remove pydantic references and replacem them by real schemas."""
+ if not isinstance(original, dict):
+ return original
+
+ data = original.copy()
+
+ for k in data:
+ item = data[k]
+
+ if isinstance(item, str):
+ if key in item:
+ data[k] = data[k].replace(key, "components/schemas")
+
+ elif isinstance(item, dict):
+ data[k] = move_pydantic_refs(data[k], key)
+
+ elif isinstance(item, list):
+ for i in range(len(data[k])):
+ data[k][i] = move_pydantic_refs(item[i], key)
+
+ if (
+ isinstance(desciminator := data.get("discriminator"), dict)
+ and "propertyName" in desciminator
+ ):
+ data["discriminator"] = desciminator["propertyName"]
+
+ return data
diff --git a/faststream/specification/asyncapi/v2_6_0/facade.py b/faststream/specification/asyncapi/v2_6_0/facade.py
index 4926dd2ad4..d8c4b5618b 100644
--- a/faststream/specification/asyncapi/v2_6_0/facade.py
+++ b/faststream/specification/asyncapi/v2_6_0/facade.py
@@ -9,10 +9,16 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, AnyHttpUrl
from faststream._internal.broker.broker import BrokerUsecase
- from faststream.specification.schema.contact import Contact, ContactDict
- from faststream.specification.schema.docs import ExternalDocs, ExternalDocsDict
- from faststream.specification.schema.license import License, LicenseDict
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+ )
class AsyncAPI2(Specification):
diff --git a/faststream/specification/asyncapi/v2_6_0/generate.py b/faststream/specification/asyncapi/v2_6_0/generate.py
index 38c8d9e6b4..4c81514da7 100644
--- a/faststream/specification/asyncapi/v2_6_0/generate.py
+++ b/faststream/specification/asyncapi/v2_6_0/generate.py
@@ -4,7 +4,7 @@
from faststream._internal._compat import DEF_KEY
from faststream._internal.basic_types import AnyDict, AnyHttpUrl
from faststream._internal.constants import ContentTypes
-from faststream.specification.asyncapi.utils import clear_key
+from faststream.specification.asyncapi.utils import clear_key, move_pydantic_refs
from faststream.specification.asyncapi.v2_6_0.schema import (
ApplicationInfo,
ApplicationSchema,
@@ -13,11 +13,11 @@
Contact,
ExternalDocs,
License,
+ Message,
Reference,
Server,
Tag,
)
-from faststream.specification.asyncapi.v2_6_0.schema.message import Message
if TYPE_CHECKING:
from faststream._internal.broker.broker import BrokerUsecase
@@ -75,7 +75,7 @@ def get_app_schema(
tags=[Tag.from_spec(tag) for tag in tags] or None,
externalDocs=ExternalDocs.from_spec(external_docs),
asyncapi=schema_version,
- defaultContentType=ContentTypes.json.value,
+ defaultContentType=ContentTypes.JSON.value,
id=identifier,
servers=servers,
channels=channels,
@@ -215,36 +215,3 @@ def _resolve_msg_payloads(
message_title = clear_key(m.title)
messages[message_title] = m
return Reference(**{"$ref": f"#/components/messages/{message_title}"})
-
-
-def move_pydantic_refs(
- original: Any,
- key: str,
-) -> Any:
- """Remove pydantic references and replacem them by real schemas."""
- if not isinstance(original, dict):
- return original
-
- data = original.copy()
-
- for k in data:
- item = data[k]
-
- if isinstance(item, str):
- if key in item:
- data[k] = data[k].replace(key, "components/schemas")
-
- elif isinstance(item, dict):
- data[k] = move_pydantic_refs(data[k], key)
-
- elif isinstance(item, list):
- for i in range(len(data[k])):
- data[k][i] = move_pydantic_refs(item[i], key)
-
- if (
- isinstance(desciminator := data.get("discriminator"), dict)
- and "propertyName" in desciminator
- ):
- data["discriminator"] = desciminator["propertyName"]
-
- return data
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py
index bd69d8f4dc..aa729dce29 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/channel.py
@@ -113,14 +113,6 @@ class ChannelBinding(BaseModel):
queue: Optional[Queue] = None
exchange: Optional[Exchange] = None
- @overload
- @classmethod
- def from_sub(cls, binding: None) -> None: ...
-
- @overload
- @classmethod
- def from_sub(cls, binding: amqp.ChannelBinding) -> Self: ...
-
@classmethod
def from_sub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
if binding is None:
@@ -136,14 +128,6 @@ def from_sub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
},
)
- @overload
- @classmethod
- def from_pub(cls, binding: None) -> None: ...
-
- @overload
- @classmethod
- def from_pub(cls, binding: amqp.ChannelBinding) -> Self: ...
-
@classmethod
def from_pub(cls, binding: Optional[amqp.ChannelBinding]) -> Optional[Self]:
if binding is None:
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py
index 46396ee938..e299b986c7 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/amqp/operation.py
@@ -3,7 +3,7 @@
References: https://github.com/asyncapi/bindings/tree/master/amqp
"""
-from typing import Optional, overload
+from typing import Optional
from pydantic import BaseModel, PositiveInt
from typing_extensions import Self
@@ -21,54 +21,39 @@ class OperationBinding(BaseModel):
bindingVersion : string representing the binding version
"""
- cc: Optional[str] = None
- ack: bool = True
- replyTo: Optional[str] = None
- deliveryMode: Optional[int] = None
- mandatory: Optional[bool] = None
- priority: Optional[PositiveInt] = None
- bindingVersion: str = "0.2.0"
-
- @overload
- @classmethod
- def from_sub(cls, binding: None) -> None: ...
+ cc: Optional[str]
+ ack: bool
+ replyTo: Optional[str]
+ deliveryMode: Optional[int]
+ mandatory: Optional[bool]
+ priority: Optional[PositiveInt]
- @overload
- @classmethod
- def from_sub(cls, binding: amqp.OperationBinding) -> Self: ...
+ bindingVersion: str = "0.2.0"
@classmethod
def from_sub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
- if binding is None:
+ if not binding:
return None
return cls(
- cc=binding.cc,
+ cc=binding.routing_key if binding.exchange.is_respect_routing_key else None,
ack=binding.ack,
replyTo=binding.reply_to,
- deliveryMode=binding.delivery_mode,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
mandatory=binding.mandatory,
priority=binding.priority,
)
- @overload
- @classmethod
- def from_pub(cls, binding: None) -> None: ...
-
- @overload
- @classmethod
- def from_pub(cls, binding: amqp.OperationBinding) -> Self: ...
-
@classmethod
def from_pub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
- if binding is None:
+ if not binding:
return None
return cls(
- cc=binding.cc,
+ cc=binding.routing_key if binding.exchange.is_respect_routing_key else None,
ack=binding.ack,
replyTo=binding.reply_to,
- deliveryMode=binding.delivery_mode,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
mandatory=binding.mandatory,
priority=binding.priority,
)
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py
index ed00f015ea..bf4b7dbd98 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/channel.py
@@ -23,7 +23,6 @@ class ChannelBinding(BaseModel):
sqs : SQS channel binding (optional)
nats : NATS channel binding (optional)
redis : Redis channel binding (optional)
-
"""
amqp: Optional[amqp_bindings.ChannelBinding] = None
@@ -53,16 +52,29 @@ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
if binding is None:
return None
- if binding.amqp:
- return cls(amqp=amqp_bindings.ChannelBinding.from_sub(binding.amqp))
- if binding.kafka:
- return cls(kafka=kafka_bindings.ChannelBinding.from_sub(binding.kafka))
- if binding.nats:
- return cls(nats=nats_bindings.ChannelBinding.from_sub(binding.nats))
- if binding.redis:
- return cls(redis=redis_bindings.ChannelBinding.from_sub(binding.redis))
- if binding.sqs:
- return cls(sqs=sqs_bindings.ChannelBinding.from_sub(binding.sqs))
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
return None
@overload
@@ -78,14 +90,27 @@ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
if binding is None:
return None
- if binding.amqp:
- return cls(amqp=amqp_bindings.ChannelBinding.from_pub(binding.amqp))
- if binding.kafka:
- return cls(kafka=kafka_bindings.ChannelBinding.from_pub(binding.kafka))
- if binding.nats:
- return cls(nats=nats_bindings.ChannelBinding.from_pub(binding.nats))
- if binding.redis:
- return cls(redis=redis_bindings.ChannelBinding.from_pub(binding.redis))
- if binding.sqs:
- return cls(sqs=sqs_bindings.ChannelBinding.from_pub(binding.sqs))
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
return None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py
index 4dc930d758..7367b7921f 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/bindings/main/operation.py
@@ -52,16 +52,29 @@ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
if binding is None:
return None
- if binding.amqp:
- return cls(amqp=amqp_bindings.OperationBinding.from_sub(binding.amqp))
- if binding.kafka:
- return cls(kafka=kafka_bindings.OperationBinding.from_sub(binding.kafka))
- if binding.nats:
- return cls(nats=nats_bindings.OperationBinding.from_sub(binding.nats))
- if binding.redis:
- return cls(redis=redis_bindings.OperationBinding.from_sub(binding.redis))
- if binding.sqs:
- return cls(sqs=sqs_bindings.OperationBinding.from_sub(binding.sqs))
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
return None
@overload
@@ -77,14 +90,27 @@ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
if binding is None:
return None
- if binding.amqp:
- return cls(amqp=amqp_bindings.OperationBinding.from_pub(binding.amqp))
- if binding.kafka:
- return cls(kafka=kafka_bindings.OperationBinding.from_pub(binding.kafka))
- if binding.nats:
- return cls(nats=nats_bindings.OperationBinding.from_pub(binding.nats))
- if binding.redis:
- return cls(redis=redis_bindings.OperationBinding.from_pub(binding.redis))
- if binding.sqs:
- return cls(sqs=sqs_bindings.OperationBinding.from_pub(binding.sqs))
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
return None
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/channels.py b/faststream/specification/asyncapi/v2_6_0/schema/channels.py
index 1f04f6538d..02d36311c2 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/channels.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/channels.py
@@ -4,10 +4,11 @@
from typing_extensions import Self
from faststream._internal._compat import PYDANTIC_V2
-from faststream.specification.asyncapi.v2_6_0.schema.bindings import ChannelBinding
-from faststream.specification.asyncapi.v2_6_0.schema.operations import Operation
from faststream.specification.schema import PublisherSpec, SubscriberSpec
+from .bindings import ChannelBinding
+from .operations import Operation
+
class Channel(BaseModel):
"""A class to represent a channel.
diff --git a/faststream/specification/asyncapi/v2_6_0/schema/operations.py b/faststream/specification/asyncapi/v2_6_0/schema/operations.py
index bb6c4347db..eac2726517 100644
--- a/faststream/specification/asyncapi/v2_6_0/schema/operations.py
+++ b/faststream/specification/asyncapi/v2_6_0/schema/operations.py
@@ -5,12 +5,13 @@
from faststream._internal._compat import PYDANTIC_V2
from faststream._internal.basic_types import AnyDict
-from faststream.specification.asyncapi.v2_6_0.schema.bindings import OperationBinding
-from faststream.specification.asyncapi.v2_6_0.schema.message import Message
-from faststream.specification.asyncapi.v2_6_0.schema.tag import Tag
-from faststream.specification.asyncapi.v2_6_0.schema.utils import Reference
from faststream.specification.schema.operation import Operation as OperationSpec
+from .bindings import OperationBinding
+from .message import Message
+from .tag import Tag
+from .utils import Reference
+
class Operation(BaseModel):
"""A class to represent an operation.
diff --git a/faststream/specification/asyncapi/v3_0_0/facade.py b/faststream/specification/asyncapi/v3_0_0/facade.py
index 74015210d4..4ce47b6f90 100644
--- a/faststream/specification/asyncapi/v3_0_0/facade.py
+++ b/faststream/specification/asyncapi/v3_0_0/facade.py
@@ -9,10 +9,16 @@
if TYPE_CHECKING:
from faststream._internal.basic_types import AnyDict, AnyHttpUrl
from faststream._internal.broker.broker import BrokerUsecase
- from faststream.specification.schema.contact import Contact, ContactDict
- from faststream.specification.schema.docs import ExternalDocs, ExternalDocsDict
- from faststream.specification.schema.license import License, LicenseDict
- from faststream.specification.schema.tag import Tag, TagDict
+ from faststream.specification.schema.extra import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+ )
class AsyncAPI3(Specification):
diff --git a/faststream/specification/asyncapi/v3_0_0/generate.py b/faststream/specification/asyncapi/v3_0_0/generate.py
index 4937ce3a6d..1efc8c4fdc 100644
--- a/faststream/specification/asyncapi/v3_0_0/generate.py
+++ b/faststream/specification/asyncapi/v3_0_0/generate.py
@@ -5,40 +5,34 @@
from faststream._internal._compat import DEF_KEY
from faststream._internal.basic_types import AnyDict, AnyHttpUrl
from faststream._internal.constants import ContentTypes
-from faststream.specification.asyncapi.utils import clear_key
-from faststream.specification.asyncapi.v2_6_0.generate import move_pydantic_refs
-from faststream.specification.asyncapi.v2_6_0.schema import (
- Reference,
- Tag,
- contact_from_spec,
- docs_from_spec,
- license_from_spec,
- tag_from_spec,
-)
+from faststream.specification.asyncapi.utils import clear_key, move_pydantic_refs
from faststream.specification.asyncapi.v3_0_0.schema import (
ApplicationInfo,
ApplicationSchema,
Channel,
Components,
+ Contact,
+ ExternalDocs,
+ License,
+ Message,
Operation,
+ Reference,
Server,
- channel_from_spec,
- operation_from_spec,
-)
-from faststream.specification.asyncapi.v3_0_0.schema.message import Message
-from faststream.specification.asyncapi.v3_0_0.schema.operations import (
- Action,
+ Tag,
)
if TYPE_CHECKING:
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.types import ConnectionType, MsgType
- from faststream.specification.schema.contact import Contact, ContactDict
- from faststream.specification.schema.docs import ExternalDocs, ExternalDocsDict
- from faststream.specification.schema.license import License, LicenseDict
- from faststream.specification.schema.tag import (
- Tag as SpecsTag,
- TagDict as SpecsTagDict,
+ from faststream.specification.schema.extra import (
+ Contact as SpecContact,
+ ContactDict,
+ ExternalDocs as SpecDocs,
+ ExternalDocsDict,
+ License as SpecLicense,
+ LicenseDict,
+ Tag as SpecTag,
+ TagDict,
)
@@ -50,18 +44,17 @@ def get_app_schema(
schema_version: str,
description: str,
terms_of_service: Optional["AnyHttpUrl"],
- contact: Optional[Union["Contact", "ContactDict", "AnyDict"]],
- license: Optional[Union["License", "LicenseDict", "AnyDict"]],
+ contact: Optional[Union["SpecContact", "ContactDict", "AnyDict"]],
+ license: Optional[Union["SpecLicense", "LicenseDict", "AnyDict"]],
identifier: Optional[str],
- tags: Optional[Sequence[Union["SpecsTag", "SpecsTagDict", "AnyDict"]]],
- external_docs: Optional[Union["ExternalDocs", "ExternalDocsDict", "AnyDict"]],
+ tags: Optional[Sequence[Union["SpecTag", "TagDict", "AnyDict"]]],
+ external_docs: Optional[Union["SpecDocs", "ExternalDocsDict", "AnyDict"]],
) -> ApplicationSchema:
"""Get the application schema."""
broker._setup()
servers = get_broker_server(broker)
- channels = get_broker_channels(broker)
- operations = get_broker_operations(broker)
+ channels, operations = get_broker_channels(broker)
messages: dict[str, Message] = {}
payloads: dict[str, AnyDict] = {}
@@ -92,13 +85,13 @@ def get_app_schema(
version=app_version,
description=description,
termsOfService=terms_of_service,
- contact=contact_from_spec(contact) if contact else None,
- license=license_from_spec(license) if license else None,
- tags=[tag_from_spec(tag) for tag in tags] or None,
- externalDocs=docs_from_spec(external_docs) if external_docs else None,
+ contact=Contact.from_spec(contact),
+ license=License.from_spec(license),
+ tags=[Tag.from_spec(tag) for tag in tags] or None,
+ externalDocs=ExternalDocs.from_spec(external_docs),
),
asyncapi=schema_version,
- defaultContentType=ContentTypes.json.value,
+ defaultContentType=ContentTypes.JSON.value,
id=identifier,
servers=servers,
channels=channels,
@@ -121,7 +114,7 @@ def get_broker_server(
tags: Optional[list[Union[Tag, AnyDict]]] = None
if broker.tags:
- tags = [tag_from_spec(tag) for tag in broker.tags]
+ tags = [Tag.from_spec(tag) for tag in broker.tags]
broker_meta: AnyDict = {
"protocol": broker.protocol,
@@ -152,77 +145,52 @@ def get_broker_server(
return servers
-def get_broker_operations(
- broker: "BrokerUsecase[MsgType, ConnectionType]",
-) -> dict[str, Operation]:
- """Get the broker operations for an application."""
- operations = {}
-
- for h in broker._subscribers:
- for channel, specs_channel in h.schema().items():
- channel_name = clear_key(channel)
-
- if specs_channel.subscribe is not None:
- operations[f"{channel_name}Subscribe"] = operation_from_spec(
- specs_channel.subscribe,
- Action.RECEIVE,
- channel_name,
- )
-
- for p in broker._publishers:
- for channel, specs_channel in p.schema().items():
- channel_name = clear_key(channel)
-
- if specs_channel.publish is not None:
- operations[f"{channel_name}"] = operation_from_spec(
- specs_channel.publish,
- Action.SEND,
- channel_name,
- )
-
- return operations
-
-
def get_broker_channels(
broker: "BrokerUsecase[MsgType, ConnectionType]",
-) -> dict[str, Channel]:
+) -> tuple[dict[str, Channel], dict[str, Operation]]:
"""Get the broker channels for an application."""
channels = {}
+ operations = {}
for sub in broker._subscribers:
- channels_schema_v3_0 = {}
- for channel_name, specs_channel in sub.schema().items():
- if specs_channel.subscribe:
- message = specs_channel.subscribe.message
- assert message.title
-
- *left, right = message.title.split(":")
- message.title = ":".join(left) + f":Subscribe{right}"
-
- # TODO: why we are format just a key?
- channels_schema_v3_0[clear_key(channel_name)] = channel_from_spec(
- specs_channel,
- message,
- channel_name,
- "SubscribeMessage",
- )
-
- channels.update(channels_schema_v3_0)
+ for key, channel in sub.schema().items():
+ channel_obj = Channel.from_sub(key, channel)
+
+ channel_key = clear_key(key)
+ # TODO: add duplication key warning
+ channels[channel_key] = channel_obj
+
+ operations[f"{channel_key}Subscribe"] = Operation.from_sub(
+ messages=[
+ Reference(**{
+ "$ref": f"#/channels/{channel_key}/messages/{msg_name}"
+ })
+ for msg_name in channel_obj.messages
+ ],
+ channel=Reference(**{"$ref": f"#/channels/{channel_key}"}),
+ operation=channel.operation,
+ )
for pub in broker._publishers:
- channels_schema_v3_0 = {}
- for channel_name, specs_channel in pub.schema().items():
- if specs_channel.publish:
- channels_schema_v3_0[clear_key(channel_name)] = channel_from_spec(
- specs_channel,
- specs_channel.publish.message,
- channel_name,
- "Message",
- )
-
- channels.update(channels_schema_v3_0)
-
- return channels
+ for key, channel in pub.schema().items():
+ channel_obj = Channel.from_pub(key, channel)
+
+ channel_key = clear_key(key)
+ # TODO: add duplication key warning
+ channels[channel_key] = channel_obj
+
+ operations[channel_key] = Operation.from_pub(
+ messages=[
+ Reference(**{
+ "$ref": f"#/channels/{channel_key}/messages/{msg_name}"
+ })
+ for msg_name in channel_obj.messages
+ ],
+ channel=Reference(**{"$ref": f"#/channels/{channel_key}"}),
+ operation=channel.operation,
+ )
+
+ return channels, operations
def _resolve_msg_payloads(
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/__init__.py
index 6a44ec6c82..e0cbcbd7b2 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/__init__.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/__init__.py
@@ -1,23 +1,31 @@
-from .channels import (
- Channel,
- from_spec as channel_from_spec,
-)
+from .channels import Channel
from .components import Components
+from .contact import Contact
+from .docs import ExternalDocs
from .info import ApplicationInfo
-from .operations import (
- Operation,
- from_spec as operation_from_spec,
-)
+from .license import License
+from .message import CorrelationId, Message
+from .operations import Operation
from .schema import ApplicationSchema
-from .servers import Server
+from .servers import Server, ServerVariable
+from .tag import Tag
+from .utils import Parameter, Reference
__all__ = (
"ApplicationInfo",
"ApplicationSchema",
"Channel",
+ "Channel",
"Components",
+ "Contact",
+ "CorrelationId",
+ "ExternalDocs",
+ "License",
+ "Message",
"Operation",
+ "Parameter",
+ "Reference",
"Server",
- "channel_from_spec",
- "operation_from_spec",
+ "ServerVariable",
+ "Tag",
)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py
index d477f704cd..c304608c5b 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/__init__.py
@@ -1,11 +1,9 @@
from .main import (
+ ChannelBinding,
OperationBinding,
- channel_binding_from_spec,
- operation_binding_from_spec,
)
__all__ = (
+ "ChannelBinding",
"OperationBinding",
- "channel_binding_from_spec",
- "operation_binding_from_spec",
)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py
index 96c7406698..8555fd981a 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/__init__.py
@@ -1,11 +1,7 @@
-from .channel import from_spec as channel_binding_from_spec
-from .operation import (
- OperationBinding,
- from_spec as operation_binding_from_spec,
-)
+from .channel import ChannelBinding
+from .operation import OperationBinding
__all__ = (
+ "ChannelBinding",
"OperationBinding",
- "channel_binding_from_spec",
- "operation_binding_from_spec",
)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py
index 33ceb3c0a0..ecab8e4a17 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/channel.py
@@ -1,21 +1,7 @@
-from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp import ChannelBinding
-from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp.channel import (
- Exchange,
- Queue,
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp import (
+ ChannelBinding as V2Binding,
)
-from faststream.specification.schema.bindings import amqp
-def from_spec(binding: amqp.ChannelBinding) -> ChannelBinding:
- return ChannelBinding(
- **{
- "is": binding.is_,
- "bindingVersion": "0.3.0",
- "queue": Queue.from_spec(binding.queue)
- if binding.queue is not None
- else None,
- "exchange": Exchange.from_spec(binding.exchange)
- if binding.exchange is not None
- else None,
- },
- )
+class ChannelBinding(V2Binding):
+ bindingVersion: str = "0.3.0"
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py
index 4dbd5478fe..77ba8356a0 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/amqp/operation.py
@@ -5,41 +5,46 @@
from typing import Optional
-from pydantic import BaseModel, PositiveInt
from typing_extensions import Self
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.amqp import (
+ OperationBinding as V2Binding,
+)
from faststream.specification.schema.bindings import amqp
-class OperationBinding(BaseModel):
- """A class to represent an operation binding.
-
- Attributes:
- cc : optional string representing the cc
- ack : boolean indicating if the operation is acknowledged
- replyTo : optional dictionary representing the replyTo
- bindingVersion : string representing the binding version
- """
-
+class OperationBinding(V2Binding):
cc: Optional[list[str]] = None
- ack: bool = True
- replyTo: Optional[str] = None
- deliveryMode: Optional[int] = None
- mandatory: Optional[bool] = None
- priority: Optional[PositiveInt] = None
bindingVersion: str = "0.3.0"
@classmethod
- def from_spec(cls, binding: amqp.OperationBinding) -> Self:
+ def from_sub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
+
return cls(
- cc=[binding.cc] if binding.cc is not None else None,
+ cc=[binding.routing_key]
+ if (binding.routing_key and binding.exchange.is_respect_routing_key)
+ else None,
ack=binding.ack,
- replyTo=binding.replyTo,
- deliveryMode=binding.deliveryMode,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
mandatory=binding.mandatory,
priority=binding.priority,
)
+ @classmethod
+ def from_pub(cls, binding: Optional[amqp.OperationBinding]) -> Optional[Self]:
+ if not binding:
+ return None
-def from_spec(binding: amqp.OperationBinding) -> OperationBinding:
- return OperationBinding.from_spec(binding)
+ return cls(
+ cc=None
+ if (not binding.routing_key or not binding.exchange.is_respect_routing_key)
+ else [binding.routing_key],
+ ack=binding.ack,
+ replyTo=binding.reply_to,
+ deliveryMode=None if binding.persist is None else int(binding.persist) + 1,
+ mandatory=binding.mandatory,
+ priority=binding.priority,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py
new file mode 100644
index 0000000000..5605abeefa
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/kafka.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.kafka import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py
index 96c7406698..8555fd981a 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/__init__.py
@@ -1,11 +1,7 @@
-from .channel import from_spec as channel_binding_from_spec
-from .operation import (
- OperationBinding,
- from_spec as operation_binding_from_spec,
-)
+from .channel import ChannelBinding
+from .operation import OperationBinding
__all__ = (
+ "ChannelBinding",
"OperationBinding",
- "channel_binding_from_spec",
- "operation_binding_from_spec",
)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py
index 41aef76aaa..c7552a11d1 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/channel.py
@@ -1,17 +1,100 @@
-from faststream.specification import schema as spec
-from faststream.specification.asyncapi.v2_6_0.schema.bindings import ChannelBinding
-from faststream.specification.asyncapi.v2_6_0.schema.bindings.main import (
- channel_binding_from_spec,
-)
-from faststream.specification.asyncapi.v3_0_0.schema.bindings.amqp import (
- channel_binding_from_spec as amqp_channel_binding_from_spec,
+from typing import Optional
+
+from pydantic import BaseModel
+from typing_extensions import Self
+
+from faststream._internal._compat import PYDANTIC_V2
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import (
+ amqp as amqp_bindings,
+ kafka as kafka_bindings,
+ nats as nats_bindings,
+ redis as redis_bindings,
+ sqs as sqs_bindings,
)
+from faststream.specification.schema.bindings import ChannelBinding as SpecBinding
+
+
+class ChannelBinding(BaseModel):
+ """A class to represent channel bindings.
+
+ Attributes:
+ amqp : AMQP channel binding (optional)
+ kafka : Kafka channel binding (optional)
+ sqs : SQS channel binding (optional)
+ nats : NATS channel binding (optional)
+ redis : Redis channel binding (optional)
+ """
+
+ amqp: Optional[amqp_bindings.ChannelBinding] = None
+ kafka: Optional[kafka_bindings.ChannelBinding] = None
+ sqs: Optional[sqs_bindings.ChannelBinding] = None
+ nats: Optional[nats_bindings.ChannelBinding] = None
+ redis: Optional[redis_bindings.ChannelBinding] = None
+
+ if PYDANTIC_V2:
+ model_config = {"extra": "allow"}
+
+ else:
+
+ class Config:
+ extra = "allow"
+
+ @classmethod
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.ChannelBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.ChannelBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+ if binding.nats and (
+ nats := nats_bindings.ChannelBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
-def from_spec(binding: spec.bindings.ChannelBinding) -> ChannelBinding:
- channel_binding = channel_binding_from_spec(binding)
+ if binding.redis and (
+ redis := redis_bindings.ChannelBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
- if binding.amqp:
- channel_binding.amqp = amqp_channel_binding_from_spec(binding.amqp)
+ if binding.sqs and (sqs := sqs_bindings.ChannelBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
- return channel_binding
+ return None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py
index 4e8295b107..fc37c3dc75 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/main/operation.py
@@ -4,16 +4,14 @@
from typing_extensions import Self
from faststream._internal._compat import PYDANTIC_V2
-from faststream.specification import schema as spec
-from faststream.specification.asyncapi.v2_6_0.core.bindings import (
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import (
+ amqp as amqp_bindings,
kafka as kafka_bindings,
nats as nats_bindings,
redis as redis_bindings,
sqs as sqs_bindings,
)
-from faststream.specification.asyncapi.v3_0_0.core.bindings import (
- amqp as amqp_bindings,
-)
+from faststream.specification.schema.bindings import OperationBinding as SpecBinding
class OperationBinding(BaseModel):
@@ -25,7 +23,6 @@ class OperationBinding(BaseModel):
sqs : SQS operation binding (optional)
nats : NATS operation binding (optional)
redis : Redis operation binding (optional)
-
"""
amqp: Optional[amqp_bindings.OperationBinding] = None
@@ -43,25 +40,61 @@ class Config:
extra = "allow"
@classmethod
- def from_spec(cls, binding: spec.bindings.OperationBinding) -> Self:
- return cls(
- amqp=amqp_bindings.operation_binding_from_spec(binding.amqp)
- if binding.amqp is not None
- else None,
- kafka=kafka_bindings.operation_binding_from_spec(binding.kafka)
- if binding.kafka is not None
- else None,
- sqs=sqs_bindings.operation_binding_from_spec(binding.sqs)
- if binding.sqs is not None
- else None,
- nats=nats_bindings.operation_binding_from_spec(binding.nats)
- if binding.nats is not None
- else None,
- redis=redis_bindings.operation_binding_from_spec(binding.redis)
- if binding.redis is not None
- else None,
- )
-
-
-def from_spec(binding: spec.bindings.OperationBinding) -> OperationBinding:
- return OperationBinding.from_spec(binding)
+ def from_sub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_sub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_sub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_sub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_sub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_sub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
+
+ @classmethod
+ def from_pub(cls, binding: Optional[SpecBinding]) -> Optional[Self]:
+ if binding is None:
+ return None
+
+ if binding.amqp and (
+ amqp := amqp_bindings.OperationBinding.from_pub(binding.amqp)
+ ):
+ return cls(amqp=amqp)
+
+ if binding.kafka and (
+ kafka := kafka_bindings.OperationBinding.from_pub(binding.kafka)
+ ):
+ return cls(kafka=kafka)
+
+ if binding.nats and (
+ nats := nats_bindings.OperationBinding.from_pub(binding.nats)
+ ):
+ return cls(nats=nats)
+
+ if binding.redis and (
+ redis := redis_bindings.OperationBinding.from_pub(binding.redis)
+ ):
+ return cls(redis=redis)
+
+ if binding.sqs and (sqs := sqs_bindings.OperationBinding.from_pub(binding.sqs)):
+ return cls(sqs=sqs)
+
+ return None
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py
new file mode 100644
index 0000000000..21d5c46926
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/nats.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.nats import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py
new file mode 100644
index 0000000000..26d44644f7
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/redis.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.redis import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py b/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py
new file mode 100644
index 0000000000..e437a1cc58
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/bindings/sqs.py
@@ -0,0 +1,9 @@
+from faststream.specification.asyncapi.v2_6_0.schema.bindings.sqs import (
+ ChannelBinding,
+ OperationBinding,
+)
+
+__all__ = (
+ "ChannelBinding",
+ "OperationBinding",
+)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/channels.py b/faststream/specification/asyncapi/v3_0_0/schema/channels.py
index afa7e9fd37..c0a2dbe553 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/channels.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/channels.py
@@ -4,16 +4,11 @@
from typing_extensions import Self
from faststream._internal._compat import PYDANTIC_V2
-from faststream.specification import schema as spec
-from faststream.specification.asyncapi.v2_6_0.schema.bindings import ChannelBinding
-from faststream.specification.asyncapi.v2_6_0.schema.message import (
- Message,
- from_spec as message_from_spec,
-)
-from faststream.specification.asyncapi.v2_6_0.schema.utils import Reference
-from faststream.specification.asyncapi.v3_0_0.schema.bindings.main import (
- channel_binding_from_spec,
-)
+from faststream.specification.asyncapi.v3_0_0.schema.bindings import ChannelBinding
+from faststream.specification.asyncapi.v3_0_0.schema.message import Message
+from faststream.specification.schema import PublisherSpec, SubscriberSpec
+
+from .utils import Reference
class Channel(BaseModel):
@@ -49,30 +44,31 @@ class Config:
extra = "allow"
@classmethod
- def from_spec(
- cls,
- channel: spec.channel.Channel,
- message: spec.message.Message,
- channel_name: str,
- message_name: str,
- ) -> Self:
+ def from_sub(cls, address: str, subscriber: SubscriberSpec) -> Self:
+ message = subscriber.operation.message
+ assert message.title
+
+ *left, right = message.title.split(":")
+ message.title = ":".join((*left, f"Subscribe{right}"))
+
return cls(
- address=channel_name,
+ description=subscriber.description,
+ address=address,
messages={
- message_name: message_from_spec(message),
+ "SubscribeMessage": Message.from_spec(message),
},
- description=channel.description,
- servers=channel.servers,
- bindings=channel_binding_from_spec(channel.bindings)
- if channel.bindings
- else None,
+ bindings=ChannelBinding.from_sub(subscriber.bindings),
+ servers=None,
)
-
-def from_spec(
- channel: spec.channel.Channel,
- message: spec.message.Message,
- channel_name: str,
- message_name: str,
-) -> Channel:
- return Channel.from_spec(channel, message, channel_name, message_name)
+ @classmethod
+ def from_pub(cls, address: str, publisher: PublisherSpec) -> Self:
+ return cls(
+ description=publisher.description,
+ address=address,
+ messages={
+ "Message": Message.from_spec(publisher.operation.message),
+ },
+ bindings=ChannelBinding.from_pub(publisher.bindings),
+ servers=None,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/contact.py b/faststream/specification/asyncapi/v3_0_0/schema/contact.py
new file mode 100644
index 0000000000..c42e750b28
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/contact.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Contact
+
+__all__ = ("Contact",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/docs.py b/faststream/specification/asyncapi/v3_0_0/schema/docs.py
new file mode 100644
index 0000000000..0a71688697
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/docs.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import ExternalDocs
+
+__all__ = ("ExternalDocs",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/license.py b/faststream/specification/asyncapi/v3_0_0/schema/license.py
new file mode 100644
index 0000000000..44ee4b2813
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/license.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import License
+
+__all__ = ("License",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/message.py b/faststream/specification/asyncapi/v3_0_0/schema/message.py
index 5e8517eb38..fa665082e9 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/message.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/message.py
@@ -1,3 +1,6 @@
-from faststream.specification.asyncapi.v2_6_0.schema.message import Message
+from faststream.specification.asyncapi.v2_6_0.schema.message import (
+ CorrelationId,
+ Message,
+)
-__all__ = ("Message",)
+__all__ = ("CorrelationId", "Message")
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/operations.py b/faststream/specification/asyncapi/v3_0_0/schema/operations.py
index 5583db86e5..2acb5c4f79 100644
--- a/faststream/specification/asyncapi/v3_0_0/schema/operations.py
+++ b/faststream/specification/asyncapi/v3_0_0/schema/operations.py
@@ -6,16 +6,12 @@
from faststream._internal._compat import PYDANTIC_V2
from faststream._internal.basic_types import AnyDict
-from faststream.specification import schema as spec
-from faststream.specification.asyncapi.v2_6_0.schema.tag import Tag
-from faststream.specification.asyncapi.v2_6_0.schema.utils import (
- Reference,
-)
-from faststream.specification.asyncapi.v3_0_0.schema.bindings import OperationBinding
-from faststream.specification.asyncapi.v3_0_0.schema.bindings.main import (
- operation_binding_from_spec,
-)
-from faststream.specification.asyncapi.v3_0_0.schema.channels import Channel
+from faststream.specification.schema.operation import Operation as OperationSpec
+
+from .bindings import OperationBinding
+from .channels import Channel
+from .tag import Tag
+from .utils import Reference
class Action(str, Enum):
@@ -34,24 +30,23 @@ class Operation(BaseModel):
message : message of the operation
security : security details of the operation
tags : tags associated with the operation
-
"""
action: Action
- summary: Optional[str] = None
- description: Optional[str] = None
+ summary: Optional[str]
+ description: Optional[str]
- bindings: Optional[OperationBinding] = None
+ bindings: Optional[OperationBinding]
messages: list[Reference]
channel: Union[Channel, Reference]
- security: Optional[dict[str, list[str]]] = None
+ security: Optional[dict[str, list[str]]]
# TODO
# traits
- tags: Optional[list[Union[Tag, AnyDict]]] = None
+ tags: Optional[list[Union[Tag, AnyDict]]]
if PYDANTIC_V2:
model_config = {"extra": "allow"}
@@ -62,38 +57,37 @@ class Config:
extra = "allow"
@classmethod
- def from_spec(
+ def from_sub(
cls,
- operation: spec.operation.Operation,
- action: Action,
- channel_name: str,
+ messages: list[Reference],
+ channel: Reference,
+ operation: OperationSpec,
) -> Self:
return cls(
- action=action,
- summary=operation.summary,
- description=operation.description,
- bindings=operation_binding_from_spec(operation.bindings)
- if operation.bindings
- else None,
- messages=[
- Reference(
- **{
- "$ref": f"#/channels/{channel_name}/messages/SubscribeMessage"
- if action is Action.RECEIVE
- else f"#/channels/{channel_name}/messages/Message",
- },
- ),
- ],
- channel=Reference(
- **{"$ref": f"#/channels/{channel_name}"},
- ),
- security=operation.security,
+ action=Action.RECEIVE,
+ messages=messages,
+ channel=channel,
+ bindings=OperationBinding.from_sub(operation.bindings),
+ summary=None,
+ description=None,
+ security=None,
+ tags=None,
)
-
-def from_spec(
- operation: spec.operation.Operation,
- action: Action,
- channel_name: str,
-) -> Operation:
- return Operation.from_spec(operation, action, channel_name)
+ @classmethod
+ def from_pub(
+ cls,
+ messages: list[Reference],
+ channel: Reference,
+ operation: OperationSpec,
+ ) -> Self:
+ return cls(
+ action=Action.SEND,
+ messages=messages,
+ channel=channel,
+ bindings=OperationBinding.from_pub(operation.bindings),
+ summary=None,
+ description=None,
+ security=None,
+ tags=None,
+ )
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/tag.py b/faststream/specification/asyncapi/v3_0_0/schema/tag.py
new file mode 100644
index 0000000000..e16c4f61cd
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/tag.py
@@ -0,0 +1,3 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Tag
+
+__all__ = ("Tag",)
diff --git a/faststream/specification/asyncapi/v3_0_0/schema/utils.py b/faststream/specification/asyncapi/v3_0_0/schema/utils.py
new file mode 100644
index 0000000000..c53f3ce1a0
--- /dev/null
+++ b/faststream/specification/asyncapi/v3_0_0/schema/utils.py
@@ -0,0 +1,6 @@
+from faststream.specification.asyncapi.v2_6_0.schema import Parameter, Reference
+
+__all__ = (
+ "Parameter",
+ "Reference",
+)
diff --git a/faststream/specification/proto/__init__.py b/faststream/specification/proto/__init__.py
index 4f48245445..3189e7cc8f 100644
--- a/faststream/specification/proto/__init__.py
+++ b/faststream/specification/proto/__init__.py
@@ -1,3 +1,4 @@
from .broker import ServerSpecification
+from .endpoint import EndpointSpecification
-__all__ = ("ServerSpecification",)
+__all__ = ("EndpointSpecification", "ServerSpecification")
diff --git a/faststream/specification/schema/__init__.py b/faststream/specification/schema/__init__.py
index c56c1fd570..009a6a63d7 100644
--- a/faststream/specification/schema/__init__.py
+++ b/faststream/specification/schema/__init__.py
@@ -1,11 +1,29 @@
+from .extra import (
+ Contact,
+ ContactDict,
+ ExternalDocs,
+ ExternalDocsDict,
+ License,
+ LicenseDict,
+ Tag,
+ TagDict,
+)
from .message import Message
from .operation import Operation
from .publisher import PublisherSpec
from .subscriber import SubscriberSpec
__all__ = (
+ "Contact",
+ "ContactDict",
+ "ExternalDocs",
+ "ExternalDocsDict",
+ "License",
+ "LicenseDict",
"Message",
"Operation",
"PublisherSpec",
"SubscriberSpec",
+ "Tag",
+ "TagDict",
)
diff --git a/faststream/specification/schema/bindings/amqp.py b/faststream/specification/schema/bindings/amqp.py
index 66eeaf9017..f15201bb8e 100644
--- a/faststream/specification/schema/bindings/amqp.py
+++ b/faststream/specification/schema/bindings/amqp.py
@@ -69,9 +69,11 @@ class ChannelBinding:
@dataclass
class OperationBinding:
- cc: Optional[str] = None # TODO: rename
- ack: bool = True
- reply_to: Optional[str] = None
- delivery_mode: Optional[int] = None
- mandatory: Optional[bool] = None
- priority: Optional[int] = None
+ routing_key: Optional[str]
+ queue: Queue
+ exchange: Exchange
+ ack: bool
+ reply_to: Optional[str]
+ persist: Optional[bool]
+ mandatory: Optional[bool]
+ priority: Optional[int]
diff --git a/pyproject.toml b/pyproject.toml
index 3b66109b8b..85ef4ac24d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,7 +56,7 @@ dynamic = ["version"]
dependencies = [
"anyio>=3.7.1,<5",
- "fast-depends>=2.4.0b0,<3.0.0",
+ "fast-depends[pydantic]>=3.0.0a3,<4.0.0",
"typing-extensions>=4.8.0",
]
@@ -64,9 +64,12 @@ dependencies = [
# public distributions
rabbit = ["aio-pika>=9,<10"]
-kafka = ["aiokafka>=0.9,<0.12"]
+kafka = ["aiokafka>=0.9,<0.13"]
-confluent = ["confluent-kafka>=2,<3"]
+confluent = [
+ "confluent-kafka>=2,<3; python_version < '3.13'",
+ "confluent-kafka>=2.6,<3; python_version >= '3.13'",
+]
nats = ["nats-py>=2.7.0,<=3.0.0"]
@@ -79,19 +82,21 @@ cli = [
"watchfiles>=0.15.0,<0.25.0"
]
+prometheus = ["prometheus-client>=0.20.0,<0.30.0"]
+
# dev dependencies
-optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel,cli]"]
+optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel,cli,prometheus]"]
devdocs = [
- "mkdocs-material==9.5.39",
+ "mkdocs-material==9.5.42",
"mkdocs-static-i18n==1.2.3",
"mdx-include==1.4.2",
- "mkdocstrings[python]==0.26.1",
+ "mkdocstrings[python]==0.26.2",
"mkdocs-literate-nav==0.6.1",
"mkdocs-git-revision-date-localized-plugin==1.2.9",
"mike==2.1.3", # versioning
"mkdocs-minify-plugin==0.8.0",
- "mkdocs-macros-plugin==1.2.0", # includes with variables
+ "mkdocs-macros-plugin==1.3.6", # includes with variables
"mkdocs-glightbox==0.4.0", # img zoom
"pillow", # required for mkdocs-glightbo
"cairosvg", # required for mkdocs-glightbo
@@ -100,7 +105,7 @@ devdocs = [
types = [
"faststream[optionals]",
- "mypy==1.11.2",
+ "mypy==1.12.1",
# mypy extensions
"types-Deprecated",
"types-PyYAML",
@@ -109,19 +114,21 @@ types = [
"types-redis",
"types-Pygments",
"types-docutils",
+ "types-aiofiles",
"confluent-kafka-stubs; python_version >= '3.11'",
]
lint = [
"faststream[types]",
- "ruff==0.6.9",
+ "ruff==0.7.0",
"bandit==1.7.10",
- "semgrep==1.90.0",
+ "semgrep==1.92.0",
"codespell==2.3.0",
]
test-core = [
- "coverage[toml]==7.6.1",
+ "coverage[toml]==7.6.1; python_version == '3.8'",
+ "coverage[toml]==7.6.4; python_version >= '3.9'",
"pytest==8.3.3",
"pytest-asyncio==0.24.0",
"dirty-equals==0.8.0",
@@ -129,7 +136,7 @@ test-core = [
testing = [
"faststream[test-core]",
- "fastapi==0.115.0",
+ "fastapi==0.115.2",
"pydantic-settings>=2.0.0,<3.0.0",
"httpx==0.27.2",
"PyYAML==6.0.2",
@@ -138,7 +145,7 @@ testing = [
dev = [
"faststream[optionals,lint,testing,devdocs]",
- "pre-commit==4.0.0",
+ "pre-commit==4.0.1",
"detect-secrets==1.5.0",
]
@@ -164,8 +171,6 @@ files = ["faststream", "tests/mypy"]
strict = true
python_version = "3.9"
ignore_missing_imports = true
-install_types = true
-non_interactive = true
plugins = ["pydantic.mypy"]
# from https://blog.wolt.com/engineering/2021/09/30/professional-grade-mypy-configuration/
diff --git a/scripts/start_test_env.sh b/scripts/start_test_env.sh
index a0ae1627b8..906556db41 100755
--- a/scripts/start_test_env.sh
+++ b/scripts/start_test_env.sh
@@ -2,4 +2,4 @@
source ./scripts/set_variables.sh
-docker-compose -p $DOCKER_COMPOSE_PROJECT -f docs/includes/docker-compose.yaml up -d --no-recreate
+docker compose -p $DOCKER_COMPOSE_PROJECT -f docs/includes/docker-compose.yaml up -d --no-recreate
diff --git a/scripts/stop_test_env.sh b/scripts/stop_test_env.sh
index 5d77186357..76ab4a3ee0 100755
--- a/scripts/stop_test_env.sh
+++ b/scripts/stop_test_env.sh
@@ -2,4 +2,4 @@
source ./scripts/set_variables.sh
-docker-compose -p $DOCKER_COMPOSE_PROJECT -f docs/includes/docker-compose.yaml down
+docker compose -p $DOCKER_COMPOSE_PROJECT -f docs/includes/docker-compose.yaml down
diff --git a/serve.py b/serve.py
new file mode 100644
index 0000000000..0c04f9281e
--- /dev/null
+++ b/serve.py
@@ -0,0 +1,9 @@
+from faststream import FastStream
+from faststream.rabbit import RabbitBroker
+
+broker = RabbitBroker()
+app = FastStream(broker)
+
+@app.after_startup
+async def _():
+ raise ValueError
\ No newline at end of file
diff --git a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
index ed5736c601..3b852203be 100644
--- a/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
+++ b/tests/a_docs/getting_started/asyncapi/asyncapi_customization/test_handler.py
@@ -1,3 +1,5 @@
+from dirty_equals import IsPartialDict
+
from docs.docs_src.getting_started.asyncapi.asyncapi_customization.custom_handler import (
docs_obj,
)
@@ -6,23 +8,31 @@
def test_handler_customization() -> None:
schema = docs_obj.to_jsonable()
- assert schema["channels"] == {
- "input_data:Consume": {
- "description": "Consumer function\n\n Args:\n msg: input msg\n ",
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "input_data", "bindingVersion": "0.4.0"}},
- "subscribe": {
- "message": {"$ref": "#/components/messages/input_data:Consume:Message"},
- },
+ (subscriber_key, subscriber_value), (publisher_key, publisher_value) = schema[
+ "channels"
+ ].items()
+
+ assert subscriber_key == "input_data:Consume", subscriber_key
+ assert subscriber_value == IsPartialDict({
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "input_data", "bindingVersion": "0.4.0"}},
+ "subscribe": {
+ "message": {"$ref": "#/components/messages/input_data:Consume:Message"},
},
- "output_data:Produce": {
- "description": "My publisher description",
- "servers": ["development"],
- "bindings": {"kafka": {"topic": "output_data", "bindingVersion": "0.4.0"}},
- "publish": {
- "message": {
- "$ref": "#/components/messages/output_data:Produce:Message"
- },
- },
+ }), subscriber_value
+ desc = subscriber_value["description"]
+ assert ( # noqa: PT018
+ "Consumer function\n\n" in desc
+ and "Args:\n" in desc
+ and " msg: input msg" in desc
+ ), desc
+
+ assert publisher_key == "output_data:Produce", publisher_key
+ assert publisher_value == {
+ "description": "My publisher description",
+ "servers": ["development"],
+ "bindings": {"kafka": {"topic": "output_data", "bindingVersion": "0.4.0"}},
+ "publish": {
+ "message": {"$ref": "#/components/messages/output_data:Produce:Message"}
},
}
diff --git a/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py b/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py
index cb6aa4d83e..3615e32d80 100644
--- a/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py
+++ b/tests/a_docs/getting_started/cli/confluent/test_confluent_context.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from faststream.confluent import TestKafkaBroker
from tests.marks import pydantic_v2
from tests.mocks import mock_pydantic_settings_env
@@ -13,4 +13,4 @@ async def test() -> None:
from docs.docs_src.getting_started.cli.confluent_context import app, broker
async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py b/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py
index a3d1d24557..9b26e90f34 100644
--- a/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py
+++ b/tests/a_docs/getting_started/cli/kafka/test_kafka_context.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from faststream.kafka import TestKafkaBroker
from tests.marks import pydantic_v2
from tests.mocks import mock_pydantic_settings_env
@@ -13,4 +13,4 @@ async def test() -> None:
from docs.docs_src.getting_started.cli.kafka_context import app, broker
async with TestKafkaBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/nats/test_nats_context.py b/tests/a_docs/getting_started/cli/nats/test_nats_context.py
index f562be059b..fcb8ed5bb9 100644
--- a/tests/a_docs/getting_started/cli/nats/test_nats_context.py
+++ b/tests/a_docs/getting_started/cli/nats/test_nats_context.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from faststream.nats import TestNatsBroker
from tests.marks import pydantic_v2
from tests.mocks import mock_pydantic_settings_env
@@ -13,4 +13,4 @@ async def test() -> None:
from docs.docs_src.getting_started.cli.nats_context import app, broker
async with TestNatsBroker(broker), TestApp(app, {"env": ""}):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py b/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py
index 99d8cb9951..2fef4df0cd 100644
--- a/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py
+++ b/tests/a_docs/getting_started/cli/rabbit/test_rabbit_context.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from faststream.rabbit import TestRabbitBroker
from tests.marks import pydantic_v2
from tests.mocks import mock_pydantic_settings_env
@@ -16,6 +16,6 @@ async def test() -> None:
async with TestRabbitBroker(broker), TestApp(app, {"env": ".env"}):
assert (
- context.get("settings").host
+ app.context.get("settings").host
== "amqp://guest:guest@localhost:5673/" # pragma: allowlist secret
)
diff --git a/tests/a_docs/getting_started/cli/redis/test_redis_context.py b/tests/a_docs/getting_started/cli/redis/test_redis_context.py
index 283934f6f3..07536cbfdc 100644
--- a/tests/a_docs/getting_started/cli/redis/test_redis_context.py
+++ b/tests/a_docs/getting_started/cli/redis/test_redis_context.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from faststream.redis import TestRedisBroker
from tests.marks import pydantic_v2
from tests.mocks import mock_pydantic_settings_env
@@ -13,4 +13,4 @@ async def test() -> None:
from docs.docs_src.getting_started.cli.redis_context import app, broker
async with TestRedisBroker(broker), TestApp(app, {"env": ".env"}):
- assert context.get("settings").host == "redis://localhost:6380"
+ assert app.context.get("settings").host == "redis://localhost:6380"
diff --git a/tests/a_docs/getting_started/context/test_initial.py b/tests/a_docs/getting_started/context/test_initial.py
index 8291ffca80..7b973b8dfc 100644
--- a/tests/a_docs/getting_started/context/test_initial.py
+++ b/tests/a_docs/getting_started/context/test_initial.py
@@ -1,6 +1,5 @@
import pytest
-from faststream import context
from tests.marks import (
python39,
require_aiokafka,
@@ -22,8 +21,8 @@ async def test_kafka() -> None:
await br.publish("", "test-topic")
await br.publish("", "test-topic")
- assert context.get("collector") == ["", ""]
- context.clear()
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
@pytest.mark.asyncio()
@@ -37,8 +36,8 @@ async def test_confluent() -> None:
await br.publish("", "test-topic")
await br.publish("", "test-topic")
- assert context.get("collector") == ["", ""]
- context.clear()
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
@pytest.mark.asyncio()
@@ -52,8 +51,8 @@ async def test_rabbit() -> None:
await br.publish("", "test-queue")
await br.publish("", "test-queue")
- assert context.get("collector") == ["", ""]
- context.clear()
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
@pytest.mark.asyncio()
@@ -67,8 +66,8 @@ async def test_nats() -> None:
await br.publish("", "test-subject")
await br.publish("", "test-subject")
- assert context.get("collector") == ["", ""]
- context.clear()
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
@pytest.mark.asyncio()
@@ -82,5 +81,5 @@ async def test_redis() -> None:
await br.publish("", "test-channel")
await br.publish("", "test-channel")
- assert context.get("collector") == ["", ""]
- context.clear()
+ assert broker.context.get("collector") == ["", ""]
+ broker.context.clear()
diff --git a/tests/a_docs/getting_started/lifespan/test_basic.py b/tests/a_docs/getting_started/lifespan/test_basic.py
index 6b4e98cbc9..97706e94ab 100644
--- a/tests/a_docs/getting_started/lifespan/test_basic.py
+++ b/tests/a_docs/getting_started/lifespan/test_basic.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
from tests.marks import (
pydantic_v2,
require_aiokafka,
@@ -22,7 +22,7 @@ async def test_rabbit_basic_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker
async with TestRabbitBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
@pydantic_v2
@@ -35,7 +35,7 @@ async def test_kafka_basic_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker
async with TestKafkaBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
@pydantic_v2
@@ -48,7 +48,7 @@ async def test_confluent_basic_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.confluent.basic import app, broker
async with TestConfluentKafkaBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
@pydantic_v2
@@ -61,7 +61,7 @@ async def test_nats_basic_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.nats.basic import app, broker
async with TestNatsBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
@pydantic_v2
@@ -74,4 +74,4 @@ async def test_redis_basic_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.redis.basic import app, broker
async with TestRedisBroker(broker), TestApp(app):
- assert context.get("settings").host == "localhost"
+ assert app.context.get("settings").host == "localhost"
diff --git a/tests/a_docs/getting_started/lifespan/test_multi.py b/tests/a_docs/getting_started/lifespan/test_multi.py
index 02bbfedfae..8d4b0e2a98 100644
--- a/tests/a_docs/getting_started/lifespan/test_multi.py
+++ b/tests/a_docs/getting_started/lifespan/test_multi.py
@@ -1,6 +1,6 @@
import pytest
-from faststream import TestApp, context
+from faststream import TestApp
@pytest.mark.asyncio()
@@ -8,4 +8,4 @@ async def test_multi_lifespan() -> None:
from docs.docs_src.getting_started.lifespan.multiple import app
async with TestApp(app):
- assert context.get("field") == 1
+ assert app.context.get("field") == 1
diff --git a/tests/a_docs/getting_started/subscription/test_annotated.py b/tests/a_docs/getting_started/subscription/test_annotated.py
index d3b608277c..0c9d24a927 100644
--- a/tests/a_docs/getting_started/subscription/test_annotated.py
+++ b/tests/a_docs/getting_started/subscription/test_annotated.py
@@ -1,7 +1,8 @@
-from typing import Any, TypeAlias
+from typing import Any
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
+from typing_extensions import TypeAlias
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.subscriber.usecase import SubscriberUsecase
diff --git a/tests/asgi/testcase.py b/tests/asgi/testcase.py
index 47fdafe0e4..438bd60d49 100644
--- a/tests/asgi/testcase.py
+++ b/tests/asgi/testcase.py
@@ -1,4 +1,5 @@
from typing import Any
+from unittest.mock import AsyncMock
import pytest
from starlette.testclient import TestClient
@@ -22,14 +23,14 @@ def get_test_broker(self, broker) -> Any:
raise NotImplementedError
def test_not_found(self) -> None:
- app = AsgiFastStream()
+ app = AsgiFastStream(AsyncMock())
with TestClient(app) as client:
response = client.get("/")
assert response.status_code == 404
def test_ws_not_found(self) -> None:
- app = AsgiFastStream()
+ app = AsgiFastStream(AsyncMock())
with TestClient(app) as client: # noqa: SIM117
with pytest.raises(WebSocketDisconnect):
@@ -40,6 +41,7 @@ def test_asgi_ping_unhealthy(self) -> None:
broker = self.get_broker()
app = AsgiFastStream(
+ AsyncMock(),
asgi_routes=[
("/health", make_ping_asgi(broker, timeout=5.0)),
],
@@ -47,7 +49,7 @@ def test_asgi_ping_unhealthy(self) -> None:
with TestClient(app) as client:
response = client.get("/health")
- assert response.status_code == 500
+ assert response.status_code == 500, response.status_code
@pytest.mark.asyncio()
async def test_asgi_ping_healthy(self) -> None:
@@ -68,7 +70,8 @@ async def test_asyncapi_asgi(self) -> None:
broker = self.get_broker()
app = AsgiFastStream(
- broker, asgi_routes=[("/docs", make_asyncapi_asgi(AsyncAPI(broker)))]
+ broker,
+ asgi_routes=[("/docs", make_asyncapi_asgi(AsyncAPI(broker)))],
)
async with self.get_test_broker(broker):
@@ -82,7 +85,7 @@ def test_get_decorator(self) -> None:
async def some_handler(scope) -> AsgiResponse:
return AsgiResponse(body=b"test", status_code=200)
- app = AsgiFastStream(asgi_routes=[("/test", some_handler)])
+ app = AsgiFastStream(AsyncMock(), asgi_routes=[("/test", some_handler)])
with TestClient(app) as client:
response = client.get("/test")
diff --git a/tests/asyncapi/base/v2_6_0/arguments.py b/tests/asyncapi/base/v2_6_0/arguments.py
index 685be4bd6f..aaac817e85 100644
--- a/tests/asyncapi/base/v2_6_0/arguments.py
+++ b/tests/asyncapi/base/v2_6_0/arguments.py
@@ -66,7 +66,7 @@ async def handle(msg) -> None:
assert key == "custom_name"
assert schema["channels"][key]["description"] == "Test description.", schema[
"channels"
- ][key]["description"]
+ ][key]
def test_empty(self) -> None:
broker = self.broker_class()
diff --git a/tests/asyncapi/base/v3_0_0/fastapi.py b/tests/asyncapi/base/v3_0_0/fastapi.py
index cdc986b568..edf8cfe993 100644
--- a/tests/asyncapi/base/v3_0_0/fastapi.py
+++ b/tests/asyncapi/base/v3_0_0/fastapi.py
@@ -26,7 +26,6 @@ async def test_fastapi_full_information(self) -> None:
)
app = FastAPI(
- lifespan=broker.lifespan_context,
title="CustomApp",
version="1.1.1",
description="Test description",
@@ -77,7 +76,7 @@ async def test_fastapi_asyncapi_routes(self) -> None:
@router.subscriber("test")
async def handler() -> None: ...
- app = FastAPI(lifespan=router.lifespan_context)
+ app = FastAPI()
app.include_router(router)
async with self.broker_wrapper(router.broker):
@@ -107,7 +106,7 @@ async def handler() -> None: ...
async def test_fastapi_asyncapi_not_fount(self) -> None:
broker = self.router_factory(include_in_schema=False)
- app = FastAPI(lifespan=broker.lifespan_context)
+ app = FastAPI()
app.include_router(broker)
async with self.broker_wrapper(broker.broker):
@@ -125,7 +124,7 @@ async def test_fastapi_asyncapi_not_fount(self) -> None:
async def test_fastapi_asyncapi_not_fount_by_url(self) -> None:
broker = self.router_factory(schema_url=None)
- app = FastAPI(lifespan=broker.lifespan_context)
+ app = FastAPI()
app.include_router(broker)
async with self.broker_wrapper(broker.broker):
diff --git a/tests/asyncapi/confluent/v2_6_0/test_connection.py b/tests/asyncapi/confluent/v2_6_0/test_connection.py
index 56ad2af682..368bbc00dd 100644
--- a/tests/asyncapi/confluent/v2_6_0/test_connection.py
+++ b/tests/asyncapi/confluent/v2_6_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.confluent import KafkaBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/confluent/v3_0_0/test_connection.py b/tests/asyncapi/confluent/v3_0_0/test_connection.py
index d49503ef9a..63b9c51da3 100644
--- a/tests/asyncapi/confluent/v3_0_0/test_connection.py
+++ b/tests/asyncapi/confluent/v3_0_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.confluent import KafkaBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/kafka/v2_6_0/test_app.py b/tests/asyncapi/kafka/v2_6_0/test_app.py
index 77470ef1cd..2bd9b5a916 100644
--- a/tests/asyncapi/kafka/v2_6_0/test_app.py
+++ b/tests/asyncapi/kafka/v2_6_0/test_app.py
@@ -1,9 +1,6 @@
from faststream.kafka import KafkaBroker
+from faststream.specification import Contact, ExternalDocs, License, Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.contact import Contact
-from faststream.specification.schema.docs import ExternalDocs
-from faststream.specification.schema.license import License
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/kafka/v2_6_0/test_connection.py b/tests/asyncapi/kafka/v2_6_0/test_connection.py
index cc7b61114b..2107e3882b 100644
--- a/tests/asyncapi/kafka/v2_6_0/test_connection.py
+++ b/tests/asyncapi/kafka/v2_6_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.kafka import KafkaBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/kafka/v3_0_0/test_connection.py b/tests/asyncapi/kafka/v3_0_0/test_connection.py
index 280cb798d1..e1fb6cfaab 100644
--- a/tests/asyncapi/kafka/v3_0_0/test_connection.py
+++ b/tests/asyncapi/kafka/v3_0_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.kafka import KafkaBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/nats/v2_6_0/test_connection.py b/tests/asyncapi/nats/v2_6_0/test_connection.py
index 8cb4110d78..486bbb8033 100644
--- a/tests/asyncapi/nats/v2_6_0/test_connection.py
+++ b/tests/asyncapi/nats/v2_6_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.nats import NatsBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/nats/v3_0_0/test_connection.py b/tests/asyncapi/nats/v3_0_0/test_connection.py
index f4913252ef..f88fc0fb83 100644
--- a/tests/asyncapi/nats/v3_0_0/test_connection.py
+++ b/tests/asyncapi/nats/v3_0_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.nats import NatsBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/rabbit/v2_6_0/test_publisher.py b/tests/asyncapi/rabbit/v2_6_0/test_publisher.py
index c9c8ff7ea7..abe3255a3d 100644
--- a/tests/asyncapi/rabbit/v2_6_0/test_publisher.py
+++ b/tests/asyncapi/rabbit/v2_6_0/test_publisher.py
@@ -106,6 +106,14 @@ async def handle(msg) -> None: ...
},
},
"publish": {
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.2.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ },
+ },
"message": {
"$ref": "#/components/messages/_:test-ex:Publisher:Message",
},
@@ -185,4 +193,4 @@ async def handle(msg) -> None: ...
},
"servers": ["development"],
},
- }
+ }, schema["channels"]
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_connection.py b/tests/asyncapi/rabbit/v3_0_0/test_connection.py
index 0403cef9c5..971a89afec 100644
--- a/tests/asyncapi/rabbit/v3_0_0/test_connection.py
+++ b/tests/asyncapi/rabbit/v3_0_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.rabbit import RabbitBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/rabbit/v3_0_0/test_publisher.py b/tests/asyncapi/rabbit/v3_0_0/test_publisher.py
index 1456b5b86d..a108270da5 100644
--- a/tests/asyncapi/rabbit/v3_0_0/test_publisher.py
+++ b/tests/asyncapi/rabbit/v3_0_0/test_publisher.py
@@ -141,13 +141,19 @@ async def handle(msg) -> None: ...
assert schema["operations"] == {
"_:test-ex:Publisher": {
"action": "send",
- "channel": {
- "$ref": "#/channels/_:test-ex:Publisher",
+ "bindings": {
+ "amqp": {
+ "ack": True,
+ "bindingVersion": "0.3.0",
+ "deliveryMode": 1,
+ "mandatory": True,
+ }
},
+ "channel": {"$ref": "#/channels/_:test-ex:Publisher"},
"messages": [
- {"$ref": "#/channels/_:test-ex:Publisher/messages/Message"},
+ {"$ref": "#/channels/_:test-ex:Publisher/messages/Message"}
],
- },
+ }
}
def test_reusable_exchange(self) -> None:
diff --git a/tests/asyncapi/redis/v2_6_0/test_connection.py b/tests/asyncapi/redis/v2_6_0/test_connection.py
index 221e4cd430..194371e767 100644
--- a/tests/asyncapi/redis/v2_6_0/test_connection.py
+++ b/tests/asyncapi/redis/v2_6_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.redis import RedisBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/asyncapi/redis/v3_0_0/test_connection.py b/tests/asyncapi/redis/v3_0_0/test_connection.py
index 51d7224c50..968e67b464 100644
--- a/tests/asyncapi/redis/v3_0_0/test_connection.py
+++ b/tests/asyncapi/redis/v3_0_0/test_connection.py
@@ -1,6 +1,6 @@
from faststream.redis import RedisBroker
+from faststream.specification import Tag
from faststream.specification.asyncapi import AsyncAPI
-from faststream.specification.schema.tag import Tag
def test_base() -> None:
diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py
index f6d6510a67..4fdb0e118d 100644
--- a/tests/brokers/base/consume.py
+++ b/tests/brokers/base/consume.py
@@ -1,5 +1,4 @@
import asyncio
-from typing import NoReturn
from unittest.mock import MagicMock
import anyio
@@ -17,8 +16,8 @@ class BrokerConsumeTestcase(BaseTestcaseConfig):
async def test_consume(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
consume_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -201,12 +200,13 @@ async def handler2(m) -> None:
async def test_consume_validate_false(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(
apply_types=True,
- validate=False,
+ serializer=None,
)
class Foo(BaseModel):
@@ -241,8 +241,9 @@ async def handler(
async def test_dynamic_sub(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
async def subscriber(m) -> None:
@@ -335,15 +336,16 @@ async def test_get_one_timeout(
async def test_stop_consume_exc(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@consume_broker.subscriber(*args, **kwargs)
- def subscriber(m) -> NoReturn:
+ def subscriber(m):
mock()
event.set()
raise StopConsume
diff --git a/tests/brokers/base/fastapi.py b/tests/brokers/base/fastapi.py
index 0523511cf1..602a040840 100644
--- a/tests/brokers/base/fastapi.py
+++ b/tests/brokers/base/fastapi.py
@@ -8,7 +8,7 @@
from fastapi.exceptions import RequestValidationError
from fastapi.testclient import TestClient
-from faststream import Response, context
+from faststream import Response
from faststream._internal.broker.broker import BrokerUsecase
from faststream._internal.broker.router import BrokerRouter
from faststream._internal.fastapi.context import Context
@@ -24,9 +24,9 @@ class FastAPITestcase(BaseTestcaseConfig):
router_class: type[StreamRouter[BrokerUsecase]]
broker_router_class: type[BrokerRouter[Any]]
- async def test_base_real(
- self, mock: Mock, queue: str, event: asyncio.Event
- ) -> None:
+ async def test_base_real(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -50,8 +50,12 @@ async def hello(msg):
mock.assert_called_with("hi")
async def test_background(
- self, mock: Mock, queue: str, event: asyncio.Event
+ self,
+ mock: Mock,
+ queue: str,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
def task(msg):
@@ -77,8 +81,11 @@ async def hello(msg, tasks: BackgroundTasks) -> None:
assert event.is_set()
mock.assert_called_with("hi")
- async def test_context(self, mock: Mock, queue: str, event: asyncio.Event) -> None:
+ async def test_context(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
+ context = router.context
context_key = "message.headers"
@@ -86,14 +93,19 @@ async def test_context(self, mock: Mock, queue: str, event: asyncio.Event) -> No
@router.subscriber(*args, **kwargs)
async def hello(msg=Context(context_key)):
- event.set()
- return mock(msg == context.resolve(context_key))
+ try:
+ mock(msg == context.resolve(context_key) and msg["1"] == "1")
+ finally:
+ event.set()
+ router._setup()
async with router.broker:
await router.broker.start()
await asyncio.wait(
(
- asyncio.create_task(router.broker.publish("", queue)),
+ asyncio.create_task(
+ router.broker.publish("", queue, headers={"1": "1"})
+ ),
asyncio.create_task(event.wait()),
),
timeout=self.timeout,
@@ -102,8 +114,11 @@ async def hello(msg=Context(context_key)):
assert event.is_set()
mock.assert_called_with(True)
- async def test_initial_context(self, queue: str, event: asyncio.Event) -> None:
+ async def test_initial_context(self, queue: str) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
+ context = router.context
args, kwargs = self.get_subscriber_params(queue)
@@ -113,6 +128,7 @@ async def hello(msg: int, data=Context(queue, initial=set)) -> None:
if len(data) == 2:
event.set()
+ router._setup()
async with router.broker:
await router.broker.start()
await asyncio.wait(
@@ -128,10 +144,10 @@ async def hello(msg: int, data=Context(queue, initial=set)) -> None:
assert context.get(queue) == {1, 2}
context.reset_global(queue)
- async def test_double_real(
- self, mock: Mock, queue: str, event: asyncio.Event
- ) -> None:
+ async def test_double_real(self, mock: Mock, queue: str) -> None:
+ event = asyncio.Event()
event2 = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -168,8 +184,9 @@ async def test_base_publisher_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue)
@@ -526,7 +543,6 @@ async def hello_router2() -> str:
async def test_dependency_overrides(self, mock: Mock, queue: str) -> None:
router = self.router_class()
- router2 = self.router_class()
def dep1() -> None:
mock.not_call()
@@ -539,11 +555,10 @@ def dep2() -> None:
args, kwargs = self.get_subscriber_params(queue)
- @router2.subscriber(*args, **kwargs)
+ @router.subscriber(*args, **kwargs)
async def hello_router2(dep: None = Depends(dep1)) -> str:
return "hi"
- router.include_router(router2)
app.include_router(router)
async with self.patch_broker(router.broker) as br:
@@ -559,3 +574,29 @@ async def hello_router2(dep: None = Depends(dep1)) -> str:
mock.assert_called_once()
assert not mock.not_call.called
+
+ async def test_nested_router(self, queue: str) -> None:
+ router = self.router_class()
+ router2 = self.router_class()
+
+ app = FastAPI()
+
+ args, kwargs = self.get_subscriber_params(queue)
+
+ @router2.subscriber(*args, **kwargs)
+ async def hello_router2() -> str:
+ return "hi"
+
+ router.include_router(router2)
+ app.include_router(router)
+
+ async with self.patch_broker(router.broker) as br:
+ with TestClient(app) as client:
+ assert client.app_state["broker"] is br
+
+ r = await br.request(
+ "hi",
+ queue,
+ timeout=0.5,
+ )
+ assert r.body == b"hi"
diff --git a/tests/brokers/base/middlewares.py b/tests/brokers/base/middlewares.py
index 22111f4ebc..47e07c6398 100644
--- a/tests/brokers/base/middlewares.py
+++ b/tests/brokers/base/middlewares.py
@@ -1,5 +1,4 @@
import asyncio
-from typing import NoReturn
from unittest.mock import Mock, call
import pytest
@@ -8,6 +7,7 @@
from faststream._internal.basic_types import DecodedMessage
from faststream.exceptions import SkipMessage
from faststream.middlewares import BaseMiddleware, ExceptionMiddleware
+from faststream.response import PublishCommand
from .basic import BaseTestcaseConfig
@@ -16,10 +16,11 @@
class LocalMiddlewareTestcase(BaseTestcaseConfig):
async def test_subscriber_middleware(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg):
mock.start(await msg.decode())
result = await call_next(msg)
@@ -54,10 +55,11 @@ async def handler(m) -> str:
async def test_publisher_middleware(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg, **kwargs):
mock.enter()
result = await call_next(msg, **kwargs)
@@ -194,7 +196,9 @@ async def handler2(m) -> str:
mock.end.assert_called_once()
assert mock.call_count == 2
- async def test_error_traceback(self, queue: str, mock: Mock, event) -> None:
+ async def test_error_traceback(self, queue: str, mock: Mock) -> None:
+ event = asyncio.Event()
+
async def mid(call_next, msg):
try:
result = await call_next(msg)
@@ -209,7 +213,7 @@ async def mid(call_next, msg):
args, kwargs = self.get_subscriber_params(queue, middlewares=(mid,))
@broker.subscriber(*args, **kwargs)
- async def handler2(m) -> NoReturn:
+ async def handler2(m):
event.set()
raise ValueError
@@ -232,10 +236,11 @@ async def handler2(m) -> NoReturn:
class MiddlewareTestcase(LocalMiddlewareTestcase):
async def test_global_middleware(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
class mid(BaseMiddleware): # noqa: N801
async def on_receive(self):
mock.start(self.msg)
@@ -272,10 +277,11 @@ async def handler(m) -> str:
async def test_add_global_middleware(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
class mid(BaseMiddleware): # noqa: N801
async def on_receive(self):
mock.start(self.msg)
@@ -328,11 +334,13 @@ async def test_patch_publish(
self,
queue: str,
mock: Mock,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
- async def on_publish(self, msg: str, *args, **kwargs) -> str:
- return msg * 2
+ async def on_publish(self, msg: PublishCommand) -> PublishCommand:
+ msg.body *= 2
+ return msg
broker = self.get_broker(middlewares=(Mid,))
@@ -365,16 +373,16 @@ async def handler_resp(m) -> None:
async def test_global_publisher_middleware(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
- async def on_publish(self, msg: str, *args, **kwargs) -> str:
- data = msg * 2
- assert args or kwargs
- mock.enter(data)
- return data
+ async def on_publish(self, msg: PublishCommand) -> PublishCommand:
+ msg.body *= 2
+ mock.enter(msg.body)
+ return msg
async def after_publish(self, *args, **kwargs) -> None:
mock.end()
@@ -413,10 +421,11 @@ async def handler(m):
class ExceptionMiddlewareTestcase(BaseTestcaseConfig):
async def test_exception_middleware_default_msg(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
@@ -429,7 +438,7 @@ async def value_error_handler(exc) -> str:
@broker.subscriber(*args, **kwargs)
@broker.publisher(queue + "1")
- async def subscriber1(m) -> NoReturn:
+ async def subscriber1(m):
raise ValueError
args, kwargs = self.get_subscriber_params(queue + "1")
@@ -455,14 +464,15 @@ async def subscriber2(msg=Context("message")) -> None:
async def test_exception_middleware_skip_msg(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
- async def value_error_handler(exc) -> NoReturn:
+ async def value_error_handler(exc):
event.set()
raise SkipMessage
@@ -471,7 +481,7 @@ async def value_error_handler(exc) -> NoReturn:
@broker.subscriber(*args, **kwargs)
@broker.publisher(queue + "1")
- async def subscriber1(m) -> NoReturn:
+ async def subscriber1(m):
raise ValueError
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@@ -495,10 +505,11 @@ async def subscriber2(msg=Context("message")) -> None:
async def test_exception_middleware_do_not_catch_skip_msg(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(Exception)
@@ -509,7 +520,7 @@ async def value_error_handler(exc) -> None:
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def subscriber(m) -> NoReturn:
+ async def subscriber(m):
event.set()
raise SkipMessage
@@ -529,14 +540,15 @@ async def subscriber(m) -> NoReturn:
async def test_exception_middleware_reraise(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ValueError, publish=True)
- async def value_error_handler(exc) -> NoReturn:
+ async def value_error_handler(exc):
event.set()
raise exc
@@ -545,7 +557,7 @@ async def value_error_handler(exc) -> NoReturn:
@broker.subscriber(*args, **kwargs)
@broker.publisher(queue + "1")
- async def subscriber1(m) -> NoReturn:
+ async def subscriber1(m):
raise ValueError
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@@ -569,10 +581,11 @@ async def subscriber2(msg=Context("message")) -> None:
async def test_exception_middleware_different_handler(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = ExceptionMiddleware()
@mid.add_handler(ZeroDivisionError, publish=True)
@@ -590,14 +603,14 @@ async def value_error_handler(exc) -> str:
@broker.subscriber(*args, **kwargs)
@publisher
- async def subscriber1(m) -> NoReturn:
+ async def subscriber1(m):
raise ZeroDivisionError
args2, kwargs2 = self.get_subscriber_params(queue + "1")
@broker.subscriber(*args2, **kwargs2)
@publisher
- async def subscriber2(m) -> NoReturn:
+ async def subscriber2(m):
raise ValueError
args3, kwargs3 = self.get_subscriber_params(queue + "2")
@@ -649,10 +662,11 @@ async def value_error_handler(exc) -> str:
async def test_exception_middleware_decoder_error(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
async def decoder(
msg,
original_decoder,
@@ -670,7 +684,7 @@ async def value_error_handler(exc) -> None:
args, kwargs = self.get_subscriber_params(queue)
@broker.subscriber(*args, **kwargs)
- async def subscriber1(m) -> NoReturn:
+ async def subscriber1(m):
raise ZeroDivisionError
async with self.patch_broker(broker) as br:
diff --git a/tests/brokers/base/parser.py b/tests/brokers/base/parser.py
index 143dc24b75..859c508c53 100644
--- a/tests/brokers/base/parser.py
+++ b/tests/brokers/base/parser.py
@@ -12,8 +12,9 @@ async def test_local_parser(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
async def custom_parser(msg, original):
@@ -45,8 +46,9 @@ async def test_local_sync_decoder(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
def custom_decoder(msg):
@@ -77,8 +79,9 @@ async def test_global_sync_decoder(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
def custom_decoder(msg):
mock(msg.body)
return msg
@@ -107,10 +110,11 @@ async def handle(m) -> None:
async def test_local_parser_no_share_between_subscribers(
self,
- event: asyncio.Event,
mock: Mock,
queue: str,
) -> None:
+ event = asyncio.Event()
+
event2 = asyncio.Event()
broker = self.get_broker()
@@ -151,8 +155,9 @@ async def test_local_parser_no_share_between_handlers(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -196,8 +201,9 @@ async def test_global_parser(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
async def custom_parser(msg, original):
msg = await original(msg)
mock(msg.body)
diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py
index 75154bd9f2..feac1efac4 100644
--- a/tests/brokers/base/publish.py
+++ b/tests/brokers/base/publish.py
@@ -141,9 +141,10 @@ async def test_serialize(
message,
message_type,
expected_message,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -171,9 +172,10 @@ async def handler(m: message_type) -> None:
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -215,9 +217,10 @@ async def m_next(msg=Context("message")) -> None:
async def test_unwrap_dict(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -250,8 +253,9 @@ async def test_unwrap_list(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -278,9 +282,10 @@ async def m(a: int, b: int, *args: tuple[int, ...]) -> None:
async def test_base_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
@@ -314,9 +319,10 @@ async def resp(msg) -> None:
async def test_publisher_object(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
publisher = pub_broker.publisher(queue + "resp")
@@ -352,9 +358,10 @@ async def resp(msg) -> None:
async def test_publish_manual(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
publisher = pub_broker.publisher(queue + "resp")
@@ -491,9 +498,10 @@ async def resp() -> None:
async def test_reply_to(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue + "reply")
@@ -529,9 +537,10 @@ async def handler(m):
async def test_no_reply(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
class Mid(BaseMiddleware):
async def after_processed(self, *args: Any, **kwargs: Any):
event.set()
@@ -573,9 +582,10 @@ async def handler(m):
async def test_publisher_after_start(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
diff --git a/tests/brokers/base/requests.py b/tests/brokers/base/requests.py
index a9beb8f764..16414f47d3 100644
--- a/tests/brokers/base/requests.py
+++ b/tests/brokers/base/requests.py
@@ -1,4 +1,4 @@
-from typing import NoReturn
+import asyncio
import anyio
import pytest
@@ -7,10 +7,10 @@
class RequestsTestcase(BaseTestcaseConfig):
- def get_middleware(self, **kwargs) -> NoReturn:
+ def get_middleware(self, **kwargs):
raise NotImplementedError
- def get_router(self, **kwargs) -> NoReturn:
+ def get_router(self, **kwargs):
raise NotImplementedError
async def test_request_timeout(self, queue: str) -> None:
@@ -26,7 +26,7 @@ async def handler(msg) -> str:
async with self.patch_broker(broker):
await broker.start()
- with pytest.raises(TimeoutError):
+ with pytest.raises((TimeoutError, asyncio.TimeoutError)):
await broker.request(
None,
queue,
diff --git a/tests/brokers/base/router.py b/tests/brokers/base/router.py
index 68c8c8552c..382f54dc0c 100644
--- a/tests/brokers/base/router.py
+++ b/tests/brokers/base/router.py
@@ -3,7 +3,7 @@
import pytest
-from faststream import BaseMiddleware, Depends
+from faststream import Depends
from faststream._internal.broker.router import (
ArgsContainer,
BrokerRouter,
@@ -25,8 +25,9 @@ async def test_empty_prefix(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -53,8 +54,9 @@ async def test_not_empty_prefix(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
router.prefix = "test_"
@@ -83,8 +85,9 @@ async def test_include_with_prefix(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -111,8 +114,9 @@ async def test_empty_prefix_publisher(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
@@ -146,8 +150,9 @@ async def test_not_empty_prefix_publisher(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
router.prefix = "test_"
@@ -183,8 +188,9 @@ async def test_manual_publisher(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
router.prefix = "test_"
@@ -219,10 +225,11 @@ def response(m) -> None:
async def test_delayed_handlers(
self,
- event: asyncio.Event,
router: BrokerRouter,
queue: str,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m) -> None:
@@ -251,11 +258,12 @@ def response(m) -> None:
async def test_delayed_publishers(
self,
- event: asyncio.Event,
router: BrokerRouter,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m):
@@ -303,9 +311,10 @@ async def test_nested_routers_sub(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
core_router = type(router)(prefix="test1_")
@@ -340,8 +349,9 @@ async def test_nested_routers_pub(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
core_router = type(router)(prefix="test1_")
@@ -433,8 +443,8 @@ async def test_router_middlewares(
) -> None:
pub_broker = self.get_broker()
- router = type(router)(middlewares=(BaseMiddleware,))
- router2 = type(router)(middlewares=(BaseMiddleware,))
+ router = type(router)(middlewares=(1,))
+ router2 = type(router)(middlewares=(2,))
args, kwargs = self.get_subscriber_params(queue, middlewares=(3,))
@@ -448,8 +458,14 @@ def subscriber() -> None: ...
sub = next(iter(pub_broker._subscribers))
publisher = next(iter(pub_broker._publishers))
- assert len((*sub._broker_middlewares, *sub.calls[0].item_middlewares)) == 3
- assert len((*publisher._broker_middlewares, *publisher._middlewares)) == 3
+ subscriber_middlewares = (
+ *sub._broker_middlewares,
+ *sub.calls[0].item_middlewares,
+ )
+ assert subscriber_middlewares == (1, 2, 3)
+
+ publisher_middlewares = (*publisher._broker_middlewares, *publisher.middlewares)
+ assert publisher_middlewares == (1, 2, 3)
async def test_router_include_with_middlewares(
self,
@@ -466,23 +482,26 @@ async def test_router_include_with_middlewares(
@router2.publisher(queue, middlewares=(3,))
def subscriber() -> None: ...
- router.include_router(router2, middlewares=(BaseMiddleware,))
- pub_broker.include_router(router, middlewares=(BaseMiddleware,))
+ router.include_router(router2, middlewares=(2,))
+ pub_broker.include_router(router, middlewares=(1,))
sub = next(iter(pub_broker._subscribers))
publisher = next(iter(pub_broker._publishers))
sub_middlewares = (*sub._broker_middlewares, *sub.calls[0].item_middlewares)
- assert len(sub_middlewares) == 3, sub_middlewares
- assert len((*publisher._broker_middlewares, *publisher._middlewares)) == 3
+ assert sub_middlewares == (1, 2, 3), sub_middlewares
+
+ publisher_middlewares = (*publisher._broker_middlewares, *publisher.middlewares)
+ assert publisher_middlewares == (1, 2, 3)
async def test_router_parser(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
async def parser(msg, original):
@@ -524,9 +543,10 @@ async def test_router_parser_override(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
async def global_parser(msg, original): # pragma: no cover
@@ -580,8 +600,9 @@ async def test_publisher_mock(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
pub = router.publisher(queue + "resp")
@@ -613,8 +634,9 @@ async def test_subscriber_mock(
self,
router: BrokerRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
diff --git a/tests/brokers/base/testclient.py b/tests/brokers/base/testclient.py
index a594eebca4..2076561581 100644
--- a/tests/brokers/base/testclient.py
+++ b/tests/brokers/base/testclient.py
@@ -1,6 +1,5 @@
import asyncio
from abc import abstractmethod
-from typing import NoReturn
from unittest.mock import Mock
import anyio
@@ -117,7 +116,7 @@ async def test_exception_raises(self, queue: str) -> None:
args, kwargs = self.get_subscriber_params(queue)
@test_broker.subscriber(*args, **kwargs)
- async def m(msg) -> NoReturn: # pragma: no cover
+ async def m(msg): # pragma: no cover
raise ValueError
async with self.patch_broker(test_broker) as br:
@@ -126,10 +125,31 @@ async def m(msg) -> NoReturn: # pragma: no cover
with pytest.raises(ValueError): # noqa: PT011
await br.publish("hello", queue)
+ @pytest.mark.asyncio()
+ async def test_parser_exception_raises(self, queue: str) -> None:
+ test_broker = self.get_broker()
+
+ def parser(msg):
+ raise ValueError
+
+ args, kwargs = self.get_subscriber_params(queue, parser=parser)
+
+ @test_broker.subscriber(*args, **kwargs)
+ async def m(msg): # pragma: no cover
+ pass
+
+ async with self.patch_broker(test_broker) as br:
+ await br.start()
+
+ with pytest.raises(ValueError): # noqa: PT011
+ await br.publish("hello", queue)
+
async def test_broker_gets_patched_attrs_within_cm(self, fake_producer_cls) -> None:
test_broker = self.get_broker()
await test_broker.start()
+ old_producer = test_broker._producer
+
async with self.patch_broker(test_broker) as br:
assert isinstance(br.start, Mock)
assert isinstance(br._connect, Mock)
@@ -140,7 +160,7 @@ async def test_broker_gets_patched_attrs_within_cm(self, fake_producer_cls) -> N
assert not isinstance(br._connect, Mock)
assert not isinstance(br.close, Mock)
assert br._connection is not None
- assert not isinstance(br._producer, fake_producer_cls)
+ assert br._producer == old_producer
async def test_broker_with_real_doesnt_get_patched(self) -> None:
test_broker = self.get_broker()
diff --git a/tests/brokers/confluent/test_consume.py b/tests/brokers/confluent/test_consume.py
index 1cbb6889f7..2e886bc1f6 100644
--- a/tests/brokers/confluent/test_consume.py
+++ b/tests/brokers/confluent/test_consume.py
@@ -1,9 +1,10 @@
import asyncio
-from typing import Any, NoReturn
+from typing import Any
from unittest.mock import patch
import pytest
+from faststream import AckPolicy
from faststream.confluent import KafkaBroker
from faststream.confluent.annotations import KafkaMessage
from faststream.confluent.client import AsyncConfluentConsumer
@@ -49,9 +50,10 @@ async def handler(msg) -> None:
async def test_consume_batch_headers(
self,
mock,
- event: asyncio.Event,
queue: str,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue, batch=True)
@@ -87,8 +89,9 @@ def subscriber(m, msg: KafkaMessage) -> None:
async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
@@ -130,8 +133,9 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
@@ -169,8 +173,9 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_ack_raise(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
@@ -180,7 +185,7 @@ async def test_consume_ack_raise(
)
@consume_broker.subscriber(*args, **kwargs)
- async def handler(msg: KafkaMessage) -> NoReturn:
+ async def handler(msg: KafkaMessage):
event.set()
raise AckMessage
@@ -208,8 +213,9 @@ async def handler(msg: KafkaMessage) -> NoReturn:
async def test_nack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
@@ -247,11 +253,14 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- args, kwargs = self.get_subscriber_params(queue, group_id="test", no_ack=True)
+ args, kwargs = self.get_subscriber_params(
+ queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING
+ )
@consume_broker.subscriber(*args, **kwargs)
async def handler(msg: KafkaMessage) -> None:
@@ -286,8 +295,9 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_with_no_auto_commit(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(
diff --git a/tests/brokers/confluent/test_fastapi.py b/tests/brokers/confluent/test_fastapi.py
index 6d2944ce94..d47612c91e 100644
--- a/tests/brokers/confluent/test_fastapi.py
+++ b/tests/brokers/confluent/test_fastapi.py
@@ -21,8 +21,9 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue, batch=True)
@@ -57,8 +58,9 @@ async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
args, kwargs = self.get_subscriber_params(queue, batch=True)
diff --git a/tests/brokers/confluent/test_publish.py b/tests/brokers/confluent/test_publish.py
index f4c7b2e251..0d6fc9f4e1 100644
--- a/tests/brokers/confluent/test_publish.py
+++ b/tests/brokers/confluent/test_publish.py
@@ -110,9 +110,10 @@ async def pub(m):
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
args, kwargs = self.get_subscriber_params(queue)
diff --git a/tests/brokers/confluent/test_publish_command.py b/tests/brokers/confluent/test_publish_command.py
new file mode 100644
index 0000000000..43e089afbb
--- /dev/null
+++ b/tests/brokers/confluent/test_publish_command.py
@@ -0,0 +1,47 @@
+from typing import Any
+
+import pytest
+
+from faststream import Response
+from faststream.confluent.response import KafkaPublishCommand, KafkaResponse
+from faststream.response import ensure_response
+
+
+def test_simple_reponse():
+ response = ensure_response(1)
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+
+
+def test_base_response_class():
+ response = ensure_response(Response(body=1, headers={1: 1}))
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+
+
+def test_kafka_response_class():
+ response = ensure_response(KafkaResponse(body=1, headers={1: 1}, key=b"1"))
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+ assert cmd.key == b"1"
+
+
+@pytest.mark.parametrize(
+ ("data", "expected_body"),
+ (
+ pytest.param(None, (), id="None Response"),
+ pytest.param((), (), id="Empty Sequence"),
+ pytest.param("123", ("123",), id="String Response"),
+ pytest.param([1, 2, 3], (1, 2, 3), id="Sequence Data"),
+ pytest.param([0, 1, 2], (0, 1, 2), id="Sequence Data with False first element"),
+ ),
+)
+def test_batch_response(data: Any, expected_body: Any):
+ response = ensure_response(data)
+ cmd = KafkaPublishCommand.from_cmd(
+ response.as_publish_command(),
+ batch=True,
+ )
+ assert cmd.batch_bodies == expected_body
diff --git a/tests/brokers/confluent/test_requests.py b/tests/brokers/confluent/test_requests.py
index 190cff5df6..a0343ced57 100644
--- a/tests/brokers/confluent/test_requests.py
+++ b/tests/brokers/confluent/test_requests.py
@@ -14,7 +14,7 @@ async def on_receive(self) -> None:
self.msg._raw_msg *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body *= 2
+ msg.body *= 2
return await call_next(msg)
diff --git a/tests/brokers/confluent/test_test_client.py b/tests/brokers/confluent/test_test_client.py
index e5d915404c..73077ec147 100644
--- a/tests/brokers/confluent/test_test_client.py
+++ b/tests/brokers/confluent/test_test_client.py
@@ -52,8 +52,9 @@ async def m(msg: KafkaMessage) -> None:
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
args, kwargs = self.get_subscriber_params(queue)
diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py
index 84db65b32d..f725f90371 100644
--- a/tests/brokers/kafka/test_consume.py
+++ b/tests/brokers/kafka/test_consume.py
@@ -1,10 +1,11 @@
import asyncio
-from typing import Any, NoReturn
+from typing import Any
from unittest.mock import patch
import pytest
from aiokafka import AIOKafkaConsumer
+from faststream import AckPolicy
from faststream.exceptions import AckMessage
from faststream.kafka import KafkaBroker, TopicPartition
from faststream.kafka.annotations import KafkaMessage
@@ -21,8 +22,9 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> KafkaBroker:
async def test_consume_by_pattern(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(queue)
@@ -78,9 +80,10 @@ async def handler(msg) -> None:
async def test_consume_batch_headers(
self,
mock,
- event: asyncio.Event,
queue: str,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, batch=True)
@@ -114,8 +117,9 @@ def subscriber(m, msg: KafkaMessage) -> None:
async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, group_id="test", auto_commit=False)
@@ -150,8 +154,9 @@ async def handler(msg: KafkaMessage) -> None:
async def test_manual_partition_consume(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
tp1 = TopicPartition(queue, partition=0)
@@ -178,8 +183,9 @@ async def handler_tp1(msg) -> None:
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, group_id="test", auto_commit=False)
@@ -216,12 +222,13 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_ack_raise(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, group_id="test", auto_commit=False)
- async def handler(msg: KafkaMessage) -> NoReturn:
+ async def handler(msg: KafkaMessage):
event.set()
raise AckMessage
@@ -254,8 +261,9 @@ async def handler(msg: KafkaMessage) -> NoReturn:
async def test_nack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, group_id="test", auto_commit=False)
@@ -292,11 +300,14 @@ async def handler(msg: KafkaMessage) -> None:
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, group_id="test", no_ack=True)
+ @consume_broker.subscriber(
+ queue, group_id="test", ack_policy=AckPolicy.DO_NOTHING
+ )
async def handler(msg: KafkaMessage) -> None:
event.set()
diff --git a/tests/brokers/kafka/test_fastapi.py b/tests/brokers/kafka/test_fastapi.py
index 442e9517f2..899deaffce 100644
--- a/tests/brokers/kafka/test_fastapi.py
+++ b/tests/brokers/kafka/test_fastapi.py
@@ -18,8 +18,9 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(queue, batch=True)
@@ -52,8 +53,9 @@ async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(queue, batch=True)
diff --git a/tests/brokers/kafka/test_publish.py b/tests/brokers/kafka/test_publish.py
index 19946156fa..80cb7b017b 100644
--- a/tests/brokers/kafka/test_publish.py
+++ b/tests/brokers/kafka/test_publish.py
@@ -100,9 +100,10 @@ async def pub(m):
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
diff --git a/tests/brokers/kafka/test_publish_command.py b/tests/brokers/kafka/test_publish_command.py
new file mode 100644
index 0000000000..912989aa1c
--- /dev/null
+++ b/tests/brokers/kafka/test_publish_command.py
@@ -0,0 +1,47 @@
+from typing import Any
+
+import pytest
+
+from faststream import Response
+from faststream.kafka.response import KafkaPublishCommand, KafkaResponse
+from faststream.response import ensure_response
+
+
+def test_simple_reponse():
+ response = ensure_response(1)
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+
+
+def test_base_response_class():
+ response = ensure_response(Response(body=1, headers={1: 1}))
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+
+
+def test_kafka_response_class():
+ response = ensure_response(KafkaResponse(body=1, headers={1: 1}, key=b"1"))
+ cmd = KafkaPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+ assert cmd.key == b"1"
+
+
+@pytest.mark.parametrize(
+ ("data", "expected_body"),
+ (
+ pytest.param(None, (), id="None Response"),
+ pytest.param((), (), id="Empty Sequence"),
+ pytest.param("123", ("123",), id="String Response"),
+ pytest.param([1, 2, 3], (1, 2, 3), id="Sequence Data"),
+ pytest.param([0, 1, 2], (0, 1, 2), id="Sequence Data with False first element"),
+ ),
+)
+def test_batch_response(data: Any, expected_body: Any):
+ response = ensure_response(data)
+ cmd = KafkaPublishCommand.from_cmd(
+ response.as_publish_command(),
+ batch=True,
+ )
+ assert cmd.batch_bodies == expected_body
diff --git a/tests/brokers/kafka/test_requests.py b/tests/brokers/kafka/test_requests.py
index 41a1687b09..f84ba2a5db 100644
--- a/tests/brokers/kafka/test_requests.py
+++ b/tests/brokers/kafka/test_requests.py
@@ -12,7 +12,7 @@ async def on_receive(self) -> None:
self.msg.value *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body *= 2
+ msg.body *= 2
return await call_next(msg)
diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py
index b27444656b..b490604453 100644
--- a/tests/brokers/kafka/test_test_client.py
+++ b/tests/brokers/kafka/test_test_client.py
@@ -94,8 +94,9 @@ async def m(msg: KafkaMessage) -> None:
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py
index cc72c36073..82ff607e4a 100644
--- a/tests/brokers/nats/test_consume.py
+++ b/tests/brokers/nats/test_consume.py
@@ -1,10 +1,12 @@
import asyncio
-from typing import Any, NoReturn
+from typing import Any
from unittest.mock import Mock, patch
import pytest
from nats.aio.msg import Msg
+from nats.js.api import PubAck
+from faststream import AckPolicy
from faststream.exceptions import AckMessage
from faststream.nats import ConsumerConfig, JStream, NatsBroker, PullSub
from faststream.nats.annotations import NatsMessage
@@ -21,8 +23,9 @@ async def test_consume_js(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(queue, stream=stream)
@@ -31,7 +34,7 @@ def subscriber(m) -> None:
async with self.patch_broker(consume_broker) as br:
await br.start()
- await asyncio.wait(
+ completed, _ = await asyncio.wait(
(
asyncio.create_task(br.publish("hello", queue, stream=stream.name)),
asyncio.create_task(event.wait()),
@@ -39,14 +42,22 @@ def subscriber(m) -> None:
timeout=3,
)
+ publish_with_stream_returns_ack_frame = False
+ for task in completed:
+ if isinstance(task.result(), PubAck):
+ publish_with_stream_returns_ack_frame = True
+ break
+
assert event.is_set()
+ assert publish_with_stream_returns_ack_frame
async def test_consume_with_filter(
self,
queue,
mock: Mock,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -74,9 +85,10 @@ async def test_consume_pull(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -106,9 +118,10 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -137,9 +150,10 @@ def subscriber(m) -> None:
async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
@@ -164,35 +178,37 @@ async def handler(msg: NatsMessage) -> None:
async def test_core_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
- stream: JStream,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, no_ack=True)
+ @consume_broker.subscriber(queue)
async def handler(msg: NatsMessage) -> None:
- if not msg.raw_message._ackd:
- event.set()
+ event.set()
async with self.patch_broker(consume_broker) as br:
await br.start()
- await asyncio.wait(
- (
- asyncio.create_task(br.publish("hello", queue)),
- asyncio.create_task(event.wait()),
- ),
- timeout=3,
- )
+ with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m:
+ await asyncio.wait(
+ (
+ asyncio.create_task(br.publish("hello", queue)),
+ asyncio.create_task(event.wait()),
+ ),
+ timeout=3,
+ )
+ assert not m.mock.called
assert event.is_set()
async def test_consume_ack_manual(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
@@ -218,13 +234,14 @@ async def handler(msg: NatsMessage) -> None:
async def test_consume_ack_raise(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
- async def handler(msg: NatsMessage) -> NoReturn:
+ async def handler(msg: NatsMessage):
event.set()
raise AckMessage
@@ -246,9 +263,10 @@ async def handler(msg: NatsMessage) -> NoReturn:
async def test_nack(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, stream=stream)
@@ -274,11 +292,15 @@ async def handler(msg: NatsMessage) -> None:
async def test_consume_no_ack(
self,
queue: str,
- event: asyncio.Event,
+ stream: str,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, no_ack=True)
+ @consume_broker.subscriber(
+ queue, stream=stream, ack_policy=AckPolicy.DO_NOTHING
+ )
async def handler(msg: NatsMessage) -> None:
event.set()
@@ -301,9 +323,10 @@ async def test_consume_batch_headers(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
@@ -339,9 +362,10 @@ def subscriber(m, msg: NatsMessage) -> None:
async def test_consume_kv(
self,
queue: str,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, kv_watch=queue + "1")
@@ -373,9 +397,10 @@ async def handler(m) -> None:
async def test_consume_os(
self,
queue: str,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue, obj_watch=True)
@@ -406,7 +431,6 @@ async def handler(filename: str) -> None:
async def test_get_one_js(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -453,7 +477,6 @@ async def test_get_one_timeout_js(
async def test_get_one_pull(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -489,7 +512,6 @@ async def publish() -> None:
async def test_get_one_pull_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
mock: Mock,
) -> None:
@@ -509,7 +531,6 @@ async def test_get_one_pull_timeout(
async def test_get_one_batch(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -545,7 +566,6 @@ async def publish() -> None:
async def test_get_one_batch_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
mock: Mock,
) -> None:
@@ -565,7 +585,6 @@ async def test_get_one_batch_timeout(
async def test_get_one_with_filter(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -600,7 +619,6 @@ async def publish() -> None:
async def test_get_one_kv(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -633,7 +651,6 @@ async def publish() -> None:
async def test_get_one_kv_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
mock: Mock,
) -> None:
@@ -649,7 +666,6 @@ async def test_get_one_kv_timeout(
async def test_get_one_os(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
) -> None:
broker = self.get_broker(apply_types=True)
@@ -683,7 +699,6 @@ async def publish() -> None:
async def test_get_one_os_timeout(
self,
queue: str,
- event: asyncio.Event,
stream: JStream,
mock: Mock,
) -> None:
diff --git a/tests/brokers/nats/test_fastapi.py b/tests/brokers/nats/test_fastapi.py
index d5a421a8a4..bbcdac2113 100644
--- a/tests/brokers/nats/test_fastapi.py
+++ b/tests/brokers/nats/test_fastapi.py
@@ -17,9 +17,10 @@ class TestRouter(FastAPITestcase):
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(queue + ".{name}")
@@ -46,9 +47,10 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
@@ -85,9 +87,10 @@ async def test_consume_batch(
self,
queue: str,
stream: JStream,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
diff --git a/tests/brokers/nats/test_publish.py b/tests/brokers/nats/test_publish.py
index 3b92b0673e..c7367ac389 100644
--- a/tests/brokers/nats/test_publish.py
+++ b/tests/brokers/nats/test_publish.py
@@ -19,9 +19,10 @@ def get_broker(self, apply_types: bool = False, **kwargs) -> NatsBroker:
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@@ -58,7 +59,6 @@ async def handle_next(msg=Context("message")) -> None:
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
) -> None:
pub_broker = self.get_broker(apply_types=True)
diff --git a/tests/brokers/nats/test_requests.py b/tests/brokers/nats/test_requests.py
index af52ca02da..579f13113d 100644
--- a/tests/brokers/nats/test_requests.py
+++ b/tests/brokers/nats/test_requests.py
@@ -12,7 +12,7 @@ async def on_receive(self) -> None:
self.msg.data *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body *= 2
+ msg.body *= 2
return await call_next(msg)
diff --git a/tests/brokers/nats/test_router.py b/tests/brokers/nats/test_router.py
index c424aa4da1..8af70bcfa0 100644
--- a/tests/brokers/nats/test_router.py
+++ b/tests/brokers/nats/test_router.py
@@ -128,10 +128,11 @@ async def h(
async def test_delayed_handlers_with_queue(
self,
- event,
router: NatsRouter,
queue: str,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m) -> None:
diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py
index cc4a3106e7..7ca172e6ce 100644
--- a/tests/brokers/nats/test_test_client.py
+++ b/tests/brokers/nats/test_test_client.py
@@ -55,8 +55,9 @@ async def m(msg) -> None: ...
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
diff --git a/tests/brokers/rabbit/specific/test_declare.py b/tests/brokers/rabbit/specific/test_declare.py
index 02467a1704..874fb403cc 100644
--- a/tests/brokers/rabbit/specific/test_declare.py
+++ b/tests/brokers/rabbit/specific/test_declare.py
@@ -6,7 +6,8 @@
@pytest.mark.asyncio()
async def test_declare_queue(async_mock, queue: str) -> None:
- declarer = RabbitDeclarer(async_mock)
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
q1 = await declarer.declare_queue(RabbitQueue(queue))
q2 = await declarer.declare_queue(RabbitQueue(queue))
@@ -20,7 +21,8 @@ async def test_declare_exchange(
async_mock,
queue: str,
) -> None:
- declarer = RabbitDeclarer(async_mock)
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
ex1 = await declarer.declare_exchange(RabbitExchange(queue))
ex2 = await declarer.declare_exchange(RabbitExchange(queue))
@@ -34,7 +36,8 @@ async def test_declare_nested_exchange_cash_nested(
async_mock,
queue: str,
) -> None:
- declarer = RabbitDeclarer(async_mock)
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
exchange = RabbitExchange(queue)
@@ -50,7 +53,8 @@ async def test_publisher_declare(
async_mock,
queue: str,
) -> None:
- declarer = RabbitDeclarer(async_mock)
+ declarer = RabbitDeclarer()
+ declarer.connect(async_mock, async_mock)
broker = RabbitBroker()
broker._connection = async_mock
diff --git a/tests/brokers/rabbit/test_consume.py b/tests/brokers/rabbit/test_consume.py
index 37d79b7728..19317948fa 100644
--- a/tests/brokers/rabbit/test_consume.py
+++ b/tests/brokers/rabbit/test_consume.py
@@ -1,10 +1,11 @@
import asyncio
-from typing import Any, NoReturn
+from typing import Any
from unittest.mock import patch
import pytest
from aio_pika import IncomingMessage, Message
+from faststream import AckPolicy
from faststream.exceptions import AckMessage, NackMessage, RejectMessage, SkipMessage
from faststream.rabbit import RabbitBroker, RabbitExchange, RabbitQueue
from faststream.rabbit.annotations import RabbitMessage
@@ -22,11 +23,12 @@ async def test_consume_from_exchange(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
def h(m) -> None:
event.set()
@@ -49,14 +51,14 @@ async def test_consume_with_get_old(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
queue=RabbitQueue(name=queue, passive=True),
exchange=RabbitExchange(name=exchange.name, passive=True),
- retry=True,
)
def h(m) -> None:
event.set()
@@ -88,11 +90,12 @@ async def test_consume_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
event.set()
@@ -122,11 +125,12 @@ async def test_consume_manual_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
await msg.ack()
event.set()
@@ -156,11 +160,12 @@ async def test_consume_exception_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
try:
raise AckMessage
@@ -192,12 +197,13 @@ async def test_consume_manual_nack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage) -> NoReturn:
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage):
await msg.nack()
event.set()
raise ValueError
@@ -227,11 +233,12 @@ async def test_consume_exception_nack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
try:
raise NackMessage
@@ -263,12 +270,13 @@ async def test_consume_manual_reject(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
- async def handler(msg: RabbitMessage) -> NoReturn:
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
+ async def handler(msg: RabbitMessage):
await msg.reject()
event.set()
raise ValueError
@@ -298,11 +306,12 @@ async def test_consume_exception_reject(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @consume_broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
try:
raise RejectMessage
@@ -333,8 +342,9 @@ async def handler(msg: RabbitMessage) -> None:
async def test_consume_skip_message(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(queue)
@@ -382,11 +392,14 @@ async def test_consume_no_ack(
self,
queue: str,
exchange: RabbitExchange,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
- @consume_broker.subscriber(queue, exchange=exchange, retry=1, no_ack=True)
+ @consume_broker.subscriber(
+ queue, exchange=exchange, ack_policy=AckPolicy.DO_NOTHING
+ )
async def handler(msg: RabbitMessage) -> None:
event.set()
diff --git a/tests/brokers/rabbit/test_fastapi.py b/tests/brokers/rabbit/test_fastapi.py
index df20f2fdeb..3bd99eae62 100644
--- a/tests/brokers/rabbit/test_fastapi.py
+++ b/tests/brokers/rabbit/test_fastapi.py
@@ -18,9 +18,10 @@ class TestRouter(FastAPITestcase):
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(
diff --git a/tests/brokers/rabbit/test_publish.py b/tests/brokers/rabbit/test_publish.py
index 92545e6a1b..f5000d9104 100644
--- a/tests/brokers/rabbit/test_publish.py
+++ b/tests/brokers/rabbit/test_publish.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import Any
+from typing import TYPE_CHECKING, Any
from unittest.mock import Mock, patch
import pytest
@@ -10,6 +10,9 @@
from tests.brokers.base.publish import BrokerPublishTestcase
from tests.tools import spy_decorator
+if TYPE_CHECKING:
+ from faststream.rabbit.response import RabbitPublishCommand
+
@pytest.mark.rabbit()
class TestPublish(BrokerPublishTestcase):
@@ -20,9 +23,10 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RabbitBroker:
async def test_reply_config(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
reply_queue = queue + "reply"
@@ -54,8 +58,9 @@ async def handler(m):
timeout=3,
)
- assert m.mock.call_args.kwargs.get("persist")
- assert m.mock.call_args.kwargs.get("immediate") is False
+ cmd: RabbitPublishCommand = m.mock.call_args[0][1]
+ assert cmd.message_options["persist"]
+ assert not cmd.publish_options["immediate"]
assert event.is_set()
mock.assert_called_with("Hello!")
@@ -64,18 +69,16 @@ async def handler(m):
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(queue)
@pub_broker.publisher(queue + "1")
async def handle():
- return RabbitResponse(
- 1,
- persist=True,
- )
+ return RabbitResponse(1, persist=True)
@pub_broker.subscriber(queue + "1")
async def handle_next(msg=Context("message")) -> None:
@@ -100,7 +103,8 @@ async def handle_next(msg=Context("message")) -> None:
assert event.is_set()
- assert m.mock.call_args.kwargs.get("persist")
+ cmd: RabbitPublishCommand = m.mock.call_args[0][1]
+ assert cmd.message_options["persist"]
mock.assert_called_once_with(body=b"1")
@@ -108,7 +112,6 @@ async def handle_next(msg=Context("message")) -> None:
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
) -> None:
pub_broker = self.get_broker(apply_types=True)
diff --git a/tests/brokers/rabbit/test_requests.py b/tests/brokers/rabbit/test_requests.py
index 2d5226a99f..fd5fb93ebf 100644
--- a/tests/brokers/rabbit/test_requests.py
+++ b/tests/brokers/rabbit/test_requests.py
@@ -13,7 +13,7 @@ async def on_receive(self) -> None:
self.msg.body *= 2
async def consume_scope(self, call_next, msg):
- msg._decoded_body *= 2
+ msg.body *= 2
return await call_next(msg)
diff --git a/tests/brokers/rabbit/test_router.py b/tests/brokers/rabbit/test_router.py
index d037375900..0fb2b8babf 100644
--- a/tests/brokers/rabbit/test_router.py
+++ b/tests/brokers/rabbit/test_router.py
@@ -112,8 +112,9 @@ async def test_queue_obj(
self,
router: RabbitRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
router.prefix = "test/"
@@ -145,8 +146,9 @@ async def test_queue_obj_with_routing_key(
self,
router: RabbitRouter,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
router.prefix = "test/"
@@ -177,10 +179,11 @@ def subscriber(m) -> None:
async def test_delayed_handlers_with_queue(
self,
- event: asyncio.Event,
router: RabbitRouter,
queue: str,
) -> None:
+ event = asyncio.Event()
+
def response(m) -> None:
event.set()
diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py
index f42fee9d01..0784da9bf3 100644
--- a/tests/brokers/rabbit/test_test_client.py
+++ b/tests/brokers/rabbit/test_test_client.py
@@ -1,5 +1,5 @@
import asyncio
-from typing import Any, NoReturn
+from typing import Any
import pytest
@@ -31,8 +31,9 @@ def patch_broker(self, broker: RabbitBroker, **kwargs: Any) -> RabbitBroker:
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
@@ -207,19 +208,19 @@ async def test_consume_manual_ack(
consume2 = asyncio.Event()
consume3 = asyncio.Event()
- @broker.subscriber(queue=queue, exchange=exchange, retry=1)
+ @broker.subscriber(queue=queue, exchange=exchange)
async def handler(msg: RabbitMessage) -> None:
await msg.raw_message.ack()
consume.set()
- @broker.subscriber(queue=queue + "1", exchange=exchange, retry=1)
- async def handler2(msg: RabbitMessage) -> NoReturn:
+ @broker.subscriber(queue=queue + "1", exchange=exchange)
+ async def handler2(msg: RabbitMessage):
await msg.raw_message.nack()
consume2.set()
raise ValueError
- @broker.subscriber(queue=queue + "2", exchange=exchange, retry=1)
- async def handler3(msg: RabbitMessage) -> NoReturn:
+ @broker.subscriber(queue=queue + "2", exchange=exchange)
+ async def handler3(msg: RabbitMessage):
await msg.raw_message.reject()
consume3.set()
raise ValueError
diff --git a/tests/brokers/redis/test_consume.py b/tests/brokers/redis/test_consume.py
index 899a941813..7c7a1e4152 100644
--- a/tests/brokers/redis/test_consume.py
+++ b/tests/brokers/redis/test_consume.py
@@ -18,10 +18,11 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
async def test_consume_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue: str,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(queue)
@@ -44,9 +45,10 @@ async def handler(msg) -> None:
async def test_pattern_with_path(
self,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber("test.{name}")
@@ -69,9 +71,10 @@ async def handler(msg) -> None:
async def test_pattern_without_path(
self,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(PubSub("test.*", pattern=True))
@@ -104,10 +107,11 @@ def patch_broker(self, broker):
async def test_consume_list(
self,
- event: asyncio.Event,
queue: str,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(list=queue)
@@ -130,10 +134,11 @@ async def handler(msg) -> None:
async def test_consume_list_native(
self,
- event: asyncio.Event,
queue: str,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(list=queue)
@@ -158,9 +163,10 @@ async def handler(msg) -> None:
async def test_consume_list_batch_with_one(
self,
queue: str,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -187,9 +193,10 @@ async def handler(msg) -> None:
async def test_consume_list_batch_headers(
self,
queue: str,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
@@ -314,7 +321,6 @@ async def handler(msg) -> None:
async def test_get_one(
self,
queue: str,
- event: asyncio.Event,
) -> None:
broker = self.get_broker(apply_types=True)
subscriber = broker.subscriber(list=queue)
@@ -369,10 +375,11 @@ def patch_broker(self, broker):
@pytest.mark.slow()
async def test_consume_stream(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10))
@@ -396,10 +403,11 @@ async def handler(msg) -> None:
@pytest.mark.slow()
async def test_consume_stream_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10))
@@ -425,10 +433,11 @@ async def handler(msg) -> None:
@pytest.mark.slow()
async def test_consume_stream_batch(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -455,9 +464,10 @@ async def handler(msg) -> None:
async def test_consume_stream_batch_headers(
self,
queue: str,
- event: asyncio.Event,
mock,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
@@ -525,10 +535,11 @@ async def handler(msg: list[Data]) -> None:
@pytest.mark.slow()
async def test_consume_stream_batch_native(
self,
- event: asyncio.Event,
mock: MagicMock,
queue,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker()
@consume_broker.subscriber(
@@ -582,8 +593,9 @@ async def handler(msg: RedisMessage) -> None: ...
async def test_consume_nack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
@@ -612,8 +624,9 @@ async def handler(msg: RedisMessage) -> None:
async def test_consume_ack(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
consume_broker = self.get_broker(apply_types=True)
@consume_broker.subscriber(
diff --git a/tests/brokers/redis/test_fastapi.py b/tests/brokers/redis/test_fastapi.py
index 41de233318..9c66a73230 100644
--- a/tests/brokers/redis/test_fastapi.py
+++ b/tests/brokers/redis/test_fastapi.py
@@ -18,9 +18,10 @@ class TestRouter(FastAPITestcase):
async def test_path(
self,
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber("in.{name}")
@@ -57,8 +58,9 @@ async def test_batch_real(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(list=ListSub(queue, batch=True, max_records=1))
@@ -82,10 +84,11 @@ async def hello(msg: list[str]):
@pytest.mark.slow()
async def test_consume_stream(
self,
- event: asyncio.Event,
mock: Mock,
queue,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, polling_interval=10))
@@ -110,10 +113,11 @@ async def handler(msg) -> None:
@pytest.mark.slow()
async def test_consume_stream_batch(
self,
- event: asyncio.Event,
mock: Mock,
queue,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True))
@@ -147,8 +151,9 @@ async def test_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(list=ListSub(queue, batch=True, max_records=1))
@@ -172,8 +177,9 @@ async def test_stream_batch_testclient(
self,
mock: Mock,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
router = self.router_class()
@router.subscriber(stream=StreamSub(queue, batch=True))
diff --git a/tests/brokers/redis/test_publish.py b/tests/brokers/redis/test_publish.py
index 25bf9023d0..8967aa0778 100644
--- a/tests/brokers/redis/test_publish.py
+++ b/tests/brokers/redis/test_publish.py
@@ -20,9 +20,10 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
async def test_list_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
@pub_broker.subscriber(list=queue)
@@ -79,9 +80,10 @@ async def handler(msg) -> None:
async def test_batch_list_publisher(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
batch_list = ListSub(queue + "resp", batch=True)
@@ -113,9 +115,10 @@ async def resp(msg) -> None:
async def test_publisher_with_maxlen(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
stream = StreamSub(queue + "resp", maxlen=1)
@@ -150,9 +153,10 @@ async def resp(msg) -> None:
async def test_response(
self,
queue: str,
- event: asyncio.Event,
mock: MagicMock,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker(apply_types=True)
@pub_broker.subscriber(list=queue)
@@ -188,7 +192,6 @@ async def resp(msg=Context("message")) -> None:
async def test_response_for_rpc(
self,
queue: str,
- event: asyncio.Event,
) -> None:
pub_broker = self.get_broker(apply_types=True)
diff --git a/tests/brokers/redis/test_publish_command.py b/tests/brokers/redis/test_publish_command.py
new file mode 100644
index 0000000000..6539ee0b62
--- /dev/null
+++ b/tests/brokers/redis/test_publish_command.py
@@ -0,0 +1,47 @@
+from typing import Any
+
+import pytest
+
+from faststream import Response
+from faststream.redis.response import RedisPublishCommand, RedisResponse
+from faststream.response import ensure_response
+
+
+def test_simple_reponse():
+ response = ensure_response(1)
+ cmd = RedisPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+
+
+def test_base_response_class():
+ response = ensure_response(Response(body=1, headers={1: 1}))
+ cmd = RedisPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+
+
+def test_kafka_response_class():
+ response = ensure_response(RedisResponse(body=1, headers={1: 1}, maxlen=1))
+ cmd = RedisPublishCommand.from_cmd(response.as_publish_command())
+ assert cmd.body == 1
+ assert cmd.headers == {1: 1}
+ assert cmd.maxlen == 1
+
+
+@pytest.mark.parametrize(
+ ("data", "expected_body"),
+ (
+ pytest.param(None, (), id="None Response"),
+ pytest.param((), (), id="Empty Sequence"),
+ pytest.param("123", ("123",), id="String Response"),
+ pytest.param([1, 2, 3], (1, 2, 3), id="Sequence Data"),
+ pytest.param([0, 1, 2], (0, 1, 2), id="Sequence Data with False first element"),
+ ),
+)
+def test_batch_response(data: Any, expected_body: Any):
+ response = ensure_response(data)
+ cmd = RedisPublishCommand.from_cmd(
+ response.as_publish_command(),
+ batch=True,
+ )
+ assert cmd.batch_bodies == expected_body
diff --git a/tests/brokers/redis/test_requests.py b/tests/brokers/redis/test_requests.py
index b9d2e3f244..f1d4fc3c0f 100644
--- a/tests/brokers/redis/test_requests.py
+++ b/tests/brokers/redis/test_requests.py
@@ -14,7 +14,7 @@ async def on_receive(self) -> None:
self.msg["data"] = json.dumps(data)
async def consume_scope(self, call_next, msg):
- msg._decoded_body *= 2
+ msg.body *= 2
return await call_next(msg)
diff --git a/tests/brokers/redis/test_router.py b/tests/brokers/redis/test_router.py
index 5b6af70eee..ef53e47a37 100644
--- a/tests/brokers/redis/test_router.py
+++ b/tests/brokers/redis/test_router.py
@@ -118,9 +118,10 @@ async def h(
async def test_delayed_channel_handlers(
self,
- event: asyncio.Event,
queue: str,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m) -> None:
@@ -145,9 +146,10 @@ def response(m) -> None:
async def test_delayed_list_handlers(
self,
- event: asyncio.Event,
queue: str,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m) -> None:
@@ -172,9 +174,10 @@ def response(m) -> None:
async def test_delayed_stream_handlers(
self,
- event: asyncio.Event,
queue: str,
) -> None:
+ event = asyncio.Event()
+
pub_broker = self.get_broker()
def response(m) -> None:
diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py
index 2eb6ecaf3c..c9bcbe5527 100644
--- a/tests/brokers/redis/test_test_client.py
+++ b/tests/brokers/redis/test_test_client.py
@@ -23,8 +23,9 @@ def patch_broker(self, broker: RedisBroker, **kwargs: Any) -> TestRedisBroker:
async def test_with_real_testclient(
self,
queue: str,
- event: asyncio.Event,
) -> None:
+ event = asyncio.Event()
+
broker = self.get_broker()
@broker.subscriber(queue)
diff --git a/tests/brokers/test_pushback.py b/tests/brokers/test_pushback.py
deleted file mode 100644
index fd1bd976d5..0000000000
--- a/tests/brokers/test_pushback.py
+++ /dev/null
@@ -1,125 +0,0 @@
-from typing import NoReturn
-from unittest.mock import AsyncMock
-
-import pytest
-
-from faststream._internal.subscriber.acknowledgement_watcher import (
- CounterWatcher,
- EndlessWatcher,
- WatcherContext,
-)
-from faststream.exceptions import NackMessage, SkipMessage
-
-
-@pytest.fixture()
-def message():
- return AsyncMock(message_id=1)
-
-
-@pytest.mark.asyncio()
-async def test_push_back_correct(async_mock: AsyncMock, message) -> None:
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- await async_mock()
-
- async_mock.assert_awaited_once()
- message.ack.assert_awaited_once()
- assert not watcher.memory.get(message.message_id)
-
-
-@pytest.mark.asyncio()
-async def test_push_back_endless_correct(async_mock: AsyncMock, message) -> None:
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- await async_mock()
-
- async_mock.assert_awaited_once()
- message.ack.assert_awaited_once()
-
-
-@pytest.mark.asyncio()
-async def test_push_back_watcher(async_mock: AsyncMock, message) -> None:
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async_mock.side_effect = ValueError("Ooops!")
-
- while not message.reject.called:
- with pytest.raises(ValueError): # noqa: PT011
- async with context:
- await async_mock()
-
- assert not message.ack.await_count
- assert message.nack.await_count == 3
- message.reject.assert_awaited_once()
-
-
-@pytest.mark.asyncio()
-async def test_push_endless_back_watcher(async_mock: AsyncMock, message) -> None:
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async_mock.side_effect = ValueError("Ooops!")
-
- while message.nack.await_count < 10:
- with pytest.raises(ValueError): # noqa: PT011
- async with context:
- await async_mock()
-
- assert not message.ack.called
- assert not message.reject.called
- assert message.nack.await_count == 10
-
-
-@pytest.mark.asyncio()
-async def test_ignore_skip(async_mock: AsyncMock, message) -> NoReturn:
- watcher = CounterWatcher(3)
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- raise SkipMessage
-
- assert not message.nack.called
- assert not message.reject.called
- assert not message.ack.called
-
-
-@pytest.mark.asyncio()
-async def test_additional_params_with_handler_exception(
- async_mock: AsyncMock, message
-) -> NoReturn:
- watcher = EndlessWatcher()
-
- context = WatcherContext(
- message=message,
- watcher=watcher,
- )
-
- async with context:
- raise NackMessage(delay=5)
-
- message.nack.assert_called_with(delay=5)
diff --git a/tests/brokers/test_response.py b/tests/brokers/test_response.py
index 710706d1c3..a8b669cc52 100644
--- a/tests/brokers/test_response.py
+++ b/tests/brokers/test_response.py
@@ -1,4 +1,5 @@
-from faststream.response import Response, ensure_response
+from faststream.response import ensure_response
+from faststream.response.response import Response
def test_raw_data() -> None:
@@ -13,13 +14,13 @@ def test_response_with_response_instance() -> None:
assert resp.headers == {"some": 1}
-def test_headers_override() -> None:
- resp = Response(1, headers={"some": 1})
- resp.add_headers({"some": 2})
- assert resp.headers == {"some": 2}
+def test_add_headers_not_overrides() -> None:
+ publish_cmd = Response(1, headers={1: 1, 2: 2}).as_publish_command()
+ publish_cmd.add_headers({1: "ignored", 3: 3}, override=False)
+ assert publish_cmd.headers == {1: 1, 2: 2, 3: 3}
-def test_headers_with_default() -> None:
- resp = Response(1, headers={"some": 1})
- resp.add_headers({"some": 2}, override=False)
- assert resp.headers == {"some": 1}
+def test_add_headers_overrides() -> None:
+ publish_cmd = Response(1, headers={1: "ignored", 2: 2}).as_publish_command()
+ publish_cmd.add_headers({1: 1, 3: 3}, override=True)
+ assert publish_cmd.headers == {1: 1, 2: 2, 3: 3}
diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py
index 8398f4a286..21900fdd09 100644
--- a/tests/cli/conftest.py
+++ b/tests/cli/conftest.py
@@ -13,12 +13,7 @@ def broker():
@pytest.fixture()
def app_without_logger(broker):
- return FastStream(broker, None)
-
-
-@pytest.fixture()
-def app_without_broker():
- return FastStream()
+ return FastStream(broker, logger=None)
@pytest.fixture()
diff --git a/tests/cli/rabbit/test_app.py b/tests/cli/rabbit/test_app.py
index c44d5b05d9..c21e26d996 100644
--- a/tests/cli/rabbit/test_app.py
+++ b/tests/cli/rabbit/test_app.py
@@ -2,7 +2,6 @@
import os
import signal
from contextlib import asynccontextmanager
-from typing import NoReturn
from unittest.mock import AsyncMock, Mock, patch
import anyio
@@ -18,20 +17,6 @@ def test_init(app: FastStream, broker) -> None:
assert app.logger is logger
-def test_init_without_broker(app_without_broker: FastStream) -> None:
- assert app_without_broker.broker is None
-
-
-def test_init_without_logger(app_without_logger: FastStream) -> None:
- assert app_without_logger.logger is None
-
-
-def test_set_broker(broker, app_without_broker: FastStream) -> None:
- assert app_without_broker.broker is None
- app_without_broker.set_broker(broker)
- assert app_without_broker.broker is broker
-
-
def test_log(app: FastStream, app_without_logger: FastStream) -> None:
app._log(logging.INFO, "test")
app_without_logger._log(logging.INFO, "test")
@@ -47,7 +32,7 @@ async def call2() -> None:
await async_mock.call_start2()
assert mock.call_start1.call_count == 1
- test_app = FastStream(on_startup=[call1, call2])
+ test_app = FastStream(AsyncMock(), on_startup=[call1, call2])
await test_app.start()
@@ -57,7 +42,9 @@ async def call2() -> None:
@pytest.mark.asyncio()
async def test_startup_calls_lifespans(
- mock: Mock, app_without_broker: FastStream
+ mock: Mock,
+ app: FastStream,
+ async_mock: AsyncMock,
) -> None:
def call1() -> None:
mock.call_start1()
@@ -67,10 +54,11 @@ def call2() -> None:
mock.call_start2()
assert mock.call_start1.call_count == 1
- app_without_broker.on_startup(call1)
- app_without_broker.on_startup(call2)
+ app.on_startup(call1)
+ app.on_startup(call2)
- await app_without_broker.start()
+ with patch.object(app.broker, "start", async_mock):
+ await app.start()
mock.call_start1.assert_called_once()
mock.call_start2.assert_called_once()
@@ -86,7 +74,7 @@ async def call2() -> None:
await async_mock.call_stop2()
assert mock.call_stop1.call_count == 1
- test_app = FastStream(on_shutdown=[call1, call2])
+ test_app = FastStream(AsyncMock(), on_shutdown=[call1, call2])
await test_app.stop()
@@ -95,9 +83,9 @@ async def call2() -> None:
@pytest.mark.asyncio()
-async def test_shutdown_calls_lifespans(
- mock: Mock, app_without_broker: FastStream
-) -> None:
+async def test_shutdown_calls_lifespans(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
+
def call1() -> None:
mock.call_stop1()
assert not mock.call_stop2.called
@@ -106,10 +94,10 @@ def call2() -> None:
mock.call_stop2()
assert mock.call_stop1.call_count == 1
- app_without_broker.on_shutdown(call1)
- app_without_broker.on_shutdown(call2)
+ app.on_shutdown(call1)
+ app.on_shutdown(call2)
- await app_without_broker.stop()
+ await app.stop()
mock.call_stop1.assert_called_once()
mock.call_stop2.assert_called_once()
@@ -127,14 +115,7 @@ async def call2() -> None:
test_app = FastStream(broker, after_startup=[call1, call2])
- with (
- patch.object(test_app.broker, "start", async_mock.broker_start),
- patch.object(
- test_app.broker,
- "connect",
- async_mock.broker_connect,
- ),
- ):
+ with patch.object(test_app.broker, "start", async_mock.broker_start):
await test_app.start()
mock.after_startup1.assert_called_once()
@@ -228,16 +209,10 @@ async def test_running(async_mock: AsyncMock, app: FastStream) -> None:
with (
patch.object(app.broker, "start", async_mock.broker_run),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
patch.object(app.broker, "close", async_mock.broker_stopped),
):
await app.run()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_run.assert_called_once()
async_mock.broker_stopped.assert_called_once()
@@ -266,23 +241,13 @@ async def lifespan(env: str):
yield
mock.off()
- app = FastStream(app.broker, lifespan=lifespan)
+ app = FastStream(async_mock, lifespan=lifespan)
app.exit()
- with (
- patch.object(app.broker, "start", async_mock.broker_run),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
- patch.object(app.broker, "close", async_mock.broker_stopped),
- ):
- await app.run(run_extra_options={"env": "test"})
+ await app.run(run_extra_options={"env": "test"})
- async_mock.broker_connect.assert_called_once()
- async_mock.broker_run.assert_called_once()
- async_mock.broker_stopped.assert_called_once()
+ async_mock.start.assert_called_once()
+ async_mock.close.assert_called_once()
mock.on.assert_called_once_with("test")
mock.off.assert_called_once()
@@ -290,7 +255,7 @@ async def lifespan(env: str):
@pytest.mark.asyncio()
async def test_test_app(mock: Mock) -> None:
- app = FastStream()
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -303,8 +268,8 @@ async def test_test_app(mock: Mock) -> None:
@pytest.mark.asyncio()
-async def test_test_app_with_excp(mock: Mock) -> NoReturn:
- app = FastStream()
+async def test_test_app_with_excp(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -318,7 +283,7 @@ async def test_test_app_with_excp(mock: Mock) -> NoReturn:
def test_sync_test_app(mock: Mock) -> None:
- app = FastStream()
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -330,8 +295,8 @@ def test_sync_test_app(mock: Mock) -> None:
mock.off.assert_called_once()
-def test_sync_test_app_with_excp(mock: Mock) -> NoReturn:
- app = FastStream()
+def test_sync_test_app_with_excp(mock: Mock) -> None:
+ app = FastStream(AsyncMock())
app.on_startup(mock.on)
app.on_shutdown(mock.off)
@@ -355,11 +320,6 @@ async def lifespan(env: str):
with (
patch.object(app.broker, "start", async_mock.broker_run),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
patch.object(app.broker, "close", async_mock.broker_stopped),
):
async with TestApp(app, {"env": "test"}):
@@ -368,7 +328,6 @@ async def lifespan(env: str):
async_mock.on.assert_awaited_once_with("test")
async_mock.off.assert_awaited_once()
async_mock.broker_run.assert_called_once()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_stopped.assert_called_once()
@@ -388,7 +347,6 @@ async def lifespan(env: str):
"close",
async_mock.broker_stopped,
),
- patch.object(app.broker, "connect", async_mock.broker_connect),
TestApp(
app,
{"env": "test"},
@@ -399,7 +357,6 @@ async def lifespan(env: str):
async_mock.on.assert_awaited_once_with("test")
async_mock.off.assert_awaited_once()
async_mock.broker_run.assert_called_once()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_stopped.assert_called_once()
@@ -408,11 +365,6 @@ async def lifespan(env: str):
async def test_stop_with_sigint(async_mock, app: FastStream) -> None:
with (
patch.object(app.broker, "start", async_mock.broker_run_sigint),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
patch.object(app.broker, "close", async_mock.broker_stopped_sigint),
):
async with anyio.create_task_group() as tg:
@@ -420,7 +372,6 @@ async def test_stop_with_sigint(async_mock, app: FastStream) -> None:
tg.start_soon(_kill, signal.SIGINT)
async_mock.broker_run_sigint.assert_called_once()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_stopped_sigint.assert_called_once()
@@ -429,11 +380,6 @@ async def test_stop_with_sigint(async_mock, app: FastStream) -> None:
async def test_stop_with_sigterm(async_mock, app: FastStream) -> None:
with (
patch.object(app.broker, "start", async_mock.broker_run_sigterm),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
patch.object(app.broker, "close", async_mock.broker_stopped_sigterm),
):
async with anyio.create_task_group() as tg:
@@ -441,7 +387,6 @@ async def test_stop_with_sigterm(async_mock, app: FastStream) -> None:
tg.start_soon(_kill, signal.SIGTERM)
async_mock.broker_run_sigterm.assert_called_once()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_stopped_sigterm.assert_called_once()
@@ -461,11 +406,6 @@ async def test_run_asgi(async_mock: AsyncMock, app: FastStream) -> None:
with (
patch.object(app.broker, "start", async_mock.broker_run),
- patch.object(
- app.broker,
- "connect",
- async_mock.broker_connect,
- ),
patch.object(app.broker, "close", async_mock.broker_stopped),
):
async with anyio.create_task_group() as tg:
@@ -473,7 +413,6 @@ async def test_run_asgi(async_mock: AsyncMock, app: FastStream) -> None:
tg.start_soon(_kill, signal.SIGINT)
async_mock.broker_run.assert_called_once()
- async_mock.broker_connect.assert_called_once()
async_mock.broker_stopped.assert_called_once()
diff --git a/tests/cli/rabbit/test_logs.py b/tests/cli/rabbit/test_logs.py
index 50a6187911..4ac67ae728 100644
--- a/tests/cli/rabbit/test_logs.py
+++ b/tests/cli/rabbit/test_logs.py
@@ -20,20 +20,16 @@
)
def test_set_level(level, app: FastStream) -> None:
level = get_log_level(level)
- app._setup()
set_log_level(level, app)
- broker_logger = app.broker._state.logger_state.logger.logger
+ broker_state = app.broker._state.get()
+ broker_state._setup_logger_state()
+ broker_logger = broker_state.logger_state.logger.logger
assert app.logger.level is broker_logger.level is level
@pytest.mark.parametrize(
("level", "app"),
(
- pytest.param(
- logging.CRITICAL,
- FastStream(),
- id="empty app",
- ),
pytest.param(
logging.CRITICAL,
FastStream(RabbitBroker(), logger=None),
@@ -56,8 +52,8 @@ def test_set_level_to_none(level, app: FastStream) -> None:
set_log_level(get_log_level(level), app)
-def test_set_default() -> None:
- app = FastStream()
+def test_set_default(broker) -> None:
+ app = FastStream(broker)
level = "wrong_level"
set_log_level(get_log_level(level), app)
assert app.logger.level is logging.INFO
diff --git a/tests/cli/test_asyncapi_docs.py b/tests/cli/test_asyncapi_docs.py
index 42d321ceaa..9deb1877b9 100644
--- a/tests/cli/test_asyncapi_docs.py
+++ b/tests/cli/test_asyncapi_docs.py
@@ -75,7 +75,7 @@ def test_serve_asyncapi_docs(
m.setattr(HTTPServer, "serve_forever", mock)
r = runner.invoke(cli, SERVE_CMD + [kafka_ascynapi_project]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, r.exc_info
mock.assert_called_once()
@@ -94,7 +94,7 @@ def test_serve_asyncapi_json_schema(
m.setattr(HTTPServer, "serve_forever", mock)
r = runner.invoke(cli, SERVE_CMD + [str(schema_path)]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, r.exc_info
mock.assert_called_once()
schema_path.unlink()
@@ -115,7 +115,7 @@ def test_serve_asyncapi_yaml_schema(
m.setattr(HTTPServer, "serve_forever", mock)
r = runner.invoke(cli, SERVE_CMD + [str(schema_path)]) # noqa: RUF005
- assert r.exit_code == 0
+ assert r.exit_code == 0, r.exc_info
mock.assert_called_once()
schema_path.unlink()
diff --git a/tests/cli/test_publish.py b/tests/cli/test_publish.py
index 383b89c8d1..3f55bd6e82 100644
--- a/tests/cli/test_publish.py
+++ b/tests/cli/test_publish.py
@@ -1,10 +1,11 @@
+from typing import TYPE_CHECKING
from unittest.mock import AsyncMock, patch
-from dirty_equals import IsPartialDict
from typer.testing import CliRunner
from faststream import FastStream
from faststream._internal.cli.main import cli as faststream_app
+from faststream.response.publish_type import PublishType
from tests.marks import (
require_aiokafka,
require_aiopika,
@@ -13,6 +14,15 @@
require_redis,
)
+if TYPE_CHECKING:
+ from faststream.confluent.response import (
+ KafkaPublishCommand as ConfluentPublishCommand,
+ )
+ from faststream.kafka.response import KafkaPublishCommand
+ from faststream.nats.response import NatsPublishCommand
+ from faststream.rabbit.response import RabbitPublishCommand
+ from faststream.redis.response import RedisPublishCommand
+
def get_mock_app(broker_type, producer_type) -> tuple[FastStream, AsyncMock]:
broker = broker_type()
@@ -21,7 +31,7 @@ def get_mock_app(broker_type, producer_type) -> tuple[FastStream, AsyncMock]:
mock_producer.publish = AsyncMock()
mock_producer._parser = AsyncMock()
mock_producer._decoder = AsyncMock()
- broker._producer = mock_producer
+ broker._state.patch_value(producer=mock_producer)
return FastStream(broker), mock_producer
@@ -46,10 +56,6 @@ def test_publish_command_with_redis_options(runner) -> None:
"channelname",
"--reply_to",
"tester",
- "--list",
- "listname",
- "--stream",
- "streamname",
"--correlation_id",
"someId",
],
@@ -57,14 +63,11 @@ def test_publish_command_with_redis_options(runner) -> None:
assert result.exit_code == 0
- assert producer_mock.publish.call_args.args[0] == "hello world"
- assert producer_mock.publish.call_args.kwargs == IsPartialDict(
- reply_to="tester",
- stream="streamname",
- list="listname",
- channel="channelname",
- correlation_id="someId",
- )
+ cmd: RedisPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.reply_to == "tester"
+ assert cmd.destination == "channelname"
+ assert cmd.correlation_id == "someId"
@require_confluent
@@ -93,11 +96,10 @@ def test_publish_command_with_confluent_options(runner) -> None:
assert result.exit_code == 0
- assert producer_mock.publish.call_args.args[0] == "hello world"
- assert producer_mock.publish.call_args.kwargs == IsPartialDict(
- topic="topicname",
- correlation_id="someId",
- )
+ cmd: ConfluentPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "topicname"
+ assert cmd.correlation_id == "someId"
@require_aiokafka
@@ -125,11 +127,11 @@ def test_publish_command_with_kafka_options(runner) -> None:
)
assert result.exit_code == 0
- assert producer_mock.publish.call_args.args[0] == "hello world"
- assert producer_mock.publish.call_args.kwargs == IsPartialDict(
- topic="topicname",
- correlation_id="someId",
- )
+
+ cmd: KafkaPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "topicname"
+ assert cmd.correlation_id == "someId"
@require_nats
@@ -160,12 +162,11 @@ def test_publish_command_with_nats_options(runner) -> None:
assert result.exit_code == 0
- assert producer_mock.publish.call_args.args[0] == "hello world"
- assert producer_mock.publish.call_args.kwargs == IsPartialDict(
- subject="subjectname",
- reply_to="tester",
- correlation_id="someId",
- )
+ cmd: NatsPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "subjectname"
+ assert cmd.reply_to == "tester"
+ assert cmd.correlation_id == "someId"
@require_aiopika
@@ -185,6 +186,8 @@ def test_publish_command_with_rabbit_options(runner) -> None:
"publish",
"fastream:app",
"hello world",
+ "--queue",
+ "queuename",
"--correlation_id",
"someId",
],
@@ -192,12 +195,10 @@ def test_publish_command_with_rabbit_options(runner) -> None:
assert result.exit_code == 0
- assert producer_mock.publish.call_args.args[0] == "hello world"
- assert producer_mock.publish.call_args.kwargs == IsPartialDict(
- {
- "correlation_id": "someId",
- },
- )
+ cmd: RabbitPublishCommand = producer_mock.publish.call_args.args[0]
+ assert cmd.body == "hello world"
+ assert cmd.destination == "queuename"
+ assert cmd.correlation_id == "someId"
@require_nats
@@ -225,8 +226,8 @@ def test_publish_nats_request_command(runner: CliRunner) -> None:
],
)
- assert producer_mock.request.call_args.args[0] == "hello world"
- assert producer_mock.request.call_args.kwargs == IsPartialDict(
- subject="subjectname",
- timeout=1.0,
- )
+ cmd: NatsPublishCommand = producer_mock.request.call_args.args[0]
+
+ assert cmd.destination == "subjectname"
+ assert cmd.timeout == 1.0
+ assert cmd.publish_type is PublishType.REQUEST
diff --git a/tests/cli/test_run.py b/tests/cli/test_run.py
index e696389070..7edf3152ce 100644
--- a/tests/cli/test_run.py
+++ b/tests/cli/test_run.py
@@ -9,7 +9,7 @@
def test_run(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
with patch(
@@ -35,7 +35,7 @@ def test_run(runner: CliRunner) -> None:
def test_run_factory(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
app_factory = MagicMock(return_value=app)
@@ -58,7 +58,7 @@ def test_run_factory(runner: CliRunner) -> None:
def test_run_workers(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
with (
@@ -85,7 +85,7 @@ def test_run_workers(runner: CliRunner) -> None:
def test_run_factory_with_workers(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
app_factory = MagicMock(return_value=app)
@@ -113,7 +113,7 @@ def test_run_factory_with_workers(runner: CliRunner) -> None:
def test_run_reloader(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
with (
@@ -150,7 +150,7 @@ def test_run_reloader(runner: CliRunner) -> None:
def test_run_reloader_with_factory(runner: CliRunner) -> None:
- app = FastStream()
+ app = FastStream(MagicMock())
app.run = AsyncMock()
app_factory = MagicMock(return_value=app)
diff --git a/tests/cli/test_run_asgi.py b/tests/cli/test_run_asgi.py
index 16073d8a08..49825f932b 100644
--- a/tests/cli/test_run_asgi.py
+++ b/tests/cli/test_run_asgi.py
@@ -9,7 +9,7 @@
def test_run_as_asgi(runner: CliRunner) -> None:
- app = AsgiFastStream()
+ app = AsgiFastStream(AsyncMock())
app.run = AsyncMock()
with patch(
@@ -36,7 +36,7 @@ def test_run_as_asgi(runner: CliRunner) -> None:
@pytest.mark.parametrize("workers", (pytest.param(1), pytest.param(2), pytest.param(5)))
def test_run_as_asgi_with_workers(runner: CliRunner, workers: int) -> None:
- app = AsgiFastStream()
+ app = AsgiFastStream(AsyncMock())
app.run = AsyncMock()
with patch(
@@ -66,7 +66,7 @@ def test_run_as_asgi_with_workers(runner: CliRunner, workers: int) -> None:
def test_run_as_asgi_callable(runner: CliRunner) -> None:
- app = AsgiFastStream()
+ app = AsgiFastStream(AsyncMock())
app.run = AsyncMock()
app_factory = Mock(return_value=app)
diff --git a/tests/conftest.py b/tests/conftest.py
index ce7cfbd8f0..02f1c26724 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -6,10 +6,7 @@
from typer.testing import CliRunner
from faststream.__about__ import __version__
-from faststream._internal.context import (
- ContextRepo,
- context as global_context,
-)
+from faststream._internal.context import ContextRepo
@pytest.hookimpl(tryfirst=True)
@@ -58,8 +55,7 @@ def version() -> str:
@pytest.fixture()
def context() -> ContextRepo:
- yield global_context
- global_context.clear()
+ return ContextRepo()
@pytest.fixture()
diff --git a/tests/opentelemetry/basic.py b/tests/opentelemetry/basic.py
index 12f00dfbfd..9a8f4dd176 100644
--- a/tests/opentelemetry/basic.py
+++ b/tests/opentelemetry/basic.py
@@ -4,6 +4,8 @@
import pytest
from dirty_equals import IsFloat, IsUUID
+from opentelemetry import baggage, context
+from opentelemetry.baggage.propagation import W3CBaggagePropagator
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics._internal.point import Metric
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
@@ -166,12 +168,13 @@ def assert_metrics(
async def test_subscriber_create_publish_process_span(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
broker = self.get_broker(middlewares=(mid,))
@@ -205,12 +208,13 @@ async def handler(m) -> None:
async def test_chain_subscriber_publisher(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
broker = self.get_broker(middlewares=(mid,))
@@ -265,12 +269,13 @@ async def handler2(m) -> None:
async def test_no_trace_context_create_process_span(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
broker = self.get_broker(middlewares=(mid,))
@@ -286,7 +291,7 @@ async def handler(m) -> None:
async with broker:
await broker.start()
- broker._middlewares = ()
+ broker.middlewares = ()
tasks = (
asyncio.create_task(broker.publish(msg, queue)),
asyncio.create_task(event.wait()),
@@ -304,12 +309,13 @@ async def handler(m) -> None:
async def test_metrics(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(meter_provider=meter_provider)
broker = self.get_broker(middlewares=(mid,))
@@ -340,12 +346,13 @@ async def handler(m) -> None:
async def test_error_metrics(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
metric_reader: InMemoryMetricReader,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(meter_provider=meter_provider)
broker = self.get_broker(middlewares=(mid,))
expected_value_type = "ValueError"
@@ -380,12 +387,13 @@ async def handler(m) -> None:
async def test_span_in_context(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(tracer_provider=tracer_provider)
broker = self.get_broker(middlewares=(mid,), apply_types=True)
@@ -413,10 +421,11 @@ async def handler(m, span: CurrentSpan) -> None:
async def test_get_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_baggage = {"foo": "bar"}
@@ -454,10 +463,11 @@ async def handler1(m, baggage: CurrentBaggage) -> None:
async def test_clear_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
broker = self.get_broker(middlewares=(mid,), apply_types=True)
@@ -503,10 +513,11 @@ async def handler2(m, baggage: CurrentBaggage) -> None:
async def test_modify_baggage(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class()
broker = self.get_broker(middlewares=(mid,), apply_types=True)
expected_baggage = {"baz": "bar", "bar": "baz"}
@@ -551,3 +562,44 @@ async def handler2(m, baggage: CurrentBaggage) -> None:
assert event.is_set()
mock.assert_called_once_with(msg)
+
+ async def test_get_baggage_from_headers(
+ self,
+ queue: str,
+ ):
+ event = asyncio.Event()
+
+ mid = self.telemetry_middleware_class()
+ broker = self.get_broker(middlewares=(mid,), apply_types=True)
+
+ args, kwargs = self.get_subscriber_params(queue)
+
+ expected_baggage = {"foo": "bar", "bar": "baz"}
+
+ ctx = context.Context()
+ for key, value in expected_baggage.items():
+ ctx = baggage.set_baggage(key, value, context=ctx)
+
+ propagator = W3CBaggagePropagator()
+ headers = {}
+ propagator.inject(headers, context=ctx)
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler():
+ baggage_instance = Baggage.from_headers(headers)
+ extracted_baggage = baggage_instance.get_all()
+ assert extracted_baggage == expected_baggage
+ event.set()
+
+ broker = self.patch_broker(broker)
+ msg = "start"
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(broker.publish(msg, queue, headers=headers)),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
diff --git a/tests/opentelemetry/confluent/test_confluent.py b/tests/opentelemetry/confluent/test_confluent.py
index e3f6a697bb..088e1d551c 100644
--- a/tests/opentelemetry/confluent/test_confluent.py
+++ b/tests/opentelemetry/confluent/test_confluent.py
@@ -64,7 +64,6 @@ def assert_span(
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -72,6 +71,8 @@ async def test_batch(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
@@ -198,7 +199,6 @@ async def handler(msg, baggage: CurrentBaggage) -> None:
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -206,6 +206,8 @@ async def test_single_publish_with_batch_consume(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
diff --git a/tests/opentelemetry/kafka/test_kafka.py b/tests/opentelemetry/kafka/test_kafka.py
index 3f05cda5b1..79e93b82df 100644
--- a/tests/opentelemetry/kafka/test_kafka.py
+++ b/tests/opentelemetry/kafka/test_kafka.py
@@ -65,7 +65,6 @@ def assert_span(
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -73,6 +72,8 @@ async def test_batch(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
@@ -199,7 +200,6 @@ async def handler(msg, baggage: CurrentBaggage) -> None:
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -207,6 +207,8 @@ async def test_single_publish_with_batch_consume(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
diff --git a/tests/opentelemetry/nats/test_nats.py b/tests/opentelemetry/nats/test_nats.py
index 8dd8238e15..efa5153a50 100644
--- a/tests/opentelemetry/nats/test_nats.py
+++ b/tests/opentelemetry/nats/test_nats.py
@@ -32,7 +32,6 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> NatsBroker:
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
stream: JStream,
@@ -41,6 +40,8 @@ async def test_batch(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
diff --git a/tests/opentelemetry/redis/test_redis.py b/tests/opentelemetry/redis/test_redis.py
index cbe1a107b8..bdfc49ceb1 100644
--- a/tests/opentelemetry/redis/test_redis.py
+++ b/tests/opentelemetry/redis/test_redis.py
@@ -32,7 +32,6 @@ def get_broker(self, apply_types: bool = False, **kwargs: Any) -> RedisBroker:
async def test_batch(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -40,6 +39,8 @@ async def test_batch(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
@@ -151,7 +152,6 @@ async def handler(msg, baggage: CurrentBaggage) -> None:
async def test_single_publish_with_batch_consume(
self,
- event: asyncio.Event,
queue: str,
mock: Mock,
meter_provider: MeterProvider,
@@ -159,6 +159,8 @@ async def test_single_publish_with_batch_consume(
tracer_provider: TracerProvider,
trace_exporter: InMemorySpanExporter,
) -> None:
+ event = asyncio.Event()
+
mid = self.telemetry_middleware_class(
meter_provider=meter_provider,
tracer_provider=tracer_provider,
diff --git a/tests/prometheus/__init__.py b/tests/prometheus/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/prometheus/basic.py b/tests/prometheus/basic.py
new file mode 100644
index 0000000000..6a5f0e303e
--- /dev/null
+++ b/tests/prometheus/basic.py
@@ -0,0 +1,260 @@
+import asyncio
+from typing import Any, Optional
+from unittest.mock import ANY, Mock, call
+
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream import Context
+from faststream.exceptions import RejectMessage
+from faststream.message import AckStatus
+from faststream.prometheus import MetricsSettingsProvider
+from faststream.prometheus.middleware import (
+ PROCESSING_STATUS_BY_ACK_STATUS,
+ PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP,
+)
+from faststream.prometheus.types import ProcessingStatus
+from tests.brokers.base.basic import BaseTestcaseConfig
+
+
+@pytest.mark.asyncio()
+class LocalPrometheusTestcase(BaseTestcaseConfig):
+ def get_broker(self, apply_types=False, **kwargs):
+ raise NotImplementedError
+
+ def get_middleware(self, **kwargs):
+ raise NotImplementedError
+
+ @staticmethod
+ def consume_destination_name(queue: str) -> str:
+ return queue
+
+ @property
+ def settings_provider_factory(self):
+ return self.get_middleware(
+ registry=CollectorRegistry()
+ )._settings_provider_factory
+
+ @pytest.mark.parametrize(
+ (
+ "status",
+ "exception_class",
+ ),
+ (
+ pytest.param(
+ AckStatus.ACKED,
+ RejectMessage,
+ id="acked status with reject message exception",
+ ),
+ pytest.param(
+ AckStatus.ACKED,
+ Exception,
+ id="acked status with not handler exception",
+ ),
+ pytest.param(AckStatus.ACKED, None, id="acked status without exception"),
+ pytest.param(AckStatus.NACKED, None, id="nacked status without exception"),
+ pytest.param(
+ AckStatus.REJECTED,
+ None,
+ id="rejected status without exception",
+ ),
+ ),
+ )
+ async def test_metrics(
+ self,
+ queue: str,
+ status: AckStatus,
+ exception_class: Optional[type[Exception]],
+ ):
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ args, kwargs = self.get_subscriber_params(queue)
+
+ message = None
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ if exception_class:
+ raise exception_class
+
+ if status == AckStatus.ACKED:
+ await message.ack()
+ elif status == AckStatus.NACKED:
+ await message.nack()
+ elif status == AckStatus.REJECTED:
+ await message.reject()
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(broker.publish("hello", queue)),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
+ self.assert_consume_metrics(
+ metrics_manager=metrics_manager_mock,
+ message=message,
+ exception_class=exception_class,
+ )
+ self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
+
+ def assert_consume_metrics(
+ self,
+ *,
+ metrics_manager: Any,
+ message: Any,
+ exception_class: Optional[type[Exception]],
+ ):
+ settings_provider = self.settings_provider_factory(message.raw_message)
+ consume_attrs = settings_provider.get_consume_attrs_from_message(message)
+ assert metrics_manager.add_received_message.mock_calls == [
+ call(
+ amount=consume_attrs["messages_count"],
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ ),
+ ]
+
+ assert metrics_manager.observe_received_messages_size.mock_calls == [
+ call(
+ size=consume_attrs["message_size"],
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ ),
+ ]
+
+ assert metrics_manager.add_received_message_in_process.mock_calls == [
+ call(
+ amount=consume_attrs["messages_count"],
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ ),
+ ]
+ assert metrics_manager.remove_received_message_in_process.mock_calls == [
+ call(
+ amount=consume_attrs["messages_count"],
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ )
+ ]
+
+ assert (
+ metrics_manager.observe_received_processed_message_duration.mock_calls
+ == [
+ call(
+ duration=ANY,
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ ),
+ ]
+ )
+
+ status = ProcessingStatus.acked
+
+ if exception_class:
+ status = (
+ PROCESSING_STATUS_BY_HANDLER_EXCEPTION_MAP.get(exception_class)
+ or ProcessingStatus.error
+ )
+ elif message.committed:
+ status = PROCESSING_STATUS_BY_ACK_STATUS[message.committed]
+
+ assert metrics_manager.add_received_processed_message.mock_calls == [
+ call(
+ amount=consume_attrs["messages_count"],
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ status=status.value,
+ ),
+ ]
+
+ if status == ProcessingStatus.error:
+ assert (
+ metrics_manager.add_received_processed_message_exception.mock_calls
+ == [
+ call(
+ broker=settings_provider.messaging_system,
+ handler=consume_attrs["destination_name"],
+ exception_type=exception_class.__name__,
+ ),
+ ]
+ )
+
+ def assert_publish_metrics(self, metrics_manager: Any):
+ settings_provider = self.settings_provider_factory(None)
+ assert metrics_manager.observe_published_message_duration.mock_calls == [
+ call(
+ duration=ANY, broker=settings_provider.messaging_system, destination=ANY
+ ),
+ ]
+ assert metrics_manager.add_published_message.mock_calls == [
+ call(
+ amount=ANY,
+ broker=settings_provider.messaging_system,
+ destination=ANY,
+ status="success",
+ ),
+ ]
+
+
+class LocalRPCPrometheusTestcase:
+ @pytest.mark.asyncio()
+ async def test_rpc_request(
+ self,
+ queue: str,
+ ) -> None:
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ @broker.subscriber(queue)
+ async def handle():
+ event.set()
+ return ""
+
+ async with self.patch_broker(broker) as br:
+ await br.start()
+
+ await asyncio.wait_for(
+ br.request("", queue),
+ timeout=3,
+ )
+
+ assert event.is_set()
+ metrics_manager_mock.add_received_message.assert_called_once()
+ metrics_manager_mock.add_published_message.assert_called_once()
+
+
+class LocalMetricsSettingsProviderTestcase:
+ messaging_system: str
+
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ raise NotImplementedError
+
+ def test_messaging_system(self) -> None:
+ provider = self.get_provider()
+ assert provider.messaging_system == self.messaging_system
+
+ def test_get_consume_attrs_from_message(self, *args, **kwargs) -> None:
+ raise NotImplementedError
+
+ def test_get_publish_destination_name_from_cmd(self, *args, **kwargs) -> None:
+ raise NotImplementedError
diff --git a/tests/prometheus/confluent/__init__.py b/tests/prometheus/confluent/__init__.py
new file mode 100644
index 0000000000..c4a1803708
--- /dev/null
+++ b/tests/prometheus/confluent/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("confluent_kafka")
diff --git a/tests/prometheus/confluent/test_confluent.py b/tests/prometheus/confluent/test_confluent.py
new file mode 100644
index 0000000000..84714bd280
--- /dev/null
+++ b/tests/prometheus/confluent/test_confluent.py
@@ -0,0 +1,80 @@
+import asyncio
+from unittest.mock import Mock
+
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream import Context
+from faststream.confluent import KafkaBroker
+from faststream.confluent.prometheus.middleware import KafkaPrometheusMiddleware
+from tests.brokers.confluent.basic import ConfluentTestcaseConfig
+from tests.brokers.confluent.test_consume import TestConsume
+from tests.brokers.confluent.test_publish import TestPublish
+from tests.prometheus.basic import LocalPrometheusTestcase
+
+
+@pytest.mark.confluent()
+class TestPrometheus(ConfluentTestcaseConfig, LocalPrometheusTestcase):
+ def get_broker(self, apply_types=False, **kwargs):
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
+ def get_middleware(self, **kwargs):
+ return KafkaPrometheusMiddleware(**kwargs)
+
+ async def test_metrics_batch(
+ self,
+ queue: str,
+ ):
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ args, kwargs = self.get_subscriber_params(queue, batch=True)
+ message = None
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(
+ broker.publish_batch("hello", "world", topic=queue)
+ ),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
+ self.assert_consume_metrics(
+ metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ )
+ self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
+
+
+@pytest.mark.confluent()
+class TestPublishWithPrometheus(TestPublish):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return KafkaBroker(
+ middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
+
+
+@pytest.mark.confluent()
+class TestConsumeWithPrometheus(TestConsume):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return KafkaBroker(
+ middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
diff --git a/tests/prometheus/confluent/test_provider.py b/tests/prometheus/confluent/test_provider.py
new file mode 100644
index 0000000000..6949a1ff26
--- /dev/null
+++ b/tests/prometheus/confluent/test_provider.py
@@ -0,0 +1,106 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.confluent.prometheus.provider import (
+ BatchConfluentMetricsSettingsProvider,
+ ConfluentMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from faststream.prometheus import MetricsSettingsProvider
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+
+class LocalBaseConfluentMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ messaging_system = "kafka"
+
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestKafkaMetricsSettingsProvider(
+ LocalBaseConfluentMetricsSettingsProviderTestcase
+):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return ConfluentMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ message = SimpleNamespace(
+ body=body, raw_message=SimpleNamespace(topic=lambda: queue)
+ )
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchConfluentMetricsSettingsProvider(
+ LocalBaseConfluentMetricsSettingsProviderTestcase
+):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return BatchConfluentMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = [b"Hi ", b"again, ", b"FastStream!"]
+ message = SimpleNamespace(
+ body=body,
+ raw_message=[
+ SimpleNamespace(topic=lambda: queue)
+ for _ in range(random.randint(a=2, b=10))
+ ],
+ )
+ expected_attrs = {
+ "destination_name": message.raw_message[0].topic(),
+ "message_size": len(bytearray().join(body)),
+ "messages_count": len(message.raw_message),
+ }
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (SimpleNamespace(), SimpleNamespace()),
+ BatchConfluentMetricsSettingsProvider(),
+ id="batch message",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ ConfluentMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ ConfluentMetricsSettingsProvider(),
+ id="None message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/kafka/__init__.py b/tests/prometheus/kafka/__init__.py
new file mode 100644
index 0000000000..bd6bc708fc
--- /dev/null
+++ b/tests/prometheus/kafka/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("aiokafka")
diff --git a/tests/prometheus/kafka/test_kafka.py b/tests/prometheus/kafka/test_kafka.py
new file mode 100644
index 0000000000..7ba5ba6f82
--- /dev/null
+++ b/tests/prometheus/kafka/test_kafka.py
@@ -0,0 +1,83 @@
+import asyncio
+from unittest.mock import Mock
+
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream import Context
+from faststream.kafka import KafkaBroker
+from faststream.kafka.prometheus.middleware import KafkaPrometheusMiddleware
+from tests.brokers.kafka.test_consume import TestConsume
+from tests.brokers.kafka.test_publish import TestPublish
+from tests.prometheus.basic import LocalPrometheusTestcase
+
+
+@pytest.mark.kafka()
+class TestPrometheus(LocalPrometheusTestcase):
+ def get_broker(self, apply_types=False, **kwargs):
+ return KafkaBroker(apply_types=apply_types, **kwargs)
+
+ def get_middleware(self, **kwargs):
+ return KafkaPrometheusMiddleware(**kwargs)
+
+ async def test_metrics_batch(
+ self,
+ queue: str,
+ ):
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ args, kwargs = self.get_subscriber_params(queue, batch=True)
+ message = None
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(
+ broker.publish_batch("hello", "world", topic=queue)
+ ),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
+ self.assert_consume_metrics(
+ metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ )
+ self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
+
+
+@pytest.mark.kafka()
+class TestPublishWithPrometheus(TestPublish):
+ def get_broker(
+ self,
+ apply_types: bool = False,
+ **kwargs,
+ ):
+ return KafkaBroker(
+ middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
+
+
+@pytest.mark.kafka()
+class TestConsumeWithPrometheus(TestConsume):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return KafkaBroker(
+ middlewares=(KafkaPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
diff --git a/tests/prometheus/kafka/test_provider.py b/tests/prometheus/kafka/test_provider.py
new file mode 100644
index 0000000000..1e0c980981
--- /dev/null
+++ b/tests/prometheus/kafka/test_provider.py
@@ -0,0 +1,101 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.kafka.prometheus.provider import (
+ BatchKafkaMetricsSettingsProvider,
+ KafkaMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from faststream.prometheus import MetricsSettingsProvider
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+
+class LocalBaseKafkaMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ messaging_system = "kafka"
+
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestKafkaMetricsSettingsProvider(LocalBaseKafkaMetricsSettingsProviderTestcase):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return KafkaMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ message = SimpleNamespace(body=body, raw_message=SimpleNamespace(topic=queue))
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchKafkaMetricsSettingsProvider(
+ LocalBaseKafkaMetricsSettingsProviderTestcase
+):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return BatchKafkaMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = [b"Hi ", b"again, ", b"FastStream!"]
+ message = SimpleNamespace(
+ body=body,
+ raw_message=[
+ SimpleNamespace(topic=queue) for _ in range(random.randint(a=2, b=10))
+ ],
+ )
+ expected_attrs = {
+ "destination_name": message.raw_message[0].topic,
+ "message_size": len(bytearray().join(body)),
+ "messages_count": len(message.raw_message),
+ }
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (SimpleNamespace(), SimpleNamespace()),
+ BatchKafkaMetricsSettingsProvider(),
+ id="batch message",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ KafkaMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ KafkaMetricsSettingsProvider(),
+ id="None message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/nats/__init__.py b/tests/prometheus/nats/__init__.py
new file mode 100644
index 0000000000..87ead90ee6
--- /dev/null
+++ b/tests/prometheus/nats/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("nats")
diff --git a/tests/prometheus/nats/test_nats.py b/tests/prometheus/nats/test_nats.py
new file mode 100644
index 0000000000..117b696922
--- /dev/null
+++ b/tests/prometheus/nats/test_nats.py
@@ -0,0 +1,87 @@
+import asyncio
+from unittest.mock import Mock
+
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream import Context
+from faststream.nats import JStream, NatsBroker, PullSub
+from faststream.nats.prometheus.middleware import NatsPrometheusMiddleware
+from tests.brokers.nats.test_consume import TestConsume
+from tests.brokers.nats.test_publish import TestPublish
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+
+
+@pytest.fixture()
+def stream(queue):
+ return JStream(queue)
+
+
+@pytest.mark.nats()
+class TestPrometheus(LocalPrometheusTestcase, LocalRPCPrometheusTestcase):
+ def get_broker(self, apply_types=False, **kwargs):
+ return NatsBroker(apply_types=apply_types, **kwargs)
+
+ def get_middleware(self, **kwargs):
+ return NatsPrometheusMiddleware(**kwargs)
+
+ async def test_metrics_batch(
+ self,
+ queue: str,
+ stream: JStream,
+ ):
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ args, kwargs = self.get_subscriber_params(
+ queue,
+ stream=stream,
+ pull_sub=PullSub(1, batch=True, timeout=self.timeout),
+ )
+ message = None
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(broker.publish("hello", queue)),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
+ self.assert_consume_metrics(
+ metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ )
+ self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
+
+
+@pytest.mark.nats()
+class TestPublishWithPrometheus(TestPublish):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return NatsBroker(
+ middlewares=(NatsPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
+
+
+@pytest.mark.nats()
+class TestConsumeWithPrometheus(TestConsume):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return NatsBroker(
+ middlewares=(NatsPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
diff --git a/tests/prometheus/nats/test_provider.py b/tests/prometheus/nats/test_provider.py
new file mode 100644
index 0000000000..817a37142d
--- /dev/null
+++ b/tests/prometheus/nats/test_provider.py
@@ -0,0 +1,107 @@
+import random
+from types import SimpleNamespace
+
+import pytest
+from nats.aio.msg import Msg
+
+from faststream.nats.prometheus.provider import (
+ BatchNatsMetricsSettingsProvider,
+ NatsMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from faststream.prometheus import MetricsSettingsProvider
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+
+class LocalBaseNatsMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ messaging_system = "nats"
+
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ command = SimpleNamespace(destination=queue)
+
+ provider = self.get_provider()
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestNatsMetricsSettingsProvider(LocalBaseNatsMetricsSettingsProviderTestcase):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return NatsMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue,
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+ message = SimpleNamespace(body=body, raw_message=SimpleNamespace(subject=queue))
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchNatsMetricsSettingsProvider(
+ LocalBaseNatsMetricsSettingsProviderTestcase
+):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return BatchNatsMetricsSettingsProvider()
+
+ def test_get_consume_attrs_from_message(self, queue: str) -> None:
+ body = b"Hello"
+ raw_messages = [
+ SimpleNamespace(subject=queue) for _ in range(random.randint(a=2, b=10))
+ ]
+
+ expected_attrs = {
+ "destination_name": raw_messages[0].subject,
+ "message_size": len(body),
+ "messages_count": len(raw_messages),
+ }
+ message = SimpleNamespace(body=body, raw_message=raw_messages)
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ (Msg(SimpleNamespace()), Msg(SimpleNamespace())),
+ BatchNatsMetricsSettingsProvider(),
+ id="message is sequence",
+ ),
+ pytest.param(
+ Msg(
+ SimpleNamespace(),
+ ),
+ NatsMetricsSettingsProvider(),
+ id="single message",
+ ),
+ pytest.param(
+ None,
+ NatsMetricsSettingsProvider(),
+ id="message is None",
+ ),
+ pytest.param(
+ SimpleNamespace(),
+ None,
+ id="message is not Msg instance",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/rabbit/__init__.py b/tests/prometheus/rabbit/__init__.py
new file mode 100644
index 0000000000..ebec43fcd5
--- /dev/null
+++ b/tests/prometheus/rabbit/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("aio_pika")
diff --git a/tests/prometheus/rabbit/test_provider.py b/tests/prometheus/rabbit/test_provider.py
new file mode 100644
index 0000000000..71d47a781b
--- /dev/null
+++ b/tests/prometheus/rabbit/test_provider.py
@@ -0,0 +1,65 @@
+from types import SimpleNamespace
+from typing import Union
+
+import pytest
+
+from faststream.prometheus import MetricsSettingsProvider
+from faststream.rabbit.prometheus.provider import RabbitMetricsSettingsProvider
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+
+class TestRabbitMetricsSettingsProvider(LocalMetricsSettingsProviderTestcase):
+ messaging_system = "rabbitmq"
+
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return RabbitMetricsSettingsProvider()
+
+ @pytest.mark.parametrize(
+ "exchange",
+ (
+ pytest.param("my_exchange", id="with exchange"),
+ pytest.param(None, id="without exchange"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(
+ self,
+ exchange: Union[str, None],
+ queue: str,
+ ) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": f"{exchange or 'default'}.{queue}",
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+ message = SimpleNamespace(
+ body=body, raw_message=SimpleNamespace(exchange=exchange, routing_key=queue)
+ )
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+ @pytest.mark.parametrize(
+ "exchange",
+ (
+ pytest.param("my_exchange", id="with exchange"),
+ pytest.param(None, id="without exchange"),
+ ),
+ )
+ def test_get_publish_destination_name_from_cmd(
+ self,
+ exchange: Union[str, None],
+ queue: str,
+ ) -> None:
+ expected_destination_name = f"{exchange or 'default'}.{queue}"
+ command = SimpleNamespace(
+ exchange=SimpleNamespace(name=exchange), destination=queue
+ )
+
+ provider = self.get_provider()
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
diff --git a/tests/prometheus/rabbit/test_rabbit.py b/tests/prometheus/rabbit/test_rabbit.py
new file mode 100644
index 0000000000..f64786fc4f
--- /dev/null
+++ b/tests/prometheus/rabbit/test_rabbit.py
@@ -0,0 +1,42 @@
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream.rabbit import RabbitBroker, RabbitExchange
+from faststream.rabbit.prometheus.middleware import RabbitPrometheusMiddleware
+from tests.brokers.rabbit.test_consume import TestConsume
+from tests.brokers.rabbit.test_publish import TestPublish
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+
+
+@pytest.fixture()
+def exchange(queue):
+ return RabbitExchange(name=queue)
+
+
+@pytest.mark.rabbit()
+class TestPrometheus(LocalPrometheusTestcase, LocalRPCPrometheusTestcase):
+ def get_broker(self, apply_types=False, **kwargs):
+ return RabbitBroker(apply_types=apply_types, **kwargs)
+
+ def get_middleware(self, **kwargs):
+ return RabbitPrometheusMiddleware(**kwargs)
+
+
+@pytest.mark.rabbit()
+class TestPublishWithPrometheus(TestPublish):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return RabbitBroker(
+ middlewares=(RabbitPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
+
+
+@pytest.mark.rabbit()
+class TestConsumeWithPrometheus(TestConsume):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return RabbitBroker(
+ middlewares=(RabbitPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
diff --git a/tests/prometheus/redis/__init__.py b/tests/prometheus/redis/__init__.py
new file mode 100644
index 0000000000..4752ef19b1
--- /dev/null
+++ b/tests/prometheus/redis/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis")
diff --git a/tests/prometheus/redis/test_provider.py b/tests/prometheus/redis/test_provider.py
new file mode 100644
index 0000000000..c1b593b545
--- /dev/null
+++ b/tests/prometheus/redis/test_provider.py
@@ -0,0 +1,160 @@
+from types import SimpleNamespace
+
+import pytest
+
+from faststream.prometheus import MetricsSettingsProvider
+from faststream.redis.message import (
+ BatchListMessage,
+ BatchStreamMessage,
+ DefaultListMessage,
+ DefaultStreamMessage,
+ PubSubMessage,
+)
+from faststream.redis.prometheus.provider import (
+ BatchRedisMetricsSettingsProvider,
+ RedisMetricsSettingsProvider,
+ settings_provider_factory,
+)
+from tests.prometheus.basic import LocalMetricsSettingsProviderTestcase
+
+
+class LocalBaseRedisMetricsSettingsProviderTestcase(
+ LocalMetricsSettingsProviderTestcase
+):
+ messaging_system = "redis"
+
+ def test_get_publish_destination_name_from_cmd(self, queue: str) -> None:
+ expected_destination_name = queue
+ provider = self.get_provider()
+ command = SimpleNamespace(destination=queue)
+
+ destination_name = provider.get_publish_destination_name_from_cmd(command)
+
+ assert destination_name == expected_destination_name
+
+
+class TestRedisMetricsSettingsProvider(LocalBaseRedisMetricsSettingsProviderTestcase):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return RedisMetricsSettingsProvider()
+
+ @pytest.mark.parametrize(
+ "destination",
+ (
+ pytest.param("channel", id="destination is channel"),
+ pytest.param("list", id="destination is list"),
+ pytest.param("stream", id="destination is stream"),
+ pytest.param("", id="destination is blank"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(self, queue: str, destination: str) -> None:
+ body = b"Hello"
+ expected_attrs = {
+ "destination_name": queue if destination else "",
+ "message_size": len(body),
+ "messages_count": 1,
+ }
+
+ raw_message = {"data": body}
+ if destination:
+ raw_message[destination] = queue
+
+ message = SimpleNamespace(body=body, raw_message=raw_message)
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+class TestBatchRedisMetricsSettingsProvider(
+ LocalBaseRedisMetricsSettingsProviderTestcase
+):
+ @staticmethod
+ def get_provider() -> MetricsSettingsProvider:
+ return BatchRedisMetricsSettingsProvider()
+
+ @pytest.mark.parametrize(
+ "destination",
+ (
+ pytest.param("channel", id="destination is channel"),
+ pytest.param("list", id="destination is list"),
+ pytest.param("stream", id="destination is stream"),
+ pytest.param("", id="destination is blank"),
+ ),
+ )
+ def test_get_consume_attrs_from_message(self, queue: str, destination: str) -> None:
+ decoded_body = ["Hi ", "again, ", "FastStream!"]
+ body = str(decoded_body).encode()
+
+ expected_attrs = {
+ "destination_name": queue if destination else "",
+ "message_size": len(body),
+ "messages_count": len(decoded_body),
+ }
+
+ raw_message = {"data": decoded_body}
+
+ if destination:
+ raw_message[destination] = queue
+
+ message = SimpleNamespace(
+ body=body,
+ raw_message=raw_message,
+ )
+
+ provider = self.get_provider()
+ attrs = provider.get_consume_attrs_from_message(message)
+
+ assert attrs == expected_attrs
+
+
+@pytest.mark.parametrize(
+ ("msg", "expected_provider"),
+ (
+ pytest.param(
+ PubSubMessage(
+ type="message",
+ channel="test-channel",
+ data=b"",
+ pattern=None,
+ ),
+ RedisMetricsSettingsProvider(),
+ id="PubSub message",
+ ),
+ pytest.param(
+ DefaultListMessage(type="list", channel="test-list", data=b""),
+ RedisMetricsSettingsProvider(),
+ id="Single List message",
+ ),
+ pytest.param(
+ BatchListMessage(type="blist", channel="test-list", data=[b"", b""]),
+ BatchRedisMetricsSettingsProvider(),
+ id="Batch List message",
+ ),
+ pytest.param(
+ DefaultStreamMessage(
+ type="stream",
+ channel="test-stream",
+ data=b"",
+ message_ids=[],
+ ),
+ RedisMetricsSettingsProvider(),
+ id="Single Stream message",
+ ),
+ pytest.param(
+ BatchStreamMessage(
+ type="bstream",
+ channel="test-stream",
+ data=[{b"": b""}, {b"": b""}],
+ message_ids=[],
+ ),
+ BatchRedisMetricsSettingsProvider(),
+ id="Batch Stream message",
+ ),
+ ),
+)
+def test_settings_provider_factory(msg, expected_provider) -> None:
+ provider = settings_provider_factory(msg)
+
+ assert isinstance(provider, type(expected_provider))
diff --git a/tests/prometheus/redis/test_redis.py b/tests/prometheus/redis/test_redis.py
new file mode 100644
index 0000000000..ee7f62cfd2
--- /dev/null
+++ b/tests/prometheus/redis/test_redis.py
@@ -0,0 +1,78 @@
+import asyncio
+from unittest.mock import Mock
+
+import pytest
+from prometheus_client import CollectorRegistry
+
+from faststream import Context
+from faststream.redis import ListSub, RedisBroker
+from faststream.redis.prometheus.middleware import RedisPrometheusMiddleware
+from tests.brokers.redis.test_consume import TestConsume
+from tests.brokers.redis.test_publish import TestPublish
+from tests.prometheus.basic import LocalPrometheusTestcase, LocalRPCPrometheusTestcase
+
+
+@pytest.mark.redis()
+class TestPrometheus(LocalPrometheusTestcase, LocalRPCPrometheusTestcase):
+ def get_broker(self, apply_types=False, **kwargs):
+ return RedisBroker(apply_types=apply_types, **kwargs)
+
+ def get_middleware(self, **kwargs):
+ return RedisPrometheusMiddleware(**kwargs)
+
+ async def test_metrics_batch(
+ self,
+ queue: str,
+ ):
+ event = asyncio.Event()
+
+ middleware = self.get_middleware(registry=CollectorRegistry())
+ metrics_manager_mock = Mock()
+ middleware._metrics_manager = metrics_manager_mock
+
+ broker = self.get_broker(apply_types=True, middlewares=(middleware,))
+
+ args, kwargs = self.get_subscriber_params(list=ListSub(queue, batch=True))
+
+ message = None
+
+ @broker.subscriber(*args, **kwargs)
+ async def handler(m=Context("message")):
+ event.set()
+
+ nonlocal message
+ message = m
+
+ async with broker:
+ await broker.start()
+ tasks = (
+ asyncio.create_task(broker.publish_batch("hello", "world", list=queue)),
+ asyncio.create_task(event.wait()),
+ )
+ await asyncio.wait(tasks, timeout=self.timeout)
+
+ assert event.is_set()
+ self.assert_consume_metrics(
+ metrics_manager=metrics_manager_mock, message=message, exception_class=None
+ )
+ self.assert_publish_metrics(metrics_manager=metrics_manager_mock)
+
+
+@pytest.mark.redis()
+class TestPublishWithPrometheus(TestPublish):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return RedisBroker(
+ middlewares=(RedisPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
+
+
+@pytest.mark.redis()
+class TestConsumeWithPrometheus(TestConsume):
+ def get_broker(self, apply_types: bool = False, **kwargs):
+ return RedisBroker(
+ middlewares=(RedisPrometheusMiddleware(registry=CollectorRegistry()),),
+ apply_types=apply_types,
+ **kwargs,
+ )
diff --git a/tests/prometheus/test_metrics.py b/tests/prometheus/test_metrics.py
new file mode 100644
index 0000000000..6be01b42c2
--- /dev/null
+++ b/tests/prometheus/test_metrics.py
@@ -0,0 +1,644 @@
+import random
+from typing import Optional
+from unittest.mock import ANY
+
+import pytest
+from dirty_equals import IsPositiveFloat, IsStr
+from prometheus_client import CollectorRegistry, Histogram, Metric
+from prometheus_client.samples import Sample
+
+from faststream.prometheus.container import MetricsContainer
+from faststream.prometheus.manager import MetricsManager
+from faststream.prometheus.types import ProcessingStatus, PublishingStatus
+
+
+class TestCaseMetrics:
+ @staticmethod
+ def create_metrics_manager(
+ app_name: Optional[str] = None,
+ metrics_prefix: Optional[str] = None,
+ received_messages_size_buckets: Optional[list[float]] = None,
+ ) -> MetricsManager:
+ registry = CollectorRegistry()
+ container = MetricsContainer(
+ registry,
+ metrics_prefix=metrics_prefix,
+ received_messages_size_buckets=received_messages_size_buckets,
+ )
+ return MetricsManager(container, app_name=app_name)
+
+ @pytest.fixture()
+ def app_name(self, request) -> str:
+ return "youtube"
+
+ @pytest.fixture()
+ def metrics_prefix(self, request) -> str:
+ return "fs"
+
+ @pytest.fixture()
+ def broker(self) -> str:
+ return "rabbit"
+
+ @pytest.fixture()
+ def queue(self) -> str:
+ return "default.test"
+
+ @pytest.fixture()
+ def messages_amount(self) -> int:
+ return random.randint(1, 10)
+
+ @pytest.fixture()
+ def exception_type(self) -> str:
+ return Exception.__name__
+
+ def test_add_received_message(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ messages_amount: int,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_messages",
+ documentation="Count of received messages by broker and handler",
+ unit="",
+ typ="counter",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_messages_total",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_received_message(
+ amount=messages_amount, broker=broker, handler=queue
+ )
+
+ metric_values = manager._container.received_messages_total.collect()
+
+ assert metric_values == [expected]
+
+ @pytest.mark.parametrize(
+ "is_default_buckets",
+ (
+ pytest.param(True, id="with default buckets"),
+ pytest.param(False, id="with custom buckets"),
+ ),
+ )
+ def test_observe_received_messages_size(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ is_default_buckets: bool,
+ ) -> None:
+ manager_kwargs = {
+ "app_name": app_name,
+ "metrics_prefix": metrics_prefix,
+ }
+
+ custom_buckets = [1.0, 2.0, 3.0, float("inf")]
+
+ if not is_default_buckets:
+ manager_kwargs["received_messages_size_buckets"] = custom_buckets
+
+ manager = self.create_metrics_manager(**manager_kwargs)
+
+ size = 1
+ buckets = (
+ MetricsContainer.DEFAULT_SIZE_BUCKETS
+ if is_default_buckets
+ else custom_buckets
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_messages_size_bytes",
+ documentation="Histogram of received messages size in bytes by broker and handler",
+ unit="",
+ typ="histogram",
+ )
+ expected.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "le": IsStr,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in buckets
+ ],
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_count",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_sum",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=size,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_messages_size_bytes_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=ANY,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.observe_received_messages_size(size=size, broker=broker, handler=queue)
+
+ metric_values = manager._container.received_messages_size_bytes.collect()
+
+ assert metric_values == [expected]
+
+ def test_add_received_message_in_process(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ messages_amount: int,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ documentation="Gauge of received messages in process by broker and handler",
+ unit="",
+ typ="gauge",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_received_message_in_process(
+ amount=messages_amount, broker=broker, handler=queue
+ )
+
+ metric_values = manager._container.received_messages_in_process.collect()
+
+ assert metric_values == [expected]
+
+ def test_remove_received_message_in_process(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ messages_amount: int,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ documentation="Gauge of received messages in process by broker and handler",
+ unit="",
+ typ="gauge",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_messages_in_process",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=float(messages_amount - 1),
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_received_message_in_process(
+ amount=messages_amount, broker=broker, handler=queue
+ )
+ manager.remove_received_message_in_process(
+ amount=1, broker=broker, handler=queue
+ )
+
+ metric_values = manager._container.received_messages_in_process.collect()
+
+ assert metric_values == [expected]
+
+ @pytest.mark.parametrize(
+ "status",
+ (
+ pytest.param(ProcessingStatus.acked, id="acked status"),
+ pytest.param(ProcessingStatus.nacked, id="nacked status"),
+ pytest.param(ProcessingStatus.rejected, id="rejected status"),
+ pytest.param(ProcessingStatus.skipped, id="skipped status"),
+ pytest.param(ProcessingStatus.error, id="error status"),
+ ),
+ )
+ def test_add_received_processed_message(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ messages_amount: int,
+ status: ProcessingStatus,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_processed_messages",
+ documentation="Count of received processed messages by broker, handler and status",
+ unit="",
+ typ="counter",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "status": status.value,
+ },
+ value=float(messages_amount),
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "status": status.value,
+ },
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_received_processed_message(
+ amount=messages_amount,
+ status=status,
+ broker=broker,
+ handler=queue,
+ )
+
+ metric_values = manager._container.received_processed_messages_total.collect()
+
+ assert metric_values == [expected]
+
+ def test_observe_received_processed_message_duration(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ duration = 0.001
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds",
+ documentation="Histogram of received processed messages duration in seconds by broker and handler",
+ unit="",
+ typ="histogram",
+ )
+ expected.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "le": IsStr,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in Histogram.DEFAULT_BUCKETS
+ ],
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_count",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_sum",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=duration,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_duration_seconds_created",
+ labels={"app_name": app_name, "broker": broker, "handler": queue},
+ value=ANY,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.observe_received_processed_message_duration(
+ duration=duration,
+ broker=broker,
+ handler=queue,
+ )
+
+ metric_values = (
+ manager._container.received_processed_messages_duration_seconds.collect()
+ )
+
+ assert metric_values == [expected]
+
+ def test_add_received_processed_message_exception(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ exception_type: str,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions",
+ documentation="Count of received processed messages exceptions by broker, handler and exception_type",
+ unit="",
+ typ="counter",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "exception_type": exception_type,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_received_processed_messages_exceptions_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "handler": queue,
+ "exception_type": exception_type,
+ },
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_received_processed_message_exception(
+ exception_type=exception_type,
+ broker=broker,
+ handler=queue,
+ )
+
+ metric_values = (
+ manager._container.received_processed_messages_exceptions_total.collect()
+ )
+
+ assert metric_values == [expected]
+
+ @pytest.mark.parametrize(
+ "status",
+ (
+ pytest.param(PublishingStatus.success, id="success status"),
+ pytest.param(PublishingStatus.error, id="error status"),
+ ),
+ )
+ def test_add_published_message(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ messages_amount: int,
+ status: PublishingStatus,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_published_messages",
+ documentation="Count of published messages by destination and status",
+ unit="",
+ typ="counter",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_published_messages_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "status": status.value,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "status": status.value,
+ },
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_published_message(
+ status=status,
+ broker=broker,
+ destination=queue,
+ )
+
+ metric_values = manager._container.published_messages_total.collect()
+
+ assert metric_values == [expected]
+
+ def test_observe_published_message_duration(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ duration = 0.001
+
+ expected = Metric(
+ name=f"{metrics_prefix}_published_messages_duration_seconds",
+ documentation="Histogram of published messages duration in seconds by broker and destination",
+ unit="",
+ typ="histogram",
+ )
+ expected.samples = [
+ *[
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_bucket",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "le": IsStr,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ )
+ for _ in Histogram.DEFAULT_BUCKETS
+ ],
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_count",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_sum",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=duration,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_duration_seconds_created",
+ labels={"app_name": app_name, "broker": broker, "destination": queue},
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.observe_published_message_duration(
+ duration=duration,
+ broker=broker,
+ destination=queue,
+ )
+
+ metric_values = manager._container.published_messages_duration_seconds.collect()
+
+ assert metric_values == [expected]
+
+ def test_add_published_message_exception(
+ self,
+ app_name: str,
+ metrics_prefix: str,
+ queue: str,
+ broker: str,
+ exception_type: str,
+ ) -> None:
+ manager = self.create_metrics_manager(
+ app_name=app_name,
+ metrics_prefix=metrics_prefix,
+ )
+
+ expected = Metric(
+ name=f"{metrics_prefix}_published_messages_exceptions",
+ documentation="Count of published messages exceptions by broker, destination and exception_type",
+ unit="",
+ typ="counter",
+ )
+ expected.samples = [
+ Sample(
+ name=f"{metrics_prefix}_published_messages_exceptions_total",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "exception_type": exception_type,
+ },
+ value=1.0,
+ timestamp=None,
+ exemplar=None,
+ ),
+ Sample(
+ name=f"{metrics_prefix}_published_messages_exceptions_created",
+ labels={
+ "app_name": app_name,
+ "broker": broker,
+ "destination": queue,
+ "exception_type": exception_type,
+ },
+ value=IsPositiveFloat,
+ timestamp=None,
+ exemplar=None,
+ ),
+ ]
+
+ manager.add_published_message_exception(
+ exception_type=exception_type,
+ broker=broker,
+ destination=queue,
+ )
+
+ metric_values = manager._container.published_messages_exceptions_total.collect()
+
+ assert metric_values == [expected]
diff --git a/tests/tools.py b/tests/tools.py
index 55d4405c9d..c2682f2455 100644
--- a/tests/tools.py
+++ b/tests/tools.py
@@ -1,8 +1,10 @@
import inspect
from functools import wraps
-from typing import Callable, ParamSpec, Protocol, TypeVar
+from typing import Callable, Protocol, TypeVar
from unittest.mock import MagicMock
+from typing_extensions import ParamSpec
+
P = ParamSpec("P")
T = TypeVar("T")
diff --git a/tests/utils/context/test_alias.py b/tests/utils/context/test_alias.py
index e01fbde325..a84e475a03 100644
--- a/tests/utils/context/test_alias.py
+++ b/tests/utils/context/test_alias.py
@@ -11,7 +11,7 @@ async def test_base_context_alias(context: ContextRepo) -> None:
key = 1000
context.set_global("key", key)
- @apply_types
+ @apply_types(context__=context)
async def func(k=Context("key")):
return k is key
@@ -23,7 +23,7 @@ async def test_context_cast(context: ContextRepo) -> None:
key = 1000
context.set_global("key", key)
- @apply_types
+ @apply_types(context__=context)
async def func(k: float = Context("key", cast=True)):
return isinstance(k, float)
@@ -35,7 +35,7 @@ async def test_nested_context_alias(context: ContextRepo) -> None:
model = SomeModel(field=SomeModel(field=1000))
context.set_global("model", model)
- @apply_types
+ @apply_types(context__=context)
async def func(
m=Context("model.field.field"),
m2=Context("model.not_existed", default=None),
@@ -59,7 +59,7 @@ async def test_annotated_alias(context: ContextRepo) -> None:
model = SomeModel(field=SomeModel(field=1000))
context.set_global("model", model)
- @apply_types
+ @apply_types(context__=context)
async def func(m: Annotated[int, Context("model.field.field")]):
return m is model.field.field
diff --git a/tests/utils/context/test_main.py b/tests/utils/context/test_main.py
index 006a478061..c34317a879 100644
--- a/tests/utils/context/test_main.py
+++ b/tests/utils/context/test_main.py
@@ -1,5 +1,5 @@
import pytest
-from pydantic import ValidationError
+from fast_depends.exceptions import ValidationError
from faststream import Context, ContextRepo
from faststream._internal.utils import apply_types
@@ -18,7 +18,7 @@ async def test_context_apply(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context()):
return key is a
@@ -30,7 +30,7 @@ async def test_context_ignore(context: ContextRepo) -> None:
a = 3
context.set_global("key", a)
- @apply_types
+ @apply_types(context__=context)
async def use() -> None:
return None
@@ -45,19 +45,19 @@ async def test_context_apply_multi(context: ContextRepo) -> None:
b = 1000
context.set_global("key_b", b)
- @apply_types
+ @apply_types(context__=context)
async def use1(key_a=Context()):
return key_a is a
assert await use1()
- @apply_types
+ @apply_types(context__=context)
async def use2(key_b=Context()):
return key_b is b
assert await use2()
- @apply_types
+ @apply_types(context__=context)
async def use3(key_a=Context(), key_b=Context()):
return key_a is a and key_b is b
@@ -72,7 +72,7 @@ async def test_context_overrides(context: ContextRepo) -> None:
b = 1000
context.set_global("test", b)
- @apply_types
+ @apply_types(context__=context)
async def use(test=Context()):
return test is b
@@ -84,11 +84,11 @@ async def test_context_nested_apply(context: ContextRepo) -> None:
a = 1000
context.set_global("key", a)
- @apply_types
+ @apply_types(context__=context)
def use_nested(key=Context()):
return key
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context()):
return key is use_nested() is a
@@ -101,7 +101,7 @@ async def test_reset_global(context: ContextRepo) -> None:
context.set_global("key", a)
context.reset_global("key")
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context()) -> None: ...
with pytest.raises(ValidationError):
@@ -114,7 +114,7 @@ async def test_clear_context(context: ContextRepo) -> None:
context.set_global("key", a)
context.clear()
- @apply_types
+ @apply_types(context__=context)
async def use(key=Context(default=None)):
return key is None
@@ -122,7 +122,7 @@ async def use(key=Context(default=None)):
def test_scope(context: ContextRepo) -> None:
- @apply_types
+ @apply_types(context__=context)
def use(key=Context(), key2=Context()) -> None:
assert key == 1
assert key2 == 1
@@ -135,7 +135,7 @@ def use(key=Context(), key2=Context()) -> None:
def test_default(context: ContextRepo) -> None:
- @apply_types
+ @apply_types(context__=context)
def use(
key=Context(),
key2=Context(),
@@ -169,8 +169,8 @@ def test_local_default(context: ContextRepo) -> None:
assert context.get_local(key, 1) == 1
-def test_initial() -> None:
- @apply_types
+def test_initial(context: ContextRepo) -> None:
+ @apply_types(context__=context)
def use(
a,
key=Context(initial=list),
@@ -201,7 +201,7 @@ def __ne__(self, other):
user2 = User(user_id=2)
user3 = User(user_id=3)
- @apply_types
+ @apply_types(context__=context)
async def use(
key1=Context("user1"),
key2=Context("user2", default=user2),
diff --git a/tests/utils/context/test_path.py b/tests/utils/context/test_path.py
index f4bb59ed5d..ff135cb1a3 100644
--- a/tests/utils/context/test_path.py
+++ b/tests/utils/context/test_path.py
@@ -70,9 +70,10 @@ async def h(
@require_nats
async def test_nats_kv_path(
queue: str,
- event: asyncio.Event,
mock: Mock,
) -> None:
+ event = asyncio.Event()
+
from faststream.nats import NatsBroker
broker = NatsBroker()
diff --git a/tests/utils/test_ast.py b/tests/utils/test_ast.py
index a92c6fcd87..d57d29e88f 100644
--- a/tests/utils/test_ast.py
+++ b/tests/utils/test_ast.py
@@ -1,5 +1,3 @@
-from typing import NoReturn
-
import pytest
from faststream._internal.testing.ast import is_contains_context_name
@@ -75,7 +73,7 @@ def test_nested_invalid() -> None:
assert not a.contains
-def test_not_broken() -> NoReturn:
+def test_not_broken() -> None:
with A() as a, B():
assert a.contains