diff --git a/docs/create_api_docs.py b/docs/create_api_docs.py index e22a25d236..883d3ef787 100644 --- a/docs/create_api_docs.py +++ b/docs/create_api_docs.py @@ -78,7 +78,7 @@ def _import_module(name: str) -> Optional[ModuleType]: public_api_modules = [ m for m in modules - if m.__file__.replace(str(repo_path) + "/", "") in PUBLIC_API_FILES + if m and m.__file__.replace(str(repo_path) + "/", "") in PUBLIC_API_FILES ] return public_api_modules diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 61f070b7bf..e09163b08a 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -143,10 +143,12 @@ search: - [DiscardPolicy](public_api/faststream/nats/DiscardPolicy.md) - [ExternalStream](public_api/faststream/nats/ExternalStream.md) - [JStream](public_api/faststream/nats/JStream.md) + - [KvWatch](public_api/faststream/nats/KvWatch.md) - [NatsBroker](public_api/faststream/nats/NatsBroker.md) - [NatsPublisher](public_api/faststream/nats/NatsPublisher.md) - [NatsRoute](public_api/faststream/nats/NatsRoute.md) - [NatsRouter](public_api/faststream/nats/NatsRouter.md) + - [ObjWatch](public_api/faststream/nats/ObjWatch.md) - [Placement](public_api/faststream/nats/Placement.md) - [PullSub](public_api/faststream/nats/PullSub.md) - [RePublish](public_api/faststream/nats/RePublish.md) @@ -476,6 +478,8 @@ search: - [AsyncAPIBatchSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - [AsyncAPIDefaultSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) - [AsyncAPISubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/confluent/subscriber/factory/create_subscriber.md) - usecase - [BatchSubscriber](api/faststream/confluent/subscriber/usecase/BatchSubscriber.md) - [DefaultSubscriber](api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md) @@ -560,6 +564,8 @@ search: - [AsyncAPIBatchSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - [AsyncAPIDefaultSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) - [AsyncAPISubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/kafka/subscriber/factory/create_subscriber.md) - usecase - [BatchSubscriber](api/faststream/kafka/subscriber/usecase/BatchSubscriber.md) - [DefaultSubscriber](api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md) @@ -583,10 +589,12 @@ search: - [DiscardPolicy](api/faststream/nats/DiscardPolicy.md) - [ExternalStream](api/faststream/nats/ExternalStream.md) - [JStream](api/faststream/nats/JStream.md) + - [KvWatch](api/faststream/nats/KvWatch.md) - [NatsBroker](api/faststream/nats/NatsBroker.md) - [NatsPublisher](api/faststream/nats/NatsPublisher.md) - [NatsRoute](api/faststream/nats/NatsRoute.md) - [NatsRouter](api/faststream/nats/NatsRouter.md) + - [ObjWatch](api/faststream/nats/ObjWatch.md) - [Placement](api/faststream/nats/Placement.md) - [PullSub](api/faststream/nats/PullSub.md) - [RePublish](api/faststream/nats/RePublish.md) @@ -611,10 +619,20 @@ search: - fastapi - [NatsRouter](api/faststream/nats/fastapi/fastapi/NatsRouter.md) - helpers + - [KVBucketDeclarer](api/faststream/nats/helpers/KVBucketDeclarer.md) + - [OSBucketDeclarer](api/faststream/nats/helpers/OSBucketDeclarer.md) - [StreamBuilder](api/faststream/nats/helpers/StreamBuilder.md) + - bucket_declarer + - [KVBucketDeclarer](api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md) + - obj_storage_declarer + - [OSBucketDeclarer](api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md) + - object_builder + - [StreamBuilder](api/faststream/nats/helpers/object_builder/StreamBuilder.md) - message - [NatsBatchMessage](api/faststream/nats/message/NatsBatchMessage.md) + - [NatsKvMessage](api/faststream/nats/message/NatsKvMessage.md) - [NatsMessage](api/faststream/nats/message/NatsMessage.md) + - [NatsObjMessage](api/faststream/nats/message/NatsObjMessage.md) - opentelemetry - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md) - middleware @@ -627,8 +645,10 @@ search: - parser - [BatchParser](api/faststream/nats/parser/BatchParser.md) - [JsParser](api/faststream/nats/parser/JsParser.md) + - [KvParser](api/faststream/nats/parser/KvParser.md) - [NatsBaseParser](api/faststream/nats/parser/NatsBaseParser.md) - [NatsParser](api/faststream/nats/parser/NatsParser.md) + - [ObjParser](api/faststream/nats/parser/ObjParser.md) - publisher - asyncapi - [AsyncAPIPublisher](api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md) @@ -643,24 +663,50 @@ search: - [NatsRouter](api/faststream/nats/router/NatsRouter.md) - schemas - [JStream](api/faststream/nats/schemas/JStream.md) + - [KvWatch](api/faststream/nats/schemas/KvWatch.md) + - [ObjWatch](api/faststream/nats/schemas/ObjWatch.md) - [PullSub](api/faststream/nats/schemas/PullSub.md) - js_stream - [JStream](api/faststream/nats/schemas/js_stream/JStream.md) - [compile_nats_wildcard](api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md) - [is_subject_match_wildcard](api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md) + - kv_watch + - [KvWatch](api/faststream/nats/schemas/kv_watch/KvWatch.md) + - obj_watch + - [ObjWatch](api/faststream/nats/schemas/obj_watch/ObjWatch.md) - pull_sub - [PullSub](api/faststream/nats/schemas/pull_sub/PullSub.md) - security - [parse_security](api/faststream/nats/security/parse_security.md) - subscriber - asyncapi - - [AsyncAPIBatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - - [AsyncAPIDefaultSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) + - [AsyncAPIBatchPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md) + - [AsyncAPIConcurrentCoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md) + - [AsyncAPIConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md) + - [AsyncAPIConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md) + - [AsyncAPICoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md) + - [AsyncAPIKeyValueWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md) + - [AsyncAPIObjStoreWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md) + - [AsyncAPIPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md) + - [AsyncAPIStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md) - [AsyncAPISubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/nats/subscriber/factory/create_subscriber.md) + - subscription + - [UnsubscribeAdapter](api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md) + - [Unsubscriptable](api/faststream/nats/subscriber/subscription/Unsubscriptable.md) + - [Watchable](api/faststream/nats/subscriber/subscription/Watchable.md) - usecase - - [BatchHandler](api/faststream/nats/subscriber/usecase/BatchHandler.md) - - [DefaultHandler](api/faststream/nats/subscriber/usecase/DefaultHandler.md) + - [BatchPullStreamSubscriber](api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md) + - [ConcurrentCoreSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md) + - [ConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md) + - [ConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md) + - [CoreSubscriber](api/faststream/nats/subscriber/usecase/CoreSubscriber.md) + - [KeyValueWatchSubscriber](api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md) - [LogicSubscriber](api/faststream/nats/subscriber/usecase/LogicSubscriber.md) + - [ObjStoreWatchSubscriber](api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md) + - [PullStreamSubscriber](api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md) + - [PushStreamSubscription](api/faststream/nats/subscriber/usecase/PushStreamSubscription.md) - testing - [FakeProducer](api/faststream/nats/testing/FakeProducer.md) - [PatchedMessage](api/faststream/nats/testing/PatchedMessage.md) @@ -743,6 +789,8 @@ search: - subscriber - asyncapi - [AsyncAPISubscriber](api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/rabbit/subscriber/factory/create_subscriber.md) - usecase - [LogicSubscriber](api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md) - testing @@ -848,6 +896,8 @@ search: - [AsyncAPIStreamBatchSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md) - [AsyncAPIStreamSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md) - [AsyncAPISubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/redis/subscriber/factory/create_subscriber.md) - usecase - [BatchListSubscriber](api/faststream/redis/subscriber/usecase/BatchListSubscriber.md) - [BatchStreamSubscriber](api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md) diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md similarity index 63% rename from docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md rename to docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md index 5702a67c39..ce811a99d9 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md +++ b/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.asyncapi.AsyncAPIBatchSubscriber +::: faststream.confluent.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..d9e5fcb4a4 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md b/docs/docs/en/api/faststream/nats/KvWatch.md similarity index 67% rename from docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md rename to docs/docs/en/api/faststream/nats/KvWatch.md index ae5d6858db..1527be51fd 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md +++ b/docs/docs/en/api/faststream/nats/KvWatch.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.usecase.BatchHandler +::: faststream.nats.KvWatch diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md b/docs/docs/en/api/faststream/nats/ObjWatch.md similarity index 67% rename from docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md rename to docs/docs/en/api/faststream/nats/ObjWatch.md index ebe246e5f6..50102ecf31 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md +++ b/docs/docs/en/api/faststream/nats/ObjWatch.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.usecase.DefaultHandler +::: faststream.nats.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md new file mode 100644 index 0000000000..b24feaada6 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.KVBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md new file mode 100644 index 0000000000..3ee16a3f24 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.OSBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md new file mode 100644 index 0000000000..fe0eaec17f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.bucket_declarer.KVBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md b/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md similarity index 63% rename from docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md rename to docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md index 92b674793d..b7663051c8 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md +++ b/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.asyncapi.AsyncAPIDefaultSubscriber +::: faststream.nats.helpers.obj_storage_declarer.OSBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md b/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md new file mode 100644 index 0000000000..024daf2d14 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.object_builder.StreamBuilder diff --git a/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md b/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md new file mode 100644 index 0000000000..5ac6ed9f41 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.message.NatsKvMessage diff --git a/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md b/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md new file mode 100644 index 0000000000..3671628da4 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.message.NatsObjMessage diff --git a/docs/docs/en/api/faststream/nats/parser/KvParser.md b/docs/docs/en/api/faststream/nats/parser/KvParser.md new file mode 100644 index 0000000000..acba65e133 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/parser/KvParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.parser.KvParser diff --git a/docs/docs/en/api/faststream/nats/parser/ObjParser.md b/docs/docs/en/api/faststream/nats/parser/ObjParser.md new file mode 100644 index 0000000000..50ff5d0e18 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/parser/ObjParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.parser.ObjParser diff --git a/docs/docs/en/api/faststream/nats/schemas/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/KvWatch.md new file mode 100644 index 0000000000..ce99738043 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/KvWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.KvWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md new file mode 100644 index 0000000000..51c3628e5e --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md new file mode 100644 index 0000000000..ce07fa305d --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.kv_watch.KvWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md new file mode 100644 index 0000000000..55831b8a6a --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.obj_watch.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md new file mode 100644 index 0000000000..15bceeedbc --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIBatchPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md new file mode 100644 index 0000000000..f88e14f817 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentCoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md new file mode 100644 index 0000000000..b5ebf86f93 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md new file mode 100644 index 0000000000..7bb4a6e088 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPushStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md new file mode 100644 index 0000000000..8819adebab --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPICoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md new file mode 100644 index 0000000000..b006854b0b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIKeyValueWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md new file mode 100644 index 0000000000..0a9157ed55 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIObjStoreWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md new file mode 100644 index 0000000000..e9650bef94 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md new file mode 100644 index 0000000000..6d448d3af5 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..0e132c3394 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md new file mode 100644 index 0000000000..455885671f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.UnsubscribeAdapter diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md new file mode 100644 index 0000000000..c94cb1b731 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.Unsubscriptable diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md new file mode 100644 index 0000000000..67638258ea --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.Watchable diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md new file mode 100644 index 0000000000..dfb1c43575 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.BatchPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md new file mode 100644 index 0000000000..e1f100c043 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentCoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md new file mode 100644 index 0000000000..c1b7207285 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md new file mode 100644 index 0000000000..ffa2e0c37b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentPushStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md new file mode 100644 index 0000000000..8ddb0b8c04 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.CoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md new file mode 100644 index 0000000000..778557ee2b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.KeyValueWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md new file mode 100644 index 0000000000..ad15f32931 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ObjStoreWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md new file mode 100644 index 0000000000..30f30a893f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.PullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md new file mode 100644 index 0000000000..bb29bbb9c2 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.PushStreamSubscription diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..79c7082931 --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..d5cf7eadc8 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.subscriber.factory.create_subscriber diff --git a/docs/docs/en/nats/jetstream/key-value.md b/docs/docs/en/nats/jetstream/key-value.md index a5bbc0f1ef..2ca8d70add 100644 --- a/docs/docs/en/nats/jetstream/key-value.md +++ b/docs/docs/en/nats/jetstream/key-value.md @@ -24,42 +24,34 @@ This interface provides you with rich abilities to use it like a regular *KV* st ## FastStream Details -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to get access into the inner `JetStream` object to create it manually. +**FastStream** has some useful methods to help you with **Key-Value NATS** feature interacting. -First of all, you need to create a *Key-Value* storage object and pass it into the context: +First of all, you need to create a *Key-Value* storage object and put some value to it: -```python linenums="1" hl_lines="12-13" -{! docs_src/nats/js/key_value.py [ln:5-8,11-13,22-27] !} +```python linenums="1" hl_lines="9-10" +{! docs_src/nats/js/key_value.py [ln:1-5,12-16] !} ``` !!! tip - We placed this code in `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. - - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + `#!python broker.key_value(bucket="bucket")` is an idempotent method. It means that it stores all already created storages in memory and do not make new request to **NATS** if your are trying to call it for the same bucket. --- -Next, we are ready to use this object right in our handlers. - -Let's create an annotated object to shorten context object access: - -```python linenums="1" hl_lines="4" -{! docs_src/nats/js/key_value.py [ln:1-3,9] !} -``` - -And just use it in a handler: +Then we are able to use returned `key_value` object as a regular NATS one. But, if you want to watch by any changes by some key in the bucket, **FastStream** allows you to make it via regular `@broker.subscriber` interface: -```python linenums="1" hl_lines="4 6-8" -{! docs_src/nats/js/key_value.py [ln:4,14-19] !} +```python linenums="1" hl_lines="1" +{! docs_src/nats/js/key_value.py [ln:8-10] !} ``` -Finally, let's test our code behavior by putting something into the KV storage and sending a message: +Also, if you want more detail settings for you **Key Value Storage**, we have `KvWatch` object for it: -```python linenums="1" hl_lines="3-4" -{! docs_src/nats/js/key_value.py [ln:30-33] !} -``` +```python linenums="1" hl_lines="5" +from faststream.nats import NatsBroker, KvWatch -??? example "Full listing" - ```python linenums="1" - {!> docs_src/nats/js/key_value.py !} - ``` +@broker.subscriber( + "key", + kv_watch=KvWatch("bucket", declare=False), +) +async def handler(msg: str): + ... +``` \ No newline at end of file diff --git a/docs/docs/en/nats/jetstream/object.md b/docs/docs/en/nats/jetstream/object.md index 2e1501f53a..33aa1055df 100644 --- a/docs/docs/en/nats/jetstream/object.md +++ b/docs/docs/en/nats/jetstream/object.md @@ -22,45 +22,47 @@ The main difference between *KV* and *Object* storages is that in the *Object* s ## FastStream Details -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to access the inner `JetStream` object to create in manually. +**FastStream** has some useful methods to help you with **Object Storage NATS** feature interacting. -First of all, you need to create an *Object* storage object and pass in to the context: +First of all, you need to create a *Object Storage* object and put some value to it: -```python linenums="1" hl_lines="12-13" -{! docs_src/nats/js/object.py [ln:7-10,13-15,24-29] !} +```python linenums="1" hl_lines="11-12" +{! docs_src/nats/js/object.py [ln:1-2,3,5,7-10,23-26] !} ``` !!! tip - We placed this code in the `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. + * [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + * `#!python broker.object_storage(bucket="example-bucket")` is an idempotent method. It means that it stores all already created storages in memory and do not make new request to **NATS** if your are trying to call it for the same bucket. --- -Next, we are ready to use this object right in the our handlers. +Then we are able to use returned `object_storage` object as a regular NATS one. But, if you want to watch by any new files in the bucket, **FastStream** allows you to make it via regular `@broker.subscriber` interface: -Let's create an Annotated object to shorten `Context` object access: - -```python linenums="1" hl_lines="4" -{! docs_src/nats/js/object.py [ln:3-5,11] !} +```python linenums="1" hl_lines="1" +@broker.subscriber("example-bucket", obj_watch=True) +async def handler(filename: str): + assert filename == "file.txt" ``` -And just use it in a handler: +**NATS** deliveres you just a filename (and some more metainformation you can get access via `message.raw_message`) because files can be any size. The framework should protect your service from memory overflow, so we can't upload whole file content right to the memo. By you can make it manually the following way: -```python linenums="1" hl_lines="6 8-9" -{! docs_src/nats/js/object.py [ln:1-2,6,16-21] !} +```python linenums="1" hl_lines="1 6 10-11" +{! docs_src/nats/js/object.py [ln:6-7,12-20] !} ``` -Finally, let's test our code behavior by putting something into the *Object storage* and sending a message: +!!! note + `faststream.nats.annotations.ObjectStorage` is a your current bucket, so you need no to put it to context manually. -```python linenums="1" hl_lines="3-4" -{! docs_src/nats/js/object.py [ln:32-35] !} -``` +Also, if you want more detail settings for you **Object Storage**, we have `ObjWatch` object for it: -!!! tip - [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. +```python linenums="1" hl_lines="5" +from faststream.nats import NatsBroker, ObjWatch -??? example "Full listing" - ```python linenums="1" - {!> docs_src/nats/js/object.py !} - ``` +@broker.subscriber( + "example-bucket", + obj_watch=ObjWatch(declare=False), +) +async def handler(filename: str): + ... +``` \ No newline at end of file diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index ce6828b2fa..8fac6fd678 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -39,7 +39,7 @@ broker = NatsBroker( app = FastStream(broker) ``` -To find detailt information just visit our documentation aboout [telemetry](https://faststream.airt.ai/latest/getting-started/opentelemetry/) +To find detailt information just visit our documentation about [telemetry](https://faststream.airt.ai/latest/getting-started/opentelemetry/) P.S. The release includes basic OpenTelemetry support - messages tracing & basic metrics. Baggage support and correct spans linking in batch processing case will be added soon. diff --git a/docs/docs_src/nats/js/key_value.py b/docs/docs_src/nats/js/key_value.py index 333272eadf..8127109cf3 100644 --- a/docs/docs_src/nats/js/key_value.py +++ b/docs/docs_src/nats/js/key_value.py @@ -1,33 +1,16 @@ -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated - -from faststream import Logger -from faststream import Context, FastStream, Logger +from faststream import FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -KeyValue = Annotated[KV, Context("kv")] broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) +@broker.subscriber("key", kv_watch="bucket") +async def handler(msg: str): + assert msg == "Hello!" @app.after_startup -async def test_send(kv: KeyValue): - await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") +async def setup_broker(): + key_value = await broker.key_value(bucket="bucket") + await key_value.put("key", b"Hello!") diff --git a/docs/docs_src/nats/js/object.py b/docs/docs_src/nats/js/object.py index d072aac7fb..916d068dab 100644 --- a/docs/docs_src/nats/js/object.py +++ b/docs/docs_src/nats/js/object.py @@ -1,35 +1,26 @@ from io import BytesIO -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated - +from faststream import FastStream from faststream import Logger -from faststream import Context, FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -ObjectStorage = Annotated[OS, Context("OS")] +from faststream.nats.annotations import ObjectStorage broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) +@broker.subscriber("example-bucket", obj_watch=True) +async def handler( + filename: str, + storage: ObjectStorage, + logger: Logger, +): + assert filename == "file.txt" + file = await storage.get(filename) + logger.info(file.data) @app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - await broker.publish("Hi!", "subject") +async def test_send(): + object_storage = await broker.object_storage("example-bucket") + await object_storage.put("file.txt", BytesIO(b"File mock")) diff --git a/examples/nats/e06_key_value.py b/examples/nats/e06_key_value.py index 60df373cc4..23d7a0d734 100644 --- a/examples/nats/e06_key_value.py +++ b/examples/nats/e06_key_value.py @@ -1,32 +1,16 @@ -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated - -from faststream import Context, FastStream, Logger +from faststream import FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -KeyValue = Annotated[KV, Context("kv")] broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) +@broker.subscriber("key", kv_watch="bucket") +async def handler(msg: str): + assert msg == "Hello!" @app.after_startup -async def test_send(kv: KeyValue): +async def setup_broker(): + kv = await broker.key_value(bucket="bucket") await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") diff --git a/examples/nats/e07_object_storage.py b/examples/nats/e07_object_storage.py index 079409d52c..55e801928e 100644 --- a/examples/nats/e07_object_storage.py +++ b/examples/nats/e07_object_storage.py @@ -1,35 +1,21 @@ from io import BytesIO -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated - -from faststream import Context, FastStream, Logger +from faststream import FastStream, Logger from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -ObjectStorage = Annotated[OS, Context("OS")] +from faststream.nats.annotations import ObjectStorage broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) +@broker.subscriber("example-bucket", obj_watch=True) +async def handler(filename: str, storage: ObjectStorage, logger: Logger): + assert filename == "file.txt" + file = await storage.get(filename) + logger.info(file.data) @app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - - await broker.publish("Hi!", "subject") +async def test_send(): + os = await broker.object_storage("example-bucket") + await os.put("file.txt", BytesIO(b"File mock")) diff --git a/faststream/__about__.py b/faststream/__about__.py index 6a9efa082f..7a4bf92d8e 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.7" +__version__ = "0.5.8" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py index 439230ad8a..c226850ace 100644 --- a/faststream/broker/core/usecase.py +++ b/faststream/broker/core/usecase.py @@ -172,6 +172,7 @@ def __init__( self._connection = None self._producer = None + # TODO: remove useless middleware filter if not is_test_env(): self._middlewares = ( CriticalLogMiddleware(self.logger, log_level), diff --git a/faststream/broker/subscriber/proto.py b/faststream/broker/subscriber/proto.py index 545c5fc169..47bd42b44d 100644 --- a/faststream/broker/subscriber/proto.py +++ b/faststream/broker/subscriber/proto.py @@ -38,12 +38,6 @@ class SubscriberProto( @abstractmethod def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ... - @staticmethod - @abstractmethod - def create() -> "SubscriberProto[MsgType]": - """Abstract factory to create a real Subscriber.""" - ... - @abstractmethod def get_log_context( self, diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 6306d10bd9..6d71a21046 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -18,7 +18,7 @@ from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher -from faststream.confluent.subscriber.asyncapi import AsyncAPISubscriber +from faststream.confluent.subscriber.factory import create_subscriber from faststream.exceptions import SetupError if TYPE_CHECKING: @@ -1234,7 +1234,7 @@ def subscriber( raise SetupError("You should install `group_id` with manual commit mode") subscriber = super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( *topics, batch=batch, batch_timeout_ms=batch_timeout_ms, diff --git a/faststream/confluent/subscriber/asyncapi.py b/faststream/confluent/subscriber/asyncapi.py index d31bfa05f2..7ec3ffb965 100644 --- a/faststream/confluent/subscriber/asyncapi.py +++ b/faststream/confluent/subscriber/asyncapi.py @@ -1,16 +1,9 @@ from typing import ( TYPE_CHECKING, Dict, - Iterable, - Literal, - Optional, Tuple, - Union, - overload, ) -from typing_extensions import override - from faststream.asyncapi.schema import ( Channel, ChannelBinding, @@ -29,10 +22,6 @@ if TYPE_CHECKING: from confluent_kafka import Message as ConfluentMsg - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -67,134 +56,6 @@ def get_schema(self) -> Dict[str, Channel]: return channels - @overload # type: ignore[override] - @staticmethod - def create( - *topics: str, - batch: Literal[True], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIBatchSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: Literal[False], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIDefaultSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: ... - - @override - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if batch: - return AsyncAPIBatchSubscriber( - *topics, - batch_timeout_ms=batch_timeout_ms, - max_records=max_records, - group_id=group_id, - connection_data=connection_data, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIDefaultSubscriber( - *topics, - group_id=group_id, - connection_data=connection_data, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - class AsyncAPIDefaultSubscriber( DefaultSubscriber, diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py new file mode 100644 index 0000000000..b7b6b6ca61 --- /dev/null +++ b/faststream/confluent/subscriber/factory.py @@ -0,0 +1,148 @@ +from typing import ( + TYPE_CHECKING, + Iterable, + Literal, + Optional, + Tuple, + Union, + overload, +) + +from faststream.confluent.subscriber.asyncapi import ( + AsyncAPIBatchSubscriber, + AsyncAPIDefaultSubscriber, +) + +if TYPE_CHECKING: + from confluent_kafka import Message as ConfluentMsg + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[True], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIBatchSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[False], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIDefaultSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: ... + + +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: + if batch: + return AsyncAPIBatchSubscriber( + *topics, + batch_timeout_ms=batch_timeout_ms, + max_records=max_records, + group_id=group_id, + connection_data=connection_data, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIDefaultSubscriber( + *topics, + group_id=group_id, + connection_data=connection_data, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py index afc69a459c..0633032c06 100644 --- a/faststream/kafka/broker/registrator.py +++ b/faststream/kafka/broker/registrator.py @@ -20,7 +20,7 @@ from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter from faststream.kafka.publisher.asyncapi import AsyncAPIPublisher -from faststream.kafka.subscriber.asyncapi import AsyncAPISubscriber +from faststream.kafka.subscriber.factory import create_subscriber if TYPE_CHECKING: from aiokafka import ConsumerRecord, TopicPartition @@ -1394,7 +1394,7 @@ def subscriber( "AsyncAPIBatchSubscriber", ]: subscriber = super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( *topics, batch=batch, batch_timeout_ms=batch_timeout_ms, diff --git a/faststream/kafka/subscriber/asyncapi.py b/faststream/kafka/subscriber/asyncapi.py index ec31001633..9adb8dad3c 100644 --- a/faststream/kafka/subscriber/asyncapi.py +++ b/faststream/kafka/subscriber/asyncapi.py @@ -1,16 +1,9 @@ from typing import ( TYPE_CHECKING, Dict, - Iterable, - Literal, - Optional, Tuple, - Union, - overload, ) -from typing_extensions import override - from faststream.asyncapi.schema import ( Channel, ChannelBinding, @@ -21,7 +14,6 @@ from faststream.asyncapi.schema.bindings import kafka from faststream.asyncapi.utils import resolve_payloads from faststream.broker.types import MsgType -from faststream.exceptions import SetupError from faststream.kafka.subscriber.usecase import ( BatchSubscriber, DefaultSubscriber, @@ -29,12 +21,7 @@ ) if TYPE_CHECKING: - from aiokafka import ConsumerRecord, TopicPartition - from aiokafka.abc import ConsumerRebalanceListener - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict + from aiokafka import ConsumerRecord class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -69,166 +56,6 @@ def get_schema(self) -> Dict[str, Channel]: return channels - @overload # type: ignore[override] - @staticmethod - def create( - *topics: str, - batch: Literal[True], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIBatchSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: Literal[False], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIDefaultSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: ... - - @override - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if is_manual and not group_id: - raise SetupError("You should install `group_id` with manual commit mode") - - if not topics and not partitions and not pattern: - raise SetupError( - "You should provide either `topics` or `partitions` or `pattern`." - ) - elif topics and partitions: - raise SetupError("You can't provide both `topics` and `partitions`.") - elif topics and pattern: - raise SetupError("You can't provide both `topics` and `pattern`.") - elif pattern and partitions: - raise SetupError("You can't provide both `pattern` and `partitions`.") - - if batch: - return AsyncAPIBatchSubscriber( - *topics, - batch_timeout_ms=batch_timeout_ms, - max_records=max_records, - group_id=group_id, - listener=listener, - pattern=pattern, - connection_args=connection_args, - partitions=partitions, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIDefaultSubscriber( - *topics, - group_id=group_id, - listener=listener, - pattern=pattern, - connection_args=connection_args, - partitions=partitions, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - class AsyncAPIDefaultSubscriber( DefaultSubscriber, diff --git a/faststream/kafka/subscriber/factory.py b/faststream/kafka/subscriber/factory.py new file mode 100644 index 0000000000..fb5de4bf1a --- /dev/null +++ b/faststream/kafka/subscriber/factory.py @@ -0,0 +1,183 @@ +from typing import ( + TYPE_CHECKING, + Iterable, + Literal, + Optional, + Tuple, + Union, + overload, +) + +from faststream.exceptions import SetupError +from faststream.kafka.subscriber.asyncapi import ( + AsyncAPIBatchSubscriber, + AsyncAPIDefaultSubscriber, +) + +if TYPE_CHECKING: + from aiokafka import ConsumerRecord, TopicPartition + from aiokafka.abc import ConsumerRebalanceListener + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[True], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIBatchSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[False], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIDefaultSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: ... + + +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: + if is_manual and not group_id: + raise SetupError("You must use `group_id` with manual commit mode.") + + if not topics and not partitions and not pattern: + raise SetupError( + "You should provide either `topics` or `partitions` or `pattern`." + ) + elif topics and partitions: + raise SetupError("You can't provide both `topics` and `partitions`.") + elif topics and pattern: + raise SetupError("You can't provide both `topics` and `pattern`.") + elif partitions and pattern: + raise SetupError("You can't provide both `partitions` and `pattern`.") + + if batch: + return AsyncAPIBatchSubscriber( + *topics, + batch_timeout_ms=batch_timeout_ms, + max_records=max_records, + group_id=group_id, + listener=listener, + pattern=pattern, + connection_args=connection_args, + partitions=partitions, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIDefaultSubscriber( + *topics, + group_id=group_id, + listener=listener, + pattern=pattern, + connection_args=connection_args, + partitions=partitions, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/nats/__init__.py b/faststream/nats/__init__.py index 2c41d1bb04..72ba1a2876 100644 --- a/faststream/nats/__init__.py +++ b/faststream/nats/__init__.py @@ -16,7 +16,7 @@ from faststream.nats.annotations import NatsMessage from faststream.nats.broker.broker import NatsBroker from faststream.nats.router import NatsPublisher, NatsRoute, NatsRouter -from faststream.nats.schemas import JStream, PullSub +from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.nats.testing import TestNatsBroker from faststream.testing.app import TestApp @@ -25,6 +25,8 @@ "NatsBroker", "JStream", "PullSub", + "KvWatch", + "ObjWatch", "NatsRoute", "NatsRouter", "NatsPublisher", diff --git a/faststream/nats/annotations.py b/faststream/nats/annotations.py index 11a4bd5fb5..dabbcaa39a 100644 --- a/faststream/nats/annotations.py +++ b/faststream/nats/annotations.py @@ -1,11 +1,14 @@ -from nats.aio.client import Client as NatsClient -from nats.js.client import JetStreamContext +from nats.aio.client import Client as _NatsClient +from nats.js.client import JetStreamContext as _JetStream +from nats.js.object_store import ObjectStore as _ObjectStore from typing_extensions import Annotated from faststream.annotations import ContextRepo, Logger, NoCast -from faststream.nats.broker import NatsBroker as NB -from faststream.nats.message import NatsMessage as NM -from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer +from faststream.nats.broker import NatsBroker as _Broker +from faststream.nats.message import NatsMessage as _Message +from faststream.nats.publisher.producer import NatsFastProducer as _CoreProducer +from faststream.nats.publisher.producer import NatsJSFastProducer as _JsProducer +from faststream.nats.subscriber.usecase import OBJECT_STORAGE_CONTEXT_KEY from faststream.utils.context import Context __all__ = ( @@ -16,11 +19,13 @@ "NatsBroker", "Client", "JsClient", + "ObjectStorage", ) -NatsMessage = Annotated[NM, Context("message")] -NatsBroker = Annotated[NB, Context("broker")] -Client = Annotated[NatsClient, Context("broker._connection")] -JsClient = Annotated[JetStreamContext, Context("broker._stream")] -NatsProducer = Annotated[NatsFastProducer, Context("broker._producer")] -NatsJsProducer = Annotated[NatsJSFastProducer, Context("broker._js_producer")] +ObjectStorage = Annotated[_ObjectStore, Context(OBJECT_STORAGE_CONTEXT_KEY)] +NatsMessage = Annotated[_Message, Context("message")] +NatsBroker = Annotated[_Broker, Context("broker")] +Client = Annotated[_NatsClient, Context("broker._connection")] +JsClient = Annotated[_JetStream, Context("broker._stream")] +NatsProducer = Annotated[_CoreProducer, Context("broker._producer")] +NatsJsProducer = Annotated[_JsProducer, Context("broker._js_producer")] diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index a2e196a535..e6806172c3 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -33,6 +33,7 @@ from faststream.broker.message import gen_cor_id from faststream.nats.broker.logging import NatsLoggingBroker from faststream.nats.broker.registrator import NatsRegistrator +from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer from faststream.nats.security import parse_security from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber @@ -51,7 +52,10 @@ SignatureCallback, ) from nats.aio.msg import Msg + from nats.js.api import Placement, RePublish, StorageType from nats.js.client import JetStreamContext + from nats.js.kv import KeyValue + from nats.js.object_store import ObjectStore from typing_extensions import TypedDict, Unpack from faststream.asyncapi import schema as asyncapi @@ -218,6 +222,8 @@ class NatsBroker( _producer: Optional["NatsFastProducer"] _js_producer: Optional["NatsJSFastProducer"] + _kv_declarer: Optional["KVBucketDeclarer"] + _os_declarer: Optional["OSBucketDeclarer"] def __init__( self, @@ -541,6 +547,8 @@ def __init__( # JS options self.stream = None self._js_producer = None + self._kv_declarer = None + self._os_declarer = None @override async def connect( # type: ignore[override] @@ -583,6 +591,9 @@ async def _connect(self, **kwargs: Any) -> "Client": parser=self._parser, ) + self._kv_declarer = KVBucketDeclarer(stream) + self._os_declarer = OSBucketDeclarer(stream) + return connection async def _close( @@ -609,44 +620,50 @@ async def start(self) -> None: assert self.stream, "Broker should be started already" # nosec B101 assert self._producer, "Broker should be started already" # nosec B101 - # TODO: filter by already running handlers after TestClient refactor - for handler in self._subscribers.values(): - stream = handler.stream + for stream in filter( + lambda x: x.declare, + self._stream_builder.objects.values(), + ): + try: + await self.stream.add_stream( + config=stream.config, + subjects=stream.subjects, + ) - log_context = handler.get_log_context(None) + except BadRequestError as e: # noqa: PERF203 + log_context = AsyncAPISubscriber.build_log_context( + message=None, + subject="", + queue="", + stream=stream.name, + ) + + if ( + e.description + == "stream name already in use with a different configuration" + ): + old_config = (await self.stream.stream_info(stream.name)).config - if stream is not None and stream.declare: - try: # pragma: no branch - await self.stream.add_stream( + self._log(str(e), logging.WARNING, log_context) + await self.stream.update_stream( config=stream.config, - subjects=stream.subjects, + subjects=tuple( + set(old_config.subjects or ()).union(stream.subjects) + ), ) - except BadRequestError as e: - if ( - e.description - == "stream name already in use with a different configuration" - ): - old_config = (await self.stream.stream_info(stream.name)).config - - self._log(str(e), logging.WARNING, log_context) - await self.stream.update_stream( - config=stream.config, - subjects=tuple( - set(old_config.subjects or ()).union(stream.subjects) - ), - ) + else: # pragma: no cover + self._log(str(e), logging.ERROR, log_context, exc_info=e) - else: # pragma: no cover - self._log(str(e), logging.ERROR, log_context, exc_info=e) - - finally: - # prevent from double declaration - stream.declare = False + finally: + # prevent from double declaration + stream.declare = False + # TODO: filter by already running handlers after TestClient refactor + for handler in self._subscribers.values(): self._log( f"`{handler.call_name}` waiting for messages", - extra=log_context, + extra=handler.get_log_context(None), ) await handler.start() @@ -750,11 +767,30 @@ def setup_subscriber( # type: ignore[override] self, subscriber: "AsyncAPISubscriber", ) -> None: - connection: Union["Client", "JetStreamContext", None] = None + connection: Union[ + "Client", + "JetStreamContext", + KVBucketDeclarer, + OSBucketDeclarer, + None, + ] = None + + if getattr(subscriber, "kv_watch", None): + connection = self._kv_declarer + + elif getattr(subscriber, "obj_watch", None): + connection = self._os_declarer + + elif getattr(subscriber, "stream", None): + connection = self.stream - connection = self._connection if subscriber.stream is None else self.stream + else: + connection = self._connection - return super().setup_subscriber(subscriber, connection=connection) + return super().setup_subscriber( + subscriber, + connection=connection, + ) @override def setup_publisher( # type: ignore[override] @@ -772,6 +808,66 @@ def setup_publisher( # type: ignore[override] super().setup_publisher(publisher, producer=producer) + async def key_value( + self, + bucket: str, + *, + description: Optional[str] = None, + max_value_size: Optional[int] = None, + history: int = 1, + ttl: Optional[float] = None, # in seconds + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + republish: Optional["RePublish"] = None, + direct: Optional[bool] = None, + # custom + declare: bool = True, + ) -> "KeyValue": + assert self._kv_declarer, "Broker should be connected already." # nosec B101 + + return await self._kv_declarer.create_key_value( + bucket=bucket, + description=description, + max_value_size=max_value_size, + history=history, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + republish=republish, + direct=direct, + declare=declare, + ) + + async def object_storage( + self, + bucket: str, + *, + description: Optional[str] = None, + ttl: Optional[float] = None, + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + # custom + declare: bool = True, + ) -> "ObjectStore": + assert self._os_declarer, "Broker should be connected already." # nosec B101 + + return await self._os_declarer.create_object_store( + bucket=bucket, + description=description, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + declare=declare, + ) + def _log_connection_broken( self, error_cb: Optional["ErrorCallback"] = None, diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index c0670e12c1..9fe73a3386 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -1,12 +1,17 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union, cast +from fast_depends.dependencies import Depends from nats.js import api from typing_extensions import Annotated, Doc, deprecated, override from faststream.broker.core.abc import ABCBroker +from faststream.broker.types import CustomCallable from faststream.broker.utils import default_filter +from faststream.nats.helpers import StreamBuilder from faststream.nats.publisher.asyncapi import AsyncAPIPublisher +from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.nats.subscriber.factory import create_subscriber if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -19,7 +24,6 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub class NatsRegistrator(ABCBroker["Msg"]): @@ -28,6 +32,11 @@ class NatsRegistrator(ABCBroker["Msg"]): _subscribers: Dict[int, "AsyncAPISubscriber"] _publishers: Dict[int, "AsyncAPIPublisher"] + def __init__(self, **kwargs: Any) -> None: + self._stream_builder = StreamBuilder() + + super().__init__(**kwargs) + @override def subscriber( # type: ignore[override] self, @@ -102,12 +111,20 @@ def subscriber( # type: ignore[override] ] = None, # pull arguments pull_sub: Annotated[ - Optional["PullSub"], + Union[bool, "PullSub"], Doc( "NATS Pull consumer parameters container. " "Should be used with `stream` only." ), + ] = False, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -187,14 +204,19 @@ def subscriber( # type: ignore[override] You can use it as a handler decorator `@broker.subscriber(...)`. """ + if stream := self._stream_builder.create(stream): + stream.add_subject(subject) + subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( # type: ignore[arg-type] + create_subscriber( subject=subject, queue=queue, stream=stream, - pull_sub=pull_sub, + pull_sub=PullSub.validate(pull_sub), + kv_watch=KvWatch.validate(kv_watch), + obj_watch=ObjWatch.validate(obj_watch), max_workers=max_workers, # extra args pending_msgs_limit=pending_msgs_limit, @@ -295,6 +317,9 @@ def publisher( # type: ignore[override] Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`. """ + if stream := self._stream_builder.create(stream): + stream.add_subject(subject) + publisher = cast( AsyncAPIPublisher, super().publisher( diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py index 4da5f51889..7ea3a2a5df 100644 --- a/faststream/nats/fastapi/fastapi.py +++ b/faststream/nats/fastapi/fastapi.py @@ -66,7 +66,7 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.security import BaseSecurity from faststream.types import AnyDict, LoggerProto @@ -651,6 +651,14 @@ def subscriber( # type: ignore[override] "Should be used with `stream` only." ), ] = None, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), + ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -866,6 +874,8 @@ def subscriber( # type: ignore[override] deliver_policy=deliver_policy, headers_only=headers_only, pull_sub=pull_sub, + kv_watch=kv_watch, + obj_watch=obj_watch, inbox_prefix=inbox_prefix, ack_first=ack_first, stream=stream, diff --git a/faststream/nats/helpers.py b/faststream/nats/helpers.py deleted file mode 100644 index c77dd16c5c..0000000000 --- a/faststream/nats/helpers.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Dict, Optional, Union - -from faststream.nats.schemas.js_stream import JStream - - -class StreamBuilder: - """A class to build streams.""" - - streams: Dict[str, JStream] - - def __init__(self) -> None: - """Initialize the stream builder.""" - self.streams = {} - - def stream( - self, - name: Union[str, JStream, None], - ) -> Optional[JStream]: - """Get a stream. - - Args: - *args: The arguments. - name: The stream name. - declare: Whether to declare the stream. - **kwargs: The keyword arguments. - """ - stream = JStream.validate(name) - - if stream is not None: - stream = self.streams[stream.name] = self.streams.get(stream.name, stream) - - return stream - - -stream_builder = StreamBuilder() diff --git a/faststream/nats/helpers/__init__.py b/faststream/nats/helpers/__init__.py new file mode 100644 index 0000000000..28b3479a7b --- /dev/null +++ b/faststream/nats/helpers/__init__.py @@ -0,0 +1,9 @@ +from faststream.nats.helpers.bucket_declarer import KVBucketDeclarer +from faststream.nats.helpers.obj_storage_declarer import OSBucketDeclarer +from faststream.nats.helpers.object_builder import StreamBuilder + +__all__ = ( + "KVBucketDeclarer", + "StreamBuilder", + "OSBucketDeclarer", +) diff --git a/faststream/nats/helpers/bucket_declarer.py b/faststream/nats/helpers/bucket_declarer.py new file mode 100644 index 0000000000..916b706254 --- /dev/null +++ b/faststream/nats/helpers/bucket_declarer.py @@ -0,0 +1,57 @@ +from typing import TYPE_CHECKING, Dict, Optional + +from nats.js.api import KeyValueConfig + +if TYPE_CHECKING: + from nats.js import JetStreamContext + from nats.js.api import Placement, RePublish, StorageType + from nats.js.kv import KeyValue + + +class KVBucketDeclarer: + buckets: Dict[str, "KeyValue"] + + def __init__(self, connection: "JetStreamContext") -> None: + self._connection = connection + self.buckets = {} + + async def create_key_value( + self, + bucket: str, + *, + description: Optional[str] = None, + max_value_size: Optional[int] = None, + history: int = 1, + ttl: Optional[float] = None, # in seconds + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + republish: Optional["RePublish"] = None, + direct: Optional[bool] = None, + # custom + declare: bool = True, + ) -> "KeyValue": + if (key_value := self.buckets.get(bucket)) is None: + if declare: + key_value = await self._connection.create_key_value( + config=KeyValueConfig( + bucket=bucket, + description=description, + max_value_size=max_value_size, + history=history, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + republish=republish, + direct=direct, + ) + ) + else: + key_value = await self._connection.key_value(bucket) + + self.buckets[bucket] = key_value + + return key_value diff --git a/faststream/nats/helpers/obj_storage_declarer.py b/faststream/nats/helpers/obj_storage_declarer.py new file mode 100644 index 0000000000..1d2ae50715 --- /dev/null +++ b/faststream/nats/helpers/obj_storage_declarer.py @@ -0,0 +1,50 @@ +from typing import TYPE_CHECKING, Dict, Optional + +from nats.js.api import ObjectStoreConfig + +if TYPE_CHECKING: + from nats.js import JetStreamContext + from nats.js.api import Placement, StorageType + from nats.js.object_store import ObjectStore + + +class OSBucketDeclarer: + buckets: Dict[str, "ObjectStore"] + + def __init__(self, connection: "JetStreamContext") -> None: + self._connection = connection + self.buckets = {} + + async def create_object_store( + self, + bucket: str, + *, + description: Optional[str] = None, + ttl: Optional[float] = None, + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + # custom + declare: bool = True, + ) -> "ObjectStore": + if (object_store := self.buckets.get(bucket)) is None: + if declare: + object_store = await self._connection.create_object_store( + bucket=bucket, + config=ObjectStoreConfig( + bucket=bucket, + description=description, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + ), + ) + else: + object_store = await self._connection.object_store(bucket) + + self.buckets[bucket] = object_store + + return object_store diff --git a/faststream/nats/helpers/object_builder.py b/faststream/nats/helpers/object_builder.py new file mode 100644 index 0000000000..5d40a44da6 --- /dev/null +++ b/faststream/nats/helpers/object_builder.py @@ -0,0 +1,27 @@ +from typing import Dict, Optional, Union + +from faststream.nats.schemas import JStream + + +class StreamBuilder: + """A class to build streams.""" + + __slots__ = ("objects",) + + objects: Dict[str, "JStream"] + + def __init__(self) -> None: + """Initialize the builder.""" + self.objects = {} + + def create( + self, + name: Union[str, "JStream", None], + ) -> Optional["JStream"]: + """Get an object.""" + stream = JStream.validate(name) + + if stream is not None: + stream = self.objects[stream.name] = self.objects.get(stream.name, stream) + + return stream diff --git a/faststream/nats/message.py b/faststream/nats/message.py index 821cebbb71..ee54ef2caa 100644 --- a/faststream/nats/message.py +++ b/faststream/nats/message.py @@ -1,12 +1,13 @@ -from typing import TYPE_CHECKING, List, Union +from typing import List, Union -from faststream.broker.message import StreamMessage +from nats.aio.msg import Msg +from nats.js.api import ObjectInfo +from nats.js.kv import KeyValue -if TYPE_CHECKING: - from nats.aio.msg import Msg +from faststream.broker.message import StreamMessage -class NatsMessage(StreamMessage["Msg"]): +class NatsMessage(StreamMessage[Msg]): """A class to represent a NATS message.""" async def ack(self) -> None: @@ -34,7 +35,7 @@ async def in_progress(self) -> None: await self.raw_message.in_progress() -class NatsBatchMessage(StreamMessage[List["Msg"]]): +class NatsBatchMessage(StreamMessage[List[Msg]]): """A class to represent a NATS batch message.""" async def ack(self) -> None: @@ -73,3 +74,11 @@ async def in_progress(self) -> None: self.raw_message, ): await m.in_progress() + + +class NatsKvMessage(StreamMessage[KeyValue.Entry]): + pass + + +class NatsObjMessage(StreamMessage[ObjectInfo]): + pass diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py index 7aefafed2c..7c33a7d76b 100644 --- a/faststream/nats/opentelemetry/provider.py +++ b/faststream/nats/opentelemetry/provider.py @@ -1,5 +1,6 @@ from typing import TYPE_CHECKING, List, Optional, Sequence, Union, overload +from nats.aio.msg import Msg from opentelemetry.semconv.trace import SpanAttributes from faststream.__about__ import SERVICE_NAME @@ -8,8 +9,6 @@ from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME if TYPE_CHECKING: - from nats.aio.msg import Msg - from faststream.broker.message import StreamMessage from faststream.types import AnyDict @@ -107,8 +106,12 @@ def telemetry_attributes_provider_factory( ) -> Union[ NatsTelemetrySettingsProvider, NatsBatchTelemetrySettingsProvider, + None, ]: if isinstance(msg, Sequence): return NatsBatchTelemetrySettingsProvider() - else: + elif isinstance(msg, Msg) or msg is None: return NatsTelemetrySettingsProvider() + else: + # KeyValue and Object Storage watch cases + return None diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index 940ae70426..ef18834eca 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -1,11 +1,18 @@ -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from faststream.broker.message import StreamMessage, decode_message, gen_cor_id -from faststream.nats.message import NatsBatchMessage, NatsMessage +from faststream.nats.message import ( + NatsBatchMessage, + NatsKvMessage, + NatsMessage, + NatsObjMessage, +) from faststream.nats.schemas.js_stream import compile_nats_wildcard if TYPE_CHECKING: from nats.aio.msg import Msg + from nats.js.api import ObjectInfo + from nats.js.kv import KeyValue from faststream.types import AnyDict, DecodedMessage @@ -36,7 +43,7 @@ def get_path( @staticmethod async def decode_message( - msg: "StreamMessage[Msg]", + msg: "StreamMessage[Any]", ) -> "DecodedMessage": return decode_message(msg) @@ -139,3 +146,22 @@ async def decode_batch( data.append(decode_message(one_msg)) return data + + +class KvParser(NatsBaseParser): + async def parse_message( + self, msg: "KeyValue.Entry" + ) -> StreamMessage["KeyValue.Entry"]: + return NatsKvMessage( + raw_message=msg, + body=msg.value, + path=self.get_path(msg.key) or {}, + ) + + +class ObjParser(NatsBaseParser): + async def parse_message(self, msg: "ObjectInfo") -> StreamMessage["ObjectInfo"]: + return NatsObjMessage( + raw_message=msg, + body=msg.name, + ) diff --git a/faststream/nats/publisher/asyncapi.py b/faststream/nats/publisher/asyncapi.py index 094e1eebb4..1546b675f8 100644 --- a/faststream/nats/publisher/asyncapi.py +++ b/faststream/nats/publisher/asyncapi.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional from typing_extensions import override @@ -11,7 +11,6 @@ ) from faststream.asyncapi.schema.bindings import nats from faststream.asyncapi.utils import resolve_payloads -from faststream.nats.helpers import stream_builder from faststream.nats.publisher.usecase import LogicPublisher if TYPE_CHECKING: @@ -58,7 +57,7 @@ def create( # type: ignore[override] subject: str, reply_to: str, headers: Optional[Dict[str, str]], - stream: Union[str, "JStream", None], + stream: Optional["JStream"], timeout: Optional[float], # Publisher args broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -69,9 +68,6 @@ def create( # type: ignore[override] description_: Optional[str], include_in_schema: bool, ) -> "AsyncAPIPublisher": - if stream := stream_builder.stream(stream): - stream.add_subject(subject) - return cls( subject=subject, reply_to=reply_to, diff --git a/faststream/nats/router.py b/faststream/nats/router.py index 65f07947a9..74215d3e78 100644 --- a/faststream/nats/router.py +++ b/faststream/nats/router.py @@ -28,7 +28,7 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.types import SendableMessage @@ -205,6 +205,14 @@ def __init__( "Should be used with `stream` only." ), ] = None, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), + ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -295,6 +303,8 @@ def __init__( deliver_policy=deliver_policy, headers_only=headers_only, pull_sub=pull_sub, + kv_watch=kv_watch, + obj_watch=obj_watch, inbox_prefix=inbox_prefix, ack_first=ack_first, stream=stream, diff --git a/faststream/nats/schemas/__init__.py b/faststream/nats/schemas/__init__.py index 24ca18db99..ca9f56f48d 100644 --- a/faststream/nats/schemas/__init__.py +++ b/faststream/nats/schemas/__init__.py @@ -1,7 +1,11 @@ from faststream.nats.schemas.js_stream import JStream +from faststream.nats.schemas.kv_watch import KvWatch +from faststream.nats.schemas.obj_watch import ObjWatch from faststream.nats.schemas.pull_sub import PullSub __all__ = ( "JStream", "PullSub", + "KvWatch", + "ObjWatch", ) diff --git a/faststream/nats/schemas/kv_watch.py b/faststream/nats/schemas/kv_watch.py new file mode 100644 index 0000000000..a1f50fce96 --- /dev/null +++ b/faststream/nats/schemas/kv_watch.py @@ -0,0 +1,70 @@ +from typing import Optional + +from typing_extensions import Annotated, Doc + +from faststream.broker.schemas import NameRequired + + +class KvWatch(NameRequired): + """A class to represent a NATS kv watch subscription.""" + + __slots__ = ( + "bucket", + "headers_only", + "include_history", + "ignore_deletes", + "meta_only", + "inactive_threshold", + "timeout", + "declare", + ) + + def __init__( + self, + bucket: Annotated[ + str, + Doc("Bucket name."), + ], + headers_only: Annotated[ + bool, + Doc("Whether to receive only headers."), + ] = False, + include_history: Annotated[ + bool, + Doc("Whether to include history."), + ] = False, + ignore_deletes: Annotated[ + bool, + Doc("Whether to ignore deletes."), + ] = False, + meta_only: Annotated[ + bool, + Doc("Whether to receive only metadata."), + ] = False, + inactive_threshold: Annotated[ + Optional[float], + Doc("Inactivity threshold."), + ] = None, + timeout: Annotated[ + Optional[float], + Doc("Timeout in seconds."), + ] = 5.0, + # custom + declare: Annotated[ + bool, + Doc("Whether to create bucket automatically or just connect to it."), + ] = True, + ) -> None: + super().__init__(bucket) + + self.headers_only = headers_only + self.include_history = include_history + self.ignore_deletes = ignore_deletes + self.meta_only = meta_only + self.inactive_threshold = inactive_threshold + self.timeout = timeout + + self.declare = declare + + def __hash__(self) -> int: + return hash(self.name) diff --git a/faststream/nats/schemas/obj_watch.py b/faststream/nats/schemas/obj_watch.py new file mode 100644 index 0000000000..998bef9482 --- /dev/null +++ b/faststream/nats/schemas/obj_watch.py @@ -0,0 +1,73 @@ +from typing import Literal, Optional, Union, overload + +from typing_extensions import Annotated, Doc + + +class ObjWatch: + """A class to represent a NATS object storage watch subscription.""" + + __slots__ = ( + "ignore_deletes", + "include_history", + "meta_only", + "timeout", + "declare", + ) + + def __init__( + self, + ignore_deletes: Annotated[ + bool, + Doc("Ignore delete events."), + ] = False, + include_history: Annotated[ + bool, + Doc("Include history."), + ] = False, + meta_only: Annotated[ + bool, + Doc("Only metadata."), + ] = False, + timeout: Annotated[ + float, + Doc("The timeout for the watch."), + ] = 5.0, + # custom + declare: Annotated[ + bool, + Doc( + "Whether to create object storage automatically or just connect to it." + ), + ] = True, + ) -> None: + self.ignore_deletes = ignore_deletes + self.include_history = include_history + self.meta_only = meta_only + self.timeout = timeout + + self.declare = declare + + @overload + @classmethod + def validate(cls, value: Literal[True]) -> "ObjWatch": ... + + @overload + @classmethod + def validate(cls, value: Literal[False]) -> None: ... + + @overload + @classmethod + def validate(cls, value: "ObjWatch") -> "ObjWatch": ... + + @overload + @classmethod + def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]: ... + + @classmethod + def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]: + if value is True: + return ObjWatch() + elif value is False: + return None + else: + return value diff --git a/faststream/nats/schemas/pull_sub.py b/faststream/nats/schemas/pull_sub.py index fbe12b0a8a..7544d17b74 100644 --- a/faststream/nats/schemas/pull_sub.py +++ b/faststream/nats/schemas/pull_sub.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Literal, Optional, Union, overload from typing_extensions import Annotated, Doc @@ -34,3 +34,28 @@ def __init__( self.batch_size = batch_size self.batch = batch self.timeout = timeout + + @overload + @classmethod + def validate(cls, value: Literal[True]) -> "PullSub": ... + + @overload + @classmethod + def validate(cls, value: Literal[False]) -> None: ... + + @overload + @classmethod + def validate(cls, value: "PullSub") -> "PullSub": ... + + @overload + @classmethod + def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]: ... + + @classmethod + def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]: + if value is True: + return PullSub() + elif value is False: + return None + else: + return value diff --git a/faststream/nats/subscriber/asyncapi.py b/faststream/nats/subscriber/asyncapi.py index 7e5a6d4312..ad0edb0bca 100644 --- a/faststream/nats/subscriber/asyncapi.py +++ b/faststream/nats/subscriber/asyncapi.py @@ -1,13 +1,5 @@ -from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union +from typing import Any, Dict -from nats.aio.subscription import ( - DEFAULT_SUB_PENDING_BYTES_LIMIT, - DEFAULT_SUB_PENDING_MSGS_LIMIT, -) -from nats.js.client import ( - DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, - DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, -) from typing_extensions import override from faststream.asyncapi.schema import ( @@ -19,22 +11,19 @@ ) from faststream.asyncapi.schema.bindings import nats from faststream.asyncapi.utils import resolve_payloads -from faststream.exceptions import SetupError -from faststream.nats.helpers import stream_builder from faststream.nats.subscriber.usecase import ( - BatchHandler, - DefaultHandler, + BatchPullStreamSubscriber, + ConcurrentCoreSubscriber, + ConcurrentPullStreamSubscriber, + ConcurrentPushStreamSubscriber, + CoreSubscriber, + KeyValueWatchSubscriber, LogicSubscriber, + ObjStoreWatchSubscriber, + PullStreamSubscriber, + PushStreamSubscription, ) -if TYPE_CHECKING: - from fast_depends.dependencies import Depends - from nats.js import api - - from faststream.broker.types import BrokerMiddleware - from faststream.nats.schemas import JStream, PullSub - from faststream.types import AnyDict - class AsyncAPISubscriber(LogicSubscriber[Any]): """A class to represent a NATS handler.""" @@ -60,137 +49,64 @@ def get_schema(self) -> Dict[str, Channel]: bindings=ChannelBinding( nats=nats.ChannelBinding( subject=self.subject, - queue=self.queue or None, + queue=getattr(self, "queue", "") or None, ) ), ) } + +class AsyncAPICoreSubscriber(AsyncAPISubscriber, CoreSubscriber): + """One-message core consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentCoreSubscriber(AsyncAPISubscriber, ConcurrentCoreSubscriber): + """One-message core concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIStreamSubscriber(AsyncAPISubscriber, PushStreamSubscription): + """One-message JS Push consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentPushStreamSubscriber( + AsyncAPISubscriber, ConcurrentPushStreamSubscriber +): + """One-message JS Push concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIPullStreamSubscriber(AsyncAPISubscriber, PullStreamSubscriber): + """One-message JS Pull consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentPullStreamSubscriber( + AsyncAPISubscriber, ConcurrentPullStreamSubscriber +): + """One-message JS Pull concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIBatchPullStreamSubscriber(AsyncAPISubscriber, BatchPullStreamSubscriber): + """Batch-message Pull consumer with AsyncAPI methods.""" + + +class AsyncAPIKeyValueWatchSubscriber(AsyncAPISubscriber, KeyValueWatchSubscriber): + """KeyValueWatch consumer with AsyncAPI methods.""" + @override - @staticmethod - def create( # type: ignore[override] - *, - subject: str, - queue: str, - pending_msgs_limit: Optional[int], - pending_bytes_limit: Optional[int], - # Core args - max_msgs: int, - # JS args - durable: Optional[str], - config: Optional["api.ConsumerConfig"], - ordered_consumer: bool, - idle_heartbeat: Optional[float], - flow_control: bool, - deliver_policy: Optional["api.DeliverPolicy"], - headers_only: Optional[bool], - # pull args - pull_sub: Optional["PullSub"], - inbox_prefix: bytes, - # custom args - ack_first: bool, - max_workers: int, - stream: Union[str, "JStream", None], - # Subscriber args - no_ack: bool, - retry: Union[bool, int], - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Any]"], - # AsyncAPI information - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if stream := stream_builder.stream(stream): - stream.add_subject(subject) - - if pull_sub is not None and stream is None: - raise SetupError("Pull subscriber can be used only with a stream") - - if stream: - # TODO: pull & queue warning - # TODO: push & durable warning - - extra_options: AnyDict = { - "pending_msgs_limit": pending_msgs_limit - or DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, - "pending_bytes_limit": pending_bytes_limit - or DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, - "durable": durable, - "stream": stream.name, - "config": config, - } - - if pull_sub is not None: - extra_options.update({"inbox_prefix": inbox_prefix}) - - else: - extra_options.update( - { - "ordered_consumer": ordered_consumer, - "idle_heartbeat": idle_heartbeat, - "flow_control": flow_control, - "deliver_policy": deliver_policy, - "headers_only": headers_only, - "manual_ack": not ack_first, - } - ) - - else: - extra_options = { - "pending_msgs_limit": pending_msgs_limit - or DEFAULT_SUB_PENDING_MSGS_LIMIT, - "pending_bytes_limit": pending_bytes_limit - or DEFAULT_SUB_PENDING_BYTES_LIMIT, - "max_msgs": max_msgs, - } - - if getattr(pull_sub, "batch", False): - return AsyncAPIBatchSubscriber( - extra_options=extra_options, - # basic args - pull_sub=pull_sub, - subject=subject, - queue=queue, - stream=stream, - # Subscriber args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI information - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) + def get_name(self) -> str: + return "" - else: - return AsyncAPIDefaultSubscriber( - max_workers=max_workers, - extra_options=extra_options, - # basic args - pull_sub=pull_sub, - subject=subject, - queue=queue, - stream=stream, - # Subscriber args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI information - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) + @override + def get_schema(self) -> Dict[str, Channel]: + return {} -class AsyncAPIDefaultSubscriber(DefaultHandler, AsyncAPISubscriber): - """One-message consumer with AsyncAPI methods.""" +class AsyncAPIObjStoreWatchSubscriber(AsyncAPISubscriber, ObjStoreWatchSubscriber): + """ObjStoreWatch consumer with AsyncAPI methods.""" + @override + def get_name(self) -> str: + return "" -class AsyncAPIBatchSubscriber(BatchHandler, AsyncAPISubscriber): - """Batch-message consumer with AsyncAPI methods.""" + @override + def get_schema(self) -> Dict[str, Channel]: + return {} diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py new file mode 100644 index 0000000000..590598a2dd --- /dev/null +++ b/faststream/nats/subscriber/factory.py @@ -0,0 +1,271 @@ +from typing import TYPE_CHECKING, Any, Iterable, Optional, Union + +from nats.aio.subscription import ( + DEFAULT_SUB_PENDING_BYTES_LIMIT, + DEFAULT_SUB_PENDING_MSGS_LIMIT, +) +from nats.js.client import ( + DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, + DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, +) + +from faststream.exceptions import SetupError +from faststream.nats.subscriber.asyncapi import ( + AsyncAPIBatchPullStreamSubscriber, + AsyncAPIConcurrentCoreSubscriber, + AsyncAPIConcurrentPullStreamSubscriber, + AsyncAPIConcurrentPushStreamSubscriber, + AsyncAPICoreSubscriber, + AsyncAPIKeyValueWatchSubscriber, + AsyncAPIObjStoreWatchSubscriber, + AsyncAPIPullStreamSubscriber, + AsyncAPIStreamSubscriber, +) + +if TYPE_CHECKING: + from fast_depends.dependencies import Depends + from nats.js import api + + from faststream.broker.types import BrokerMiddleware + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub + from faststream.types import AnyDict + + +def create_subscriber( + *, + subject: str, + queue: str, + pending_msgs_limit: Optional[int], + pending_bytes_limit: Optional[int], + # Core args + max_msgs: int, + # JS args + durable: Optional[str], + config: Optional["api.ConsumerConfig"], + ordered_consumer: bool, + idle_heartbeat: Optional[float], + flow_control: bool, + deliver_policy: Optional["api.DeliverPolicy"], + headers_only: Optional[bool], + # pull args + pull_sub: Optional["PullSub"], + kv_watch: Optional["KvWatch"], + obj_watch: Optional["ObjWatch"], + inbox_prefix: bytes, + # custom args + ack_first: bool, + max_workers: int, + stream: Optional["JStream"], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Any]"], + # AsyncAPI information + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPICoreSubscriber", + "AsyncAPIConcurrentCoreSubscriber", + "AsyncAPIStreamSubscriber", + "AsyncAPIConcurrentPushStreamSubscriber", + "AsyncAPIPullStreamSubscriber", + "AsyncAPIConcurrentPullStreamSubscriber", + "AsyncAPIBatchPullStreamSubscriber", + "AsyncAPIKeyValueWatchSubscriber", + "AsyncAPIObjStoreWatchSubscriber", +]: + if pull_sub is not None and stream is None: + raise SetupError("Pull subscriber can be used only with a stream") + + if stream: + # TODO: pull & queue warning + # TODO: push & durable warning + + extra_options: AnyDict = { + "pending_msgs_limit": pending_msgs_limit + or DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, + "pending_bytes_limit": pending_bytes_limit + or DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, + "durable": durable, + "stream": stream.name, + "config": config, + } + + if pull_sub is not None: + extra_options.update({"inbox_prefix": inbox_prefix}) + + else: + extra_options.update( + { + "ordered_consumer": ordered_consumer, + "idle_heartbeat": idle_heartbeat, + "flow_control": flow_control, + "deliver_policy": deliver_policy, + "headers_only": headers_only, + "manual_ack": not ack_first, + } + ) + + else: + extra_options = { + "pending_msgs_limit": pending_msgs_limit or DEFAULT_SUB_PENDING_MSGS_LIMIT, + "pending_bytes_limit": pending_bytes_limit + or DEFAULT_SUB_PENDING_BYTES_LIMIT, + "max_msgs": max_msgs, + } + + if obj_watch is not None: + return AsyncAPIObjStoreWatchSubscriber( + subject=subject, + obj_watch=obj_watch, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + if kv_watch is not None: + return AsyncAPIKeyValueWatchSubscriber( + subject=subject, + kv_watch=kv_watch, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif stream is None: + if max_workers > 1: + return AsyncAPIConcurrentCoreSubscriber( + max_workers=max_workers, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPICoreSubscriber( + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + if max_workers > 1: + if pull_sub is not None: + return AsyncAPIConcurrentPullStreamSubscriber( + max_workers=max_workers, + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIConcurrentPushStreamSubscriber( + max_workers=max_workers, + stream=stream, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + if pull_sub is not None: + if pull_sub.batch: + return AsyncAPIBatchPullStreamSubscriber( + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIPullStreamSubscriber( + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIStreamSubscriber( + stream=stream, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/nats/subscriber/subscription.py b/faststream/nats/subscriber/subscription.py new file mode 100644 index 0000000000..4bc994842b --- /dev/null +++ b/faststream/nats/subscriber/subscription.py @@ -0,0 +1,26 @@ +from typing import Any, Generic, Optional, Protocol, TypeVar + + +class Unsubscriptable(Protocol): + async def unsubscribe(self) -> None: ... + + +class Watchable(Protocol): + async def stop(self) -> None: ... + + async def updates(self, timeout: float) -> Optional[Any]: ... + + +WatchableT = TypeVar("WatchableT", bound=Watchable) + + +class UnsubscribeAdapter(Unsubscriptable, Generic[WatchableT]): + __slots__ = ("obj",) + + obj: WatchableT + + def __init__(self, subscription: WatchableT) -> None: + self.obj = subscription + + async def unsubscribe(self) -> None: + await self.obj.stop() diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 55a8340a7d..f668ac387e 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -1,11 +1,13 @@ import asyncio from abc import abstractmethod from contextlib import suppress +from functools import cached_property from typing import ( TYPE_CHECKING, Any, Awaitable, Callable, + Coroutine, Dict, Iterable, List, @@ -18,16 +20,29 @@ import anyio from fast_depends.dependencies import Depends from nats.errors import ConnectionClosedError, TimeoutError +from nats.js.api import ObjectInfo +from nats.js.kv import KeyValue from typing_extensions import Annotated, Doc, override from faststream.broker.message import StreamMessage from faststream.broker.publisher.fake import FakePublisher from faststream.broker.subscriber.usecase import SubscriberUsecase from faststream.broker.types import CustomCallable, MsgType -from faststream.exceptions import NOT_CONNECTED_YET, SetupError -from faststream.nats.parser import BatchParser, JsParser, NatsParser +from faststream.exceptions import NOT_CONNECTED_YET +from faststream.nats.parser import ( + BatchParser, + JsParser, + KvParser, + NatsParser, + ObjParser, +) from faststream.nats.schemas.js_stream import compile_nats_wildcard +from faststream.nats.subscriber.subscription import ( + UnsubscribeAdapter, + Unsubscriptable, +) from faststream.types import AnyDict, LoggerProto, SendableMessage +from faststream.utils.context.repository import context if TYPE_CHECKING: from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream @@ -35,6 +50,7 @@ from nats.aio.msg import Msg from nats.aio.subscription import Subscription from nats.js import JetStreamContext + from nats.js.object_store import ObjectStore from faststream.broker.message import StreamMessage from faststream.broker.publisher.proto import ProducerProto @@ -42,19 +58,15 @@ AsyncCallable, BrokerMiddleware, ) - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.types import Decorator class LogicSubscriber(SubscriberUsecase[MsgType]): """A class to represent a NATS handler.""" - subscription: Union[ - None, - "Subscription", - "JetStreamContext.PushSubscription", - "JetStreamContext.PullSubscription", - ] + subscription: Optional[Unsubscriptable] producer: Optional["ProducerProto"] _connection: Union["Client", "JetStreamContext", None] @@ -63,9 +75,6 @@ def __init__( *, subject: str, extra_options: Optional[AnyDict], - queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], # Subscriber args default_parser: "AsyncCallable", default_decoder: "AsyncCallable", @@ -78,13 +87,8 @@ def __init__( description_: Optional[str], include_in_schema: bool, ) -> None: - _, path = compile_nats_wildcard(subject) - - self.subject = path - self.queue = queue + self.subject = subject - self.stream = stream - self.pull_sub = pull_sub self.extra_options = extra_options or {} super().__init__( @@ -104,7 +108,6 @@ def __init__( self._connection = None self.subscription = None self.producer = None - self.tasks: List["asyncio.Task[Any]"] = [] @override def setup( # type: ignore[override] @@ -140,6 +143,12 @@ def setup( # type: ignore[override] _call_decorators=_call_decorators, ) + @cached_property + def clear_subject(self) -> str: + """Compile `test.{name}` to `test.*` subject.""" + _, path = compile_nats_wildcard(self.subject) + return path + async def start(self) -> None: """Create NATS subscription and start consume tasks.""" assert self._connection, NOT_CONNECTED_YET # nosec B101 @@ -154,60 +163,21 @@ async def close(self) -> None: await self.subscription.unsubscribe() self.subscription = None - for task in self.tasks: - if not task.done(): - task.cancel() - self.tasks = [] - @abstractmethod async def _create_subscription( self, *, - connection: Union["Client", "JetStreamContext"], + connection: Union[ + "Client", "JetStreamContext", "KVBucketDeclarer", "OSBucketDeclarer" + ], ) -> None: """Create NATS subscription object to consume messages.""" raise NotImplementedError() - def _make_response_publisher( - self, - message: Annotated[ - "StreamMessage[Any]", - Doc("Message requiring reply"), - ], - ) -> Sequence[FakePublisher]: - """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" - if not message.reply_to or self._producer is None: - return () - - return ( - FakePublisher( - self._producer.publish, - publish_kwargs={ - "subject": message.reply_to, - }, - ), - ) - - def __hash__(self) -> int: - return self.get_routing_hash(self.subject) - - @staticmethod - def get_routing_hash( - subject: Annotated[ - str, - Doc("NATS subject to consume messages"), - ], - ) -> int: - """Get handler hash by outer data. - - Using to find handler in `broker.handlers` dictionary. - """ - return hash(subject) - @staticmethod def build_log_context( message: Annotated[ - Optional["StreamMessage[Any]"], + Optional["StreamMessage[MsgType]"], Doc("Message which we are building context for"), ], subject: Annotated[ @@ -220,77 +190,64 @@ def build_log_context( Doc("Using queue group name"), ] = "", stream: Annotated[ - Optional["JStream"], + str, Doc("Stream object we are listening"), - ] = None, + ] = "", ) -> Dict[str, str]: """Static method to build log context out of `self.consume` scope.""" return { "subject": subject, "queue": queue, - "stream": getattr(stream, "name", ""), + "stream": stream, "message_id": getattr(message, "message_id", ""), } - def get_log_context( - self, - message: Annotated[ - Optional["StreamMessage[Any]"], - Doc("Message which we are building context for"), - ], - ) -> Dict[str, str]: - """Log context factory using in `self.consume` scope.""" - return self.build_log_context( - message=message, - subject=self.subject, - queue=self.queue, - stream=self.stream, - ) - def add_prefix(self, prefix: str) -> None: """Include Subscriber in router.""" self.subject = "".join((prefix, self.subject)) + def __hash__(self) -> int: + return self.get_routing_hash(self.subject) -class DefaultHandler(LogicSubscriber["Msg"]): - """One-message consumer class.""" + @staticmethod + def get_routing_hash( + subject: Annotated[ + str, + Doc("NATS subject to consume messages"), + ], + ) -> int: + """Get handler hash by outer data. - send_stream: "MemoryObjectSendStream[Msg]" - receive_stream: "MemoryObjectReceiveStream[Msg]" + Using to find handler in `broker.handlers` dictionary. + """ + return hash(subject) + +class _DefaultSubscriber(LogicSubscriber[MsgType]): def __init__( self, *, - max_workers: int, - # default args subject: str, - queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], + # default args extra_options: Optional[AnyDict], # Subscriber args + default_parser: "AsyncCallable", + default_decoder: "AsyncCallable", no_ack: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], - broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], # AsyncAPI args title_: Optional[str], description_: Optional[str], include_in_schema: bool, ) -> None: - parser_: Union[NatsParser, JsParser] = ( - NatsParser(pattern=subject) if stream is None else JsParser(pattern=subject) - ) - super().__init__( subject=subject, - queue=queue, - stream=stream, - pull_sub=pull_sub, extra_options=extra_options, # subscriber args - default_parser=parser_.parse_message, - default_decoder=parser_.decode_message, + default_parser=default_parser, + default_decoder=default_decoder, # Propagated args no_ack=no_ack, retry=retry, @@ -302,6 +259,70 @@ def __init__( include_in_schema=include_in_schema, ) + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[Any]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + if not message.reply_to or self._producer is None: + return () + + return ( + FakePublisher( + self._producer.publish, + publish_kwargs={ + "subject": message.reply_to, + }, + ), + ) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[MsgType]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + ) + + +class _TasksMixin(LogicSubscriber[Any]): + def __init__(self, **kwargs: Any) -> None: + self.tasks: List["asyncio.Task[Any]"] = [] + + super().__init__(**kwargs) + + def add_task(self, coro: Coroutine[Any, Any, Any]) -> None: + self.tasks.append(asyncio.create_task(coro)) + + async def close(self) -> None: + """Clean up handler subscription, cancel consume task in graceful mode.""" + await super().close() + + for task in self.tasks: + if not task.done(): + task.cancel() + + self.tasks = [] + + +class _ConcurrentMixin(_TasksMixin): + send_stream: "MemoryObjectSendStream[Msg]" + receive_stream: "MemoryObjectReceiveStream[Msg]" + + def __init__( + self, + *, + max_workers: int, + **kwargs: Any, + ) -> None: self.max_workers = max_workers self.send_stream, self.receive_stream = anyio.create_memory_object_stream( @@ -309,38 +330,10 @@ def __init__( ) self.limiter = anyio.Semaphore(max_workers) - async def _create_subscription( - self, - *, - connection: Union["Client", "JetStreamContext"], - ) -> None: - """Create NATS subscription and start consume task.""" - cb: Callable[["Msg"], Awaitable[Any]] - if self.max_workers > 1: - self.tasks.append(asyncio.create_task(self._serve_consume_queue())) - cb = self.__put_msg - else: - cb = self.consume - - if self.pull_sub is not None: - connection = cast("JetStreamContext", connection) - - if self.stream is None: - raise SetupError("Pull subscriber can be used only with a stream") - - self.subscription = await connection.pull_subscribe( - subject=self.subject, - **self.extra_options, - ) - self.tasks.append(asyncio.create_task(self._consume_pull(cb=cb))) - - else: - self.subscription = await connection.subscribe( - subject=self.subject, - queue=self.queue, - cb=cb, # type: ignore[arg-type] - **self.extra_options, - ) + super().__init__(**kwargs) + + def start_consume_task(self) -> None: + self.add_task(self._serve_consume_queue()) async def _serve_consume_queue( self, @@ -351,31 +344,9 @@ async def _serve_consume_queue( """ async with anyio.create_task_group() as tg: async for msg in self.receive_stream: - tg.start_soon(self.__consume_msg, msg) - - async def _consume_pull( - self, - cb: Callable[["Msg"], Awaitable[SendableMessage]], - ) -> None: - """Endless task consuming messages using NATS Pull subscriber.""" - assert self.pull_sub # nosec B101 - - sub = cast("JetStreamContext.PullSubscription", self.subscription) - - while self.running: # pragma: no branch - messages = [] - with suppress(TimeoutError, ConnectionClosedError): - messages = await sub.fetch( - batch=self.pull_sub.batch_size, - timeout=self.pull_sub.timeout, - ) - - if messages: - async with anyio.create_task_group() as tg: - for msg in messages: - tg.start_soon(cb, msg) + tg.start_soon(self._consume_msg, msg) - async def __consume_msg( + async def _consume_msg( self, msg: "Msg", ) -> None: @@ -383,17 +354,14 @@ async def __consume_msg( async with self.limiter: await self.consume(msg) - async def __put_msg(self, msg: "Msg") -> None: + async def _put_msg(self, msg: "Msg") -> None: """Proxy method to put msg into in-memory queue with semaphore block.""" async with self.limiter: await self.send_stream.send(msg) -class BatchHandler(LogicSubscriber[List["Msg"]]): - """Batch-message consumer class.""" - - pull_sub: "PullSub" - stream: "JStream" +class CoreSubscriber(_DefaultSubscriber["Msg"]): + subscription: Optional["Subscription"] def __init__( self, @@ -401,30 +369,27 @@ def __init__( # default args subject: str, queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], - broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], # AsyncAPI args title_: Optional[str], description_: Optional[str], include_in_schema: bool, ) -> None: - parser = BatchParser(pattern=subject) + parser_ = NatsParser(pattern=subject) + + self.queue = queue super().__init__( subject=subject, - queue=queue, - stream=stream, - pull_sub=pull_sub, extra_options=extra_options, # subscriber args - default_parser=parser.parse_batch, - default_decoder=parser.decode_batch, + default_parser=parser_.parse_message, + default_decoder=parser_.decode_message, # Propagated args no_ack=no_ack, retry=retry, @@ -440,27 +405,613 @@ def __init__( async def _create_subscription( # type: ignore[override] self, *, - connection: "JetStreamContext", + connection: "Client", ) -> None: """Create NATS subscription and start consume task.""" - self.subscription = await connection.pull_subscribe( - subject=self.subject, + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self.consume, **self.extra_options, ) - self.tasks.append(asyncio.create_task(self._consume_pull())) - async def _consume_pull(self) -> None: - """Endless task consuming messages using NATS Pull subscriber.""" - assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[Msg]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + queue=self.queue, + ) - sub = cast("JetStreamContext.PullSubscription", self.subscription) - while self.running: # pragma: no branch - with suppress(TimeoutError, ConnectionClosedError): - messages = await sub.fetch( - batch=self.pull_sub.batch_size, - timeout=self.pull_sub.timeout, - ) +class ConcurrentCoreSubscriber(_ConcurrentMixin, CoreSubscriber): + def __init__( + self, + *, + max_workers: int, + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + subject=subject, + queue=queue, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) - if messages: - await self.consume(messages) + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "Client", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self._put_msg, + **self.extra_options, + ) + + +class _StreamSubscriber(_DefaultSubscriber["Msg"]): + def __init__( + self, + *, + stream: "JStream", + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser_ = JsParser(pattern=subject) + + self.queue = queue + self.stream = stream + + super().__init__( + subject=subject, + extra_options=extra_options, + # subscriber args + default_parser=parser_.parse_message, + default_decoder=parser_.decode_message, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[Msg]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + queue=self.queue, + stream=self.stream.name, + ) + + +class PushStreamSubscription(_StreamSubscriber): + subscription: Optional["JetStreamContext.PushSubscription"] + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self.consume, + **self.extra_options, + ) + + +class ConcurrentPushStreamSubscriber(_ConcurrentMixin, _StreamSubscriber): + subscription: Optional["JetStreamContext.PushSubscription"] + + def __init__( + self, + *, + max_workers: int, + stream: "JStream", + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + stream=stream, + subject=subject, + queue=queue, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self._put_msg, + **self.extra_options, + ) + + +class PullStreamSubscriber(_TasksMixin, _StreamSubscriber): + subscription: Optional["JetStreamContext.PullSubscription"] + + def __init__( + self, + *, + pull_sub: "PullSub", + stream: "JStream", + # default args + subject: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + self.pull_sub = pull_sub + + super().__init__( + # basic args + stream=stream, + subject=subject, + extra_options=extra_options, + queue="", + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull(cb=self.consume)) + + async def _consume_pull( + self, + cb: Callable[["Msg"], Awaitable[SendableMessage]], + ) -> None: + """Endless task consuming messages using NATS Pull subscriber.""" + assert self.subscription # nosec B101 + + while self.running: # pragma: no branch + messages = [] + with suppress(TimeoutError, ConnectionClosedError): + messages = await self.subscription.fetch( + batch=self.pull_sub.batch_size, + timeout=self.pull_sub.timeout, + ) + + if messages: + async with anyio.create_task_group() as tg: + for msg in messages: + tg.start_soon(cb, msg) + + +class ConcurrentPullStreamSubscriber(_ConcurrentMixin, PullStreamSubscriber): + def __init__( + self, + *, + max_workers: int, + # default args + pull_sub: "PullSub", + stream: "JStream", + subject: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + pull_sub=pull_sub, + stream=stream, + subject=subject, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull(cb=self._put_msg)) + + +class BatchPullStreamSubscriber(_TasksMixin, _DefaultSubscriber[List["Msg"]]): + """Batch-message consumer class.""" + + subscription: Optional["JetStreamContext.PullSubscription"] + + def __init__( + self, + *, + # default args + subject: str, + stream: "JStream", + pull_sub: "PullSub", + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = BatchParser(pattern=subject) + + self.stream = stream + self.pull_sub = pull_sub + + super().__init__( + subject=subject, + extra_options=extra_options, + # subscriber args + default_parser=parser.parse_batch, + default_decoder=parser.decode_batch, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull()) + + async def _consume_pull(self) -> None: + """Endless task consuming messages using NATS Pull subscriber.""" + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + while self.running: # pragma: no branch + with suppress(TimeoutError, ConnectionClosedError): + messages = await self.subscription.fetch( + batch=self.pull_sub.batch_size, + timeout=self.pull_sub.timeout, + ) + + if messages: + await self.consume(messages) + + +class KeyValueWatchSubscriber(_TasksMixin, LogicSubscriber[KeyValue.Entry]): + subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"] + + def __init__( + self, + *, + subject: str, + kv_watch: "KvWatch", + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = KvParser(pattern=subject) + self.kv_watch = kv_watch + + super().__init__( + subject=subject, + extra_options=None, + no_ack=True, + retry=False, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "KVBucketDeclarer", + ) -> None: + bucket = await connection.create_key_value( + bucket=self.kv_watch.name, + declare=self.kv_watch.declare, + ) + + self.subscription = UnsubscribeAdapter["KeyValue.KeyWatcher"]( + await bucket.watch( + keys=self.clear_subject, + headers_only=self.kv_watch.headers_only, + include_history=self.kv_watch.include_history, + ignore_deletes=self.kv_watch.ignore_deletes, + meta_only=self.kv_watch.meta_only, + # inactive_threshold=self.kv_watch.inactive_threshold + ) + ) + + self.add_task(self._consume_watch()) + + async def _consume_watch(self) -> None: + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + key_watcher = self.subscription.obj + + while self.running: + with suppress(ConnectionClosedError, TimeoutError): + message = cast( + Optional["KeyValue.Entry"], + await key_watcher.updates(self.kv_watch.timeout), # type: ignore[no-untyped-call] + ) + + if message: + await self.consume(message) + + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[KeyValue.Entry]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + return () + + def __hash__(self) -> int: + return hash(self.kv_watch) + hash(self.subject) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[KeyValue.Entry]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + stream=self.kv_watch.name, + ) + + +OBJECT_STORAGE_CONTEXT_KEY = "__object_storage" + + +class ObjStoreWatchSubscriber(_TasksMixin, LogicSubscriber[ObjectInfo]): + subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"] + + def __init__( + self, + *, + subject: str, + obj_watch: "ObjWatch", + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = ObjParser(pattern="") + + self.obj_watch = obj_watch + + super().__init__( + subject=subject, + extra_options=None, + no_ack=True, + retry=False, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "OSBucketDeclarer", + ) -> None: + self.bucket = await connection.create_object_store( + bucket=self.subject, + declare=self.obj_watch.declare, + ) + + self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"]( + await self.bucket.watch( + ignore_deletes=self.obj_watch.ignore_deletes, + include_history=self.obj_watch.include_history, + meta_only=self.obj_watch.meta_only, + ) + ) + + self.add_task(self._consume_watch()) + + async def _consume_watch(self) -> None: + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + obj_watch = self.subscription.obj + + while self.running: + with suppress(TimeoutError): + message = cast( + Optional["ObjectInfo"], + await obj_watch.updates(self.obj_watch.timeout), # type: ignore[no-untyped-call] + ) + + if message: + with context.scope(OBJECT_STORAGE_CONTEXT_KEY, self.bucket): + await self.consume(message) + + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[ObjectInfo]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + return () + + def __hash__(self) -> int: + return hash(self.subject) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[ObjectInfo]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + ) diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 6681ba5b14..5a9190dfd7 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -9,7 +9,7 @@ from faststream.nats.broker import NatsBroker from faststream.nats.publisher.producer import NatsFastProducer from faststream.nats.schemas.js_stream import is_subject_match_wildcard -from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.nats.subscriber.usecase import LogicSubscriber from faststream.testing.broker import TestBroker, call_handler if TYPE_CHECKING: @@ -55,7 +55,7 @@ def remove_publisher_fake_subscriber( broker: NatsBroker, publisher: "AsyncAPIPublisher" ) -> None: broker._subscribers.pop( - AsyncAPISubscriber.get_routing_hash(publisher.subject), None + LogicSubscriber.get_routing_hash(publisher.subject), None ) @@ -91,12 +91,15 @@ async def publish( # type: ignore[override] ) for handler in self.broker._subscribers.values(): # pragma: no branch - if stream and getattr(handler.stream, "name", None) != stream: + if stream and ( + not (handler_stream := getattr(handler, "stream", None)) + or stream != handler_stream.name + ): continue - if is_subject_match_wildcard(subject, handler.subject): + if is_subject_match_wildcard(subject, handler.clear_subject): msg: Union[List[PatchedMessage], PatchedMessage] - if getattr(handler.pull_sub, "batch", False): + if (pull := getattr(handler, "pull_sub", None)) and pull.batch: msg = [incoming] else: msg = incoming diff --git a/faststream/opentelemetry/middleware.py b/faststream/opentelemetry/middleware.py index 9a4ad34c10..7bb0519c68 100644 --- a/faststream/opentelemetry/middleware.py +++ b/faststream/opentelemetry/middleware.py @@ -102,7 +102,9 @@ def __init__( self, *, tracer: "Tracer", - settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + settings_provider_factory: Callable[ + [Any], Optional[TelemetrySettingsProvider[Any]] + ], metrics_container: _MetricsContainer, msg: Optional[Any] = None, ) -> None: @@ -121,7 +123,8 @@ async def publish_scope( *args: Any, **kwargs: Any, ) -> Any: - provider = self.__settings_provider + if (provider := self.__settings_provider) is None: + return await call_next(msg, *args, **kwargs) headers = kwargs.pop("headers", {}) or {} current_context = context.get_current() @@ -182,7 +185,8 @@ async def consume_scope( call_next: "AsyncFuncAny", msg: "StreamMessage[Any]", ) -> Any: - provider = self.__settings_provider + if (provider := self.__settings_provider) is None: + return await call_next(msg) current_context = propagate.extract(msg.headers) destination_name = provider.get_consume_destination_name(msg) @@ -258,7 +262,9 @@ class TelemetryMiddleware: def __init__( self, *, - settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + settings_provider_factory: Callable[ + [Any], Optional[TelemetrySettingsProvider[Any]] + ], tracer_provider: Optional["TracerProvider"] = None, meter_provider: Optional["MeterProvider"] = None, meter: Optional["Meter"] = None, diff --git a/faststream/rabbit/broker/registrator.py b/faststream/rabbit/broker/registrator.py index e55d06a30a..e13b7b5261 100644 --- a/faststream/rabbit/broker/registrator.py +++ b/faststream/rabbit/broker/registrator.py @@ -11,6 +11,7 @@ RabbitQueue, ) from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber +from faststream.rabbit.subscriber.factory import create_subscriber if TYPE_CHECKING: from aio_pika import IncomingMessage # noqa: F401 @@ -117,7 +118,7 @@ def subscriber( # type: ignore[override] subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( queue=RabbitQueue.validate(queue), exchange=RabbitExchange.validate(exchange), consume_args=consume_args, diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py index 8e8ddcb5db..158d343dd1 100644 --- a/faststream/rabbit/subscriber/asyncapi.py +++ b/faststream/rabbit/subscriber/asyncapi.py @@ -1,6 +1,4 @@ -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Union - -from typing_extensions import override +from typing import Dict from faststream.asyncapi.schema import ( Channel, @@ -12,17 +10,9 @@ ) from faststream.asyncapi.schema.bindings import amqp from faststream.asyncapi.utils import resolve_payloads -from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig from faststream.rabbit.subscriber.usecase import LogicSubscriber from faststream.rabbit.utils import is_routing_exchange -if TYPE_CHECKING: - from aio_pika import IncomingMessage - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict - class AsyncAPISubscriber(LogicSubscriber): """AsyncAPI-compatible Rabbit Subscriber class.""" @@ -83,36 +73,3 @@ def get_schema(self) -> Dict[str, Channel]: ), ) } - - @override - @classmethod - def create( # type: ignore[override] - cls, - *, - queue: RabbitQueue, - exchange: Optional["RabbitExchange"], - consume_args: Optional["AnyDict"], - reply_config: Optional["ReplyConfig"], - # Subscriber args - no_ack: bool, - retry: Union[bool, int], - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPISubscriber": - return cls( - queue=queue, - exchange=exchange, - consume_args=consume_args, - reply_config=reply_config, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py new file mode 100644 index 0000000000..f0ee6b752a --- /dev/null +++ b/faststream/rabbit/subscriber/factory.py @@ -0,0 +1,42 @@ +from typing import TYPE_CHECKING, Iterable, Optional, Union + +from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig +from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber + +if TYPE_CHECKING: + from aio_pika import IncomingMessage + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +def create_subscriber( + *, + queue: RabbitQueue, + exchange: Optional["RabbitExchange"], + consume_args: Optional["AnyDict"], + reply_config: Optional["ReplyConfig"], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> AsyncAPISubscriber: + return AsyncAPISubscriber( + queue=queue, + exchange=exchange, + consume_args=consume_args, + reply_config=reply_config, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/redis/broker/registrator.py b/faststream/redis/broker/registrator.py index 4e06b3872b..7a643d189a 100644 --- a/faststream/redis/broker/registrator.py +++ b/faststream/redis/broker/registrator.py @@ -7,6 +7,7 @@ from faststream.redis.message import UnifyRedisDict from faststream.redis.publisher.asyncapi import AsyncAPIPublisher from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber +from faststream.redis.subscriber.factory import SubsciberType, create_subscriber if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -20,7 +21,6 @@ from faststream.redis.message import UnifyRedisMessage from faststream.redis.publisher.asyncapi import PublisherType from faststream.redis.schemas import ListSub, PubSub, StreamSub - from faststream.redis.subscriber.asyncapi import SubsciberType from faststream.types import AnyDict @@ -104,7 +104,7 @@ def subscriber( # type: ignore[override] subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( channel=channel, list=list, stream=stream, diff --git a/faststream/redis/schemas/proto.py b/faststream/redis/schemas/proto.py index 28e50cf9ba..2521a1a0a3 100644 --- a/faststream/redis/schemas/proto.py +++ b/faststream/redis/schemas/proto.py @@ -17,10 +17,6 @@ def channel_binding(self) -> "redis.ChannelBinding": ... @abstractmethod def get_payloads(self) -> Any: ... - @staticmethod - @abstractmethod - def create() -> Any: ... - def validate_options( *, diff --git a/faststream/redis/subscriber/asyncapi.py b/faststream/redis/subscriber/asyncapi.py index dd2fbe7e56..36171b247b 100644 --- a/faststream/redis/subscriber/asyncapi.py +++ b/faststream/redis/subscriber/asyncapi.py @@ -1,6 +1,4 @@ -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Union - -from typing_extensions import TypeAlias, override +from typing import Dict from faststream.asyncapi.schema import ( Channel, @@ -11,9 +9,8 @@ ) from faststream.asyncapi.schema.bindings import redis from faststream.asyncapi.utils import resolve_payloads -from faststream.exceptions import SetupError -from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub -from faststream.redis.schemas.proto import RedisAsyncAPIProtocol, validate_options +from faststream.redis.schemas import ListSub, StreamSub +from faststream.redis.schemas.proto import RedisAsyncAPIProtocol from faststream.redis.subscriber.usecase import ( BatchListSubscriber, BatchStreamSubscriber, @@ -23,20 +20,6 @@ StreamSubscriber, ) -if TYPE_CHECKING: - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.redis.message import UnifyRedisDict - -SubsciberType: TypeAlias = Union[ - "AsyncAPIChannelSubscriber", - "AsyncAPIStreamBatchSubscriber", - "AsyncAPIStreamSubscriber", - "AsyncAPIListBatchSubscriber", - "AsyncAPIListSubscriber", -] - class AsyncAPISubscriber(LogicSubscriber, RedisAsyncAPIProtocol): """A class to represent a Redis handler.""" @@ -62,98 +45,6 @@ def get_schema(self) -> Dict[str, Channel]: ) } - @override - @staticmethod - def create( # type: ignore[override] - *, - channel: Union["PubSub", str, None], - list: Union["ListSub", str, None], - stream: Union["StreamSub", str, None], - # Subscriber args - no_ack: bool = False, - retry: bool = False, - broker_dependencies: Iterable["Depends"] = (), - broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (), - # AsyncAPI args - title_: Optional[str] = None, - description_: Optional[str] = None, - include_in_schema: bool = True, - ) -> SubsciberType: - validate_options(channel=channel, list=list, stream=stream) - - if (channel_sub := PubSub.validate(channel)) is not None: - return AsyncAPIChannelSubscriber( - channel=channel_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - elif (stream_sub := StreamSub.validate(stream)) is not None: - if stream_sub.batch: - return AsyncAPIStreamBatchSubscriber( - stream=stream_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIStreamSubscriber( - stream=stream_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - elif (list_sub := ListSub.validate(list)) is not None: - if list_sub.batch: - return AsyncAPIListBatchSubscriber( - list=list_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIListSubscriber( - list=list_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - else: - raise SetupError(INCORRECT_SETUP_MSG) - class AsyncAPIChannelSubscriber(ChannelSubscriber, AsyncAPISubscriber): def get_name(self) -> str: diff --git a/faststream/redis/subscriber/factory.py b/faststream/redis/subscriber/factory.py new file mode 100644 index 0000000000..da5fe02898 --- /dev/null +++ b/faststream/redis/subscriber/factory.py @@ -0,0 +1,119 @@ +from typing import TYPE_CHECKING, Iterable, Optional, Union + +from typing_extensions import TypeAlias + +from faststream.exceptions import SetupError +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.redis.schemas.proto import validate_options +from faststream.redis.subscriber.asyncapi import ( + AsyncAPIChannelSubscriber, + AsyncAPIListBatchSubscriber, + AsyncAPIListSubscriber, + AsyncAPIStreamBatchSubscriber, + AsyncAPIStreamSubscriber, +) + +if TYPE_CHECKING: + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.redis.message import UnifyRedisDict + +SubsciberType: TypeAlias = Union[ + "AsyncAPIChannelSubscriber", + "AsyncAPIStreamBatchSubscriber", + "AsyncAPIStreamSubscriber", + "AsyncAPIListBatchSubscriber", + "AsyncAPIListSubscriber", +] + + +def create_subscriber( + *, + channel: Union["PubSub", str, None], + list: Union["ListSub", str, None], + stream: Union["StreamSub", str, None], + # Subscriber args + no_ack: bool = False, + retry: bool = False, + broker_dependencies: Iterable["Depends"] = (), + broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (), + # AsyncAPI args + title_: Optional[str] = None, + description_: Optional[str] = None, + include_in_schema: bool = True, +) -> SubsciberType: + validate_options(channel=channel, list=list, stream=stream) + + if (channel_sub := PubSub.validate(channel)) is not None: + return AsyncAPIChannelSubscriber( + channel=channel_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif (stream_sub := StreamSub.validate(stream)) is not None: + if stream_sub.batch: + return AsyncAPIStreamBatchSubscriber( + stream=stream_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIStreamSubscriber( + stream=stream_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif (list_sub := ListSub.validate(list)) is not None: + if list_sub.batch: + return AsyncAPIListBatchSubscriber( + list=list_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIListSubscriber( + list=list_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + raise SetupError(INCORRECT_SETUP_MSG) diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 58b2745b14..7919f384f7 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -1,5 +1,5 @@ import asyncio -from abc import ABC, abstractmethod +from abc import abstractmethod from contextlib import suppress from copy import deepcopy from typing import ( @@ -57,7 +57,7 @@ Offset: TypeAlias = bytes -class LogicSubscriber(ABC, SubscriberUsecase[UnifyRedisDict]): +class LogicSubscriber(SubscriberUsecase[UnifyRedisDict]): """A class to represent a Redis handler.""" _client: Optional["Redis[bytes]"] diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index 1beedc4dc0..2931bf76e2 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -18,7 +18,7 @@ from faststream.redis.parser import RawMessage from faststream.redis.publisher.producer import RedisFastProducer from faststream.redis.schemas import INCORRECT_SETUP_MSG -from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber +from faststream.redis.subscriber.factory import create_subscriber from faststream.testing.broker import TestBroker, call_handler if TYPE_CHECKING: @@ -66,7 +66,7 @@ def remove_publisher_fake_subscriber( publisher: "AsyncAPIPublisher", ) -> None: broker._subscribers.pop( - hash(AsyncAPISubscriber.create(**publisher.subscriber_property)), + hash(create_subscriber(**publisher.subscriber_property)), None, ) diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index f8925210a4..249e5c6846 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -69,6 +69,8 @@ def __init__( self.connect_only = connect_only async def __aenter__(self) -> Broker: + # TODO: remove useless middlewares filter + middlewares = tuple( filter( lambda x: not isinstance(x, CriticalLogMiddleware), @@ -90,6 +92,7 @@ async def __aenter__(self) -> Broker: async def __aexit__(self, *args: Any) -> None: await self._ctx.__aexit__(*args) + # TODO: remove useless middlewares filter middlewares: Tuple["BrokerMiddleware[Any]", ...] = ( CriticalLogMiddleware( # type: ignore[arg-type] logger=self.broker.logger, diff --git a/pyproject.toml b/pyproject.toml index 0ef08d8502..17a90227f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ otel = ["opentelemetry-sdk>=1.24.0,<2.0.0"] optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel]"] devdocs = [ - "mkdocs-material==9.5.21", + "mkdocs-material==9.5.24", "mkdocs-static-i18n==1.2.3", "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", @@ -88,7 +88,7 @@ devdocs = [ "mike==2.1.1", # versioning "mkdocs-minify-plugin==0.8.0", "mkdocs-macros-plugin==1.0.5", # includes with variables - "mkdocs-glightbox==0.3.7", # img zoom + "mkdocs-glightbox==0.4.0", # img zoom "pillow", # required for mkdocs-glightbo "cairosvg", # required for mkdocs-glightbo "requests", # using in CI, do not pin it @@ -99,6 +99,7 @@ types = [ "faststream[optionals]", "mypy==1.10.0", # mypy extensions + "types-Deprecated", "types-PyYAML", "types-setuptools", "types-ujson", @@ -112,14 +113,14 @@ lint = [ "faststream[types]", "ruff==0.4.4", "bandit==1.7.8", - "semgrep==1.70.0", + "semgrep==1.73.0", "codespell==2.2.6", ] test-core = [ "coverage[toml]==7.5.1", - "pytest==8.2.0", - "pytest-asyncio==0.23.6", + "pytest==8.2.1", + "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", ] diff --git a/tests/asyncapi/test_app.py b/tests/asyncapi/kafka/test_app.py similarity index 100% rename from tests/asyncapi/test_app.py rename to tests/asyncapi/kafka/test_app.py diff --git a/tests/asyncapi/nats/test_kv_schema.py b/tests/asyncapi/nats/test_kv_schema.py new file mode 100644 index 0000000000..4b0edc1847 --- /dev/null +++ b/tests/asyncapi/nats/test_kv_schema.py @@ -0,0 +1,14 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.nats import NatsBroker + + +def test_kv_schema(): + broker = NatsBroker() + + @broker.subscriber("test", kv_watch="test") + async def handle(): ... + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema["channels"] == {} diff --git a/tests/asyncapi/nats/test_obj_schema.py b/tests/asyncapi/nats/test_obj_schema.py new file mode 100644 index 0000000000..f7546cbc22 --- /dev/null +++ b/tests/asyncapi/nats/test_obj_schema.py @@ -0,0 +1,14 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.nats import NatsBroker + + +def test_obj_schema(): + broker = NatsBroker() + + @broker.subscriber("test", obj_watch=True) + async def handle(): ... + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema["channels"] == {} diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 5318fb2a69..60ac90a7f3 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -276,5 +276,73 @@ def subscriber(m, msg: NatsMessage): timeout=3, ) - assert event.is_set() - mock.assert_called_once_with(True) + assert event.is_set() + mock.assert_called_once_with(True) + + @pytest.mark.asyncio() + async def test_consume_kv( + self, + queue: str, + event: asyncio.Event, + mock, + ): + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, kv_watch=queue + "1") + async def handler(m): + mock(m) + event.set() + + async with self.patch_broker(consume_broker) as br: + await br.start() + bucket = await br.key_value(queue + "1") + + await asyncio.wait( + ( + asyncio.create_task( + bucket.put( + queue, + b"world", + ) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_with(b"world") + + @pytest.mark.asyncio() + async def test_consume_os( + self, + queue: str, + event: asyncio.Event, + mock, + ): + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, obj_watch=True) + async def handler(filename: str): + event.set() + mock(filename) + + async with self.patch_broker(consume_broker) as br: + await br.start() + bucket = await br.object_storage(queue) + + await asyncio.wait( + ( + asyncio.create_task( + bucket.put( + "hello", + b"world", + ) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with("hello") diff --git a/tests/brokers/nats/test_kv_declarer_cache.py b/tests/brokers/nats/test_kv_declarer_cache.py new file mode 100644 index 0000000000..1c85be3a5a --- /dev/null +++ b/tests/brokers/nats/test_kv_declarer_cache.py @@ -0,0 +1,23 @@ +from unittest.mock import patch + +import pytest +from nats.js import JetStreamContext + +from faststream.nats import NatsBroker +from tests.tools import spy_decorator + + +@pytest.mark.asyncio() +@pytest.mark.nats() +async def test_kv_storage_cache(): + broker = NatsBroker() + await broker.connect() + with patch.object( + JetStreamContext, + "create_key_value", + spy_decorator(JetStreamContext.create_key_value), + ) as m: + await broker.key_value("test") + await broker.key_value("test") + assert broker._kv_declarer.buckets["test"] + m.mock.assert_called_once() diff --git a/tests/brokers/nats/test_os_declarer_cache.py b/tests/brokers/nats/test_os_declarer_cache.py new file mode 100644 index 0000000000..d7107e27e0 --- /dev/null +++ b/tests/brokers/nats/test_os_declarer_cache.py @@ -0,0 +1,24 @@ +from unittest.mock import patch + +import pytest +from nats.js import JetStreamContext + +from faststream.nats import NatsBroker +from tests.tools import spy_decorator + + +@pytest.mark.asyncio() +@pytest.mark.nats() +async def test_object_storage_cache(): + broker = NatsBroker() + await broker.connect() + + with patch.object( + JetStreamContext, + "create_object_store", + spy_decorator(JetStreamContext.create_object_store), + ) as m: + await broker.object_storage("test") + await broker.object_storage("test") + assert broker._os_declarer.buckets["test"] + m.mock.assert_called_once() diff --git a/tests/cli/rabbit/__init__.py b/tests/cli/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/cli/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/cli/test_app.py b/tests/cli/rabbit/test_app.py similarity index 97% rename from tests/cli/test_app.py rename to tests/cli/rabbit/test_app.py index 6795d8fd94..00944b7bb5 100644 --- a/tests/cli/test_app.py +++ b/tests/cli/rabbit/test_app.py @@ -10,10 +10,9 @@ from faststream import FastStream, TestApp from faststream._compat import IS_WINDOWS from faststream.log import logger -from faststream.rabbit import RabbitBroker -def test_init(app: FastStream, broker: RabbitBroker): +def test_init(app: FastStream, broker): assert app.broker is broker assert app.logger is logger @@ -26,7 +25,7 @@ def test_init_without_logger(app_without_logger: FastStream): assert app_without_logger.logger is None -def test_set_broker(broker: RabbitBroker, app_without_broker: FastStream): +def test_set_broker(broker, app_without_broker: FastStream): assert app_without_broker.broker is None app_without_broker.set_broker(broker) assert app_without_broker.broker is broker diff --git a/tests/cli/utils/test_logs.py b/tests/cli/rabbit/test_logs.py similarity index 83% rename from tests/cli/utils/test_logs.py rename to tests/cli/rabbit/test_logs.py index 384e71954d..79e140da99 100644 --- a/tests/cli/utils/test_logs.py +++ b/tests/cli/rabbit/test_logs.py @@ -8,15 +8,12 @@ @pytest.mark.parametrize( - ("level", "broker"), + "level", ( # noqa: PT007 - pytest.param(logging.ERROR, RabbitBroker(), id=str(logging.ERROR)), - *( - pytest.param(level, RabbitBroker(), id=level) - for level in LogLevels.__members__ - ), + pytest.param(logging.ERROR, id=str(logging.ERROR)), + *(pytest.param(level, id=level) for level in LogLevels.__members__), *( - pytest.param(level, RabbitBroker(), id=str(level)) + pytest.param(level, id=str(level)) for level in LogLevels.__members__.values() ), ), diff --git a/tests/cli/test_asyncapi_docs.py b/tests/cli/test_asyncapi_docs.py index 7ae5eb625d..816710c9ad 100644 --- a/tests/cli/test_asyncapi_docs.py +++ b/tests/cli/test_asyncapi_docs.py @@ -14,12 +14,14 @@ serve_cmd, ) from faststream.cli.main import cli +from tests.marks import require_aiokafka GEN_JSON_CMD = gen_json_cmd.split(" ")[1:-1] GEN_YAML_CMD = gen_yaml_cmd.split(" ")[1:-1] SERVE_CMD = serve_cmd.split(" ")[1:-1] +@require_aiokafka def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project: Path): r = runner.invoke( cli, [*GEN_JSON_CMD, "--out", "schema.json", str(kafka_basic_project)] @@ -36,6 +38,7 @@ def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project: schema_path.unlink() +@require_aiokafka def test_gen_asyncapi_yaml_for_kafka_app(runner: CliRunner, kafka_basic_project: Path): r = runner.invoke(cli, GEN_YAML_CMD + [str(kafka_basic_project)]) # noqa: RUF005 assert r.exit_code == 0 @@ -56,6 +59,7 @@ def test_gen_wrong_path(runner: CliRunner): assert "No such file or directory" in r.stdout +@require_aiokafka def test_serve_asyncapi_docs( runner: CliRunner, kafka_basic_project: Path, @@ -70,6 +74,7 @@ def test_serve_asyncapi_docs( mock.assert_called_once() +@require_aiokafka @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_serve_asyncapi_json_schema( runner: CliRunner, @@ -90,6 +95,7 @@ def test_serve_asyncapi_json_schema( schema_path.unlink() +@require_aiokafka @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_serve_asyncapi_yaml_schema( runner: CliRunner, diff --git a/tests/cli/test_publish.py b/tests/cli/test_publish.py index 980dfb4a8a..7e2aa367ea 100644 --- a/tests/cli/test_publish.py +++ b/tests/cli/test_publish.py @@ -1,55 +1,38 @@ from unittest.mock import AsyncMock, patch -import pytest from dirty_equals import IsPartialDict -from typer.testing import CliRunner from faststream import FastStream from faststream.cli.main import cli as faststream_app -from faststream.confluent import KafkaBroker as ConfluentBroker -from faststream.confluent.publisher.producer import AsyncConfluentFastProducer -from faststream.kafka import KafkaBroker -from faststream.kafka.publisher.producer import AioKafkaFastProducer -from faststream.nats import NatsBroker -from faststream.nats.publisher.producer import NatsFastProducer -from faststream.rabbit import RabbitBroker -from faststream.rabbit.publisher.producer import AioPikaFastProducer -from faststream.redis import RedisBroker -from faststream.redis.publisher.producer import RedisFastProducer - -# Initialize the CLI runner -runner = CliRunner() - - -@pytest.fixture() -def mock_app(request): - app = FastStream() - broker_type = request.param["broker_type"] - producer_type = request.param["producer_type"] +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + +def get_mock_app(broker_type, producer_type) -> FastStream: broker = broker_type() broker.connect = AsyncMock() - mock_producer = AsyncMock(spec=producer_type) mock_producer.publish = AsyncMock() broker._producer = mock_producer + return FastStream(broker) - app.broker = broker - return app +@require_redis +def test_publish_command_with_redis_options(runner): + from faststream.redis import RedisBroker + from faststream.redis.publisher.producer import RedisFastProducer -@pytest.mark.parametrize( - "mock_app", - [ - { - "broker_type": RedisBroker, - "producer_type": RedisFastProducer, - } - ], - indirect=True, -) -def test_publish_command_with_redis_options(mock_app): - with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): + mock_app = get_mock_app(RedisBroker, RedisFastProducer) + + with patch( + "faststream.cli.main.import_from_string", + return_value=(None, mock_app), + ): result = runner.invoke( faststream_app, [ @@ -82,17 +65,13 @@ def test_publish_command_with_redis_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [ - { - "broker_type": ConfluentBroker, - "producer_type": AsyncConfluentFastProducer, - } - ], - indirect=True, -) -def test_publish_command_with_confluent_options(mock_app): +@require_confluent +def test_publish_command_with_confluent_options(runner): + from faststream.confluent import KafkaBroker as ConfluentBroker + from faststream.confluent.publisher.producer import AsyncConfluentFastProducer + + mock_app = get_mock_app(ConfluentBroker, AsyncConfluentFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -116,12 +95,13 @@ def test_publish_command_with_confluent_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": KafkaBroker, "producer_type": AioKafkaFastProducer}], - indirect=True, -) -def test_publish_command_with_kafka_options(mock_app): +@require_aiokafka +def test_publish_command_with_kafka_options(runner): + from faststream.kafka import KafkaBroker + from faststream.kafka.publisher.producer import AioKafkaFastProducer + + mock_app = get_mock_app(KafkaBroker, AioKafkaFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -145,12 +125,13 @@ def test_publish_command_with_kafka_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": NatsBroker, "producer_type": NatsFastProducer}], - indirect=True, -) -def test_publish_command_with_nats_options(mock_app): +@require_nats +def test_publish_command_with_nats_options(runner): + from faststream.nats import NatsBroker + from faststream.nats.publisher.producer import NatsFastProducer + + mock_app = get_mock_app(NatsBroker, NatsFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -178,12 +159,13 @@ def test_publish_command_with_nats_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": RabbitBroker, "producer_type": AioPikaFastProducer}], - indirect=True, -) -def test_publish_command_with_rabbit_options(mock_app: AsyncMock): +@require_aiopika +def test_publish_command_with_rabbit_options(runner): + from faststream.rabbit import RabbitBroker + from faststream.rabbit.publisher.producer import AioPikaFastProducer + + mock_app = get_mock_app(RabbitBroker, AioPikaFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, diff --git a/tests/cli/utils/test_imports.py b/tests/cli/utils/test_imports.py index 3c7597c951..f97e26c0ff 100644 --- a/tests/cli/utils/test_imports.py +++ b/tests/cli/utils/test_imports.py @@ -5,6 +5,7 @@ from faststream.app import FastStream from faststream.cli.utils.imports import get_app_path, import_from_string, import_object +from tests.marks import require_aiokafka, require_aiopika, require_nats def test_import_wrong(): @@ -54,6 +55,9 @@ def test_import_from_string_import_wrong(): pytest.param("examples.rabbit.topic:app", "examples/rabbit/topic.py"), ), ) +@require_nats +@require_aiopika +@require_aiokafka def test_import_from_string(test_input, exp_module): module, app = import_from_string(test_input) assert isinstance(app, FastStream) @@ -80,6 +84,9 @@ def test_import_from_string(test_input, exp_module): ), ), ) +@require_nats +@require_aiopika +@require_aiokafka def test_import_module(test_input, exp_module): module, app = import_from_string(test_input) assert isinstance(app, FastStream) diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py +++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/docs/getting_started/cli/confluent/__init__.py b/tests/docs/getting_started/cli/confluent/__init__.py new file mode 100644 index 0000000000..c4a1803708 --- /dev/null +++ b/tests/docs/getting_started/cli/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/docs/getting_started/cli/test_confluent_context.py b/tests/docs/getting_started/cli/confluent/test_confluent_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_confluent_context.py rename to tests/docs/getting_started/cli/confluent/test_confluent_context.py diff --git a/tests/docs/getting_started/cli/kafka/__init__.py b/tests/docs/getting_started/cli/kafka/__init__.py new file mode 100644 index 0000000000..bd6bc708fc --- /dev/null +++ b/tests/docs/getting_started/cli/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/docs/getting_started/cli/test_kafka_context.py b/tests/docs/getting_started/cli/kafka/test_kafka_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_kafka_context.py rename to tests/docs/getting_started/cli/kafka/test_kafka_context.py diff --git a/tests/docs/getting_started/cli/nats/__init__.py b/tests/docs/getting_started/cli/nats/__init__.py new file mode 100644 index 0000000000..87ead90ee6 --- /dev/null +++ b/tests/docs/getting_started/cli/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/docs/getting_started/cli/test_nats_context.py b/tests/docs/getting_started/cli/nats/test_nats_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_nats_context.py rename to tests/docs/getting_started/cli/nats/test_nats_context.py diff --git a/tests/docs/getting_started/cli/rabbit/__init__.py b/tests/docs/getting_started/cli/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/docs/getting_started/cli/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/docs/getting_started/cli/test_rabbit_context.py b/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_rabbit_context.py rename to tests/docs/getting_started/cli/rabbit/test_rabbit_context.py diff --git a/tests/docs/getting_started/cli/redis/__init__.py b/tests/docs/getting_started/cli/redis/__init__.py new file mode 100644 index 0000000000..4752ef19b1 --- /dev/null +++ b/tests/docs/getting_started/cli/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/docs/getting_started/cli/test_redis_context.py b/tests/docs/getting_started/cli/redis/test_redis_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_redis_context.py rename to tests/docs/getting_started/cli/redis/test_redis_context.py diff --git a/tests/docs/getting_started/config/test_usage.py b/tests/docs/getting_started/config/test_usage.py index cdceaf9e8d..2ae34dda33 100644 --- a/tests/docs/getting_started/config/test_usage.py +++ b/tests/docs/getting_started/config/test_usage.py @@ -1,8 +1,9 @@ -from tests.marks import pydantic_v2 +from tests.marks import pydantic_v2, require_aiopika from tests.mocks import mock_pydantic_settings_env @pydantic_v2 +@require_aiopika def test_exists_and_valid(): with mock_pydantic_settings_env({"url": "localhost:9092"}): from docs.docs_src.getting_started.config.usage import settings diff --git a/tests/docs/getting_started/context/test_annotated.py b/tests/docs/getting_started/context/test_annotated.py index b800ad1956..b85bc816bc 100644 --- a/tests/docs/getting_started/context/test_annotated.py +++ b/tests/docs/getting_started/context/test_annotated.py @@ -1,20 +1,24 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @python39 @pytest.mark.asyncio() +@require_aiokafka async def test_annotated_kafka(): from docs.docs_src.getting_started.context.kafka.annotated import ( base_handler, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -24,11 +28,13 @@ async def test_annotated_kafka(): @python39 @pytest.mark.asyncio() +@require_confluent async def test_annotated_confluent(): from docs.docs_src.getting_started.context.confluent.annotated import ( base_handler, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -38,11 +44,13 @@ async def test_annotated_confluent(): @python39 @pytest.mark.asyncio() +@require_aiopika async def test_annotated_rabbit(): from docs.docs_src.getting_started.context.rabbit.annotated import ( base_handler, broker, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") @@ -52,11 +60,13 @@ async def test_annotated_rabbit(): @python39 @pytest.mark.asyncio() +@require_nats async def test_annotated_nats(): from docs.docs_src.getting_started.context.nats.annotated import ( base_handler, broker, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test") @@ -66,11 +76,13 @@ async def test_annotated_nats(): @python39 @pytest.mark.asyncio() +@require_redis async def test_annotated_redis(): from docs.docs_src.getting_started.context.redis.annotated import ( base_handler, broker, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/context/test_base.py b/tests/docs/getting_started/context/test_base.py index 03e9984f24..5b24fdd827 100644 --- a/tests/docs/getting_started/context/test_base.py +++ b/tests/docs/getting_started/context/test_base.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_base_kafka(): from docs.docs_src.getting_started.context.kafka.base import base_handler, broker + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -18,11 +22,13 @@ async def test_base_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_base_confluent(): from docs.docs_src.getting_started.context.confluent.base import ( base_handler, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -31,8 +37,10 @@ async def test_base_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_base_rabbit(): from docs.docs_src.getting_started.context.rabbit.base import base_handler, broker + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") @@ -41,8 +49,10 @@ async def test_base_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_base_nats(): from docs.docs_src.getting_started.context.nats.base import base_handler, broker + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test") @@ -51,8 +61,10 @@ async def test_base_nats(): @pytest.mark.asyncio() +@require_redis async def test_base_redis(): from docs.docs_src.getting_started.context.redis.base import base_handler, broker + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/context/test_cast.py b/tests/docs/getting_started/context/test_cast.py index 6ade96b33e..f99769603e 100644 --- a/tests/docs/getting_started/context/test_cast.py +++ b/tests/docs/getting_started/context/test_cast.py @@ -1,19 +1,23 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_cast_kafka(): from docs.docs_src.getting_started.context.kafka.cast import ( broker, handle, handle_int, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -26,12 +30,14 @@ async def test_cast_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_cast_confluent(): from docs.docs_src.getting_started.context.confluent.cast import ( broker, handle, handle_int, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -44,12 +50,14 @@ async def test_cast_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_cast_rabbit(): from docs.docs_src.getting_started.context.rabbit.cast import ( broker, handle, handle_int, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -62,12 +70,14 @@ async def test_cast_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_cast_nats(): from docs.docs_src.getting_started.context.nats.cast import ( broker, handle, handle_int, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -80,12 +90,14 @@ async def test_cast_nats(): @pytest.mark.asyncio() +@require_redis async def test_cast_redis(): from docs.docs_src.getting_started.context.redis.cast import ( broker, handle, handle_int, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_custom_global.py b/tests/docs/getting_started/context/test_custom_global.py index b9ec555cfe..0541980245 100644 --- a/tests/docs/getting_started/context/test_custom_global.py +++ b/tests/docs/getting_started/context/test_custom_global.py @@ -1,20 +1,24 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_custom_global_context_kafka(): from docs.docs_src.getting_started.context.kafka.custom_global_context import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-topic") @@ -23,12 +27,14 @@ async def test_custom_global_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_custom_global_context_confluent(): from docs.docs_src.getting_started.context.confluent.custom_global_context import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-topic") @@ -37,12 +43,14 @@ async def test_custom_global_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_custom_global_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.custom_global_context import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-queue") @@ -51,12 +59,14 @@ async def test_custom_global_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_custom_global_context_nats(): from docs.docs_src.getting_started.context.nats.custom_global_context import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-subject") @@ -65,12 +75,14 @@ async def test_custom_global_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_custom_global_context_redis(): from docs.docs_src.getting_started.context.redis.custom_global_context import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_custom_local.py b/tests/docs/getting_started/context/test_custom_local.py index 8672c6dd65..e2cf2421c0 100644 --- a/tests/docs/getting_started/context/test_custom_local.py +++ b/tests/docs/getting_started/context/test_custom_local.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_custom_local_context_kafka(): from docs.docs_src.getting_started.context.kafka.custom_local_context import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_custom_local_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_custom_local_context_confluent(): from docs.docs_src.getting_started.context.confluent.custom_local_context import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_custom_local_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_custom_local_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.custom_local_context import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_custom_local_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_custom_local_context_nats(): from docs.docs_src.getting_started.context.nats.custom_local_context import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_custom_local_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_custom_local_context_redis(): from docs.docs_src.getting_started.context.redis.custom_local_context import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_default_arguments.py b/tests/docs/getting_started/context/test_default_arguments.py index f583f09f9b..969baf881f 100644 --- a/tests/docs/getting_started/context/test_default_arguments.py +++ b/tests/docs/getting_started/context/test_default_arguments.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_default_arguments_kafka(): from docs.docs_src.getting_started.context.kafka.default_arguments import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_default_arguments_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_default_arguments_confluent(): from docs.docs_src.getting_started.context.confluent.default_arguments import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_default_arguments_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_default_arguments_rabbit(): from docs.docs_src.getting_started.context.rabbit.default_arguments import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_default_arguments_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_default_arguments_nats(): from docs.docs_src.getting_started.context.nats.default_arguments import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_default_arguments_nats(): @pytest.mark.asyncio() +@require_redis async def test_default_arguments_redis(): from docs.docs_src.getting_started.context.redis.default_arguments import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_existed_context.py b/tests/docs/getting_started/context/test_existed_context.py index 5b13128849..e0b0eebafc 100644 --- a/tests/docs/getting_started/context/test_existed_context.py +++ b/tests/docs/getting_started/context/test_existed_context.py @@ -1,17 +1,21 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_existed_context_kafka(): from docs.docs_src.getting_started.context.kafka.existed_context import ( broker_object, ) + from faststream.kafka import TestKafkaBroker @broker_object.subscriber("response") async def resp(): ... @@ -24,10 +28,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_confluent async def test_existed_context_confluent(): from docs.docs_src.getting_started.context.confluent.existed_context import ( broker_object, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @broker_object.subscriber("response") async def resp(): ... @@ -40,10 +46,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_aiopika async def test_existed_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.existed_context import ( broker_object, ) + from faststream.rabbit import TestRabbitBroker @broker_object.subscriber("response") async def resp(): ... @@ -56,10 +64,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_nats async def test_existed_context_nats(): from docs.docs_src.getting_started.context.nats.existed_context import ( broker_object, ) + from faststream.nats import TestNatsBroker @broker_object.subscriber("response") async def resp(): ... @@ -72,10 +82,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_redis async def test_existed_context_redis(): from docs.docs_src.getting_started.context.redis.existed_context import ( broker_object, ) + from faststream.redis import TestRedisBroker @broker_object.subscriber("response") async def resp(): ... diff --git a/tests/docs/getting_started/context/test_fields_access.py b/tests/docs/getting_started/context/test_fields_access.py index ab8a47d65b..f584a86089 100644 --- a/tests/docs/getting_started/context/test_fields_access.py +++ b/tests/docs/getting_started/context/test_fields_access.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fields_access_kafka(): from docs.docs_src.getting_started.context.kafka.fields_access import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic", headers={"user": "John"}) @@ -21,11 +25,13 @@ async def test_fields_access_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_fields_access_confluent(): from docs.docs_src.getting_started.context.confluent.fields_access import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic", headers={"user": "John"}) @@ -34,11 +40,13 @@ async def test_fields_access_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_fields_access_rabbit(): from docs.docs_src.getting_started.context.rabbit.fields_access import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue", headers={"user": "John"}) @@ -47,11 +55,13 @@ async def test_fields_access_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_fields_access_nats(): from docs.docs_src.getting_started.context.nats.fields_access import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject", headers={"user": "John"}) @@ -60,11 +70,13 @@ async def test_fields_access_nats(): @pytest.mark.asyncio() +@require_redis async def test_fields_access_redis(): from docs.docs_src.getting_started.context.redis.fields_access import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel", headers={"user": "John"}) diff --git a/tests/docs/getting_started/context/test_initial.py b/tests/docs/getting_started/context/test_initial.py index b4a4a9dbbe..2ea77e2ebd 100644 --- a/tests/docs/getting_started/context/test_initial.py +++ b/tests/docs/getting_started/context/test_initial.py @@ -1,11 +1,19 @@ import pytest from faststream import context -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @python39 +@require_aiokafka async def test_kafka(): from docs.docs_src.getting_started.context.kafka.initial import broker from faststream.kafka import TestKafkaBroker @@ -20,6 +28,7 @@ async def test_kafka(): @pytest.mark.asyncio() @python39 +@require_confluent async def test_confluent(): from docs.docs_src.getting_started.context.confluent.initial import broker from faststream.confluent import TestKafkaBroker @@ -34,6 +43,7 @@ async def test_confluent(): @pytest.mark.asyncio() @python39 +@require_aiopika async def test_rabbit(): from docs.docs_src.getting_started.context.rabbit.initial import broker from faststream.rabbit import TestRabbitBroker @@ -48,6 +58,7 @@ async def test_rabbit(): @pytest.mark.asyncio() @python39 +@require_nats async def test_nats(): from docs.docs_src.getting_started.context.nats.initial import broker from faststream.nats import TestNatsBroker @@ -62,6 +73,7 @@ async def test_nats(): @pytest.mark.asyncio() @python39 +@require_redis async def test_redis(): from docs.docs_src.getting_started.context.redis.initial import broker from faststream.redis import TestRedisBroker diff --git a/tests/docs/getting_started/context/test_manual_local_context.py b/tests/docs/getting_started/context/test_manual_local_context.py index 9442d1166c..f7829575b9 100644 --- a/tests/docs/getting_started/context/test_manual_local_context.py +++ b/tests/docs/getting_started/context/test_manual_local_context.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_manual_local_context_kafka(): from docs.docs_src.getting_started.context.kafka.manual_local_context import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_manual_local_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_manual_local_context_confluent(): from docs.docs_src.getting_started.context.confluent.manual_local_context import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_manual_local_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_manual_local_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.manual_local_context import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_manual_local_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_manual_local_context_nats(): from docs.docs_src.getting_started.context.nats.manual_local_context import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_manual_local_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_manual_local_context_redis(): from docs.docs_src.getting_started.context.redis.manual_local_context import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_nested.py b/tests/docs/getting_started/context/test_nested.py index 11e6932084..9cf13d0252 100644 --- a/tests/docs/getting_started/context/test_nested.py +++ b/tests/docs/getting_started/context/test_nested.py @@ -1,11 +1,13 @@ import pytest -from faststream.rabbit import TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test(): from docs.docs_src.getting_started.context.nested import broker, handler + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/dependencies/basic/test_depends.py b/tests/docs/getting_started/dependencies/basic/test_depends.py index 724fab875c..0c46d6a967 100644 --- a/tests/docs/getting_started/dependencies/basic/test_depends.py +++ b/tests/docs/getting_started/dependencies/basic/test_depends.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_depends_kafka(): from docs.docs_src.getting_started.dependencies.basic.kafka.depends import ( broker, handler, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): await broker.publish({}, "test") @@ -20,11 +24,13 @@ async def test_depends_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_depends_confluent(): from docs.docs_src.getting_started.dependencies.basic.confluent.depends import ( broker, handler, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker): await broker.publish({}, "test") @@ -32,11 +38,13 @@ async def test_depends_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_depends_rabbit(): from docs.docs_src.getting_started.dependencies.basic.rabbit.depends import ( broker, handler, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker): await broker.publish({}, "test") @@ -44,11 +52,13 @@ async def test_depends_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_depends_nats(): from docs.docs_src.getting_started.dependencies.basic.nats.depends import ( broker, handler, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker): await broker.publish({}, "test") @@ -56,11 +66,13 @@ async def test_depends_nats(): @pytest.mark.asyncio() +@require_redis async def test_depends_redis(): from docs.docs_src.getting_started.dependencies.basic.redis.depends import ( broker, handler, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker): await broker.publish({}, "test") diff --git a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py index 51578c24a9..e2ffabd2f1 100644 --- a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py +++ b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_nested_depends_kafka(): from docs.docs_src.getting_started.dependencies.basic.kafka.nested_depends import ( broker, handler, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): await broker.publish({}, "test") @@ -20,11 +24,13 @@ async def test_nested_depends_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_nested_depends_confluent(): from docs.docs_src.getting_started.dependencies.basic.confluent.nested_depends import ( broker, handler, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker): await broker.publish({}, "test") @@ -32,11 +38,13 @@ async def test_nested_depends_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_nested_depends_rabbit(): from docs.docs_src.getting_started.dependencies.basic.rabbit.nested_depends import ( broker, handler, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker): await broker.publish({}, "test") @@ -44,11 +52,13 @@ async def test_nested_depends_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_nested_depends_nats(): from docs.docs_src.getting_started.dependencies.basic.nats.nested_depends import ( broker, handler, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker): await broker.publish({}, "test") @@ -56,11 +66,13 @@ async def test_nested_depends_nats(): @pytest.mark.asyncio() +@require_redis async def test_nested_depends_redis(): from docs.docs_src.getting_started.dependencies.basic.redis.nested_depends import ( broker, handler, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker): await broker.publish({}, "test") diff --git a/tests/docs/getting_started/dependencies/test_basic.py b/tests/docs/getting_started/dependencies/test_basic.py index 81ff933677..a1daf5538e 100644 --- a/tests/docs/getting_started/dependencies/test_basic.py +++ b/tests/docs/getting_started/dependencies/test_basic.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_basic_kafka(): from docs.docs_src.getting_started.dependencies.basic_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_class.py b/tests/docs/getting_started/dependencies/test_class.py index 1ee3207650..a9e3e0517a 100644 --- a/tests/docs/getting_started/dependencies/test_class.py +++ b/tests/docs/getting_started/dependencies/test_class.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_basic_kafka(): from docs.docs_src.getting_started.dependencies.class_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_global.py b/tests/docs/getting_started/dependencies/test_global.py index 05e10097ce..8da64084b1 100644 --- a/tests/docs/getting_started/dependencies/test_global.py +++ b/tests/docs/getting_started/dependencies/test_global.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_global_kafka(): from docs.docs_src.getting_started.dependencies.global_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_global_broker.py b/tests/docs/getting_started/dependencies/test_global_broker.py index 406e359cf1..c944070513 100644 --- a/tests/docs/getting_started/dependencies/test_global_broker.py +++ b/tests/docs/getting_started/dependencies/test_global_broker.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_global_broker_kafka(): from docs.docs_src.getting_started.dependencies.global_broker_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_sub_dep.py b/tests/docs/getting_started/dependencies/test_sub_dep.py index d3fc3b591a..8847ab9064 100644 --- a/tests/docs/getting_started/dependencies/test_sub_dep.py +++ b/tests/docs/getting_started/dependencies/test_sub_dep.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_sub_dep_kafka(): from docs.docs_src.getting_started.dependencies.sub_dep_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_yield.py b/tests/docs/getting_started/dependencies/test_yield.py index 5ca173d924..84886692d6 100644 --- a/tests/docs/getting_started/dependencies/test_yield.py +++ b/tests/docs/getting_started/dependencies/test_yield.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_yield_kafka(): from docs.docs_src.getting_started.dependencies.yield_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/index/test_basic.py b/tests/docs/getting_started/index/test_basic.py index 945ae94971..5341408c8d 100644 --- a/tests/docs/getting_started/index/test_basic.py +++ b/tests/docs/getting_started/index/test_basic.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_quickstart_index_kafka(): from docs.docs_src.getting_started.index.base_kafka import base_handler, broker + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("", "test") @@ -18,8 +22,10 @@ async def test_quickstart_index_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_quickstart_index_confluent(): from docs.docs_src.getting_started.index.base_confluent import base_handler, broker + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("", "test") @@ -28,8 +34,10 @@ async def test_quickstart_index_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_quickstart_index_rabbit(): from docs.docs_src.getting_started.index.base_rabbit import base_handler, broker + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("", "test") @@ -38,8 +46,10 @@ async def test_quickstart_index_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_quickstart_index_nats(): from docs.docs_src.getting_started.index.base_nats import base_handler, broker + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("", "test") @@ -48,8 +58,10 @@ async def test_quickstart_index_nats(): @pytest.mark.asyncio() +@require_redis async def test_quickstart_index_redis(): from docs.docs_src.getting_started.index.base_redis import base_handler, broker + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("", "test") diff --git a/tests/docs/getting_started/lifespan/test_basic.py b/tests/docs/getting_started/lifespan/test_basic.py index 21ba14a72e..82f0227e59 100644 --- a/tests/docs/getting_started/lifespan/test_basic.py +++ b/tests/docs/getting_started/lifespan/test_basic.py @@ -1,18 +1,23 @@ import pytest from faststream import TestApp, context -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import pydantic_v2 +from tests.marks import ( + pydantic_v2, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) from tests.mocks import mock_pydantic_settings_env @pydantic_v2 @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_basic_lifespan(): + from faststream.rabbit import TestRabbitBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker @@ -22,7 +27,10 @@ async def test_rabbit_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_basic_lifespan(): + from faststream.kafka import TestKafkaBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker @@ -32,7 +40,10 @@ async def test_kafka_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_confluent async def test_confluent_basic_lifespan(): + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.confluent.basic import app, broker @@ -42,7 +53,10 @@ async def test_confluent_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_nats async def test_nats_basic_lifespan(): + from faststream.nats import TestNatsBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.nats.basic import app, broker @@ -52,7 +66,10 @@ async def test_nats_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_redis async def test_redis_basic_lifespan(): + from faststream.redis import TestRedisBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.redis.basic import app, broker diff --git a/tests/docs/getting_started/lifespan/test_ml.py b/tests/docs/getting_started/lifespan/test_ml.py index 8c4502c856..f63d6bbed7 100644 --- a/tests/docs/getting_started/lifespan/test_ml.py +++ b/tests/docs/getting_started/lifespan/test_ml.py @@ -1,16 +1,20 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_ml_lifespan(): from docs.docs_src.getting_started.lifespan.rabbit.ml import app, broker, predict + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -19,8 +23,10 @@ async def test_rabbit_ml_lifespan(): @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_ml_lifespan(): from docs.docs_src.getting_started.lifespan.kafka.ml import app, broker, predict + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -29,8 +35,10 @@ async def test_kafka_ml_lifespan(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_ml_lifespan(): from docs.docs_src.getting_started.lifespan.confluent.ml import app, broker, predict + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -39,8 +47,10 @@ async def test_confluent_ml_lifespan(): @pytest.mark.asyncio() +@require_nats async def test_nats_ml_lifespan(): from docs.docs_src.getting_started.lifespan.nats.ml import app, broker, predict + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -49,8 +59,10 @@ async def test_nats_ml_lifespan(): @pytest.mark.asyncio() +@require_redis async def test_redis_ml_lifespan(): from docs.docs_src.getting_started.lifespan.redis.ml import app, broker, predict + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) diff --git a/tests/docs/getting_started/lifespan/test_ml_context.py b/tests/docs/getting_started/lifespan/test_ml_context.py index 612fcd2c91..dc7cb57d6a 100644 --- a/tests/docs/getting_started/lifespan/test_ml_context.py +++ b/tests/docs/getting_started/lifespan/test_ml_context.py @@ -1,20 +1,24 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_ml_lifespan(): from docs.docs_src.getting_started.lifespan.rabbit.ml_context import ( app, broker, predict, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -23,12 +27,14 @@ async def test_rabbit_ml_lifespan(): @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_ml_lifespan(): from docs.docs_src.getting_started.lifespan.kafka.ml_context import ( app, broker, predict, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -37,12 +43,14 @@ async def test_kafka_ml_lifespan(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_ml_lifespan(): from docs.docs_src.getting_started.lifespan.confluent.ml_context import ( app, broker, predict, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -51,12 +59,14 @@ async def test_confluent_ml_lifespan(): @pytest.mark.asyncio() +@require_nats async def test_nats_ml_lifespan(): from docs.docs_src.getting_started.lifespan.nats.ml_context import ( app, broker, predict, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -65,12 +75,14 @@ async def test_nats_ml_lifespan(): @pytest.mark.asyncio() +@require_redis async def test_redis_ml_lifespan(): from docs.docs_src.getting_started.lifespan.redis.ml_context import ( app, broker, predict, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) diff --git a/tests/docs/getting_started/lifespan/test_testing.py b/tests/docs/getting_started/lifespan/test_testing.py index cfe19fca37..42dbf0dd58 100644 --- a/tests/docs/getting_started/lifespan/test_testing.py +++ b/tests/docs/getting_started/lifespan/test_testing.py @@ -1,22 +1,65 @@ -from docs.docs_src.getting_started.lifespan.confluent.testing import ( - test_lifespan as _test_lifespan_confluent, -) -from docs.docs_src.getting_started.lifespan.kafka.testing import ( - test_lifespan as _test_lifespan_k, -) -from docs.docs_src.getting_started.lifespan.nats.testing import ( - test_lifespan as _test_lifespan_n, -) -from docs.docs_src.getting_started.lifespan.rabbit.testing import ( - test_lifespan as _test_lifespan_r, -) -from docs.docs_src.getting_started.lifespan.redis.testing import ( - test_lifespan as _test_lifespan_red, +import pytest + +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) -from tests.marks import python39 -test_lifespan_red = python39(_test_lifespan_red) -test_lifespan_r = python39(_test_lifespan_r) -test_lifespan_n = python39(_test_lifespan_n) -test_lifespan_k = python39(_test_lifespan_k) -test_lifespan_confluent = python39(_test_lifespan_confluent) + +@pytest.mark.asyncio() +@python39 +@require_redis +async def test_lifespan_redis(): + from docs.docs_src.getting_started.lifespan.redis.testing import ( + test_lifespan as _test_lifespan_red, + ) + + await _test_lifespan_red() + + +@pytest.mark.asyncio() +@python39 +@require_confluent +async def test_lifespan_confluent(): + from docs.docs_src.getting_started.lifespan.confluent.testing import ( + test_lifespan as _test_lifespan_confluent, + ) + + await _test_lifespan_confluent() + + +@pytest.mark.asyncio() +@python39 +@require_aiokafka +async def test_lifespan_kafka(): + from docs.docs_src.getting_started.lifespan.kafka.testing import ( + test_lifespan as _test_lifespan_k, + ) + + await _test_lifespan_k() + + +@pytest.mark.asyncio() +@python39 +@require_aiopika +async def test_lifespan_rabbit(): + from docs.docs_src.getting_started.lifespan.rabbit.testing import ( + test_lifespan as _test_lifespan_r, + ) + + await _test_lifespan_r() + + +@pytest.mark.asyncio() +@python39 +@require_nats +async def test_lifespan_nats(): + from docs.docs_src.getting_started.lifespan.nats.testing import ( + test_lifespan as _test_lifespan_n, + ) + + await _test_lifespan_n() diff --git a/tests/docs/getting_started/publishing/test_broker.py b/tests/docs/getting_started/publishing/test_broker.py index 2b28895b92..794564df71 100644 --- a/tests/docs/getting_started/publishing/test_broker.py +++ b/tests/docs/getting_started/publishing/test_broker.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_broker_kafka(): from docs.docs_src.getting_started.publishing.kafka.broker import ( app, @@ -16,6 +19,7 @@ async def test_broker_kafka(): handle, handle_next, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -23,6 +27,7 @@ async def test_broker_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_broker_confluent(): from docs.docs_src.getting_started.publishing.confluent.broker import ( app, @@ -30,6 +35,7 @@ async def test_broker_confluent(): handle, handle_next, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -37,6 +43,7 @@ async def test_broker_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_broker_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.broker import ( app, @@ -44,6 +51,7 @@ async def test_broker_rabbit(): handle, handle_next, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -51,6 +59,7 @@ async def test_broker_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_broker_nats(): from docs.docs_src.getting_started.publishing.nats.broker import ( app, @@ -58,6 +67,7 @@ async def test_broker_nats(): handle, handle_next, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -65,6 +75,7 @@ async def test_broker_nats(): @pytest.mark.asyncio() +@require_redis async def test_broker_redis(): from docs.docs_src.getting_started.publishing.redis.broker import ( app, @@ -72,6 +83,7 @@ async def test_broker_redis(): handle, handle_next, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/publishing/test_broker_context.py b/tests/docs/getting_started/publishing/test_broker_context.py index 60deb460e9..aa8d0f194b 100644 --- a/tests/docs/getting_started/publishing/test_broker_context.py +++ b/tests/docs/getting_started/publishing/test_broker_context.py @@ -1,21 +1,25 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @pytest.mark.kafka() +@require_aiokafka async def test_broker_context_kafka(): from docs.docs_src.getting_started.publishing.kafka.broker_context import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -24,12 +28,14 @@ async def test_broker_context_kafka(): @pytest.mark.asyncio() @pytest.mark.confluent() +@require_confluent async def test_broker_context_confluent(): from docs.docs_src.getting_started.publishing.confluent.broker_context import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker, with_real=True), TestApp(app): await handle.wait_call(5) @@ -38,12 +44,14 @@ async def test_broker_context_confluent(): @pytest.mark.asyncio() @pytest.mark.nats() +@require_nats async def test_broker_context_nats(): from docs.docs_src.getting_started.publishing.nats.broker_context import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -52,12 +60,14 @@ async def test_broker_context_nats(): @pytest.mark.asyncio() @pytest.mark.rabbit() +@require_aiopika async def test_broker_context_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.broker_context import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -66,12 +76,14 @@ async def test_broker_context_rabbit(): @pytest.mark.asyncio() @pytest.mark.redis() +@require_redis async def test_broker_context_redis(): from docs.docs_src.getting_started.publishing.redis.broker_context import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) diff --git a/tests/docs/getting_started/publishing/test_decorator.py b/tests/docs/getting_started/publishing/test_decorator.py index e4ad61195c..66e860e234 100644 --- a/tests/docs/getting_started/publishing/test_decorator.py +++ b/tests/docs/getting_started/publishing/test_decorator.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_decorator_kafka(): from docs.docs_src.getting_started.publishing.kafka.decorator import ( app, @@ -16,6 +19,7 @@ async def test_decorator_kafka(): handle, handle_next, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -24,6 +28,7 @@ async def test_decorator_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_decorator_confluent(): from docs.docs_src.getting_started.publishing.confluent.decorator import ( app, @@ -31,6 +36,7 @@ async def test_decorator_confluent(): handle, handle_next, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -39,6 +45,7 @@ async def test_decorator_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_decorator_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.decorator import ( app, @@ -46,6 +53,7 @@ async def test_decorator_rabbit(): handle, handle_next, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -54,6 +62,7 @@ async def test_decorator_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_decorator_nats(): from docs.docs_src.getting_started.publishing.nats.decorator import ( app, @@ -61,6 +70,7 @@ async def test_decorator_nats(): handle, handle_next, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -69,6 +79,7 @@ async def test_decorator_nats(): @pytest.mark.asyncio() +@require_redis async def test_decorator_redis(): from docs.docs_src.getting_started.publishing.redis.decorator import ( app, @@ -76,6 +87,7 @@ async def test_decorator_redis(): handle, handle_next, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/publishing/test_direct.py b/tests/docs/getting_started/publishing/test_direct.py index 535a52c1c8..1bbb8e142a 100644 --- a/tests/docs/getting_started/publishing/test_direct.py +++ b/tests/docs/getting_started/publishing/test_direct.py @@ -1,23 +1,59 @@ -from docs.docs_src.getting_started.publishing.confluent.direct_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.publishing.kafka.direct_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.publishing.nats.direct_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.publishing.rabbit.direct_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.publishing.redis.direct_testing import ( - test_handle as test_handle_red, -) +import pytest -__all__ = ( - "test_handle_r", - "test_handle_k", - "test_handle_n", - "test_handle_red", - "test_handle_confluent", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.publishing.kafka.direct_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.publishing.confluent.direct_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.publishing.rabbit.direct_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.publishing.nats.direct_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.publishing.redis.direct_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() diff --git a/tests/docs/getting_started/publishing/test_object.py b/tests/docs/getting_started/publishing/test_object.py index d2bdc6b07c..d270cac965 100644 --- a/tests/docs/getting_started/publishing/test_object.py +++ b/tests/docs/getting_started/publishing/test_object.py @@ -1,23 +1,59 @@ -from docs.docs_src.getting_started.publishing.confluent.object_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.publishing.kafka.object_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.publishing.nats.object_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.publishing.rabbit.object_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.publishing.redis.object_testing import ( - test_handle as test_handle_red, -) +import pytest -__all__ = ( - "test_handle_k", - "test_handle_r", - "test_handle_n", - "test_handle_red", - "test_handle_confluent", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.publishing.kafka.object_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.publishing.confluent.object_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.publishing.rabbit.object_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.publishing.nats.object_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.publishing.redis.object_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() diff --git a/tests/docs/getting_started/routers/test_base.py b/tests/docs/getting_started/routers/test_base.py index e23e788d0e..24004e71e0 100644 --- a/tests/docs/getting_started/routers/test_base.py +++ b/tests/docs/getting_started/routers/test_base.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_base_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router import ( app, @@ -16,6 +19,7 @@ async def test_base_router_kafka(): handle, handle_response, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -23,6 +27,7 @@ async def test_base_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_base_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router import ( app, @@ -30,6 +35,7 @@ async def test_base_router_confluent(): handle, handle_response, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -37,6 +43,7 @@ async def test_base_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_base_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router import ( app, @@ -44,6 +51,7 @@ async def test_base_router_rabbit(): handle, handle_response, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -51,6 +59,7 @@ async def test_base_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_base_router_nats(): from docs.docs_src.getting_started.routers.nats.router import ( app, @@ -58,6 +67,7 @@ async def test_base_router_nats(): handle, handle_response, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -65,6 +75,7 @@ async def test_base_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_base_router_redis(): from docs.docs_src.getting_started.routers.redis.router import ( app, @@ -72,6 +83,7 @@ async def test_base_router_redis(): handle, handle_response, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) diff --git a/tests/docs/getting_started/routers/test_delay.py b/tests/docs/getting_started/routers/test_delay.py index 171aaaef4c..a951584837 100644 --- a/tests/docs/getting_started/routers/test_delay.py +++ b/tests/docs/getting_started/routers/test_delay.py @@ -1,19 +1,23 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router_delay import ( app, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -24,11 +28,13 @@ async def test_delay_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router_delay import ( app, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -39,11 +45,13 @@ async def test_delay_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router_delay import ( app, broker, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -54,11 +62,13 @@ async def test_delay_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.router_delay import ( app, broker, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -69,11 +79,13 @@ async def test_delay_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.router_delay import ( app, broker, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ diff --git a/tests/docs/getting_started/routers/test_delay_equal.py b/tests/docs/getting_started/routers/test_delay_equal.py index b0337d5cce..9f68b9edc5 100644 --- a/tests/docs/getting_started/routers/test_delay_equal.py +++ b/tests/docs/getting_started/routers/test_delay_equal.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.delay_equal import ( app, @@ -17,6 +20,7 @@ async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router_delay import ( broker as control_broker, ) + from faststream.kafka import TestKafkaBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -30,6 +34,7 @@ async def test_delay_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.delay_equal import ( app, @@ -38,6 +43,7 @@ async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router_delay import ( broker as control_broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -51,6 +57,7 @@ async def test_delay_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.delay_equal import ( app, @@ -59,6 +66,7 @@ async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router_delay import ( broker as control_broker, ) + from faststream.rabbit import TestRabbitBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -72,6 +80,7 @@ async def test_delay_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.delay_equal import ( app, @@ -80,6 +89,7 @@ async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.router_delay import ( broker as control_broker, ) + from faststream.nats import TestNatsBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -93,6 +103,7 @@ async def test_delay_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.delay_equal import ( app, @@ -101,6 +112,7 @@ async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.router_delay import ( broker as control_broker, ) + from faststream.redis import TestRedisBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() diff --git a/tests/docs/getting_started/serialization/test_parser.py b/tests/docs/getting_started/serialization/test_parser.py index 371558cc41..5285d7ee9b 100644 --- a/tests/docs/getting_started/serialization/test_parser.py +++ b/tests/docs/getting_started/serialization/test_parser.py @@ -1,68 +1,80 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_nats async def test_parser_nats(): from docs.docs_src.getting_started.serialization.parser_nats import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_aiokafka async def test_parser_kafka(): from docs.docs_src.getting_started.serialization.parser_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_confluent async def test_parser_confluent(): from docs.docs_src.getting_started.serialization.parser_confluent import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_aiopika async def test_parser_rabbit(): from docs.docs_src.getting_started.serialization.parser_rabbit import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_redis async def test_parser_redis(): from docs.docs_src.getting_started.serialization.parser_redis import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/subscription/test_annotated.py b/tests/docs/getting_started/subscription/test_annotated.py index 0511ccf3b3..21c1ed758d 100644 --- a/tests/docs/getting_started/subscription/test_annotated.py +++ b/tests/docs/getting_started/subscription/test_annotated.py @@ -1,96 +1,98 @@ import pytest from pydantic import ValidationError -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @python39 class BaseCase: async def test_handle(self, setup): - broker, handle = setup + broker, handle, test_class = setup - async with self.test_class(broker) as br: + async with test_class(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test") handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) assert handle.mock is None async def test_validation_error(self, setup): - broker, handle = setup + broker, handle, test_class = setup - async with self.test_class(broker) as br: + async with test_class(broker) as br: with pytest.raises(ValidationError): await br.publish("wrong message", "test") handle.mock.assert_called_once_with("wrong message") +@require_aiokafka class TestKafka(BaseCase): - test_class = TestKafkaBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.kafka.pydantic_annotated_fields import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker - return (broker, handle) + return (broker, handle, TestKafkaBroker) +@require_confluent class TestConfluent(BaseCase): - test_class = TestConfluentKafkaBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.confluent.pydantic_annotated_fields import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker - return (broker, handle) + return (broker, handle, TestConfluentKafkaBroker) +@require_aiopika class TestRabbit(BaseCase): - test_class = TestRabbitBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.rabbit.pydantic_annotated_fields import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker - return (broker, handle) + return (broker, handle, TestRabbitBroker) +@require_nats class TestNats(BaseCase): - test_class = TestNatsBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.nats.pydantic_annotated_fields import ( broker, handle, ) + from faststream.nats import TestNatsBroker - return (broker, handle) + return (broker, handle, TestNatsBroker) +@require_redis class TestRedis(BaseCase): - test_class = TestRedisBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.redis.pydantic_annotated_fields import ( broker, handle, ) + from faststream.redis import TestRedisBroker - return (broker, handle) + return (broker, handle, TestRedisBroker) diff --git a/tests/docs/getting_started/subscription/test_filter.py b/tests/docs/getting_started/subscription/test_filter.py index d7c5475af4..1cd9588f55 100644 --- a/tests/docs/getting_started/subscription/test_filter.py +++ b/tests/docs/getting_started/subscription/test_filter.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_filtering(): from docs.docs_src.getting_started.subscription.kafka.filter import ( app, @@ -16,6 +19,7 @@ async def test_kafka_filtering(): default_handler, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -23,6 +27,7 @@ async def test_kafka_filtering(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_filtering(): from docs.docs_src.getting_started.subscription.confluent.filter import ( app, @@ -30,6 +35,7 @@ async def test_confluent_filtering(): default_handler, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -37,6 +43,7 @@ async def test_confluent_filtering(): @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_filtering(): from docs.docs_src.getting_started.subscription.rabbit.filter import ( app, @@ -44,6 +51,7 @@ async def test_rabbit_filtering(): default_handler, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -51,6 +59,7 @@ async def test_rabbit_filtering(): @pytest.mark.asyncio() +@require_nats async def test_nats_filtering(): from docs.docs_src.getting_started.subscription.nats.filter import ( app, @@ -58,6 +67,7 @@ async def test_nats_filtering(): default_handler, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -65,6 +75,7 @@ async def test_nats_filtering(): @pytest.mark.asyncio() +@require_redis async def test_redis_filtering(): from docs.docs_src.getting_started.subscription.redis.filter import ( app, @@ -72,6 +83,7 @@ async def test_redis_filtering(): default_handler, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) diff --git a/tests/docs/getting_started/subscription/test_pydantic.py b/tests/docs/getting_started/subscription/test_pydantic.py index c17a4bc4c4..51344f3ffe 100644 --- a/tests/docs/getting_started/subscription/test_pydantic.py +++ b/tests/docs/getting_started/subscription/test_pydantic.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_pydantic_model_rabbit(): from docs.docs_src.getting_started.subscription.rabbit.pydantic_model import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-queue") @@ -20,11 +24,13 @@ async def test_pydantic_model_rabbit(): @pytest.mark.asyncio() +@require_aiokafka async def test_pydantic_model_kafka(): from docs.docs_src.getting_started.subscription.kafka.pydantic_model import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-topic") @@ -32,11 +38,13 @@ async def test_pydantic_model_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_pydantic_model_confluent(): from docs.docs_src.getting_started.subscription.confluent.pydantic_model import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-topic") @@ -44,11 +52,13 @@ async def test_pydantic_model_confluent(): @pytest.mark.asyncio() +@require_nats async def test_pydantic_model_nats(): from docs.docs_src.getting_started.subscription.nats.pydantic_model import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-subject") @@ -56,11 +66,13 @@ async def test_pydantic_model_nats(): @pytest.mark.asyncio() +@require_redis async def test_pydantic_model_redis(): from docs.docs_src.getting_started.subscription.redis.pydantic_model import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-channel") diff --git a/tests/docs/getting_started/subscription/test_real.py b/tests/docs/getting_started/subscription/test_real.py index 415bde53c2..74a93869f7 100644 --- a/tests/docs/getting_started/subscription/test_real.py +++ b/tests/docs/getting_started/subscription/test_real.py @@ -1,47 +1,119 @@ import pytest -from docs.docs_src.getting_started.subscription.confluent.real_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.subscription.confluent.real_testing import ( - test_validation_error as test_validation_error_confluent, -) -from docs.docs_src.getting_started.subscription.kafka.real_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.subscription.kafka.real_testing import ( - test_validation_error as test_validation_error_k, -) -from docs.docs_src.getting_started.subscription.nats.real_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.subscription.nats.real_testing import ( - test_validation_error as test_validation_error_n, -) -from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( - test_validation_error as test_validation_error_r, -) -from docs.docs_src.getting_started.subscription.redis.real_testing import ( - test_handle as test_handle_red, -) -from docs.docs_src.getting_started.subscription.redis.real_testing import ( - test_validation_error as test_validation_error_red, +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) -pytest.mark.kafka(test_handle_k) -pytest.mark.kafka(test_validation_error_k) -pytest.mark.confluent(test_handle_confluent) -pytest.mark.confluent(test_validation_error_confluent) +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.subscription.kafka.real_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_validate_kafka(): + from docs.docs_src.getting_started.subscription.kafka.real_testing import ( + test_validation_error as test_validation_error_k, + ) + + await test_validation_error_k() + + +@pytest.mark.confluent() +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.subscription.confluent.real_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.confluent() +@require_confluent +async def test_validate_confluent(): + from docs.docs_src.getting_started.subscription.confluent.real_testing import ( + test_validation_error as test_validation_error_confluent, + ) + + await test_validation_error_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_validate_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( + test_validation_error as test_validation_error_r, + ) + + await test_validation_error_r() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.subscription.nats.real_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_validate_nats(): + from docs.docs_src.getting_started.subscription.nats.real_testing import ( + test_validation_error as test_validation_error_n, + ) + + await test_validation_error_n() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() -pytest.mark.rabbit(test_handle_r) -pytest.mark.rabbit(test_validation_error_r) -pytest.mark.nats(test_handle_n) -pytest.mark.nats(test_validation_error_n) +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_validate_redis(): + from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_validation_error as test_validation_error_red, + ) -pytest.mark.redis(test_handle_red) -pytest.mark.redis(test_validation_error_red) + await test_validation_error_red() diff --git a/tests/docs/getting_started/subscription/test_testing.py b/tests/docs/getting_started/subscription/test_testing.py index a1f87099af..5f46d5561e 100644 --- a/tests/docs/getting_started/subscription/test_testing.py +++ b/tests/docs/getting_started/subscription/test_testing.py @@ -1,43 +1,119 @@ -from docs.docs_src.getting_started.subscription.confluent.testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.subscription.confluent.testing import ( - test_validation_error as test_validation_error_confluent, -) -from docs.docs_src.getting_started.subscription.kafka.testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.subscription.kafka.testing import ( - test_validation_error as test_validation_error_k, -) -from docs.docs_src.getting_started.subscription.nats.testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.subscription.nats.testing import ( - test_validation_error as test_validation_error_n, -) -from docs.docs_src.getting_started.subscription.rabbit.testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.subscription.rabbit.testing import ( - test_validation_error as test_validation_error_r, -) -from docs.docs_src.getting_started.subscription.redis.testing import ( - test_handle as test_handle_rd, -) -from docs.docs_src.getting_started.subscription.redis.testing import ( - test_validation_error as test_validation_error_rd, -) +import pytest -__all__ = ( - "test_handle_r", - "test_validation_error_r", - "test_handle_rd", - "test_validation_error_rd", - "test_handle_k", - "test_validation_error_k", - "test_handle_confluent", - "test_validation_error_confluent", - "test_handle_n", - "test_validation_error_n", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.subscription.kafka.testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_validate_kafka(): + from docs.docs_src.getting_started.subscription.kafka.testing import ( + test_validation_error as test_validation_error_k, + ) + + await test_validation_error_k() + + +@pytest.mark.confluent() +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.subscription.confluent.testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.confluent() +@require_confluent +async def test_validate_confluent(): + from docs.docs_src.getting_started.subscription.confluent.testing import ( + test_validation_error as test_validation_error_confluent, + ) + + await test_validation_error_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_validate_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.testing import ( + test_validation_error as test_validation_error_r, + ) + + await test_validation_error_r() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.subscription.nats.testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_validate_nats(): + from docs.docs_src.getting_started.subscription.nats.testing import ( + test_validation_error as test_validation_error_n, + ) + + await test_validation_error_n() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.subscription.redis.testing import ( + test_handle as test_handle_rd, + ) + + await test_handle_rd() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_validate_redis(): + from docs.docs_src.getting_started.subscription.redis.testing import ( + test_validation_error as test_validation_error_rd, + ) + + await test_validation_error_rd() diff --git a/tests/docs/index/test_basic.py b/tests/docs/index/test_basic.py index da4d6d246e..b495a0384f 100644 --- a/tests/docs/index/test_basic.py +++ b/tests/docs/index/test_basic.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_index_kafka_base(): from docs.docs_src.index.kafka.basic import broker, handle_msg + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-topic") @@ -22,8 +26,10 @@ async def test_index_kafka_base(): @pytest.mark.asyncio() +@require_confluent async def test_index_confluent_base(): from docs.docs_src.index.confluent.basic import broker, handle_msg + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-topic") @@ -36,8 +42,10 @@ async def test_index_confluent_base(): @pytest.mark.asyncio() +@require_aiopika async def test_index_rabbit_base(): from docs.docs_src.index.rabbit.basic import broker, handle_msg + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-queue") @@ -50,8 +58,10 @@ async def test_index_rabbit_base(): @pytest.mark.asyncio() +@require_nats async def test_index_nats_base(): from docs.docs_src.index.nats.basic import broker, handle_msg + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-subject") @@ -64,8 +74,10 @@ async def test_index_nats_base(): @pytest.mark.asyncio() +@require_redis async def test_index_redis_base(): from docs.docs_src.index.redis.basic import broker, handle_msg + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-channel") diff --git a/tests/docs/index/test_dependencies.py b/tests/docs/index/test_dependencies.py index 81cd4d9bb5..b2494469f6 100644 --- a/tests/docs/index/test_dependencies.py +++ b/tests/docs/index/test_dependencies.py @@ -1,11 +1,13 @@ import pytest -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_index_dep(): from docs.docs_src.index.dependencies import base_handler, broker + from faststream.kafka import TestKafkaBroker data = { "user": "John", diff --git a/tests/docs/index/test_pydantic.py b/tests/docs/index/test_pydantic.py index 7a8e2a6251..426a104d5d 100644 --- a/tests/docs/index/test_pydantic.py +++ b/tests/docs/index/test_pydantic.py @@ -1,23 +1,93 @@ -from docs.docs_src.index.confluent.test import test_correct as test_confluent_correct -from docs.docs_src.index.confluent.test import test_invalid as test_confluent_invalid -from docs.docs_src.index.kafka.test import test_correct as test_k_correct -from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid -from docs.docs_src.index.nats.test import test_correct as test_n_correct -from docs.docs_src.index.nats.test import test_invalid as test_n_invalid -from docs.docs_src.index.rabbit.test import test_correct as test_r_correct -from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid -from docs.docs_src.index.redis.test import test_correct as test_red_correct -from docs.docs_src.index.redis.test import test_invalid as test_red_invalid - -__all__ = ( - "test_k_correct", - "test_k_invalid", - "test_confluent_correct", - "test_confluent_invalid", - "test_r_correct", - "test_r_invalid", - "test_n_correct", - "test_n_invalid", - "test_red_correct", - "test_red_invalid", +import pytest + +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka_correct(): + from docs.docs_src.index.kafka.test import test_correct as test_k_correct + + await test_k_correct() + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka_invalid(): + from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid + + await test_k_invalid() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent_correct(): + from docs.docs_src.index.confluent.test import ( + test_correct as test_confluent_correct, + ) + + await test_confluent_correct() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent_invalid(): + from docs.docs_src.index.confluent.test import ( + test_invalid as test_confluent_invalid, + ) + + await test_confluent_invalid() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit_correct(): + from docs.docs_src.index.rabbit.test import test_correct as test_r_correct + + await test_r_correct() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit_invalid(): + from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid + + await test_r_invalid() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats_correct(): + from docs.docs_src.index.nats.test import test_correct as test_n_correct + + await test_n_correct() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats_invalid(): + from docs.docs_src.index.nats.test import test_invalid as test_n_invalid + + await test_n_invalid() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis_correct(): + from docs.docs_src.index.redis.test import test_correct as test_red_correct + + await test_red_correct() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis_invalid(): + from docs.docs_src.index.redis.test import test_invalid as test_red_invalid + + await test_red_invalid() diff --git a/tests/docs/integration/fastapi/test_base.py b/tests/docs/integration/fastapi/test_base.py index 982cdc378e..d6871fe04a 100644 --- a/tests/docs/integration/fastapi/test_base.py +++ b/tests/docs/integration/fastapi/test_base.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_base(): from docs.docs_src.integrations.fastapi.kafka.base import app, hello, router + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(router.broker) as br: with TestClient(app) as client: @@ -26,8 +30,10 @@ async def test_fastapi_kafka_base(): @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_base(): from docs.docs_src.integrations.fastapi.confluent.base import app, hello, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(router.broker) as br: with TestClient(app) as client: @@ -43,8 +49,10 @@ async def test_fastapi_confluent_base(): @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_base(): from docs.docs_src.integrations.fastapi.rabbit.base import app, hello, router + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(router.broker) as br: with TestClient(app) as client: @@ -60,8 +68,10 @@ async def test_fastapi_rabbit_base(): @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_base(): from docs.docs_src.integrations.fastapi.nats.base import app, hello, router + from faststream.nats import TestNatsBroker async with TestNatsBroker(router.broker) as br: with TestClient(app) as client: @@ -77,8 +87,10 @@ async def test_fastapi_nats_base(): @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_base(): from docs.docs_src.integrations.fastapi.redis.base import app, hello, router + from faststream.redis import TestRedisBroker async with TestRedisBroker(router.broker) as br: with TestClient(app) as client: diff --git a/tests/docs/integration/fastapi/test_depends.py b/tests/docs/integration/fastapi/test_depends.py index ae160bb622..2a7b917e8a 100644 --- a/tests/docs/integration/fastapi/test_depends.py +++ b/tests/docs/integration/fastapi/test_depends.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_depends(): from docs.docs_src.integrations.fastapi.kafka.depends import app, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -23,8 +27,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_depends(): from docs.docs_src.integrations.fastapi.confluent.depends import app, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -37,8 +43,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_depends(): from docs.docs_src.integrations.fastapi.rabbit.depends import app, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -51,8 +59,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_depends(): from docs.docs_src.integrations.fastapi.nats.depends import app, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -65,8 +75,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_depends(): from docs.docs_src.integrations.fastapi.redis.depends import app, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_multiple.py b/tests/docs/integration/fastapi/test_multiple.py index c3252682fe..8077d7b9fa 100644 --- a/tests/docs/integration/fastapi/test_multiple.py +++ b/tests/docs/integration/fastapi/test_multiple.py @@ -1,6 +1,14 @@ import pytest from fastapi.testclient import TestClient +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + class BaseCase: def test_running(self, data): @@ -18,6 +26,7 @@ def test_running(self, data): @pytest.mark.kafka() +@require_aiokafka class TestKafka(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -27,6 +36,7 @@ def data(self): @pytest.mark.confluent() +@require_confluent class TestConfluent(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -39,6 +49,7 @@ def data(self): @pytest.mark.nats() +@require_nats class TestNats(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -48,6 +59,7 @@ def data(self): @pytest.mark.rabbit() +@require_aiopika class TestRabbit(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -57,6 +69,7 @@ def data(self): @pytest.mark.redis() +@require_redis class TestRedis(BaseCase): @pytest.fixture(scope="class") def data(self): diff --git a/tests/docs/integration/fastapi/test_multiple_lifespan.py b/tests/docs/integration/fastapi/test_multiple_lifespan.py index 48099a376f..72dc782c51 100644 --- a/tests/docs/integration/fastapi/test_multiple_lifespan.py +++ b/tests/docs/integration/fastapi/test_multiple_lifespan.py @@ -1,6 +1,14 @@ import pytest from fastapi.testclient import TestClient +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + class BaseCase: def test_running(self, data): @@ -27,6 +35,7 @@ async def handler2(): ... @pytest.mark.kafka() +@require_aiokafka class TestKafka(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -40,6 +49,7 @@ def data(self): @pytest.mark.confluent() +@require_confluent class TestConfluent(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -53,6 +63,7 @@ def data(self): @pytest.mark.nats() +@require_nats class TestNats(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -66,6 +77,7 @@ def data(self): @pytest.mark.rabbit() +@require_aiopika class TestRabbit(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -79,6 +91,7 @@ def data(self): @pytest.mark.redis() +@require_redis class TestRedis(BaseCase): @pytest.fixture(scope="class") def data(self): diff --git a/tests/docs/integration/fastapi/test_send.py b/tests/docs/integration/fastapi/test_send.py index b8f1fca7d8..f1b3dde966 100644 --- a/tests/docs/integration/fastapi/test_send.py +++ b/tests/docs/integration/fastapi/test_send.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_send(): from docs.docs_src.integrations.fastapi.kafka.send import app, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -23,8 +27,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_send(): from docs.docs_src.integrations.fastapi.confluent.send import app, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -37,8 +43,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_send(): from docs.docs_src.integrations.fastapi.rabbit.send import app, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -51,8 +59,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_send(): from docs.docs_src.integrations.fastapi.nats.send import app, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -65,8 +75,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_send(): from docs.docs_src.integrations.fastapi.redis.send import app, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_startup.py b/tests/docs/integration/fastapi/test_startup.py index d4e80b8851..3c68863171 100644 --- a/tests/docs/integration/fastapi/test_startup.py +++ b/tests/docs/integration/fastapi/test_startup.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_startup(): from docs.docs_src.integrations.fastapi.kafka.startup import app, hello, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -21,8 +25,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_startup(): from docs.docs_src.integrations.fastapi.confluent.startup import app, hello, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -33,8 +39,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_startup(): from docs.docs_src.integrations.fastapi.rabbit.startup import app, hello, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -45,8 +53,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_startup(): from docs.docs_src.integrations.fastapi.nats.startup import app, hello, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -57,8 +67,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_startup(): from docs.docs_src.integrations.fastapi.redis.startup import app, hello, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_test.py b/tests/docs/integration/fastapi/test_test.py index 0d5544e8d4..992335ff77 100644 --- a/tests/docs/integration/fastapi/test_test.py +++ b/tests/docs/integration/fastapi/test_test.py @@ -1,15 +1,49 @@ -from docs.docs_src.integrations.fastapi.confluent.test import ( - test_router as test_confluent, -) -from docs.docs_src.integrations.fastapi.kafka.test import test_router as test_k -from docs.docs_src.integrations.fastapi.nats.test import test_router as test_n -from docs.docs_src.integrations.fastapi.rabbit.test import test_router as test_r -from docs.docs_src.integrations.fastapi.redis.test import test_router as test_red - -__all__ = ( - "test_k", - "test_r", - "test_n", - "test_red", - "test_confluent", +import pytest + +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka(): + from docs.docs_src.integrations.fastapi.kafka.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent(): + from docs.docs_src.integrations.fastapi.confluent.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit(): + from docs.docs_src.integrations.fastapi.rabbit.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats(): + from docs.docs_src.integrations.fastapi.nats.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis(): + from docs.docs_src.integrations.fastapi.redis.test import test_router + + await test_router() diff --git a/tests/docs/integration/http/test_fastapi.py b/tests/docs/integration/http/test_fastapi.py index 5875810de4..54267dbdb6 100644 --- a/tests/docs/integration/http/test_fastapi.py +++ b/tests/docs/integration/http/test_fastapi.py @@ -1,16 +1,18 @@ import pytest from fastapi.testclient import TestClient -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_raw_integration(): from docs.docs_src.integrations.http_frameworks_integrations.fastapi import ( app, base_handler, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): with TestClient(app) as client: diff --git a/tests/docs/nats/js/test_kv.py b/tests/docs/nats/js/test_kv.py index 55a8a4ed4f..2f582a1569 100644 --- a/tests/docs/nats/js/test_kv.py +++ b/tests/docs/nats/js/test_kv.py @@ -11,4 +11,4 @@ async def test_basic(): async with TestNatsBroker(broker, with_real=True), TestApp(app): await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with(b"Hello!") diff --git a/tests/docs/nats/js/test_object.py b/tests/docs/nats/js/test_object.py index 535fae7ff8..b65905d4c6 100644 --- a/tests/docs/nats/js/test_object.py +++ b/tests/docs/nats/js/test_object.py @@ -9,6 +9,23 @@ async def test_basic(): from docs.docs_src.nats.js.object import app, broker, handler - async with TestNatsBroker(broker, with_real=True), TestApp(app): + async with TestNatsBroker(broker, with_real=True): + await broker.start() + + os = await broker.object_storage("example-bucket") + try: + existed_files = await os.list() + except Exception: + existed_files = () + + call = True + for file in existed_files: + if file.name == "file.txt": + call = False + + if call: + async with TestApp(app): + pass + await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with("file.txt") diff --git a/tests/examples/fastapi_integration/test_app.py b/tests/examples/fastapi_integration/test_app.py index 2859777861..c186d046be 100644 --- a/tests/examples/fastapi_integration/test_app.py +++ b/tests/examples/fastapi_integration/test_app.py @@ -1,7 +1,25 @@ -from examples.fastapi_integration.testing import broker, test_handler, test_incorrect +import pytest -__all__ = ( - "test_incorrect", - "test_handler", - "broker", -) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handler(): + from examples.fastapi_integration.testing import router + from examples.fastapi_integration.testing import test_handler as test_ + from faststream.rabbit import TestRabbitBroker + + async with TestRabbitBroker(router.broker) as br: + await test_(br) + + +@pytest.mark.asyncio() +@require_aiopika +async def test_incorrect(): + from examples.fastapi_integration.testing import router + from examples.fastapi_integration.testing import test_incorrect as test_ + from faststream.rabbit import TestRabbitBroker + + async with TestRabbitBroker(router.broker) as br: + await test_(br) diff --git a/tests/examples/kafka/__init__.py b/tests/examples/kafka/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/examples/kafka/__init__.py +++ b/tests/examples/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/examples/nats/__init__.py b/tests/examples/nats/__init__.py index e69de29bb2..87ead90ee6 100644 --- a/tests/examples/nats/__init__.py +++ b/tests/examples/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/examples/nats/test_e06_key_value.py b/tests/examples/nats/test_e06_key_value.py index cf7bc25b97..e170569312 100644 --- a/tests/examples/nats/test_e06_key_value.py +++ b/tests/examples/nats/test_e06_key_value.py @@ -11,4 +11,4 @@ async def test_basic(): async with TestNatsBroker(broker, with_real=True), TestApp(app): await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with(b"Hello!") diff --git a/tests/examples/nats/test_e07_object_storage.py b/tests/examples/nats/test_e07_object_storage.py index 7d32d7d8a4..1310f71a12 100644 --- a/tests/examples/nats/test_e07_object_storage.py +++ b/tests/examples/nats/test_e07_object_storage.py @@ -9,6 +9,23 @@ async def test_basic(): from examples.nats.e07_object_storage import app, broker, handler - async with TestNatsBroker(broker, with_real=True), TestApp(app): + async with TestNatsBroker(broker, with_real=True): + await broker.start() + + os = await broker.object_storage("example-bucket") + try: + existed_files = await os.list() + except Exception: + existed_files = () + + call = True + for file in existed_files: + if file.name == "file.txt": + call = False + + if call: + async with TestApp(app): + pass + await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with("file.txt") diff --git a/tests/examples/rabbit/__init__.py b/tests/examples/rabbit/__init__.py index e69de29bb2..ebec43fcd5 100644 --- a/tests/examples/rabbit/__init__.py +++ b/tests/examples/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/examples/redis/__init__.py b/tests/examples/redis/__init__.py index e69de29bb2..4752ef19b1 100644 --- a/tests/examples/redis/__init__.py +++ b/tests/examples/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/examples/router/__init__.py b/tests/examples/router/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/examples/router/__init__.py +++ b/tests/examples/router/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/examples/test_e01_basic_consume.py b/tests/examples/test_e01_basic_consume.py index 722034f226..e9dd550bb3 100644 --- a/tests/examples/test_e01_basic_consume.py +++ b/tests/examples/test_e01_basic_consume.py @@ -1,11 +1,14 @@ import pytest -from examples.e01_basic_consume import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e01_basic_consume import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e02_1_basic_publisher.py b/tests/examples/test_e02_1_basic_publisher.py index ff310197db..dee4068ae3 100644 --- a/tests/examples/test_e02_1_basic_publisher.py +++ b/tests/examples/test_e02_1_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_1_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_1_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e02_2_basic_publisher.py b/tests/examples/test_e02_2_basic_publisher.py index c85e245d09..93e1a09579 100644 --- a/tests/examples/test_e02_2_basic_publisher.py +++ b/tests/examples/test_e02_2_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_2_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_2_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e02_3_basic_publisher.py b/tests/examples/test_e02_3_basic_publisher.py index 9e6ce2baec..9e8f40a57d 100644 --- a/tests/examples/test_e02_3_basic_publisher.py +++ b/tests/examples/test_e02_3_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_3_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_3_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e03_miltiple_pubsub.py b/tests/examples/test_e03_miltiple_pubsub.py index a7713f5268..65ee6ed165 100644 --- a/tests/examples/test_e03_miltiple_pubsub.py +++ b/tests/examples/test_e03_miltiple_pubsub.py @@ -1,17 +1,20 @@ import pytest -from examples.e03_miltiple_pubsub import ( - app, - broker, - handle, - handle_response_1, - handle_response_2, -) -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e03_miltiple_pubsub import ( + app, + broker, + handle, + handle_response_1, + handle_response_2, + ) + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response_1.wait_call(3) diff --git a/tests/examples/test_e04_msg_filter.py b/tests/examples/test_e04_msg_filter.py index 71ed93d08d..79b5ba1225 100644 --- a/tests/examples/test_e04_msg_filter.py +++ b/tests/examples/test_e04_msg_filter.py @@ -1,11 +1,14 @@ import pytest -from examples.e04_msg_filter import app, broker, handle_json, handle_other_messages -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e04_msg_filter import app, broker, handle_json, handle_other_messages + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle_json.wait_call(3) await handle_other_messages.wait_call(3) diff --git a/tests/examples/test_e05_rpc_request.py b/tests/examples/test_e05_rpc_request.py index 8afa9589e3..6a763b5bf6 100644 --- a/tests/examples/test_e05_rpc_request.py +++ b/tests/examples/test_e05_rpc_request.py @@ -1,11 +1,14 @@ import pytest -from examples.e05_rpc_request import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e05_rpc_request import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e06_manual_ack.py b/tests/examples/test_e06_manual_ack.py index 98d8c44589..70d096dbb3 100644 --- a/tests/examples/test_e06_manual_ack.py +++ b/tests/examples/test_e06_manual_ack.py @@ -1,11 +1,14 @@ import pytest -from examples.e06_manual_ack import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e06_manual_ack import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e07_ack_immediately.py b/tests/examples/test_e07_ack_immediately.py index ab97748daf..393c275dff 100644 --- a/tests/examples/test_e07_ack_immediately.py +++ b/tests/examples/test_e07_ack_immediately.py @@ -1,11 +1,14 @@ import pytest -from examples.e07_ack_immediately import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e07_ack_immediately import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e08_testing.py b/tests/examples/test_e08_testing.py index e9184d3342..a54ba447c7 100644 --- a/tests/examples/test_e08_testing.py +++ b/tests/examples/test_e08_testing.py @@ -1,3 +1,11 @@ -from examples.e08_testing import test_handle +import pytest -__all__ = ("test_handle",) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle(): + from examples.e08_testing import test_handle as _test + + await _test() diff --git a/tests/examples/test_e09_testing_mocks.py b/tests/examples/test_e09_testing_mocks.py index 83f82b2b37..04718322a6 100644 --- a/tests/examples/test_e09_testing_mocks.py +++ b/tests/examples/test_e09_testing_mocks.py @@ -1,3 +1,11 @@ -from examples.e09_testing_mocks import test_handle +import pytest -__all__ = ("test_handle",) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle(): + from examples.e09_testing_mocks import test_handle as _test + + await _test() diff --git a/tests/examples/test_e10_middlewares.py b/tests/examples/test_e10_middlewares.py index b635260802..4fdb3f15e6 100644 --- a/tests/examples/test_e10_middlewares.py +++ b/tests/examples/test_e10_middlewares.py @@ -1,11 +1,14 @@ import pytest -from examples.e10_middlewares import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e10_middlewares import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e11_settings.py b/tests/examples/test_e11_settings.py index 146bab8b64..3483b57597 100644 --- a/tests/examples/test_e11_settings.py +++ b/tests/examples/test_e11_settings.py @@ -1,11 +1,14 @@ import pytest -from examples.e11_settings import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e11_settings import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/marks.py b/tests/marks.py index 80bb1cde5c..07bde035b0 100644 --- a/tests/marks.py +++ b/tests/marks.py @@ -23,3 +23,68 @@ not PYDANTIC_V2, reason="requires PydanticV1", ) + + +try: + from faststream.confluent import KafkaBroker +except ImportError: + HAS_CONFLUENT = False +else: + HAS_CONFLUENT = True + +require_confluent = pytest.mark.skipif( + not HAS_CONFLUENT, + reason="requires confluent-kafka", +) + + +try: + from faststream.kafka import KafkaBroker # noqa: F401 +except ImportError: + HAS_AIOKAFKA = False +else: + HAS_AIOKAFKA = True + +require_aiokafka = pytest.mark.skipif( + not HAS_AIOKAFKA, + reason="requires aiokafka", +) + + +try: + from faststream.rabbit import RabbitBroker # noqa: F401 +except ImportError: + HAS_AIOPIKA = False +else: + HAS_AIOPIKA = True + +require_aiopika = pytest.mark.skipif( + not HAS_AIOPIKA, + reason="requires aio-pika", +) + + +try: + from faststream.redis import RedisBroker # noqa: F401 +except ImportError: + HAS_REDIS = False +else: + HAS_REDIS = True + +require_redis = pytest.mark.skipif( + not HAS_REDIS, + reason="requires redis", +) + + +try: + from faststream.nats import NatsBroker # noqa: F401 +except ImportError: + HAS_NATS = False +else: + HAS_NATS = True + +require_nats = pytest.mark.skipif( + not HAS_NATS, + reason="requires nats-py", +) diff --git a/tests/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py index 75763c2fee..20d03f3611 100644 --- a/tests/opentelemetry/__init__.py +++ b/tests/opentelemetry/__init__.py @@ -1,3 +1,3 @@ import pytest -pytest.importorskip("opentelemetry") +pytest.importorskip("opentelemetry.sdk") diff --git a/tests/utils/context/test_headers.py b/tests/utils/context/test_headers.py index 068e8b8bd4..bccf0b0362 100644 --- a/tests/utils/context/test_headers.py +++ b/tests/utils/context/test_headers.py @@ -1,11 +1,14 @@ import pytest from faststream import Header -from faststream.nats import NatsBroker, TestNatsBroker +from tests.marks import require_nats @pytest.mark.asyncio() +@require_nats async def test_nats_headers(): + from faststream.nats import NatsBroker, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in") diff --git a/tests/utils/context/test_path.py b/tests/utils/context/test_path.py index beff946592..babf557b58 100644 --- a/tests/utils/context/test_path.py +++ b/tests/utils/context/test_path.py @@ -1,19 +1,17 @@ +import asyncio +from unittest.mock import Mock + import pytest from faststream import Path -from faststream.nats import NatsBroker, PullSub, TestNatsBroker -from faststream.rabbit import ( - ExchangeType, - RabbitBroker, - RabbitExchange, - RabbitQueue, - TestRabbitBroker, -) -from faststream.redis import RedisBroker, TestRedisBroker +from tests.marks import require_aiopika, require_nats, require_redis @pytest.mark.asyncio() +@require_nats async def test_nats_path(): + from faststream.nats import NatsBroker, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in.{name}.{id}") @@ -38,7 +36,48 @@ async def h( @pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_nats_kv_path( + queue: str, + event: asyncio.Event, + mock: Mock, +): + from faststream.nats import NatsBroker + + broker = NatsBroker() + + @broker.subscriber("in.{name}.{id}", kv_watch=queue) + async def h( + msg: int, + name: str = Path(), + id_: int = Path("id"), + ): + mock(msg == 1 and name == "john" and id_ == 1) + event.set() + + async with broker: + await broker.start() + + kv = await broker.key_value(queue) + + await asyncio.wait( + ( + asyncio.create_task(kv.put("in.john.1", b"1")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) + + +@pytest.mark.asyncio() +@require_nats async def test_nats_batch_path(): + from faststream.nats import NatsBroker, PullSub, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in.{name}.{id}", stream="test", pull_sub=PullSub(batch=True)) @@ -63,7 +102,10 @@ async def h( @pytest.mark.asyncio() +@require_redis async def test_redis_path(): + from faststream.redis import RedisBroker, TestRedisBroker + broker = RedisBroker() @broker.subscriber("in.{name}.{id}") @@ -88,7 +130,16 @@ async def h( @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_path(): + from faststream.rabbit import ( + ExchangeType, + RabbitBroker, + RabbitExchange, + RabbitQueue, + TestRabbitBroker, + ) + broker = RabbitBroker() @broker.subscriber(