From 7bc34288126951e4bb42ff0e93e59f01045bbb4a Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Fri, 3 May 2024 08:03:09 +0300 Subject: [PATCH 01/43] =?UTF-8?q?fix=20(#1415):=20raise=20SetupError=20if?= =?UTF-8?q?=20rpc=20and=20reply=5Fto=20are=20using=20in=20TestCL=E2=80=A6?= =?UTF-8?q?=20(#1419)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix (#1415): raise SetupError if rpc and reply_to are using in TestCLient in the same time * tests: make RMQ test client test in-memory * tests: do not use already connected fixtures * tests: do not use already connected fixtures --- faststream/nats/testing.py | 4 ++++ faststream/rabbit/testing.py | 4 ++++ faststream/redis/testing.py | 5 ++++- tests/brokers/nats/test_test_client.py | 11 +++++++++++ tests/brokers/rabbit/test_test_client.py | 11 +++++++++++ tests/brokers/redis/test_test_client.py | 11 +++++++++++ 6 files changed, 45 insertions(+), 1 deletion(-) diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 2fa2bddd1a..f106c93f9d 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -4,6 +4,7 @@ from typing_extensions import override from faststream.broker.message import encode_message, gen_cor_id +from faststream.exceptions import WRONG_PUBLISH_ARGS from faststream.nats.broker import NatsBroker from faststream.nats.publisher.producer import NatsFastProducer from faststream.nats.schemas.js_stream import is_subject_match_wildcard @@ -71,6 +72,9 @@ async def publish( # type: ignore[override] rpc_timeout: Optional[float] = None, raise_timeout: bool = False, ) -> Any: + if rpc and reply_to: + raise WRONG_PUBLISH_ARGS + incoming = build_message( message=message, subject=subject, diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py index e425ed02d6..e15cbe2cb3 100644 --- a/faststream/rabbit/testing.py +++ b/faststream/rabbit/testing.py @@ -8,6 +8,7 @@ from typing_extensions import override from faststream.broker.message import gen_cor_id +from faststream.exceptions import WRONG_PUBLISH_ARGS from faststream.rabbit.broker.broker import RabbitBroker from faststream.rabbit.parser import AioPikaParser from faststream.rabbit.publisher.asyncapi import AsyncAPIPublisher @@ -197,6 +198,9 @@ async def publish( # type: ignore[override] """Publish a message to a RabbitMQ queue or exchange.""" exch = RabbitExchange.validate(exchange) + if rpc and reply_to: + raise WRONG_PUBLISH_ARGS + incoming = build_message( message=message, exchange=exch, diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index 54cf908923..74541322f1 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -4,7 +4,7 @@ from typing_extensions import override from faststream.broker.message import gen_cor_id -from faststream.exceptions import SetupError +from faststream.exceptions import WRONG_PUBLISH_ARGS, SetupError from faststream.redis.broker.broker import RedisBroker from faststream.redis.message import ( BatchListMessage, @@ -87,6 +87,9 @@ async def publish( # type: ignore[override] rpc_timeout: Optional[float] = 30.0, raise_timeout: bool = False, ) -> Optional[Any]: + if rpc and reply_to: + raise WRONG_PUBLISH_ARGS + correlation_id = correlation_id or gen_cor_id() body = build_message( diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py index 8190e27509..c4bdaa7b41 100644 --- a/tests/brokers/nats/test_test_client.py +++ b/tests/brokers/nats/test_test_client.py @@ -3,12 +3,23 @@ import pytest from faststream import BaseMiddleware +from faststream.exceptions import SetupError from faststream.nats import JStream, NatsBroker, PullSub, TestNatsBroker from tests.brokers.base.testclient import BrokerTestclientTestcase @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + async def test_rpc_conflicts_reply(self, queue): + async with TestNatsBroker(NatsBroker()) as br: + with pytest.raises(SetupError): + await br.publish( + "", + queue, + rpc=True, + reply_to="response", + ) + @pytest.mark.nats() async def test_with_real_testclient( self, diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py index b5f32f0de6..0bb72286e5 100644 --- a/tests/brokers/rabbit/test_test_client.py +++ b/tests/brokers/rabbit/test_test_client.py @@ -4,6 +4,7 @@ import pytest from faststream import BaseMiddleware +from faststream.exceptions import SetupError from faststream.rabbit import ( ExchangeType, RabbitBroker, @@ -18,6 +19,16 @@ @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + async def test_rpc_conflicts_reply(self, queue): + async with TestRabbitBroker(RabbitBroker()) as br: + with pytest.raises(SetupError): + await br.publish( + "", + queue, + rpc=True, + reply_to="response", + ) + @pytest.mark.rabbit() async def test_with_real_testclient( self, diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py index ba87d4e685..951d071fbe 100644 --- a/tests/brokers/redis/test_test_client.py +++ b/tests/brokers/redis/test_test_client.py @@ -3,12 +3,23 @@ import pytest from faststream import BaseMiddleware +from faststream.exceptions import SetupError from faststream.redis import ListSub, RedisBroker, StreamSub, TestRedisBroker from tests.brokers.base.testclient import BrokerTestclientTestcase @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + async def test_rpc_conflicts_reply(self, queue): + async with TestRedisBroker(RedisBroker()) as br: + with pytest.raises(SetupError): + await br.publish( + "", + queue, + rpc=True, + reply_to="response", + ) + @pytest.mark.redis() async def test_with_real_testclient( self, From 669647f016fa237f73e6a210d5f6dabebaa8d396 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Fri, 3 May 2024 20:46:05 +0300 Subject: [PATCH 02/43] Chore/update deps2 (#1418) * chore: bump dependencies * lint: fix mypy --------- Co-authored-by: Kumaran Rajendhiran --- faststream/broker/wrapper/call.py | 7 +------ pyproject.toml | 10 +++++----- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/faststream/broker/wrapper/call.py b/faststream/broker/wrapper/call.py index f991e1b749..2dda3bf1ea 100644 --- a/faststream/broker/wrapper/call.py +++ b/faststream/broker/wrapper/call.py @@ -1,5 +1,4 @@ import asyncio -from functools import wraps from typing import ( TYPE_CHECKING, Any, @@ -186,13 +185,9 @@ def set_wrapped( def _wrap_decode_message( func: Callable[..., Awaitable[T_HandlerReturn]], params_ln: int, -) -> Callable[ - ["StreamMessage[MsgType]"], - Awaitable[T_HandlerReturn], -]: +) -> Callable[["StreamMessage[MsgType]"], Awaitable[T_HandlerReturn]]: """Wraps a function to decode a message and pass it as an argument to the wrapped function.""" - @wraps(func) async def decode_wrapper(message: "StreamMessage[MsgType]") -> T_HandlerReturn: """A wrapper function to decode and handle a message.""" msg = message.decoded_body diff --git a/pyproject.toml b/pyproject.toml index b8af94fd6a..3f22e04023 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ devdocs = [ "mkdocs-material==9.5.18", "mkdocs-static-i18n==1.2.2", "mdx-include==1.4.2", - "mkdocstrings[python]==0.24.3", + "mkdocstrings[python]==0.25.0", "mkdocs-literate-nav==0.6.1", "mkdocs-git-revision-date-localized-plugin==1.2.4", "mike==2.0.0", # versioning @@ -93,7 +93,7 @@ devdocs = [ types = [ "faststream[rabbit,confluent,kafka,nats,redis]", - "mypy==1.9.0", + "mypy==1.10.0", # mypy extensions "types-PyYAML", "types-setuptools", @@ -106,15 +106,15 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.1", + "ruff==0.4.2", "bandit==1.7.8", - "semgrep==1.69.0", + "semgrep==1.70.0", "codespell==2.2.6", ] test-core = [ "coverage[toml]==7.4.4", - "pytest==8.1.1", + "pytest==8.2.0", "pytest-asyncio==0.23.6", "dirty-equals==0.7.1.post0", ] From 312dd55fc049b2b97af32546a2ff19613931981a Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Fri, 3 May 2024 20:47:03 +0300 Subject: [PATCH 03/43] refactor: correct security with kwarg params merging (#1417) * refactor: correct security with kwarg params merging * fix: correct filter_by_dict usage --------- Co-authored-by: Kumaran Rajendhiran --- faststream/confluent/broker/broker.py | 7 +++++-- faststream/kafka/broker/broker.py | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index a462bb2257..8f9de15b09 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -439,15 +439,18 @@ async def _connect( # type: ignore[override] **kwargs: Any, ) -> ConsumerConnectionParams: security_params = parse_security(self.security) + kwargs.update(security_params) + producer = AsyncConfluentProducer( **kwargs, - **security_params, client_id=client_id, ) + self._producer = AsyncConfluentFastProducer( producer=producer, ) - return filter_by_dict(ConsumerConnectionParams, {**kwargs, **security_params}) + + return filter_by_dict(ConsumerConnectionParams, kwargs) async def start(self) -> None: await super().start() diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 16df9c7c8c..59d6e733d6 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -581,16 +581,19 @@ async def _connect( # type: ignore[override] **kwargs: Any, ) -> ConsumerConnectionParams: security_params = parse_security(self.security) + kwargs.update(security_params) + producer = aiokafka.AIOKafkaProducer( **kwargs, - **security_params, client_id=client_id, ) + await producer.start() self._producer = AioKafkaFastProducer( producer=producer, ) - return filter_by_dict(ConsumerConnectionParams, {**kwargs, **security_params}) + + return filter_by_dict(ConsumerConnectionParams, kwargs) async def start(self) -> None: """Connect broker to Kafka and startup all subscribers.""" From 8b8a82ee6f4ff911f4bf4d1cda5b7d87546fdc12 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Sat, 4 May 2024 19:42:50 +0300 Subject: [PATCH 04/43] fix (#1414): correct Messag.ack error processing (#1420) * fix (#1414): correct Messag.ack error processing * chore: bump version --- faststream/__about__.py | 2 +- faststream/broker/acknowledgement_watcher.py | 38 +++++++++++++++++--- faststream/broker/utils.py | 1 + 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/faststream/__about__.py b/faststream/__about__.py index 20bfe0ee61..5c3e4f3a29 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.3" +__version__ = "0.5.4" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/broker/acknowledgement_watcher.py b/faststream/broker/acknowledgement_watcher.py index 4ecb6ad4b3..4f75b1d66e 100644 --- a/faststream/broker/acknowledgement_watcher.py +++ b/faststream/broker/acknowledgement_watcher.py @@ -126,11 +126,13 @@ def __init__( self, message: "StreamMessage[MsgType]", watcher: BaseWatcher, + logger: Optional["LoggerProto"] = None, **extra_options: Any, ) -> None: self.watcher = watcher self.message = message self.extra_options = extra_options + self.logger = logger async def __aenter__(self) -> None: self.watcher.add(self.message.message_id) @@ -172,15 +174,41 @@ async def __aexit__( return not is_test_env() async def __ack(self) -> None: - await self.message.ack(**self.extra_options) - self.watcher.remove(self.message.message_id) + try: + await self.message.ack(**self.extra_options) + except Exception as er: + if self.logger is not None: + self.logger.log( + logging.ERROR, + er, + exc_info=er + ) + else: + self.watcher.remove(self.message.message_id) async def __nack(self) -> None: - await self.message.nack(**self.extra_options) + try: + await self.message.nack(**self.extra_options) + except Exception as er: + if self.logger is not None: + self.logger.log( + logging.ERROR, + er, + exc_info=er + ) async def __reject(self) -> None: - await self.message.reject(**self.extra_options) - self.watcher.remove(self.message.message_id) + try: + await self.message.reject(**self.extra_options) + except Exception as er: + if self.logger is not None: + self.logger.log( + logging.ERROR, + er, + exc_info=er + ) + else: + self.watcher.remove(self.message.message_id) def get_watcher( diff --git a/faststream/broker/utils.py b/faststream/broker/utils.py index 8ca0585a4c..6903f4c94d 100644 --- a/faststream/broker/utils.py +++ b/faststream/broker/utils.py @@ -50,6 +50,7 @@ def get_watcher_context( return partial( WatcherContext, watcher=get_watcher(logger, retry), + logger=logger, **extra_options, ) From a5d8d4fcab1cb2aae2dbe3a5dadfaa592024b3f8 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Sat, 4 May 2024 18:24:26 +0000 Subject: [PATCH 05/43] Update Release Notes for 0.5.4 (#1421) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index d8fe1b2aee..10da34ba0f 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,18 @@ hide: --- # Release Notes +## 0.5.4 + +### What's Changed + +* Update Release Notes for 0.5.3 by @faststream-release-notes-updater in [#1400](https://github.com/airtai/faststream/pull/1400){.external-link target="_blank"} +* fix (#1415): raise SetupError if rpc and reply_to are using in TestCL… by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1419](https://github.com/airtai/faststream/pull/1419){.external-link target="_blank"} +* Chore/update deps2 by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1418](https://github.com/airtai/faststream/pull/1418){.external-link target="_blank"} +* refactor: correct security with kwarg params merging by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1417](https://github.com/airtai/faststream/pull/1417){.external-link target="_blank"} +* fix (#1414): correct Messag.ack error processing by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1420](https://github.com/airtai/faststream/pull/1420){.external-link target="_blank"} + +**Full Changelog**: [#0.5.3...0.5.4](https://github.com/airtai/faststream/compare/0.5.3...0.5.4){.external-link target="_blank"} + ## 0.5.3 ### What's Changed From 9fceb62fc28cd9d51aa017eac34763b46f539e02 Mon Sep 17 00:00:00 2001 From: Spataphore <93342746+spataphore1337@users.noreply.github.com> Date: Tue, 7 May 2024 22:34:57 +0300 Subject: [PATCH 06/43] feature: manual partition assignment to Kafka (#1422) * feat: add assign TopicPartitions * make partitions public attr * add partitions for __init__ of Default and Batch Subscriber * add partitions for __init__ in create method (asyncapi) * add listener, pattern and partitions for __init__ of KafkaRouter * add partitions for subscribe method of router FastAPI * feat: support partitions in Kafka TestClient * docs: gen API * feat: add tests for manual partition consume * feat: use lint.sh, add TopicPartitions in create_publisher_fake_subscriber (Kafka) --------- Co-authored-by: Nikita Pastukhov --- docs/docs/SUMMARY.md | 2 + .../en/api/faststream/kafka/TopicPartition.md | 11 +++++ docs/docs/en/release.md | 2 +- faststream/broker/acknowledgement_watcher.py | 18 ++------ faststream/kafka/__init__.py | 3 ++ faststream/kafka/broker/registrator.py | 31 ++++++++++++- faststream/kafka/fastapi/fastapi.py | 30 ++++++++++++ faststream/kafka/router.py | 44 +++++++++++++++++- faststream/kafka/subscriber/asyncapi.py | 20 +++++++- faststream/kafka/subscriber/usecase.py | 46 +++++++++++++++---- faststream/kafka/testing.py | 27 +++++++++-- tests/brokers/kafka/test_consume.py | 26 ++++++++++- tests/brokers/kafka/test_test_client.py | 46 ++++++++++++++++++- 13 files changed, 272 insertions(+), 34 deletions(-) create mode 100644 docs/docs/en/api/faststream/kafka/TopicPartition.md mode change 100644 => 100755 faststream/kafka/testing.py diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 46fe5f4026..4c71af3c71 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -134,6 +134,7 @@ search: - [KafkaRouter](public_api/faststream/kafka/KafkaRouter.md) - [TestApp](public_api/faststream/kafka/TestApp.md) - [TestKafkaBroker](public_api/faststream/kafka/TestKafkaBroker.md) + - [TopicPartition](public_api/faststream/kafka/TopicPartition.md) - nats - [AckPolicy](public_api/faststream/nats/AckPolicy.md) - [ConsumerConfig](public_api/faststream/nats/ConsumerConfig.md) @@ -495,6 +496,7 @@ search: - [KafkaRouter](api/faststream/kafka/KafkaRouter.md) - [TestApp](api/faststream/kafka/TestApp.md) - [TestKafkaBroker](api/faststream/kafka/TestKafkaBroker.md) + - [TopicPartition](api/faststream/kafka/TopicPartition.md) - broker - [KafkaBroker](api/faststream/kafka/broker/KafkaBroker.md) - broker diff --git a/docs/docs/en/api/faststream/kafka/TopicPartition.md b/docs/docs/en/api/faststream/kafka/TopicPartition.md new file mode 100644 index 0000000000..41fbd7f624 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/TopicPartition.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: aiokafka.structs.TopicPartition diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 10da34ba0f..8c97b8dc44 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -20,7 +20,7 @@ hide: * fix (#1415): raise SetupError if rpc and reply_to are using in TestCL… by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1419](https://github.com/airtai/faststream/pull/1419){.external-link target="_blank"} * Chore/update deps2 by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1418](https://github.com/airtai/faststream/pull/1418){.external-link target="_blank"} * refactor: correct security with kwarg params merging by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1417](https://github.com/airtai/faststream/pull/1417){.external-link target="_blank"} -* fix (#1414): correct Messag.ack error processing by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1420](https://github.com/airtai/faststream/pull/1420){.external-link target="_blank"} +* fix (#1414): correct Message.ack error processing by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1420](https://github.com/airtai/faststream/pull/1420){.external-link target="_blank"} **Full Changelog**: [#0.5.3...0.5.4](https://github.com/airtai/faststream/compare/0.5.3...0.5.4){.external-link target="_blank"} diff --git a/faststream/broker/acknowledgement_watcher.py b/faststream/broker/acknowledgement_watcher.py index 4f75b1d66e..dabc6eb87f 100644 --- a/faststream/broker/acknowledgement_watcher.py +++ b/faststream/broker/acknowledgement_watcher.py @@ -178,11 +178,7 @@ async def __ack(self) -> None: await self.message.ack(**self.extra_options) except Exception as er: if self.logger is not None: - self.logger.log( - logging.ERROR, - er, - exc_info=er - ) + self.logger.log(logging.ERROR, er, exc_info=er) else: self.watcher.remove(self.message.message_id) @@ -191,22 +187,14 @@ async def __nack(self) -> None: await self.message.nack(**self.extra_options) except Exception as er: if self.logger is not None: - self.logger.log( - logging.ERROR, - er, - exc_info=er - ) + self.logger.log(logging.ERROR, er, exc_info=er) async def __reject(self) -> None: try: await self.message.reject(**self.extra_options) except Exception as er: if self.logger is not None: - self.logger.log( - logging.ERROR, - er, - exc_info=er - ) + self.logger.log(logging.ERROR, er, exc_info=er) else: self.watcher.remove(self.message.message_id) diff --git a/faststream/kafka/__init__.py b/faststream/kafka/__init__.py index eb83bd8b01..c81b617033 100644 --- a/faststream/kafka/__init__.py +++ b/faststream/kafka/__init__.py @@ -1,3 +1,5 @@ +from aiokafka import TopicPartition + from faststream.kafka.annotations import KafkaMessage from faststream.kafka.broker import KafkaBroker from faststream.kafka.router import KafkaPublisher, KafkaRoute, KafkaRouter @@ -12,4 +14,5 @@ "KafkaPublisher", "TestKafkaBroker", "TestApp", + "TopicPartition", ) diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py index bed606870a..899e5828d5 100644 --- a/faststream/kafka/broker/registrator.py +++ b/faststream/kafka/broker/registrator.py @@ -25,7 +25,7 @@ from faststream.kafka.subscriber.asyncapi import AsyncAPISubscriber if TYPE_CHECKING: - from aiokafka import ConsumerRecord + from aiokafka import ConsumerRecord, TopicPartition from aiokafka.abc import ConsumerRebalanceListener from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor from fast_depends.dependencies import Depends @@ -336,6 +336,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["Depends"], @@ -660,6 +667,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["Depends"], @@ -984,6 +998,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["Depends"], @@ -1311,6 +1332,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["Depends"], @@ -1402,6 +1430,7 @@ def subscriber( group_id=group_id, listener=listener, pattern=pattern, + partitions=partitions, builder=builder, is_manual=not auto_commit, # subscriber args diff --git a/faststream/kafka/fastapi/fastapi.py b/faststream/kafka/fastapi/fastapi.py index ce988aa329..541940d79e 100644 --- a/faststream/kafka/fastapi/fastapi.py +++ b/faststream/kafka/fastapi/fastapi.py @@ -38,6 +38,7 @@ from asyncio import AbstractEventLoop from enum import Enum + from aiokafka import TopicPartition from aiokafka.abc import AbstractTokenProvider, ConsumerRebalanceListener from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor from fastapi import params @@ -919,6 +920,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["params.Depends"], @@ -1401,6 +1409,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["params.Depends"], @@ -1883,6 +1898,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["params.Depends"], @@ -2368,6 +2390,13 @@ def subscriber( Pattern to match available topics. You must provide either topics or pattern, but not both. """), ] = None, + partitions: Annotated[ + Iterable["TopicPartition"], + Doc(""" + An explicit partitions list to assign. + You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["params.Depends"], @@ -2575,6 +2604,7 @@ def subscriber( batch_timeout_ms=batch_timeout_ms, listener=listener, pattern=pattern, + partitions=partitions, # broker args dependencies=dependencies, parser=parser, diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py index 7bcbd1a48d..98383512a5 100644 --- a/faststream/kafka/router.py +++ b/faststream/kafka/router.py @@ -19,7 +19,8 @@ from faststream.kafka.broker.registrator import KafkaRegistrator if TYPE_CHECKING: - from aiokafka import ConsumerRecord + from aiokafka import ConsumerRecord, TopicPartition + from aiokafka.abc import ConsumerRebalanceListener from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor from fast_depends.dependencies import Depends @@ -376,6 +377,44 @@ def __init__( Optional[int], Doc("Number of messages to consume as one batch."), ] = None, + listener: Annotated[ + Optional["ConsumerRebalanceListener"], + Doc(""" + Optionally include listener + callback, which will be called before and after each rebalance + operation. + As part of group management, the consumer will keep track of + the list of consumers that belong to a particular group and + will trigger a rebalance operation if one of the following + events trigger: + + * Number of partitions change for any of the subscribed topics + * Topic is created or deleted + * An existing member of the consumer group dies + * A new member is added to the consumer group + + When any of these events are triggered, the provided listener + will be invoked first to indicate that the consumer's + assignment has been revoked, and then again when the new + assignment has been received. Note that this listener will + immediately override any listener set in a previous call + to subscribe. It is guaranteed, however, that the partitions + revoked/assigned + through this interface are from topics subscribed in this call. + """), + ] = None, + pattern: Annotated[ + Optional[str], + Doc(""" + Pattern to match available topics. You must provide either topics or pattern, but not both. + """), + ] = None, + partitions: Annotated[ + Optional[Iterable["TopicPartition"]], + Doc(""" + A topic and partition tuple. You can't use 'topics' and 'partitions' in the same time. + """), + ] = (), # broker args dependencies: Annotated[ Iterable["Depends"], @@ -456,6 +495,9 @@ def __init__( max_records=max_records, batch_timeout_ms=batch_timeout_ms, batch=batch, + listener=listener, + pattern=pattern, + partitions=partitions, # basic args dependencies=dependencies, parser=parser, diff --git a/faststream/kafka/subscriber/asyncapi.py b/faststream/kafka/subscriber/asyncapi.py index 4453690cc1..f2897d3fdf 100644 --- a/faststream/kafka/subscriber/asyncapi.py +++ b/faststream/kafka/subscriber/asyncapi.py @@ -22,6 +22,7 @@ from faststream.asyncapi.schema.bindings import kafka from faststream.asyncapi.utils import resolve_payloads from faststream.broker.types import MsgType +from faststream.exceptions import SetupError from faststream.kafka.subscriber.usecase import ( BatchSubscriber, DefaultSubscriber, @@ -29,7 +30,7 @@ ) if TYPE_CHECKING: - from aiokafka import AIOKafkaConsumer, ConsumerRecord + from aiokafka import AIOKafkaConsumer, ConsumerRecord, TopicPartition from aiokafka.abc import ConsumerRebalanceListener from fast_depends.dependencies import Depends @@ -79,6 +80,7 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -103,6 +105,7 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -127,6 +130,7 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -156,6 +160,7 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -173,6 +178,17 @@ def create( "AsyncAPIDefaultSubscriber", "AsyncAPIBatchSubscriber", ]: + if not topics and not partitions and not pattern: + raise SetupError( + "You should provide either `topics` or `partitions` or `pattern`." + ) + elif topics and partitions: + raise SetupError("You can't provide both `topics` and `partitions`.") + elif topics and pattern: + raise SetupError("You can't provide both `topics` and `pattern`.") + elif pattern and partitions: + raise SetupError("You can't provide both `pattern` and `partitions`.") + if batch: return AsyncAPIBatchSubscriber( *topics, @@ -181,6 +197,7 @@ def create( group_id=group_id, listener=listener, pattern=pattern, + partitions=partitions, builder=builder, is_manual=is_manual, no_ack=no_ack, @@ -197,6 +214,7 @@ def create( group_id=group_id, listener=listener, pattern=pattern, + partitions=partitions, builder=builder, is_manual=is_manual, no_ack=no_ack, diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index 0a99702b98..963f00c524 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -7,12 +7,14 @@ Callable, Dict, Iterable, + List, Optional, Sequence, Tuple, ) import anyio +from aiokafka import TopicPartition from aiokafka.errors import ConsumerStoppedError, KafkaError from typing_extensions import override @@ -56,6 +58,7 @@ def __init__( builder: Callable[..., "AIOKafkaConsumer"], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], is_manual: bool, # Subscriber args default_parser: "AsyncCallable", @@ -94,6 +97,7 @@ def __init__( self.client_id = "" self.__pattern = pattern self.__listener = listener + self.partitions = partitions self.__connection_args: "ConsumerConnectionParams" = {} @override @@ -139,13 +143,18 @@ async def start(self) -> None: client_id=self.client_id, **self.__connection_args, ) - consumer.subscribe( - topics=self.topics, - pattern=self.__pattern, - listener=self.__listener, - ) - await consumer.start() + if self.topics: + consumer.subscribe( + topics=self.topics, + pattern=self.__pattern, + listener=self.__listener, + ) + + elif self.partitions: + consumer.assign(partitions=self.partitions) + + await consumer.start() await super().start() self.task = asyncio.create_task(self._consume()) @@ -213,9 +222,18 @@ def get_routing_hash( ) -> int: return hash("".join((*topics, group_id or ""))) + @property + def topic_names(self) -> List[str]: + if self.__pattern: + return [self.__pattern] + elif self.topics: + return list(self.topics) + else: + return [f"{p.topic}-{p.partition}" for p in self.partitions] + def __hash__(self) -> int: return self.get_routing_hash( - topics=(*self.topics, self.__pattern or ""), + topics=self.topic_names, group_id=self.group_id, ) @@ -236,7 +254,7 @@ def get_log_context( message: Optional["StreamMessage[ConsumerRecord]"], ) -> Dict[str, str]: if message is None: - topic = ",".join(self.topics) + topic = ",".join(self.topic_names) elif isinstance(message.raw_message, Sequence): topic = message.raw_message[0].topic else: @@ -251,6 +269,14 @@ def get_log_context( def add_prefix(self, prefix: str) -> None: self.topics = tuple("".join((prefix, t)) for t in self.topics) + self.partitions = [ + TopicPartition( + topic="".join((prefix, p.topic)), + partition=p.partition, + ) + for p in self.partitions + ] + class DefaultSubscriber(LogicSubscriber["ConsumerRecord"]): def __init__( @@ -260,6 +286,7 @@ def __init__( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -277,6 +304,7 @@ def __init__( group_id=group_id, listener=listener, pattern=pattern, + partitions=partitions, builder=builder, is_manual=is_manual, # subscriber args @@ -308,6 +336,7 @@ def __init__( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + partitions: Iterable["TopicPartition"], builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args @@ -330,6 +359,7 @@ def __init__( group_id=group_id, listener=listener, pattern=pattern, + partitions=partitions, builder=builder, is_manual=is_manual, # subscriber args diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py old mode 100644 new mode 100755 index fb9e71417f..e28056edf6 --- a/faststream/kafka/testing.py +++ b/faststream/kafka/testing.py @@ -5,6 +5,7 @@ from typing_extensions import override from faststream.broker.message import encode_message, gen_cor_id +from faststream.kafka import TopicPartition from faststream.kafka.broker import KafkaBroker from faststream.kafka.publisher.asyncapi import AsyncAPIBatchPublisher from faststream.kafka.publisher.producer import AioKafkaFastProducer @@ -31,10 +32,17 @@ def create_publisher_fake_subscriber( broker: KafkaBroker, publisher: "AsyncAPIPublisher[Any]", ) -> "HandlerCallWrapper[Any, Any, Any]": - sub = broker.subscriber( - publisher.topic, - batch=isinstance(publisher, AsyncAPIBatchPublisher), - ) + if publisher.partition: + tp = TopicPartition(topic=publisher.topic, partition=publisher.partition) + sub = broker.subscriber( + partitions=[tp], + batch=isinstance(publisher, AsyncAPIBatchPublisher), + ) + else: + sub = broker.subscriber( + publisher.topic, + batch=isinstance(publisher, AsyncAPIBatchPublisher), + ) if not sub.calls: @@ -92,7 +100,16 @@ async def publish( # type: ignore[override] ) for handler in self.broker._subscribers.values(): # pragma: no branch - if topic in handler.topics: + call: bool = False + + for p in handler.partitions: + if p.topic == topic and (partition is None or p.partition == partition): + call = True + + if not call and topic in handler.topics: + call = True + + if call: return await call_handler( handler=handler, message=[incoming] diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py index fdef8a20bc..a50c06d8c4 100644 --- a/tests/brokers/kafka/test_consume.py +++ b/tests/brokers/kafka/test_consume.py @@ -5,7 +5,7 @@ from aiokafka import AIOKafkaConsumer from faststream.exceptions import AckMessage -from faststream.kafka import KafkaBroker +from faststream.kafka import KafkaBroker, TopicPartition from faststream.kafka.annotations import KafkaMessage from tests.brokers.base.consume import BrokerRealConsumeTestcase from tests.tools import spy_decorator @@ -67,6 +67,30 @@ async def handler(msg: KafkaMessage): assert event.is_set() + @pytest.mark.asyncio() + async def test_manual_partition_consume( + self, queue: str, full_broker: KafkaBroker, event: asyncio.Event + ): + tp1 = TopicPartition(queue, partition=0) + + @full_broker.subscriber(partitions=[tp1]) + async def handler_tp1(msg: KafkaMessage): + event.set() + + async with full_broker: + await full_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("hello", queue, partition=0) + ), + asyncio.create_task(event.wait()), + ), + timeout=10, + ) + + assert event.is_set() + @pytest.mark.asyncio() @pytest.mark.slow() async def test_consume_ack_manual( diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py index 7c72e6c525..cc128921ed 100644 --- a/tests/brokers/kafka/test_test_client.py +++ b/tests/brokers/kafka/test_test_client.py @@ -3,12 +3,56 @@ import pytest from faststream import BaseMiddleware -from faststream.kafka import KafkaBroker, TestKafkaBroker +from faststream.kafka import KafkaBroker, TestKafkaBroker, TopicPartition from tests.brokers.base.testclient import BrokerTestclientTestcase @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + async def test_partition_match( + self, + test_broker: KafkaBroker, + queue: str, + ): + @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(): + pass + + await test_broker.start() + await test_broker.publish("hello", queue) + m.mock.assert_called_once_with("hello") + + async def test_partition_match_exect( + self, + test_broker: KafkaBroker, + queue: str, + ): + @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(): + pass + + await test_broker.start() + await test_broker.publish("hello", queue, partition=1) + m.mock.assert_called_once_with("hello") + + async def test_partition_missmatch( + self, + test_broker: KafkaBroker, + queue: str, + ): + @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(): + pass + + @test_broker.subscriber(queue) + async def m2(): + pass + + await test_broker.start() + await test_broker.publish("hello", queue, partition=2) + assert not m.mock.called + m2.mock.assert_called_once_with("hello") + @pytest.mark.kafka() async def test_with_real_testclient( self, From 48c35f412f7e6662d24bd4b462defa6cac2a7fc3 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Wed, 8 May 2024 07:32:36 +0300 Subject: [PATCH 07/43] Chore/update deps (#1429) * chore: bump dependencies * chore: bump version --------- Co-authored-by: Kumaran Rajendhiran --- pyproject.toml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3f22e04023..c4f419a0a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,13 +75,13 @@ redis = ["redis>=5.0.0,<6.0.0"] # dev dependencies devdocs = [ - "mkdocs-material==9.5.18", + "mkdocs-material==9.5.21", "mkdocs-static-i18n==1.2.2", "mdx-include==1.4.2", "mkdocstrings[python]==0.25.0", "mkdocs-literate-nav==0.6.1", - "mkdocs-git-revision-date-localized-plugin==1.2.4", - "mike==2.0.0", # versioning + "mkdocs-git-revision-date-localized-plugin==1.2.5", + "mike==2.1.1", # versioning "mkdocs-minify-plugin==0.8.0", "mkdocs-macros-plugin==1.0.5", # includes with variables "mkdocs-glightbox==0.3.7", # img zoom @@ -106,7 +106,7 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.2", + "ruff==0.4.3", "bandit==1.7.8", "semgrep==1.70.0", "codespell==2.2.6", @@ -121,7 +121,7 @@ test-core = [ testing = [ "faststream[test-core]", - "fastapi==0.110.2", + "fastapi==0.111.0", "pydantic-settings>=2.0.0,<3.0.0", "httpx==0.27.0", "PyYAML==6.0.1", From 2a9f4e42827a22ce3023c271bba9d6a380f91492 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Thu, 9 May 2024 14:04:42 +0300 Subject: [PATCH 08/43] Fix/correct dynamic subscriber registration (#1433) * fix: correct NATS dynamic subscriber registration' * tests: fix dynamic test for TestClient * lint: fix deprecation warn * tests: fix reloader test * tests: fix kafka warnings * tests: fix hanging test --------- Co-authored-by: Kumaran Rajendhiran --- .codespell-whitelist.txt | 2 +- faststream/asyncapi/site.py | 2 +- faststream/confluent/broker/broker.py | 20 ++++++-- faststream/confluent/broker/logging.py | 6 +-- faststream/confluent/broker/registrator.py | 47 +++++++++---------- faststream/confluent/subscriber/asyncapi.py | 15 +++--- faststream/confluent/subscriber/usecase.py | 25 +++++----- faststream/confluent/testing.py | 17 ++++++- faststream/kafka/broker/broker.py | 11 +++-- faststream/kafka/broker/logging.py | 5 +- faststream/kafka/broker/registrator.py | 52 +++++++++------------ faststream/kafka/subscriber/asyncapi.py | 19 ++++---- faststream/kafka/subscriber/usecase.py | 30 ++++++------ faststream/kafka/testing.py | 17 ++++++- faststream/nats/testing.py | 9 +++- faststream/redis/testing.py | 8 +++- tests/brokers/base/consume.py | 29 ++++++++++++ tests/cli/supervisors/test_base_reloader.py | 2 +- 18 files changed, 195 insertions(+), 121 deletions(-) diff --git a/.codespell-whitelist.txt b/.codespell-whitelist.txt index dcfed576bf..6b1a432b87 100644 --- a/.codespell-whitelist.txt +++ b/.codespell-whitelist.txt @@ -1 +1 @@ -dependant +dependant \ No newline at end of file diff --git a/faststream/asyncapi/site.py b/faststream/asyncapi/site.py index 73184f9bb4..fcc0aefea6 100644 --- a/faststream/asyncapi/site.py +++ b/faststream/asyncapi/site.py @@ -102,7 +102,7 @@ def serve_app( ) -> None: """Serve the HTTPServer with AsyncAPI schema.""" logger.info(f"HTTPServer running on http://{host}:{port} (Press CTRL+C to quit)") - logger.warn("Please, do not use it in production.") + logger.warning("Please, do not use it in production.") server.HTTPServer( (host, port), diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 8f9de15b09..9f31fbbb5e 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -23,7 +23,11 @@ from faststream.broker.message import gen_cor_id from faststream.confluent.broker.logging import KafkaLoggingBroker from faststream.confluent.broker.registrator import KafkaRegistrator -from faststream.confluent.client import AsyncConfluentProducer, _missing +from faststream.confluent.client import ( + AsyncConfluentConsumer, + AsyncConfluentProducer, + _missing, +) from faststream.confluent.publisher.producer import AsyncConfluentFastProducer from faststream.confluent.schemas.params import ConsumerConnectionParams from faststream.confluent.security import parse_security @@ -425,7 +429,7 @@ async def connect( Doc("Kafka addresses to connect."), ] = Parameter.empty, **kwargs: Any, - ) -> ConsumerConnectionParams: + ) -> Callable[..., AsyncConfluentConsumer]: if bootstrap_servers is not Parameter.empty: kwargs["bootstrap_servers"] = bootstrap_servers @@ -437,7 +441,7 @@ async def _connect( # type: ignore[override] *, client_id: str, **kwargs: Any, - ) -> ConsumerConnectionParams: + ) -> Callable[..., AsyncConfluentConsumer]: security_params = parse_security(self.security) kwargs.update(security_params) @@ -450,7 +454,10 @@ async def _connect( # type: ignore[override] producer=producer, ) - return filter_by_dict(ConsumerConnectionParams, kwargs) + return partial( + AsyncConfluentConsumer, + **filter_by_dict(ConsumerConnectionParams, kwargs), + ) async def start(self) -> None: await super().start() @@ -464,7 +471,10 @@ async def start(self) -> None: @property def _subscriber_setup_extra(self) -> "AnyDict": - return {"client_id": self.client_id, "connection_data": self._connection or {}} + return { + "client_id": self.client_id, + "builder": self._connection, + } @override async def publish( # type: ignore[override] diff --git a/faststream/confluent/broker/logging.py b/faststream/confluent/broker/logging.py index 9eebc89461..4fead65305 100644 --- a/faststream/confluent/broker/logging.py +++ b/faststream/confluent/broker/logging.py @@ -1,9 +1,9 @@ import logging from inspect import Parameter -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union from faststream.broker.core.usecase import BrokerUsecase -from faststream.confluent.schemas.params import ConsumerConnectionParams +from faststream.confluent.client import AsyncConfluentConsumer from faststream.log.logging import get_broker_logger if TYPE_CHECKING: @@ -15,7 +15,7 @@ class KafkaLoggingBroker( BrokerUsecase[ Union["confluent_kafka.Message", Tuple["confluent_kafka.Message", ...]], - ConsumerConnectionParams, + Callable[..., AsyncConfluentConsumer], ] ): """A class that extends the LoggingMixin class and adds additional functionality for logging Kafka related information.""" diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 4fde6249c3..6306d10bd9 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -1,4 +1,3 @@ -from functools import partial from typing import ( TYPE_CHECKING, Any, @@ -18,7 +17,6 @@ from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter -from faststream.confluent.client import AsyncConfluentConsumer from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher from faststream.confluent.subscriber.asyncapi import AsyncAPISubscriber from faststream.exceptions import SetupError @@ -1235,29 +1233,6 @@ def subscriber( if not auto_commit and not group_id: raise SetupError("You should install `group_id` with manual commit mode") - builder = partial( - AsyncConfluentConsumer, - key_deserializer=key_deserializer, - value_deserializer=value_deserializer, - fetch_max_wait_ms=fetch_max_wait_ms, - fetch_max_bytes=fetch_max_bytes, - fetch_min_bytes=fetch_min_bytes, - max_partition_fetch_bytes=max_partition_fetch_bytes, - auto_offset_reset=auto_offset_reset, - enable_auto_commit=auto_commit, - auto_commit_interval_ms=auto_commit_interval_ms, - check_crcs=check_crcs, - partition_assignment_strategy=partition_assignment_strategy, - max_poll_interval_ms=max_poll_interval_ms, - rebalance_timeout_ms=rebalance_timeout_ms, - session_timeout_ms=session_timeout_ms, - heartbeat_interval_ms=heartbeat_interval_ms, - consumer_timeout_ms=consumer_timeout_ms, - max_poll_records=max_poll_records, - exclude_internal_topics=exclude_internal_topics, - isolation_level=isolation_level, - ) - subscriber = super().subscriber( AsyncAPISubscriber.create( *topics, @@ -1265,7 +1240,27 @@ def subscriber( batch_timeout_ms=batch_timeout_ms, max_records=max_records, group_id=group_id, - builder=builder, + connection_data={ + "key_deserializer": key_deserializer, + "value_deserializer": value_deserializer, + "fetch_max_wait_ms": fetch_max_wait_ms, + "fetch_max_bytes": fetch_max_bytes, + "fetch_min_bytes": fetch_min_bytes, + "max_partition_fetch_bytes": max_partition_fetch_bytes, + "auto_offset_reset": auto_offset_reset, + "enable_auto_commit": auto_commit, + "auto_commit_interval_ms": auto_commit_interval_ms, + "check_crcs": check_crcs, + "partition_assignment_strategy": partition_assignment_strategy, + "max_poll_interval_ms": max_poll_interval_ms, + "rebalance_timeout_ms": rebalance_timeout_ms, + "session_timeout_ms": session_timeout_ms, + "heartbeat_interval_ms": heartbeat_interval_ms, + "consumer_timeout_ms": consumer_timeout_ms, + "max_poll_records": max_poll_records, + "exclude_internal_topics": exclude_internal_topics, + "isolation_level": isolation_level, + }, is_manual=not auto_commit, # subscriber args no_ack=no_ack, diff --git a/faststream/confluent/subscriber/asyncapi.py b/faststream/confluent/subscriber/asyncapi.py index 8da47a800e..d31bfa05f2 100644 --- a/faststream/confluent/subscriber/asyncapi.py +++ b/faststream/confluent/subscriber/asyncapi.py @@ -1,6 +1,5 @@ from typing import ( TYPE_CHECKING, - Callable, Dict, Iterable, Literal, @@ -33,7 +32,7 @@ from fast_depends.dependencies import Depends from faststream.broker.types import BrokerMiddleware - from faststream.confluent.client import AsyncConfluentConsumer + from faststream.types import AnyDict class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -77,7 +76,7 @@ def create( max_records: Optional[int], # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -99,7 +98,7 @@ def create( max_records: Optional[int], # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -121,7 +120,7 @@ def create( max_records: Optional[int], # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -148,7 +147,7 @@ def create( max_records: Optional[int], # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -171,7 +170,7 @@ def create( batch_timeout_ms=batch_timeout_ms, max_records=max_records, group_id=group_id, - builder=builder, + connection_data=connection_data, is_manual=is_manual, no_ack=no_ack, retry=retry, @@ -185,7 +184,7 @@ def create( return AsyncAPIDefaultSubscriber( *topics, group_id=group_id, - builder=builder, + connection_data=connection_data, is_manual=is_manual, no_ack=no_ack, retry=retry, diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py index d778086bae..e5e23ed710 100644 --- a/faststream/confluent/subscriber/usecase.py +++ b/faststream/confluent/subscriber/usecase.py @@ -19,7 +19,6 @@ from faststream.broker.subscriber.usecase import SubscriberUsecase from faststream.broker.types import MsgType from faststream.confluent.parser import AsyncConfluentParser -from faststream.confluent.schemas.params import ConsumerConnectionParams if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -41,7 +40,9 @@ class LogicSubscriber(ABC, SubscriberUsecase[MsgType]): topics: Sequence[str] group_id: Optional[str] + builder: Optional[Callable[..., "AsyncConfluentConsumer"]] consumer: Optional["AsyncConfluentConsumer"] + task: Optional["asyncio.Task[None]"] client_id: Optional[str] @@ -50,7 +51,7 @@ def __init__( *topics: str, # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args default_parser: "AsyncCallable", @@ -81,20 +82,20 @@ def __init__( self.group_id = group_id self.topics = topics self.is_manual = is_manual - self.builder = builder + self.builder = None self.consumer = None self.task = None # Setup it later self.client_id = "" - self.__connection_data = ConsumerConnectionParams() + self.__connection_data = connection_data @override def setup( # type: ignore[override] self, *, client_id: Optional[str], - connection_data: "ConsumerConnectionParams", + builder: Callable[..., "AsyncConfluentConsumer"], # basic args logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], @@ -110,7 +111,7 @@ def setup( # type: ignore[override] _call_decorators: Iterable["Decorator"], ) -> None: self.client_id = client_id - self.__connection_data = connection_data + self.builder = builder super().setup( logger=logger, @@ -128,6 +129,8 @@ def setup( # type: ignore[override] @override async def start(self) -> None: """Start the consumer.""" + assert self.builder, "You should setup subscriber at first." # nosec B101 + self.consumer = consumer = self.builder( *self.topics, group_id=self.group_id, @@ -172,7 +175,7 @@ async def get_msg(self) -> Optional[MsgType]: raise NotImplementedError() async def _consume(self) -> None: - assert self.consumer, "You need to start handler first" # nosec B101 + assert self.consumer, "You should start subscriber at first." # nosec B101 connected = True while self.running: @@ -219,7 +222,7 @@ def __init__( *topics: str, # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -234,7 +237,7 @@ def __init__( super().__init__( *topics, group_id=group_id, - builder=builder, + connection_data=connection_data, is_manual=is_manual, # subscriber args default_parser=AsyncConfluentParser.parse_message, @@ -278,7 +281,7 @@ def __init__( max_records: Optional[int], # Kafka information group_id: Optional[str], - builder: Callable[..., "AsyncConfluentConsumer"], + connection_data: "AnyDict", is_manual: bool, # Subscriber args no_ack: bool, @@ -296,7 +299,7 @@ def __init__( super().__init__( *topics, group_id=group_id, - builder=builder, + connection_data=connection_data, is_manual=is_manual, # subscriber args default_parser=AsyncConfluentParser.parse_message_batch, diff --git a/faststream/confluent/testing.py b/faststream/confluent/testing.py index 4559cbde8b..9420ff3aa5 100644 --- a/faststream/confluent/testing.py +++ b/faststream/confluent/testing.py @@ -1,5 +1,6 @@ from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple +from unittest.mock import AsyncMock, MagicMock from typing_extensions import override @@ -22,8 +23,13 @@ class TestKafkaBroker(TestBroker[KafkaBroker]): """A class to test Kafka brokers.""" @staticmethod - async def _fake_connect(broker: KafkaBroker, *args: Any, **kwargs: Any) -> None: + async def _fake_connect( # type: ignore[override] + broker: KafkaBroker, + *args: Any, + **kwargs: Any, + ) -> Callable[..., AsyncMock]: broker._producer = FakeProducer(broker) + return _fake_connection @staticmethod def create_publisher_fake_subscriber( @@ -231,3 +237,10 @@ def build_message( timestamp_type=0 + 1, timestamp_ms=timestamp_ms or int(datetime.now().timestamp()), ) + + +def _fake_connection(*args: Any, **kwargs: Any) -> AsyncMock: + mock = AsyncMock() + mock.getone.return_value = MagicMock() + mock.getmany.return_value = [MagicMock()] + return mock diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 59d6e733d6..2a29796860 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -557,7 +557,7 @@ async def connect( # type: ignore[override] Doc("Kafka addresses to connect."), ] = Parameter.empty, **kwargs: "Unpack[KafkaInitKwargs]", - ) -> ConsumerConnectionParams: + ) -> Callable[..., aiokafka.AIOKafkaConsumer]: """Connect to Kafka servers manually. Consumes the same with `KafkaBroker.__init__` arguments and overrides them. @@ -579,7 +579,7 @@ async def _connect( # type: ignore[override] *, client_id: str, **kwargs: Any, - ) -> ConsumerConnectionParams: + ) -> Callable[..., aiokafka.AIOKafkaConsumer]: security_params = parse_security(self.security) kwargs.update(security_params) @@ -593,7 +593,10 @@ async def _connect( # type: ignore[override] producer=producer, ) - return filter_by_dict(ConsumerConnectionParams, kwargs) + return partial( + aiokafka.AIOKafkaConsumer, + **filter_by_dict(ConsumerConnectionParams, kwargs), + ) async def start(self) -> None: """Connect broker to Kafka and startup all subscribers.""" @@ -610,7 +613,7 @@ async def start(self) -> None: def _subscriber_setup_extra(self) -> "AnyDict": return { "client_id": self.client_id, - "connection_args": self._connection or {}, + "builder": self._connection, } @override diff --git a/faststream/kafka/broker/logging.py b/faststream/kafka/broker/logging.py index df828024da..16b1103b83 100644 --- a/faststream/kafka/broker/logging.py +++ b/faststream/kafka/broker/logging.py @@ -1,9 +1,8 @@ import logging from inspect import Parameter -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Tuple, Union from faststream.broker.core.usecase import BrokerUsecase -from faststream.kafka.schemas.params import ConsumerConnectionParams from faststream.log.logging import get_broker_logger if TYPE_CHECKING: @@ -15,7 +14,7 @@ class KafkaLoggingBroker( BrokerUsecase[ Union["aiokafka.ConsumerRecord", Tuple["aiokafka.ConsumerRecord", ...]], - ConsumerConnectionParams, + Callable[..., "aiokafka.AIOKafkaConsumer"], ] ): """A class that extends the LoggingMixin class and adds additional functionality for logging Kafka related information.""" diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py index 899e5828d5..afc69a459c 100644 --- a/faststream/kafka/broker/registrator.py +++ b/faststream/kafka/broker/registrator.py @@ -1,4 +1,3 @@ -from functools import partial from typing import ( TYPE_CHECKING, Any, @@ -14,13 +13,12 @@ overload, ) -from aiokafka import AIOKafkaConsumer, ConsumerRecord +from aiokafka import ConsumerRecord from aiokafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor from typing_extensions import Annotated, Doc, deprecated, override from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter -from faststream.exceptions import SetupError from faststream.kafka.publisher.asyncapi import AsyncAPIPublisher from faststream.kafka.subscriber.asyncapi import AsyncAPISubscriber @@ -1395,32 +1393,6 @@ def subscriber( "AsyncAPIDefaultSubscriber", "AsyncAPIBatchSubscriber", ]: - if not auto_commit and not group_id: - raise SetupError("You should install `group_id` with manual commit mode") - - builder = partial( - AIOKafkaConsumer, - key_deserializer=key_deserializer, - value_deserializer=value_deserializer, - fetch_max_wait_ms=fetch_max_wait_ms, - fetch_max_bytes=fetch_max_bytes, - fetch_min_bytes=fetch_min_bytes, - max_partition_fetch_bytes=max_partition_fetch_bytes, - auto_offset_reset=auto_offset_reset, - enable_auto_commit=auto_commit, - auto_commit_interval_ms=auto_commit_interval_ms, - check_crcs=check_crcs, - partition_assignment_strategy=partition_assignment_strategy, - max_poll_interval_ms=max_poll_interval_ms, - rebalance_timeout_ms=rebalance_timeout_ms, - session_timeout_ms=session_timeout_ms, - heartbeat_interval_ms=heartbeat_interval_ms, - consumer_timeout_ms=consumer_timeout_ms, - max_poll_records=max_poll_records, - exclude_internal_topics=exclude_internal_topics, - isolation_level=isolation_level, - ) - subscriber = super().subscriber( AsyncAPISubscriber.create( *topics, @@ -1430,8 +1402,28 @@ def subscriber( group_id=group_id, listener=listener, pattern=pattern, + connection_args={ + "key_deserializer": key_deserializer, + "value_deserializer": value_deserializer, + "fetch_max_wait_ms": fetch_max_wait_ms, + "fetch_max_bytes": fetch_max_bytes, + "fetch_min_bytes": fetch_min_bytes, + "max_partition_fetch_bytes": max_partition_fetch_bytes, + "auto_offset_reset": auto_offset_reset, + "enable_auto_commit": auto_commit, + "auto_commit_interval_ms": auto_commit_interval_ms, + "check_crcs": check_crcs, + "partition_assignment_strategy": partition_assignment_strategy, + "max_poll_interval_ms": max_poll_interval_ms, + "rebalance_timeout_ms": rebalance_timeout_ms, + "session_timeout_ms": session_timeout_ms, + "heartbeat_interval_ms": heartbeat_interval_ms, + "consumer_timeout_ms": consumer_timeout_ms, + "max_poll_records": max_poll_records, + "exclude_internal_topics": exclude_internal_topics, + "isolation_level": isolation_level, + }, partitions=partitions, - builder=builder, is_manual=not auto_commit, # subscriber args no_ack=no_ack, diff --git a/faststream/kafka/subscriber/asyncapi.py b/faststream/kafka/subscriber/asyncapi.py index f2897d3fdf..ec31001633 100644 --- a/faststream/kafka/subscriber/asyncapi.py +++ b/faststream/kafka/subscriber/asyncapi.py @@ -1,6 +1,5 @@ from typing import ( TYPE_CHECKING, - Callable, Dict, Iterable, Literal, @@ -30,11 +29,12 @@ ) if TYPE_CHECKING: - from aiokafka import AIOKafkaConsumer, ConsumerRecord, TopicPartition + from aiokafka import ConsumerRecord, TopicPartition from aiokafka.abc import ConsumerRebalanceListener from fast_depends.dependencies import Depends from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -80,8 +80,8 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -105,8 +105,8 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -130,8 +130,8 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -160,8 +160,8 @@ def create( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -178,6 +178,9 @@ def create( "AsyncAPIDefaultSubscriber", "AsyncAPIBatchSubscriber", ]: + if is_manual and not group_id: + raise SetupError("You should install `group_id` with manual commit mode") + if not topics and not partitions and not pattern: raise SetupError( "You should provide either `topics` or `partitions` or `pattern`." @@ -197,8 +200,8 @@ def create( group_id=group_id, listener=listener, pattern=pattern, + connection_args=connection_args, partitions=partitions, - builder=builder, is_manual=is_manual, no_ack=no_ack, retry=retry, @@ -214,8 +217,8 @@ def create( group_id=group_id, listener=listener, pattern=pattern, + connection_args=connection_args, partitions=partitions, - builder=builder, is_manual=is_manual, no_ack=no_ack, retry=retry, diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index 963f00c524..818922c48e 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -35,7 +35,6 @@ from faststream.broker.message import StreamMessage from faststream.broker.publisher.proto import ProducerProto - from faststream.kafka.schemas.params import ConsumerConnectionParams from faststream.types import AnyDict, Decorator, LoggerProto @@ -45,7 +44,9 @@ class LogicSubscriber(ABC, SubscriberUsecase[MsgType]): topics: Sequence[str] group_id: Optional[str] + builder: Optional[Callable[..., "AIOKafkaConsumer"]] consumer: Optional["AIOKafkaConsumer"] + task: Optional["asyncio.Task[None]"] client_id: Optional[str] batch: bool @@ -55,7 +56,7 @@ def __init__( *topics: str, # Kafka information group_id: Optional[str], - builder: Callable[..., "AIOKafkaConsumer"], + connection_args: "AnyDict", listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], partitions: Iterable["TopicPartition"], @@ -86,10 +87,12 @@ def __init__( include_in_schema=include_in_schema, ) - self.group_id = group_id self.topics = topics + self.partitions = partitions + self.group_id = group_id + self.is_manual = is_manual - self.builder = builder + self.builder = None self.consumer = None self.task = None @@ -97,15 +100,14 @@ def __init__( self.client_id = "" self.__pattern = pattern self.__listener = listener - self.partitions = partitions - self.__connection_args: "ConsumerConnectionParams" = {} + self.__connection_args = connection_args @override def setup( # type: ignore[override] self, *, client_id: Optional[str], - connection_args: "ConsumerConnectionParams", + builder: Callable[..., "AIOKafkaConsumer"], # basic args logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], @@ -121,7 +123,7 @@ def setup( # type: ignore[override] _call_decorators: Iterable["Decorator"], ) -> None: self.client_id = client_id - self.__connection_args = connection_args + self.builder = builder super().setup( logger=logger, @@ -138,6 +140,8 @@ def setup( # type: ignore[override] async def start(self) -> None: """Start the consumer.""" + assert self.builder, "You should setup subscriber at first." # nosec B101 + self.consumer = consumer = self.builder( group_id=self.group_id, client_id=self.client_id, @@ -192,7 +196,7 @@ async def get_msg(self) -> MsgType: raise NotImplementedError() async def _consume(self) -> None: - assert self.consumer, "You should setup subscriber at first." # nosec B101 + assert self.consumer, "You should start subscriber at first." # nosec B101 connected = True while self.running: @@ -286,8 +290,8 @@ def __init__( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -304,8 +308,8 @@ def __init__( group_id=group_id, listener=listener, pattern=pattern, + connection_args=connection_args, partitions=partitions, - builder=builder, is_manual=is_manual, # subscriber args default_parser=AioKafkaParser.parse_message, @@ -336,8 +340,8 @@ def __init__( group_id: Optional[str], listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], + connection_args: "AnyDict", partitions: Iterable["TopicPartition"], - builder: Callable[..., "AIOKafkaConsumer"], is_manual: bool, # Subscriber args no_ack: bool, @@ -359,8 +363,8 @@ def __init__( group_id=group_id, listener=listener, pattern=pattern, + connection_args=connection_args, partitions=partitions, - builder=builder, is_manual=is_manual, # subscriber args default_parser=AioKafkaParser.parse_message_batch, diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py index e28056edf6..fd8b520332 100755 --- a/faststream/kafka/testing.py +++ b/faststream/kafka/testing.py @@ -1,5 +1,6 @@ from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional +from unittest.mock import AsyncMock, MagicMock from aiokafka import ConsumerRecord from typing_extensions import override @@ -24,8 +25,13 @@ class TestKafkaBroker(TestBroker[KafkaBroker]): """A class to test Kafka brokers.""" @staticmethod - async def _fake_connect(broker: KafkaBroker, *args: Any, **kwargs: Any) -> None: + async def _fake_connect( # type: ignore[override] + broker: KafkaBroker, + *args: Any, + **kwargs: Any, + ) -> Callable[..., AsyncMock]: broker._producer = FakeProducer(broker) + return _fake_connection @staticmethod def create_publisher_fake_subscriber( @@ -201,3 +207,10 @@ def build_message( offset=0, headers=[(i, j.encode()) for i, j in headers.items()], ) + + +def _fake_connection(*args: Any, **kwargs: Any) -> AsyncMock: + mock = AsyncMock() + mock.subscribe = MagicMock + mock.assign = MagicMock + return mock diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index f106c93f9d..6681ba5b14 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -1,4 +1,5 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from unittest.mock import AsyncMock from nats.aio.msg import Msg from typing_extensions import override @@ -40,8 +41,14 @@ def f(msg: Any) -> None: return sub.calls[0].handler @staticmethod - async def _fake_connect(broker: NatsBroker, *args: Any, **kwargs: Any) -> None: + async def _fake_connect( # type: ignore[override] + broker: NatsBroker, + *args: Any, + **kwargs: Any, + ) -> AsyncMock: + broker.stream = AsyncMock() # type: ignore[assignment] broker._js_producer = broker._producer = FakeProducer(broker) # type: ignore[assignment] + return AsyncMock() @staticmethod def remove_publisher_fake_subscriber( diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index 74541322f1..7d4a60da4e 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -1,5 +1,6 @@ import re from typing import TYPE_CHECKING, Any, Optional, Sequence, Union +from unittest.mock import AsyncMock, MagicMock from typing_extensions import override @@ -49,12 +50,15 @@ def f(msg: Any) -> None: return sub.calls[0].handler @staticmethod - async def _fake_connect( + async def _fake_connect( # type: ignore[override] broker: RedisBroker, *args: Any, **kwargs: Any, - ) -> None: + ) -> AsyncMock: broker._producer = FakeProducer(broker) # type: ignore[assignment] + connection = MagicMock() + connection.pubsub.side_effect = AsyncMock + return connection @staticmethod def remove_publisher_fake_subscriber( diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py index 654d3b19f8..fc3ad0956d 100644 --- a/tests/brokers/base/consume.py +++ b/tests/brokers/base/consume.py @@ -221,6 +221,35 @@ async def handler(m: Foo, dep: int = Depends(dependency), broker=Context()): assert event.is_set() mock.assert_called_once_with({"x": 1}, "100", consume_broker) + async def test_dynamic_sub( + self, + queue: str, + consume_broker: BrokerUsecase, + event: asyncio.Event, + ): + def subscriber(m): + event.set() + + async with consume_broker: + await consume_broker.start() + + sub = consume_broker.subscriber(queue, **self.subscriber_kwargs) + sub(subscriber) + consume_broker.setup_subscriber(sub) + await sub.start() + + await asyncio.wait( + ( + asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + await sub.close() + + assert event.is_set() + @pytest.mark.asyncio() class BrokerRealConsumeTestcase(BrokerConsumeTestcase): diff --git a/tests/cli/supervisors/test_base_reloader.py b/tests/cli/supervisors/test_base_reloader.py index c143d39c9f..2a1c2fd6ed 100644 --- a/tests/cli/supervisors/test_base_reloader.py +++ b/tests/cli/supervisors/test_base_reloader.py @@ -14,7 +14,7 @@ def should_restart(self) -> bool: return True -def empty(): +def empty(*args, **kwargs): pass From 1d32af5a7b922dd7f2f30e10287f35c6f410af0d Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Thu, 9 May 2024 14:33:18 +0300 Subject: [PATCH 09/43] chore: bump version (#1435) Co-authored-by: Kumaran Rajendhiran --- faststream/__about__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/faststream/__about__.py b/faststream/__about__.py index 5c3e4f3a29..efa54afcc2 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.4" +__version__ = "0.5.5" SERVICE_NAME = f"faststream-{__version__}" From 9f21e72d91afd34da214678ea24c62982339a4c9 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Thu, 9 May 2024 12:02:51 +0000 Subject: [PATCH 10/43] Update Release Notes for 0.5.5 (#1436) * Update Release Notes for 0.5.5 * Update release.md --------- Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> Co-authored-by: Pastukhov Nikita --- docs/docs/en/release.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 8c97b8dc44..474997a668 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,35 @@ hide: --- # Release Notes +## 0.5.5 + +### What's Changed + +Add support for explicit partition assignment in aiokafka `KafkaBroker` (special thanks to @spataphore1337): + +```python +from faststream import FastStream +from faststream.kafka import KafkaBroker, TopicPartition + +broker = KafkaBroker() + +topic_partition_fisrt = TopicPartition("my_topic", 1) +topic_partition_second = TopicPartition("my_topic", 2) + +@broker.subscribe(partitions=[topic_partition_fisrt, topic_partition_second]) +async def some_consumer(msg): + ... +``` + +* Update Release Notes for 0.5.4 by @faststream-release-notes-updater in [#1421](https://github.com/airtai/faststream/pull/1421){.external-link target="_blank"} +* feature: manual partition assignment to Kafka by [@spataphore1337](https://github.com/spataphore1337){.external-link target="_blank"} in [#1422](https://github.com/airtai/faststream/pull/1422){.external-link target="_blank"} +* Chore/update deps by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1429](https://github.com/airtai/faststream/pull/1429){.external-link target="_blank"} +* Fix/correct dynamic subscriber registration by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1433](https://github.com/airtai/faststream/pull/1433){.external-link target="_blank"} +* chore: bump version by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1435](https://github.com/airtai/faststream/pull/1435){.external-link target="_blank"} + + +**Full Changelog**: [#0.5.4...0.5.5](https://github.com/airtai/faststream/compare/0.5.4...0.5.5){.external-link target="_blank"} + ## 0.5.4 ### What's Changed From d100d5ff8cfddaffca8831aba0912d58095c8170 Mon Sep 17 00:00:00 2001 From: Sehat1137 <29227141+Sehat1137@users.noreply.github.com> Date: Sat, 11 May 2024 22:41:01 +0300 Subject: [PATCH 11/43] feature: add --factory param (#1440) Co-authored-by: sehat1137 --- faststream/cli/docs/app.py | 19 ++++++++++++++++--- faststream/cli/main.py | 18 +++++++++++++++++- 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/faststream/cli/docs/app.py b/faststream/cli/docs/app.py index 4751222a17..450abdb061 100644 --- a/faststream/cli/docs/app.py +++ b/faststream/cli/docs/app.py @@ -44,6 +44,10 @@ def serve( " Defaults to the current working directory." ), ), + is_factory: bool = typer.Option( + False, + "--factory", help="Treat APP as an application factory" + ), ) -> None: """Serve project AsyncAPI schema.""" if ":" in app: @@ -66,18 +70,18 @@ def serve( except ImportError: warnings.warn(INSTALL_WATCHFILES, category=ImportWarning, stacklevel=1) - _parse_and_serve(app, host, port) + _parse_and_serve(app, host, port, is_factory) else: WatchReloader( target=_parse_and_serve, - args=(app, host, port), + args=(app, host, port, is_factory), reload_dirs=(str(module_parent),), extra_extensions=extra_extensions, ).run() else: - _parse_and_serve(app, host, port) + _parse_and_serve(app, host, port, is_factory) @docs_app.command(name="gen") @@ -104,12 +108,18 @@ def gen( " Defaults to the current working directory." ), ), + is_factory: bool = typer.Option( + False, + "--factory", help="Treat APP as an application factory" + ), ) -> None: """Generate project AsyncAPI schema.""" if app_dir: # pragma: no branch sys.path.insert(0, app_dir) _, app_obj = import_from_string(app) + if callable(app_obj) and is_factory: + app_obj = app_obj() raw_schema = get_app_schema(app_obj) if yaml: @@ -138,9 +148,12 @@ def _parse_and_serve( app: str, host: str = "localhost", port: int = 8000, + is_factory: bool = False, ) -> None: if ":" in app: _, app_obj = import_from_string(app) + if callable(app_obj) and is_factory: + app_obj = app_obj() raw_schema = get_app_schema(app_obj) else: diff --git a/faststream/cli/main.py b/faststream/cli/main.py index 7c0ec0391f..3ed7afa3e6 100644 --- a/faststream/cli/main.py +++ b/faststream/cli/main.py @@ -94,6 +94,12 @@ def run( " Defaults to the current working directory." ), ), + is_factory: bool = typer.Option( + False, + "--factory", + is_flag=True, + help="Treat APP as an application factory", + ), ) -> None: """Run [MODULE:APP] FastStream application.""" if watch_extensions and not reload: @@ -108,7 +114,7 @@ def run( if app_dir: # pragma: no branch sys.path.insert(0, app_dir) - args = (app, extra, casted_log_level) + args = (app, extra, is_factory, casted_log_level) if reload and workers > 1: raise SetupError("You can't use reload option with multiprocessing") @@ -151,11 +157,14 @@ def _run( # NOTE: we should pass `str` due FastStream is not picklable app: str, extra_options: Dict[str, "SettingField"], + is_factory: bool, log_level: int = logging.INFO, app_level: int = logging.INFO, ) -> None: """Runs the specified application.""" _, app_obj = import_from_string(app) + if is_factory and callable(app_obj): + app_obj = app_obj() if not isinstance(app_obj, FastStream): raise typer.BadParameter( @@ -200,6 +209,10 @@ def publish( app: str = typer.Argument(..., help="FastStream app instance, e.g., main:app"), message: str = typer.Argument(..., help="Message to be published"), rpc: bool = typer.Option(False, help="Enable RPC mode and system output"), + is_factory: bool = typer.Option( + False, + "--factory", help="Treat APP as an application factory" + ), ) -> None: """Publish a message using the specified broker in a FastStream application. @@ -218,6 +231,9 @@ def publish( raise ValueError("Message parameter is required.") _, app_obj = import_from_string(app) + if callable(app_obj) and is_factory: + app_obj = app_obj() + if not app_obj.broker: raise ValueError("Broker instance not found in the app.") From 1bcbcf5a59d4da7d027a52534cb181ba8482f2ac Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Thu, 16 May 2024 20:00:17 +0300 Subject: [PATCH 12/43] =?UTF-8?q?feat:=20add=20RMQ=20channels=20options,?= =?UTF-8?q?=20support=20for=20prefix=20for=20routing=5Fkey,=20a=E2=80=A6?= =?UTF-8?q?=20(#1448)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add RMQ channels options, support for prefix for routing_key, add public API for middlewares * tests: fix asyncapi tests * chore: update dependencies * fix: parse old NATS stream config if it exists * feat (#1447): add StreamMessage.batch_headers attr to provide access to whole batch messages headers * fix: add factory is_flag option * feat: add batch_headers for Confluent --- faststream/__about__.py | 2 +- faststream/broker/core/abc.py | 13 +++ faststream/broker/message.py | 2 + faststream/broker/publisher/proto.py | 3 + faststream/broker/publisher/usecase.py | 10 ++- faststream/broker/subscriber/proto.py | 3 + faststream/broker/subscriber/usecase.py | 3 + faststream/cli/docs/app.py | 6 +- faststream/cli/main.py | 4 +- faststream/confluent/parser.py | 41 +++++---- faststream/kafka/parser.py | 17 +++- faststream/nats/broker/broker.py | 4 +- faststream/nats/parser.py | 20 ++++- faststream/rabbit/__init__.py | 1 + faststream/rabbit/annotations.py | 13 +++ faststream/rabbit/broker/broker.py | 62 ++++++++++++- faststream/rabbit/fastapi/router.py | 23 +++++ faststream/rabbit/schemas/queue.py | 11 +++ faststream/rabbit/subscriber/usecase.py | 5 +- faststream/redis/parser.py | 72 +++++++++++---- faststream/types.py | 8 +- pyproject.toml | 10 +-- tests/asyncapi/rabbit/test_router.py | 4 +- tests/brokers/base/middlewares.py | 53 +++++++++++ tests/brokers/base/publish.py | 38 +++++++- tests/brokers/confluent/test_consume.py | 32 +++++++ tests/brokers/kafka/test_consume.py | 32 +++++++ tests/brokers/nats/test_consume.py | 43 +++++++-- tests/brokers/rabbit/test_router.py | 33 +++++++ tests/brokers/redis/test_consume.py | 113 ++++++++++++++++++++---- tests/brokers/redis/test_fastapi.py | 4 +- 31 files changed, 589 insertions(+), 96 deletions(-) diff --git a/faststream/__about__.py b/faststream/__about__.py index efa54afcc2..7aaf590027 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.5" +__version__ = "0.5.6" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/broker/core/abc.py b/faststream/broker/core/abc.py index 1a49e26843..e89f2d5144 100644 --- a/faststream/broker/core/abc.py +++ b/faststream/broker/core/abc.py @@ -46,6 +46,19 @@ def __init__( self._parser = parser self._decoder = decoder + def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: + """Append BrokerMiddleware to the end of middlewares list. + + Current middleware will be used as a most inner of already existed ones. + """ + self._middlewares = (*self._middlewares, middleware) + + for sub in self._subscribers.values(): + sub.add_middleware(middleware) + + for pub in self._publishers.values(): + pub.add_middleware(middleware) + @abstractmethod def subscriber( self, diff --git a/faststream/broker/message.py b/faststream/broker/message.py index a692f12d4f..beec9fe555 100644 --- a/faststream/broker/message.py +++ b/faststream/broker/message.py @@ -6,6 +6,7 @@ TYPE_CHECKING, Any, Generic, + List, Optional, Sequence, Tuple, @@ -38,6 +39,7 @@ class StreamMessage(Generic[MsgType]): body: Union[bytes, Any] headers: "AnyDict" = field(default_factory=dict) + batch_headers: List["AnyDict"] = field(default_factory=list) path: "AnyDict" = field(default_factory=dict) content_type: Optional[str] = None diff --git a/faststream/broker/publisher/proto.py b/faststream/broker/publisher/proto.py index 2233739252..747b29b048 100644 --- a/faststream/broker/publisher/proto.py +++ b/faststream/broker/publisher/proto.py @@ -56,6 +56,9 @@ class PublisherProto( _middlewares: Iterable["PublisherMiddleware"] _producer: Optional["ProducerProto"] + @abstractmethod + def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ... + @staticmethod @abstractmethod def create() -> "PublisherProto[MsgType]": diff --git a/faststream/broker/publisher/usecase.py b/faststream/broker/publisher/usecase.py index 23e8c5586e..46bb96ef2a 100644 --- a/faststream/broker/publisher/usecase.py +++ b/faststream/broker/publisher/usecase.py @@ -19,7 +19,12 @@ from faststream.asyncapi.message import get_response_schema from faststream.asyncapi.utils import to_camelcase from faststream.broker.publisher.proto import PublisherProto -from faststream.broker.types import MsgType, P_HandlerParams, T_HandlerReturn +from faststream.broker.types import ( + BrokerMiddleware, + MsgType, + P_HandlerParams, + T_HandlerReturn, +) from faststream.broker.wrapper.call import HandlerCallWrapper if TYPE_CHECKING: @@ -87,6 +92,9 @@ def __init__( self.include_in_schema = include_in_schema self.schema_ = schema_ + def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: + self._broker_middlewares = (*self._broker_middlewares, middleware) + @override def setup( # type: ignore[override] self, diff --git a/faststream/broker/subscriber/proto.py b/faststream/broker/subscriber/proto.py index 534c795b95..fa19428fde 100644 --- a/faststream/broker/subscriber/proto.py +++ b/faststream/broker/subscriber/proto.py @@ -35,6 +35,9 @@ class SubscriberProto( _broker_middlewares: Iterable["BrokerMiddleware[MsgType]"] _producer: Optional["ProducerProto"] + @abstractmethod + def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ... + @staticmethod @abstractmethod def create() -> "SubscriberProto[MsgType]": diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index 82e6ebce8c..5d0dd886dd 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -131,6 +131,9 @@ def __init__( self.description_ = description_ self.include_in_schema = include_in_schema + def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: + self._broker_middlewares = (*self._broker_middlewares, middleware) + @override def setup( # type: ignore[override] self, diff --git a/faststream/cli/docs/app.py b/faststream/cli/docs/app.py index 450abdb061..c8066a8b9e 100644 --- a/faststream/cli/docs/app.py +++ b/faststream/cli/docs/app.py @@ -45,8 +45,7 @@ def serve( ), ), is_factory: bool = typer.Option( - False, - "--factory", help="Treat APP as an application factory" + False, "--factory", help="Treat APP as an application factory" ), ) -> None: """Serve project AsyncAPI schema.""" @@ -110,7 +109,8 @@ def gen( ), is_factory: bool = typer.Option( False, - "--factory", help="Treat APP as an application factory" + "--factory", + help="Treat APP as an application factory", ), ) -> None: """Generate project AsyncAPI schema.""" diff --git a/faststream/cli/main.py b/faststream/cli/main.py index 3ed7afa3e6..bbbe99aa33 100644 --- a/faststream/cli/main.py +++ b/faststream/cli/main.py @@ -211,7 +211,9 @@ def publish( rpc: bool = typer.Option(False, help="Enable RPC mode and system output"), is_factory: bool = typer.Option( False, - "--factory", help="Treat APP as an application factory" + "--factory", + is_flag=True, + help="Treat APP as an application factory", ), ) -> None: """Publish a message using the specified broker in a FastStream application. diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index a4858247ac..8541ceb4f0 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union from faststream.broker.message import decode_message, gen_cor_id from faststream.confluent.message import FAKE_CONSUMER, KafkaMessage @@ -20,18 +20,14 @@ async def parse_message( message: "Message", ) -> "StreamMessage[Message]": """Parses a Kafka message.""" - headers = {} - if message.headers() is not None: - for i, j in message.headers(): # type: ignore[union-attr] - if isinstance(j, str): - headers[i] = j - else: - headers[i] = j.decode() + headers = _parse_msg_headers(message.headers()) + body = message.value() offset = message.offset() _, timestamp = message.timestamp() handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + return KafkaMessage( body=body, headers=headers, @@ -49,28 +45,29 @@ async def parse_message_batch( message: Tuple["Message", ...], ) -> "StreamMessage[Tuple[Message, ...]]": """Parses a batch of messages from a Kafka consumer.""" + body: List[Any] = [] + batch_headers: List[Dict[str, str]] = [] + first = message[0] last = message[-1] - headers = {} - if first.headers() is not None: - for i, j in first.headers(): # type: ignore[union-attr] - if isinstance(j, str): - headers[i] = j - else: - headers[i] = j.decode() - body = [m.value() for m in message] - first_offset = first.offset() - last_offset = last.offset() + for m in message: + body.append(m.value) + batch_headers.append(_parse_msg_headers(m.headers())) + + headers = next(iter(batch_headers), {}) + _, first_timestamp = first.timestamp() handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + return KafkaMessage( body=body, headers=headers, + batch_headers=batch_headers, reply_to=headers.get("reply_to", ""), content_type=headers.get("content-type"), - message_id=f"{first_offset}-{last_offset}-{first_timestamp}", + message_id=f"{first.offset()}-{last.offset()}-{first_timestamp}", correlation_id=headers.get("correlation_id", gen_cor_id()), raw_message=message, consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER, @@ -91,3 +88,9 @@ async def decode_message_batch( ) -> "DecodedMessage": """Decode a batch of messages.""" return [decode_message(await cls.parse_message(m)) for m in msg.raw_message] + + +def _parse_msg_headers( + headers: Sequence[Tuple[str, Union[bytes, str]]], +) -> Dict[str, str]: + return {i: j if isinstance(j, str) else j.decode() for i, j in headers} diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index c99bc31c33..8487eb3d0b 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from faststream.broker.message import decode_message, gen_cor_id from faststream.kafka.message import FAKE_CONSUMER, KafkaMessage @@ -39,13 +39,24 @@ async def parse_message_batch( message: Tuple["ConsumerRecord", ...], ) -> "StreamMessage[Tuple[ConsumerRecord, ...]]": """Parses a batch of messages from a Kafka consumer.""" + body: List[Any] = [] + batch_headers: List[Dict[str, str]] = [] + first = message[0] last = message[-1] - headers = {i: j.decode() for i, j in first.headers} + + for m in message: + body.append(m.value) + batch_headers.append({i: j.decode() for i, j in m.headers}) + + headers = next(iter(batch_headers), {}) + handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + return KafkaMessage( - body=[m.value for m in message], + body=body, headers=headers, + batch_headers=batch_headers, reply_to=headers.get("reply_to", ""), content_type=headers.get("content-type"), message_id=f"{first.offset}-{last.offset}-{first.timestamp}", diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index 2c6265ac1c..2ccbe47bad 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -623,12 +623,12 @@ async def start(self) -> None: ) except BadRequestError as e: - old_config = (await self.stream.stream_info(stream.name)).config - if ( e.description == "stream name already in use with a different configuration" ): + old_config = (await self.stream.stream_info(stream.name)).config + self._log(str(e), logging.WARNING, log_context) await self.stream.update_stream( config=stream.config, diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index d843f13f99..940ae70426 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional from faststream.broker.message import StreamMessage, decode_message, gen_cor_id from faststream.nats.message import NatsBatchMessage, NatsMessage @@ -102,15 +102,27 @@ async def parse_batch( self, message: List["Msg"], ) -> "StreamMessage[List[Msg]]": - if first_msg := next(iter(message), None): - path = self.get_path(first_msg.subject) + body: List[bytes] = [] + batch_headers: List[Dict[str, str]] = [] + + if message: + path = self.get_path(message[0].subject) + + for m in message: + batch_headers.append(m.headers or {}) + body.append(m.data) + else: path = None + headers = next(iter(batch_headers), {}) + return NatsBatchMessage( raw_message=message, - body=[m.data for m in message], + body=body, path=path or {}, + headers=headers, + batch_headers=batch_headers, ) async def decode_batch( diff --git a/faststream/rabbit/__init__.py b/faststream/rabbit/__init__.py index 11ca1a9373..7c05cb70c8 100644 --- a/faststream/rabbit/__init__.py +++ b/faststream/rabbit/__init__.py @@ -21,5 +21,6 @@ "ReplyConfig", "RabbitExchange", "RabbitQueue", + # Annotations "RabbitMessage", ) diff --git a/faststream/rabbit/annotations.py b/faststream/rabbit/annotations.py index bfb78c6af9..f32654d2cc 100644 --- a/faststream/rabbit/annotations.py +++ b/faststream/rabbit/annotations.py @@ -1,3 +1,4 @@ +from aio_pika import RobustChannel, RobustConnection from typing_extensions import Annotated from faststream.annotations import ContextRepo, Logger, NoCast @@ -13,8 +14,20 @@ "RabbitMessage", "RabbitBroker", "RabbitProducer", + "Channel", + "Connection", ) RabbitMessage = Annotated[RM, Context("message")] RabbitBroker = Annotated[RB, Context("broker")] RabbitProducer = Annotated[AioPikaFastProducer, Context("broker._producer")] + +Channel = Annotated[RobustChannel, Context("broker._channel")] +Connection = Annotated[RobustConnection, Context("broker._connection")] + +# NOTE: transaction is not for the public usage yet +# async def _get_transaction(connection: Connection) -> RabbitTransaction: +# async with connection.channel(publisher_confirms=False) as channel: +# yield channel.transaction() + +# Transaction = Annotated[RabbitTransaction, Depends(_get_transaction)] diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py index fd4ca30d84..b0bc98c42f 100644 --- a/faststream/rabbit/broker/broker.py +++ b/faststream/rabbit/broker/broker.py @@ -100,6 +100,26 @@ def __init__( "TimeoutType", Doc("Connection establishement timeout."), ] = None, + # channel args + channel_number: Annotated[ + Optional[int], + Doc("Specify the channel number explicit."), + ] = None, + publisher_confirms: Annotated[ + bool, + Doc( + "if `True` the `publish` method will " + "return `bool` type after publish is complete." + "Otherwise it will returns `None`." + ), + ] = True, + on_return_raises: Annotated[ + bool, + Doc( + "raise an :class:`aio_pika.exceptions.DeliveryError`" + "when mandatory message will be returned" + ), + ] = False, # broker args max_consumers: Annotated[ Optional[int], @@ -220,6 +240,10 @@ def __init__( url=str(amqp_url), ssl_context=security_args.get("ssl_context"), timeout=timeout, + # channel args + channel_number=channel_number, + publisher_confirms=publisher_confirms, + on_return_raises=on_return_raises, # Basic args graceful_timeout=graceful_timeout, dependencies=dependencies, @@ -303,6 +327,26 @@ async def connect( # type: ignore[override] "TimeoutType", Doc("Connection establishement timeout."), ] = None, + # channel args + channel_number: Annotated[ + Union[int, None, object], + Doc("Specify the channel number explicit."), + ] = Parameter.empty, + publisher_confirms: Annotated[ + Union[bool, object], + Doc( + "if `True` the `publish` method will " + "return `bool` type after publish is complete." + "Otherwise it will returns `None`." + ), + ] = Parameter.empty, + on_return_raises: Annotated[ + Union[bool, object], + Doc( + "raise an :class:`aio_pika.exceptions.DeliveryError`" + "when mandatory message will be returned" + ), + ] = Parameter.empty, ) -> "RobustConnection": """Connect broker object to RabbitMQ. @@ -310,6 +354,15 @@ async def connect( # type: ignore[override] """ kwargs: AnyDict = {} + if channel_number is not Parameter.empty: + kwargs["channel_number"] = channel_number + + if publisher_confirms is not Parameter.empty: + kwargs["publisher_confirms"] = publisher_confirms + + if on_return_raises is not Parameter.empty: + kwargs["on_return_raises"] = on_return_raises + if timeout: kwargs["timeout"] = timeout @@ -346,6 +399,9 @@ async def _connect( # type: ignore[override] *, timeout: "TimeoutType", ssl_context: Optional["SSLContext"], + channel_number: Optional[int], + publisher_confirms: bool, + on_return_raises: bool, ) -> "RobustConnection": connection = cast( "RobustConnection", @@ -360,7 +416,11 @@ async def _connect( # type: ignore[override] max_consumers = self._max_consumers channel = self._channel = cast( "RobustChannel", - await connection.channel(), + await connection.channel( + channel_number=channel_number, + publisher_confirms=publisher_confirms, + on_return_raises=on_return_raises, + ), ) declarer = self.declarer = RabbitDeclarer(channel) diff --git a/faststream/rabbit/fastapi/router.py b/faststream/rabbit/fastapi/router.py index 4cc90b25d9..6d13beabae 100644 --- a/faststream/rabbit/fastapi/router.py +++ b/faststream/rabbit/fastapi/router.py @@ -96,6 +96,26 @@ def __init__( "TimeoutType", Doc("Connection establishement timeout."), ] = None, + # channel args + channel_number: Annotated[ + Optional[int], + Doc("Specify the channel number explicit."), + ] = None, + publisher_confirms: Annotated[ + bool, + Doc( + "if `True` the `publish` method will " + "return `bool` type after publish is complete." + "Otherwise it will returns `None`." + ), + ] = True, + on_return_raises: Annotated[ + bool, + Doc( + "raise an :class:`aio_pika.exceptions.DeliveryError`" + "when mandatory message will be returned" + ), + ] = False, # broker args max_consumers: Annotated[ Optional[int], @@ -408,6 +428,9 @@ def __init__( graceful_timeout=graceful_timeout, decoder=decoder, parser=parser, + channel_number=channel_number, + publisher_confirms=publisher_confirms, + on_return_raises=on_return_raises, middlewares=middlewares, security=security, asyncapi_url=asyncapi_url, diff --git a/faststream/rabbit/schemas/queue.py b/faststream/rabbit/schemas/queue.py index b63685d1a5..a9bccf013d 100644 --- a/faststream/rabbit/schemas/queue.py +++ b/faststream/rabbit/schemas/queue.py @@ -1,3 +1,4 @@ +from copy import deepcopy from typing import TYPE_CHECKING, Optional from typing_extensions import Annotated, Doc @@ -115,3 +116,13 @@ def __init__( self.auto_delete = auto_delete self.arguments = arguments self.timeout = timeout + + def add_prefix(self, prefix: str) -> "RabbitQueue": + new_q: RabbitQueue = deepcopy(self) + + new_q.name = "".join((prefix, new_q.name)) + + if new_q.routing_key: + new_q.routing_key = "".join((prefix, new_q.routing_key)) + + return new_q diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py index aecac22384..d2ca4480a2 100644 --- a/faststream/rabbit/subscriber/usecase.py +++ b/faststream/rabbit/subscriber/usecase.py @@ -1,4 +1,3 @@ -from copy import deepcopy from typing import ( TYPE_CHECKING, Any, @@ -223,6 +222,4 @@ def get_log_context( def add_prefix(self, prefix: str) -> None: """Include Subscriber in router.""" - new_q = deepcopy(self.queue) - new_q.name = "".join((prefix, new_q.name)) - self.queue = new_q + self.queue = self.queue.add_prefix(prefix) diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py index d47dae603d..52806b7fbd 100644 --- a/faststream/redis/parser.py +++ b/faststream/redis/parser.py @@ -1,6 +1,7 @@ from typing import ( TYPE_CHECKING, Any, + List, Mapping, Optional, Sequence, @@ -135,13 +136,16 @@ async def parse_message( self, message: Mapping[str, Any], ) -> "StreamMessage[Mapping[str, Any]]": - data, headers = self._parse_data(message) + data, headers, batch_headers = self._parse_data(message) + id_ = gen_cor_id() + return self.msg_class( raw_message=message, body=data, path=self.get_path(message), headers=headers, + batch_headers=batch_headers, reply_to=headers.get("reply_to", ""), content_type=headers.get("content-type"), message_id=headers.get("message_id", id_), @@ -149,8 +153,10 @@ async def parse_message( ) @staticmethod - def _parse_data(message: Mapping[str, Any]) -> Tuple[bytes, "AnyDict"]: - return RawMessage.parse(message["data"]) + def _parse_data( + message: Mapping[str, Any], + ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: + return (*RawMessage.parse(message["data"]), []) def get_path(self, message: Mapping[str, Any]) -> "AnyDict": if ( @@ -182,10 +188,26 @@ class RedisBatchListParser(SimpleParser): msg_class = RedisBatchListMessage @staticmethod - def _parse_data(message: Mapping[str, Any]) -> Tuple[bytes, "AnyDict"]: + def _parse_data( + message: Mapping[str, Any], + ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: + body: List[Any] = [] + batch_headers: List["AnyDict"] = [] + + for x in message["data"]: + msg_data, msg_headers = _decode_batch_body_item(x) + body.append(msg_data) + batch_headers.append(msg_headers) + + first_msg_headers = next(iter(batch_headers), {}) + return ( - dump_json(_decode_batch_body_item(x) for x in message["data"]), - {"content-type": ContentTypes.json}, + dump_json(body), + { + **first_msg_headers, + "content-type": ContentTypes.json.value, + }, + batch_headers, ) @@ -193,27 +215,43 @@ class RedisStreamParser(SimpleParser): msg_class = RedisStreamMessage @classmethod - def _parse_data(cls, message: Mapping[str, Any]) -> Tuple[bytes, "AnyDict"]: + def _parse_data( + cls, message: Mapping[str, Any] + ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: data = message["data"] - return RawMessage.parse(data.get(bDATA_KEY) or dump_json(data)) + return (*RawMessage.parse(data.get(bDATA_KEY) or dump_json(data)), []) class RedisBatchStreamParser(SimpleParser): msg_class = RedisBatchStreamMessage @staticmethod - def _parse_data(message: Mapping[str, Any]) -> Tuple[bytes, "AnyDict"]: + def _parse_data( + message: Mapping[str, Any], + ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: + body: List[Any] = [] + batch_headers: List["AnyDict"] = [] + + for x in message["data"]: + msg_data, msg_headers = _decode_batch_body_item(x.get(bDATA_KEY, x)) + body.append(msg_data) + batch_headers.append(msg_headers) + + first_msg_headers = next(iter(batch_headers), {}) + return ( - dump_json( - _decode_batch_body_item(x.get(bDATA_KEY, x)) for x in message["data"] - ), - {"content-type": ContentTypes.json}, + dump_json(body), + { + **first_msg_headers, + "content-type": ContentTypes.json.value, + }, + batch_headers, ) -def _decode_batch_body_item(msg_content: bytes) -> Any: - msg_body, _ = RawMessage.parse(msg_content) +def _decode_batch_body_item(msg_content: bytes) -> Tuple[Any, "AnyDict"]: + msg_body, headers = RawMessage.parse(msg_content) try: - return json_loads(msg_body) + return json_loads(msg_body), headers except Exception: - return msg_body + return msg_body, headers diff --git a/faststream/types.py b/faststream/types.py index 9f12fb9d57..681a7a3b18 100644 --- a/faststream/types.py +++ b/faststream/types.py @@ -63,22 +63,16 @@ class StandardDataclass(Protocol): """Protocol to check type is dataclass.""" __dataclass_fields__: ClassVar[Dict[str, Any]] - __dataclass_params__: ClassVar[Any] - __post_init__: ClassVar[Callable[..., None]] - - def __init__(self, *args: object, **kwargs: object) -> None: - """Interface method.""" - ... BaseSendableMessage: TypeAlias = Union[ JsonDecodable, Decimal, datetime, - None, StandardDataclass, SendableTable, SendableArray, + None, ] try: diff --git a/pyproject.toml b/pyproject.toml index c4f419a0a6..505e0de0dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,9 +76,9 @@ redis = ["redis>=5.0.0,<6.0.0"] # dev dependencies devdocs = [ "mkdocs-material==9.5.21", - "mkdocs-static-i18n==1.2.2", + "mkdocs-static-i18n==1.2.3", "mdx-include==1.4.2", - "mkdocstrings[python]==0.25.0", + "mkdocstrings[python]==0.25.1", "mkdocs-literate-nav==0.6.1", "mkdocs-git-revision-date-localized-plugin==1.2.5", "mike==2.1.1", # versioning @@ -106,14 +106,14 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.3", + "ruff==0.4.4", "bandit==1.7.8", "semgrep==1.70.0", "codespell==2.2.6", ] test-core = [ - "coverage[toml]==7.4.4", + "coverage[toml]==7.5.1", "pytest==8.2.0", "pytest-asyncio==0.23.6", "dirty-equals==0.7.1.post0", @@ -133,7 +133,7 @@ dev = [ "faststream[rabbit,kafka,confluent,nats,redis,lint,testing,devdocs]", "pre-commit==3.5.0; python_version < '3.9'", "pre-commit==3.7.0; python_version >= '3.9'", - "detect-secrets==1.4.0", + "detect-secrets==1.5.0", ] [project.urls] diff --git a/tests/asyncapi/rabbit/test_router.py b/tests/asyncapi/rabbit/test_router.py index b878eac005..386f4960f5 100644 --- a/tests/asyncapi/rabbit/test_router.py +++ b/tests/asyncapi/rabbit/test_router.py @@ -63,7 +63,7 @@ async def handle(msg): ... "subscribe": { "bindings": { "amqp": { - "cc": "key", + "cc": "test_key", "ack": True, "bindingVersion": "0.2.0", } @@ -91,7 +91,7 @@ async def handle(msg): ... }, }, } - ) + ), schema class TestRouterArguments(ArgumentsTestcase): diff --git a/tests/brokers/base/middlewares.py b/tests/brokers/base/middlewares.py index 4f89f08411..7ed74522d8 100644 --- a/tests/brokers/base/middlewares.py +++ b/tests/brokers/base/middlewares.py @@ -270,6 +270,59 @@ async def handler(m): mock.start.assert_called_once() mock.end.assert_called_once() + async def test_add_global_middleware( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + raw_broker, + ): + class mid(BaseMiddleware): # noqa: N801 + async def on_receive(self): + mock.start(self.msg) + return await super().on_receive() + + async def after_processed(self, exc_type, exc_val, exc_tb): + mock.end() + return await super().after_processed(exc_type, exc_val, exc_tb) + + broker = self.broker_class() + + # already registered subscriber + @broker.subscriber(queue, **self.subscriber_kwargs) + async def handler(m): + event.set() + return "" + + # should affect to already registered and a new subscriber both + broker.add_middleware(mid) + + event2 = asyncio.Event() + + # new subscriber + @broker.subscriber(f"{queue}1", **self.subscriber_kwargs) + async def handler2(m): + event2.set() + return "" + + broker = self.patch_broker(raw_broker, broker) + + async with broker: + await broker.start() + await asyncio.wait( + ( + asyncio.create_task(broker.publish("", queue)), + asyncio.create_task(broker.publish("", f"{queue}1")), + asyncio.create_task(event.wait()), + asyncio.create_task(event2.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() + assert mock.start.call_count == 2 + assert mock.end.call_count == 2 + async def test_patch_publish(self, queue: str, mock: Mock, event, raw_broker): class Mid(BaseMiddleware): async def on_publish(self, msg: str, *args, **kwargs) -> str: diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index 2ed026c9f7..4deb2a50ae 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -1,4 +1,5 @@ import asyncio +from dataclasses import asdict, dataclass from datetime import datetime from typing import Any, ClassVar, Dict, List, Tuple from unittest.mock import Mock @@ -7,7 +8,7 @@ import pytest from pydantic import BaseModel -from faststream._compat import model_to_json +from faststream._compat import dump_json, model_to_json from faststream.annotations import Logger from faststream.broker.core.usecase import BrokerUsecase @@ -16,6 +17,11 @@ class SimpleModel(BaseModel): r: str +@dataclass +class SimpleDataclass: + r: str + + now = datetime.now() @@ -55,6 +61,12 @@ def pub_broker(self, full_broker): 1.0, id="float->float", ), + pytest.param( + 1, + float, + 1.0, + id="int->float", + ), pytest.param( False, bool, @@ -103,6 +115,30 @@ def pub_broker(self, full_broker): SimpleModel(r="hello!"), id="dict->model", ), + pytest.param( + dump_json(asdict(SimpleDataclass(r="hello!"))), + SimpleDataclass, + SimpleDataclass(r="hello!"), + id="bytes->dataclass", + ), + pytest.param( + SimpleDataclass(r="hello!"), + SimpleDataclass, + SimpleDataclass(r="hello!"), + id="dataclass->dataclass", + ), + pytest.param( + SimpleDataclass(r="hello!"), + dict, + {"r": "hello!"}, + id="dataclass->dict", + ), + pytest.param( + {"r": "hello!"}, + SimpleDataclass, + SimpleDataclass(r="hello!"), + id="dict->dataclass", + ), ), ) async def test_serialize( diff --git a/tests/brokers/confluent/test_consume.py b/tests/brokers/confluent/test_consume.py index fb612d66d0..2c471c6e73 100644 --- a/tests/brokers/confluent/test_consume.py +++ b/tests/brokers/confluent/test_consume.py @@ -39,6 +39,38 @@ async def handler(msg): assert [{1, "hi"}] == [set(r.result()) for r in result] + @pytest.mark.asyncio() + async def test_consume_batch_headers( + self, mock, event: asyncio.Event, queue: str, full_broker: KafkaBroker + ): + @full_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + def subscriber(m, msg: KafkaMessage): + check = all( + ( + msg.headers, + [msg.headers] == msg.batch_headers, + msg.headers.get("custom") == "1", + ) + ) + mock(check) + event.set() + + async with full_broker: + await full_broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("", queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() + mock.assert_called_once_with(True) + @pytest.mark.asyncio() @pytest.mark.slow() async def test_consume_ack( diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py index a50c06d8c4..82c3a7d0b8 100644 --- a/tests/brokers/kafka/test_consume.py +++ b/tests/brokers/kafka/test_consume.py @@ -33,6 +33,38 @@ async def handler(msg): assert [{1, "hi"}] == [set(r.result()) for r in result] + @pytest.mark.asyncio() + async def test_consume_batch_headers( + self, mock, event: asyncio.Event, queue: str, full_broker: KafkaBroker + ): + @full_broker.subscriber(queue, batch=True) + def subscriber(m, msg: KafkaMessage): + check = all( + ( + msg.headers, + [msg.headers] == msg.batch_headers, + msg.headers.get("custom") == "1", + ) + ) + mock(check) + event.set() + + async with full_broker: + await full_broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("", queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) + @pytest.mark.asyncio() @pytest.mark.slow() async def test_consume_ack( diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 23a1576287..1e7997526e 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -96,7 +96,6 @@ def subscriber(m): assert event.is_set() mock.assert_called_once_with([b"hello"]) - @pytest.mark.asyncio() async def test_consume_ack( self, queue: str, @@ -127,7 +126,6 @@ async def handler(msg: NatsMessage): assert event.is_set() - @pytest.mark.asyncio() async def test_consume_ack_manual( self, queue: str, @@ -159,7 +157,6 @@ async def handler(msg: NatsMessage): assert event.is_set() - @pytest.mark.asyncio() async def test_consume_ack_raise( self, queue: str, @@ -191,7 +188,6 @@ async def handler(msg: NatsMessage): assert event.is_set() - @pytest.mark.asyncio() async def test_nack( self, queue: str, @@ -223,7 +219,6 @@ async def handler(msg: NatsMessage): assert event.is_set() - @pytest.mark.asyncio() async def test_consume_no_ack( self, queue: str, full_broker: NatsBroker, event: asyncio.Event ): @@ -248,3 +243,41 @@ async def handler(msg: NatsMessage): m.mock.assert_not_called() assert event.is_set() + + async def test_consume_batch_headers( + self, + queue: str, + full_broker: NatsBroker, + stream: JStream, + event: asyncio.Event, + mock, + ): + @full_broker.subscriber( + queue, + stream=stream, + pull_sub=PullSub(1, batch=True), + ) + def subscriber(m, msg: NatsMessage): + check = all( + ( + msg.headers, + [msg.headers] == msg.batch_headers, + msg.headers.get("custom") == "1", + ) + ) + mock(check) + event.set() + + await full_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("", queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) diff --git a/tests/brokers/rabbit/test_router.py b/tests/brokers/rabbit/test_router.py index 50f81a636d..ac14d3372d 100644 --- a/tests/brokers/rabbit/test_router.py +++ b/tests/brokers/rabbit/test_router.py @@ -139,6 +139,39 @@ def subscriber(m): assert event.is_set() + async def test_queue_obj_with_routing_key( + self, + router: RabbitRouter, + broker: RabbitBroker, + queue: str, + event: asyncio.Event, + ): + router.prefix = "test/" + + r_queue = RabbitQueue("useless", routing_key=f"{queue}1") + exchange = RabbitExchange(f"{queue}exch") + + @router.subscriber(r_queue, exchange=exchange) + def subscriber(m): + event.set() + + broker.include_router(router) + + async with broker: + await broker.start() + + await asyncio.wait( + ( + asyncio.create_task( + broker.publish("hello", f"test/{queue}1", exchange=exchange) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + async def test_delayed_handlers_with_queue( self, event: asyncio.Event, diff --git a/tests/brokers/redis/test_consume.py b/tests/brokers/redis/test_consume.py index 176fd0965f..8ddad852c8 100644 --- a/tests/brokers/redis/test_consume.py +++ b/tests/brokers/redis/test_consume.py @@ -138,30 +138,70 @@ async def handler(msg): mock.assert_called_once_with(b"hello") @pytest.mark.slow() - async def test_consume_list_batch_with_one(self, queue: str, broker: RedisBroker): - msgs_queue = asyncio.Queue(maxsize=1) - - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + async def test_consume_list_batch_with_one( + self, event: asyncio.Event, mock, queue: str, broker: RedisBroker + ): + @broker.subscriber( + list=ListSub(queue, batch=True, max_records=1, polling_interval=0.01) + ) async def handler(msg): - await msgs_queue.put(msg) + mock(msg) + event.set() async with broker: await broker.start() - await broker.publish("hi", list=queue) - - result, _ = await asyncio.wait( - (asyncio.create_task(msgs_queue.get()),), + await asyncio.wait( + ( + asyncio.create_task(broker.publish("hi", list=queue)), + asyncio.create_task(event.wait()), + ), timeout=3, ) - assert ["hi"] == [r.result()[0] for r in result] + assert event.is_set() + mock.assert_called_once_with(["hi"]) + + @pytest.mark.slow() + async def test_consume_list_batch_headers( + self, + queue: str, + full_broker: RedisBroker, + event: asyncio.Event, + mock, + ): + @full_broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) + def subscriber(m, msg: RedisMessage): + check = all( + ( + msg.headers, + msg.headers["correlation_id"] + == msg.batch_headers[0]["correlation_id"], + msg.headers.get("custom") == "1", + ) + ) + mock(check) + event.set() + + await full_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("", list=queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) @pytest.mark.slow() async def test_consume_list_batch(self, queue: str, broker: RedisBroker): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) async def handler(msg): await msgs_queue.put(msg) @@ -189,7 +229,7 @@ def __hash__(self): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) async def handler(msg: List[Data]): await msgs_queue.put(msg) @@ -210,7 +250,7 @@ async def handler(msg: List[Data]): async def test_consume_list_batch_native(self, queue: str, broker: RedisBroker): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=1)) + @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) async def handler(msg): await msgs_queue.put(msg) @@ -238,7 +278,7 @@ async def test_consume_stream( mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=3000)) + @broker.subscriber(stream=StreamSub(queue, polling_interval=10)) async def handler(msg): mock(msg) event.set() @@ -264,7 +304,7 @@ async def test_consume_stream_native( mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=3000)) + @broker.subscriber(stream=StreamSub(queue, polling_interval=10)) async def handler(msg): mock(msg) event.set() @@ -292,7 +332,7 @@ async def test_consume_stream_batch( mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) async def handler(msg): mock(msg) event.set() @@ -310,6 +350,43 @@ async def handler(msg): mock.assert_called_once_with(["hello"]) + @pytest.mark.slow() + async def test_consume_stream_batch_headers( + self, + queue: str, + full_broker: RedisBroker, + event: asyncio.Event, + mock, + ): + @full_broker.subscriber( + stream=StreamSub(queue, polling_interval=10, batch=True) + ) + def subscriber(m, msg: RedisMessage): + check = all( + ( + msg.headers, + msg.headers["correlation_id"] + == msg.batch_headers[0]["correlation_id"], + msg.headers.get("custom") == "1", + ) + ) + mock(check) + event.set() + + await full_broker.start() + await asyncio.wait( + ( + asyncio.create_task( + full_broker.publish("", stream=queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) + @pytest.mark.slow() async def test_consume_stream_batch_complex( self, @@ -323,7 +400,7 @@ class Data(BaseModel): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) async def handler(msg: List[Data]): await msgs_queue.put(msg) @@ -348,7 +425,7 @@ async def test_consume_stream_batch_native( mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) async def handler(msg): mock(msg) event.set() diff --git a/tests/brokers/redis/test_fastapi.py b/tests/brokers/redis/test_fastapi.py index c61f88614d..36e95d1a29 100644 --- a/tests/brokers/redis/test_fastapi.py +++ b/tests/brokers/redis/test_fastapi.py @@ -86,7 +86,7 @@ async def test_consume_stream( ): router = RedisRouter() - @router.subscriber(stream=StreamSub(queue, polling_interval=3000)) + @router.subscriber(stream=StreamSub(queue, polling_interval=10)) async def handler(msg): mock(msg) event.set() @@ -114,7 +114,7 @@ async def test_consume_stream_batch( ): router = RedisRouter() - @router.subscriber(stream=StreamSub(queue, polling_interval=3000, batch=True)) + @router.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) async def handler(msg: List[str]): mock(msg) event.set() From 987d0699aff0594602df84d8459c7cc9f0583ab6 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Thu, 16 May 2024 18:01:24 +0000 Subject: [PATCH 13/43] Update Release Notes for 0.5.6 (#1451) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 474997a668..b01f54aa22 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,24 @@ hide: --- # Release Notes +## 0.5.6 + +### What's Changed + +* feature: add --factory param by [@Sehat1137](https://github.com/Sehat1137){.external-link target="_blank"} in [#1440](https://github.com/airtai/faststream/pull/1440){.external-link target="_blank"} +* feat: add RMQ channels options, support for prefix for routing_key, a… by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1448](https://github.com/airtai/faststream/pull/1448){.external-link target="_blank"} +* feature: Add `from faststream.rabbit.annotations import Connection, Channel` shortcuts +* Bugfix: RabbitMQ RabbitRouter prefix now affects to queue routing key as well +* Feature (close #1402): add `broker.add_middleware` public API to append a middleware to already created broker +* Feature: add `RabbitBroker(channel_number: int, publisher_confirms: bool, on_return_raises: bool)` options to setup channel settings +* Feature (close #1447): add `StreamMessage.batch_headers` attribute to provide with access to whole batch messages headers + +### New Contributors + +* [@Sehat1137](https://github.com/Sehat1137){.external-link target="_blank"} made their first contribution in [#1440](https://github.com/airtai/faststream/pull/1440){.external-link target="_blank"} + +**Full Changelog**: [#0.5.5...0.5.6](https://github.com/airtai/faststream/compare/0.5.5...0.5.6){.external-link target="_blank"} + ## 0.5.5 ### What's Changed From c555f0f8995447049702bb1d8c3a65d451cfa839 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Thu, 16 May 2024 23:58:30 +0300 Subject: [PATCH 14/43] fix: serialize TestClient rpc output to mock the real message (#1452) --- faststream/broker/core/usecase.py | 2 +- faststream/testing/broker.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py index 10266e152c..ea988a1811 100644 --- a/faststream/broker/core/usecase.py +++ b/faststream/broker/core/usecase.py @@ -333,7 +333,7 @@ async def publish( **kwargs: Any, ) -> Optional[Any]: """Publish message directly.""" - assert producer, NOT_CONNECTED_YET # nosec B101) + assert producer, NOT_CONNECTED_YET # nosec B101 publish: "AsyncFunc" = producer.publish for m in self._middlewares: diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index d1f89f1958..c7d186d34c 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -16,6 +16,7 @@ from unittest.mock import AsyncMock, MagicMock from faststream.broker.core.usecase import BrokerUsecase +from faststream.broker.message import StreamMessage, decode_message, encode_message from faststream.broker.middlewares.logging import CriticalLogMiddleware from faststream.broker.wrapper.call import HandlerCallWrapper from faststream.testing.app import TestApp @@ -212,6 +213,9 @@ async def call_handler( result = await handler.consume(message) if rpc: - return result + message_body, content_type = encode_message(result) + msg_to_publish = StreamMessage(raw_message=None, body=message_body, content_type=content_type) + consumed_data = decode_message(msg_to_publish) + return consumed_data return None From e0ec055ce2d2d70dd3d24d40514764ca0349e4b8 Mon Sep 17 00:00:00 2001 From: treaditup <97654121+draincoder@users.noreply.github.com> Date: Sun, 19 May 2024 00:53:11 +0300 Subject: [PATCH 15/43] feature (#916): Observability (#1398) * Create TelemetryMiddleware * Refactor TelemetryMiddleware * Optimization TelemetryMiddleware * Refactor TelemetryMiddleware * Change msg type in __call__ * Continue process span while publish * Change nesting and add create span * refactor: mv useless types to TYPE_CHECKING * refactor: add TelemetrySettingsProvider protocol & NATS impl * chore: update distributions * Fixed a typo in PR template * Fixed a typo in ABCBroker: _broker_dependecies to _broker_dependencies * Add TelemetryTestcase * Fix invalid ruff formating * Add telemetry tests, fix kind error * Add metrics test, fix invalid metric error * More specifics in the metrics test * Changed getting a destination to methods * Add Rabbit impl * Ruff format * Remove delivery_tag from publish * Refactor extracting attributes * Ruff format * Add AIOKafka impl * Add Confluent impl * Rename test classes * Add Redis impl * Refactor settings providers import * fix: correct TelemetryMiddlewares batch consuming * Fix spans and metrics ordering in tests, switch Confluent attributes methods * Add MESSAGING_BATCH_MESSAGE_COUNT attribute * Add TestConsume and TestPublish to telemetry tests * refactor: create independant TelemetryMiddleware for each broker * tests: use broker factory instead of fixtures in Consume and Publish test cases * tests: refactor RPC and TestClient testcases * tests: refactor RMQ tests * tests: refactor Kafka tests * tests: NATS subscriber tests with patching * tests: RMQ subscriber tests with patching * tests: refactor Redis tests * tests: refactor Confluent tests * chore: run CI * tests: fix Confluent TestClient tests * tests: fix py3.8 compatibility * lint: fix some mypy * chore: fix CI * tests: fix partition tests * tests: fix partition real broker case * Change supported metrics * Separation of metrics for systems with and without batches * Fix _subscriber_setup_extra merge conflict * Refactor and update metrics tests, fix metrics * Refactor metrics, need batches count in publish * feat: count Redis consuming batch size * lint: fix mypy * Add batch tests, metrics ready * chore: solve main conflicts * fix: correct confluent batch body serialization * Fix opentelemetry redis batch test * lint: correct StreamRoute call annotation * Add docs for OpenTelemetry * docs: add otel page links style * chore: bump version * Refactor docs * docs: fix styles --------- Co-authored-by: Nikita Pastukhov Co-authored-by: Pastukhov Nikita Co-authored-by: treaditup --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- .github/workflows/test.yaml | 18 +- docs/docs/SUMMARY.md | 50 +++ docs/docs/assets/img/distributed-trace.png | Bin 0 -> 282097 bytes docs/docs/assets/img/simple-trace.png | Bin 0 -> 188840 bytes .../opentelemetry/KafkaTelemetryMiddleware.md | 11 + .../middleware/KafkaTelemetryMiddleware.md | 11 + .../BaseConfluentTelemetrySettingsProvider.md | 11 + ...BatchConfluentTelemetrySettingsProvider.md | 11 + .../ConfluentTelemetrySettingsProvider.md | 11 + .../telemetry_attributes_provider_factory.md | 11 + .../opentelemetry/KafkaTelemetryMiddleware.md | 11 + .../middleware/KafkaTelemetryMiddleware.md | 11 + .../BaseKafkaTelemetrySettingsProvider.md | 11 + .../BatchKafkaTelemetrySettingsProvider.md | 11 + .../KafkaTelemetrySettingsProvider.md | 11 + .../telemetry_attributes_provider_factory.md | 11 + .../opentelemetry/NatsTelemetryMiddleware.md | 11 + .../middleware/NatsTelemetryMiddleware.md | 11 + .../BaseNatsTelemetrySettingsProvider.md | 11 + .../NatsBatchTelemetrySettingsProvider.md | 11 + .../provider/NatsTelemetrySettingsProvider.md | 11 + .../telemetry_attributes_provider_factory.md | 11 + .../opentelemetry/TelemetryMiddleware.md | 11 + .../TelemetrySettingsProvider.md | 11 + .../opentelemetry/consts/MessageAction.md | 11 + .../middleware/BaseTelemetryMiddleware.md | 11 + .../middleware/TelemetryMiddleware.md | 11 + .../provider/TelemetrySettingsProvider.md | 11 + .../RabbitTelemetryMiddleware.md | 11 + .../middleware/RabbitTelemetryMiddleware.md | 11 + .../RabbitTelemetrySettingsProvider.md | 11 + .../opentelemetry/RedisTelemetryMiddleware.md | 11 + .../middleware/RedisTelemetryMiddleware.md | 11 + .../RedisTelemetrySettingsProvider.md | 11 + .../en/getting-started/opentelemetry/index.md | 114 ++++++ docs/docs/navigation_template.txt | 1 + .../getting_started/opentelemetry/__init__.py | 0 .../opentelemetry/confluent_telemetry.py | 10 + .../opentelemetry/kafka_telemetry.py | 10 + .../opentelemetry/nats_telemetry.py | 10 + .../opentelemetry/rabbit_telemetry.py | 10 + .../opentelemetry/redis_telemetry.py | 10 + .../getting_started/opentelemetry/1.md | 24 ++ faststream/__about__.py | 2 +- faststream/broker/core/abc.py | 4 +- faststream/broker/core/usecase.py | 51 +-- faststream/broker/subscriber/proto.py | 2 +- faststream/broker/subscriber/usecase.py | 10 +- faststream/confluent/broker/broker.py | 1 + .../confluent/opentelemetry/__init__.py | 3 + .../confluent/opentelemetry/middleware.py | 26 ++ .../confluent/opentelemetry/provider.py | 114 ++++++ faststream/confluent/parser.py | 2 +- faststream/confluent/router.py | 6 +- faststream/confluent/subscriber/usecase.py | 2 +- faststream/kafka/broker/broker.py | 2 + faststream/kafka/opentelemetry/__init__.py | 3 + faststream/kafka/opentelemetry/middleware.py | 26 ++ faststream/kafka/opentelemetry/provider.py | 115 ++++++ faststream/kafka/router.py | 6 +- faststream/kafka/subscriber/usecase.py | 2 +- faststream/nats/broker/broker.py | 11 +- faststream/nats/broker/registrator.py | 2 +- faststream/nats/opentelemetry/__init__.py | 3 + faststream/nats/opentelemetry/middleware.py | 24 ++ faststream/nats/opentelemetry/provider.py | 114 ++++++ faststream/nats/router.py | 16 +- faststream/nats/subscriber/asyncapi.py | 4 +- faststream/nats/subscriber/usecase.py | 2 +- faststream/opentelemetry/__init__.py | 7 + faststream/opentelemetry/consts.py | 9 + faststream/opentelemetry/middleware.py | 299 +++++++++++++++ faststream/opentelemetry/provider.py | 31 ++ faststream/rabbit/broker/broker.py | 2 + faststream/rabbit/opentelemetry/__init__.py | 3 + faststream/rabbit/opentelemetry/middleware.py | 24 ++ faststream/rabbit/opentelemetry/provider.py | 62 +++ faststream/rabbit/publisher/usecase.py | 3 +- faststream/rabbit/router.py | 7 +- faststream/rabbit/subscriber/usecase.py | 2 +- faststream/redis/broker/broker.py | 3 +- faststream/redis/opentelemetry/__init__.py | 3 + faststream/redis/opentelemetry/middleware.py | 24 ++ faststream/redis/opentelemetry/provider.py | 62 +++ faststream/redis/publisher/producer.py | 3 +- faststream/redis/router.py | 7 +- faststream/redis/subscriber/usecase.py | 2 +- faststream/redis/testing.py | 2 + faststream/testing/broker.py | 4 +- pyproject.toml | 8 +- tests/asyncapi/confluent/__init__.py | 3 + tests/asyncapi/kafka/__init__.py | 3 + tests/asyncapi/nats/__init__.py | 3 + tests/asyncapi/rabbit/__init__.py | 3 + tests/asyncapi/redis/__init__.py | 3 + tests/brokers/base/consume.py | 112 +++--- tests/brokers/base/publish.py | 116 +++--- tests/brokers/base/router.py | 4 +- tests/brokers/base/rpc.py | 69 ++-- tests/brokers/base/testclient.py | 67 ++-- tests/brokers/confluent/__init__.py | 3 + tests/brokers/confluent/conftest.py | 7 - tests/brokers/confluent/test_consume.py | 153 ++++---- tests/brokers/confluent/test_publish.py | 43 ++- tests/brokers/confluent/test_test_client.py | 54 +-- tests/brokers/conftest.py | 8 - tests/brokers/kafka/__init__.py | 3 + tests/brokers/kafka/test_consume.py | 136 ++++--- tests/brokers/kafka/test_publish.py | 43 ++- tests/brokers/kafka/test_test_client.py | 102 ++--- tests/brokers/nats/__init__.py | 3 + tests/brokers/nats/test_consume.py | 193 +++++----- tests/brokers/nats/test_publish.py | 28 +- tests/brokers/nats/test_rpc.py | 7 +- tests/brokers/nats/test_test_client.py | 138 ++++--- tests/brokers/rabbit/__init__.py | 3 + tests/brokers/rabbit/test_consume.py | 165 ++++---- tests/brokers/rabbit/test_publish.py | 18 +- tests/brokers/rabbit/test_rpc.py | 4 +- tests/brokers/rabbit/test_test_client.py | 157 ++++---- tests/brokers/redis/__init__.py | 3 + tests/brokers/redis/test_consume.py | 309 +++++++++------ tests/brokers/redis/test_publish.py | 52 ++- tests/brokers/redis/test_rpc.py | 14 +- tests/brokers/redis/test_test_client.py | 133 ++++--- tests/conftest.py | 6 + tests/docs/confluent/__init__.py | 3 + tests/docs/kafka/__init__.py | 3 + tests/docs/nats/__init__.py | 3 + tests/docs/rabbit/__init__.py | 3 + tests/docs/redis/__init__.py | 3 + tests/marks.py | 18 +- tests/opentelemetry/__init__.py | 3 + tests/opentelemetry/basic.py | 357 ++++++++++++++++++ tests/opentelemetry/confluent/__init__.py | 3 + .../opentelemetry/confluent/test_confluent.py | 130 +++++++ tests/opentelemetry/kafka/__init__.py | 3 + tests/opentelemetry/kafka/test_kafka.py | 128 +++++++ tests/opentelemetry/nats/__init__.py | 3 + tests/opentelemetry/nats/test_nats.py | 103 +++++ tests/opentelemetry/rabbit/__init__.py | 3 + tests/opentelemetry/rabbit/test_rabbit.py | 83 ++++ tests/opentelemetry/redis/__init__.py | 3 + tests/opentelemetry/redis/test_redis.py | 112 ++++++ 145 files changed, 3842 insertions(+), 999 deletions(-) create mode 100644 docs/docs/assets/img/distributed-trace.png create mode 100644 docs/docs/assets/img/simple-trace.png create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md create mode 100644 docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md create mode 100644 docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md create mode 100644 docs/docs/en/getting-started/opentelemetry/index.md create mode 100644 docs/docs_src/getting_started/opentelemetry/__init__.py create mode 100644 docs/docs_src/getting_started/opentelemetry/confluent_telemetry.py create mode 100644 docs/docs_src/getting_started/opentelemetry/kafka_telemetry.py create mode 100644 docs/docs_src/getting_started/opentelemetry/nats_telemetry.py create mode 100644 docs/docs_src/getting_started/opentelemetry/rabbit_telemetry.py create mode 100644 docs/docs_src/getting_started/opentelemetry/redis_telemetry.py create mode 100644 docs/includes/getting_started/opentelemetry/1.md create mode 100644 faststream/confluent/opentelemetry/__init__.py create mode 100644 faststream/confluent/opentelemetry/middleware.py create mode 100644 faststream/confluent/opentelemetry/provider.py create mode 100644 faststream/kafka/opentelemetry/__init__.py create mode 100644 faststream/kafka/opentelemetry/middleware.py create mode 100644 faststream/kafka/opentelemetry/provider.py create mode 100644 faststream/nats/opentelemetry/__init__.py create mode 100644 faststream/nats/opentelemetry/middleware.py create mode 100644 faststream/nats/opentelemetry/provider.py create mode 100644 faststream/opentelemetry/__init__.py create mode 100644 faststream/opentelemetry/consts.py create mode 100644 faststream/opentelemetry/middleware.py create mode 100644 faststream/opentelemetry/provider.py create mode 100644 faststream/rabbit/opentelemetry/__init__.py create mode 100644 faststream/rabbit/opentelemetry/middleware.py create mode 100644 faststream/rabbit/opentelemetry/provider.py create mode 100644 faststream/redis/opentelemetry/__init__.py create mode 100644 faststream/redis/opentelemetry/middleware.py create mode 100644 faststream/redis/opentelemetry/provider.py delete mode 100644 tests/brokers/conftest.py create mode 100644 tests/brokers/nats/__init__.py create mode 100644 tests/opentelemetry/__init__.py create mode 100644 tests/opentelemetry/basic.py create mode 100644 tests/opentelemetry/confluent/__init__.py create mode 100644 tests/opentelemetry/confluent/test_confluent.py create mode 100644 tests/opentelemetry/kafka/__init__.py create mode 100644 tests/opentelemetry/kafka/test_kafka.py create mode 100644 tests/opentelemetry/nats/__init__.py create mode 100644 tests/opentelemetry/nats/test_nats.py create mode 100644 tests/opentelemetry/rabbit/__init__.py create mode 100644 tests/opentelemetry/rabbit/test_rabbit.py create mode 100644 tests/opentelemetry/redis/__init__.py create mode 100644 tests/opentelemetry/redis/test_redis.py diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 93f6f4cabc..e5333e3e48 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -22,5 +22,5 @@ Please delete options that are not relevant. - [ ] My changes do not generate any new warnings - [ ] I have added tests to validate the effectiveness of my fix or the functionality of my new feature - [ ] Both new and existing unit tests pass successfully on my local environment by running `scripts/test-cov.sh` -- [ ] I have ensured that static analysis tests are passing by running `scripts/static-anaylysis.sh` +- [ ] I have ensured that static analysis tests are passing by running `scripts/static-analysis.sh` - [ ] I have included code examples to illustrate the modifications diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 6be65fa584..ddf783ded9 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -84,7 +84,7 @@ jobs: key: ${{ runner.os }}-python-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-test-v03 - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,confluent,nats,redis,testing] + run: pip install .[optionals,testing] - name: Install Pydantic v1 if: matrix.pydantic-version == 'pydantic-v1' run: pip install "pydantic>=1.10.0,<2.0.0" @@ -117,7 +117,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] orjson + run: pip install .[optionals,testing] orjson - run: mkdir coverage - name: Test run: bash scripts/test.sh -m"(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" @@ -144,7 +144,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,confluent,nats,redis,testing] + run: pip install .[optionals,testing] - name: Test run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" @@ -161,7 +161,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,kafka,confluent,nats,redis,testing] + run: pip install .[optionals,testing] - name: Test run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" @@ -194,7 +194,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] + run: pip install .[optionals,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and kafka) or kafka" @@ -254,7 +254,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] + run: pip install .[optionals,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and confluent) or confluent" @@ -303,7 +303,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] + run: pip install .[optionals,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and rabbit) or rabbit" @@ -352,7 +352,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] + run: pip install .[optionals,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and nats) or nats" @@ -401,7 +401,7 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,kafka,confluent,rabbit,redis,testing] + run: pip install .[optionals,testing] - run: mkdir coverage - name: Test run: bash scripts/test.sh -m "(slow and redis) or redis" diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 4c71af3c71..61f070b7bf 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -41,6 +41,7 @@ search: - [FastAPI Plugin](getting-started/integrations/fastapi/index.md) - [Django](getting-started/integrations/django/index.md) - [CLI commands](getting-started/cli/index.md) + - [OpenTelemetry](getting-started/opentelemetry/index.md) - [Logging](getting-started/logging.md) - [Config Management](getting-started/config/index.md) - [Task Scheduling](scheduling.md) @@ -439,6 +440,15 @@ search: - [ConsumerProtocol](api/faststream/confluent/message/ConsumerProtocol.md) - [FakeConsumer](api/faststream/confluent/message/FakeConsumer.md) - [KafkaMessage](api/faststream/confluent/message/KafkaMessage.md) + - opentelemetry + - [KafkaTelemetryMiddleware](api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md) + - middleware + - [KafkaTelemetryMiddleware](api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md) + - provider + - [BaseConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md) + - [BatchConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md) + - [ConfluentTelemetrySettingsProvider](api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md) + - [telemetry_attributes_provider_factory](api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md) - parser - [AsyncConfluentParser](api/faststream/confluent/parser/AsyncConfluentParser.md) - publisher @@ -514,6 +524,15 @@ search: - [ConsumerProtocol](api/faststream/kafka/message/ConsumerProtocol.md) - [FakeConsumer](api/faststream/kafka/message/FakeConsumer.md) - [KafkaMessage](api/faststream/kafka/message/KafkaMessage.md) + - opentelemetry + - [KafkaTelemetryMiddleware](api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md) + - middleware + - [KafkaTelemetryMiddleware](api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md) + - provider + - [BaseKafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md) + - [BatchKafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md) + - [KafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md) + - [telemetry_attributes_provider_factory](api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md) - parser - [AioKafkaParser](api/faststream/kafka/parser/AioKafkaParser.md) - publisher @@ -596,6 +615,15 @@ search: - message - [NatsBatchMessage](api/faststream/nats/message/NatsBatchMessage.md) - [NatsMessage](api/faststream/nats/message/NatsMessage.md) + - opentelemetry + - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md) + - middleware + - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md) + - provider + - [BaseNatsTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md) + - [NatsBatchTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md) + - [NatsTelemetrySettingsProvider](api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md) + - [telemetry_attributes_provider_factory](api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md) - parser - [BatchParser](api/faststream/nats/parser/BatchParser.md) - [JsParser](api/faststream/nats/parser/JsParser.md) @@ -638,6 +666,16 @@ search: - [PatchedMessage](api/faststream/nats/testing/PatchedMessage.md) - [TestNatsBroker](api/faststream/nats/testing/TestNatsBroker.md) - [build_message](api/faststream/nats/testing/build_message.md) + - opentelemetry + - [TelemetryMiddleware](api/faststream/opentelemetry/TelemetryMiddleware.md) + - [TelemetrySettingsProvider](api/faststream/opentelemetry/TelemetrySettingsProvider.md) + - consts + - [MessageAction](api/faststream/opentelemetry/consts/MessageAction.md) + - middleware + - [BaseTelemetryMiddleware](api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md) + - [TelemetryMiddleware](api/faststream/opentelemetry/middleware/TelemetryMiddleware.md) + - provider + - [TelemetrySettingsProvider](api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md) - rabbit - [ExchangeType](api/faststream/rabbit/ExchangeType.md) - [RabbitBroker](api/faststream/rabbit/RabbitBroker.md) @@ -664,6 +702,12 @@ search: - [RabbitRouter](api/faststream/rabbit/fastapi/router/RabbitRouter.md) - message - [RabbitMessage](api/faststream/rabbit/message/RabbitMessage.md) + - opentelemetry + - [RabbitTelemetryMiddleware](api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md) + - middleware + - [RabbitTelemetryMiddleware](api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md) + - provider + - [RabbitTelemetrySettingsProvider](api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md) - parser - [AioPikaParser](api/faststream/rabbit/parser/AioPikaParser.md) - publisher @@ -748,6 +792,12 @@ search: - [StreamMessage](api/faststream/redis/message/StreamMessage.md) - [UnifyRedisDict](api/faststream/redis/message/UnifyRedisDict.md) - [UnifyRedisMessage](api/faststream/redis/message/UnifyRedisMessage.md) + - opentelemetry + - [RedisTelemetryMiddleware](api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md) + - middleware + - [RedisTelemetryMiddleware](api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md) + - provider + - [RedisTelemetrySettingsProvider](api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md) - parser - [RawMessage](api/faststream/redis/parser/RawMessage.md) - [RedisBatchListParser](api/faststream/redis/parser/RedisBatchListParser.md) diff --git a/docs/docs/assets/img/distributed-trace.png b/docs/docs/assets/img/distributed-trace.png new file mode 100644 index 0000000000000000000000000000000000000000..2b9e89de5f88ba73bc85b1b521653ae69d8e61d1 GIT binary patch literal 282097 zcmafb2UJtt+9v9!C?H4?kSaln^eP=xqNp@QdW(wG(0h-HG-)Ezn>3LUkWQ#7QX~}V zLV!r`gdQLvna%xY=Fa_R)|zA15{_BtBxmpazE9gmKGxB=OvOe;K|yi(;RC211;wRF z3JOY{i)Xcc>_qe&$t51WIY z^>vXx=_QViKFd4cX8*_Wj0>V^^nd&ge*DD!%rss21?%S!%;_il_wnk0!MNc+Pf?tt zeyK66{`Y%MfBNkOfmZ(KsYTwq>L&krYWI7}e}7ZkGp+yjCh5DH|JNBo>i@XVv-=mV z&-}-ko{Kpm|8e5gwMf>EL4*G|N3q-r~?Z=M3$C9b7PTq_eJn(jv8ZD5i_K6 zVNXSipr9ZN3*`O0)t{?UsG8=?=EOvO5oY|)inGgg&h=-%a<*`#jA)ljIffg(&@S(& zt`&nfJPQ!EdV*9|c)JwYkYHGXEFaS!(=Rc2q&apT+}}Tb+Lw~zleR^ZW{3xumK7!q zqe;5jo08$^F^!}c`-fuLslnHxay#1NVw#W#ZL)ErnZHrPomFv0Hv9v6{)+8olH4I63(k zN;cJGEZo|jui8W6AKYMwY|rg}G-x->X==ouEX*XPS7Pa{eC9Zxy^JCVdG1xLQC(i# z$CN_BJ@ywB#v;s7KR>ubYKo;1^kDpNlX(-n*EiPlrBDUJXMdluKxqzX;{5Np{9dg3 zacJb}GTIJ58%%lT6UJ+y;pLpdq}T5gse2IxKDn4bz8$GPajd%}V@*dDzS=AXGZzT> ztmAz(id!jhF)>K!i-IA0cDB`h{}2@I(`*P6)eQ#W$e-OE9kqEdo&cqQqQt~U1;t+k zhwNmMrms>^)ZcqppeG<;i!yh1v5kAo3?8!lty{Hvspgo+EKk($wafeaYz~U@oG+~T_;LpaU)%t*`Pm(|n9tSK zDQUqer<3i=3ODNV#D&Q-uT=BlKK8C-)Sp^cj%ClZhEV)&Zg98waPx~;j)LN^Vdn|!^H(0=%76FAlqgOd zeLfZFU<+2tmz2MWmSJIx{(k$%6(pbe05aD;iLLf&VYw!QnXcwobNa*eCJ~GC)awr@ zRei%G*`}GJB{CbhJe@_s#XP>1Gl0KX*APAw$5^gA@CNVX3$_<;BZgq*m%w8=F(51L zG$Z2t5to%3H8{=DgCk+i1Ftuu{&U%V%2S+#vVjZz`|!W$k6L5-t#>*q|31D&U+MnO zGLk}n_rEXmAOEd7G&6U6+@SmaU&`|ey0%s1|8=>b4<(7$hjp%Bk=cjzef@c*?TRFn zM)Rk3b>O&ubXlt-F|MrjCF`SJn@7dJ?-F!iIJu+fs(UrZ5u5NALUR66 zlbFKC6aU`&t%xj1gDAu6XzrlwRQ<85KL4BV&G`2t((5fRzwcrnQlaU4&r(bZDGm-@ z_+O9Uwi?fsi!>KP3gBl>xbKcT4~EE9;&!V(NI*r@MKnb!L?qYulYcT)(4ZNt_6F_f z>pogXTe{cSMzX$$Fn;my#;2R=d)DY`T?AjnUi@iU0i8qDrXyM*ibXW}*UPDT?Hbc& z=aaMzjGUSO5`|WZg4Jg3J@ka?lC)G=;Lb`wV&YZk{PObNWR*DO`OxYomPo^LSgSYY zKs#w#;Z+M=+;yL~4{oqYJllG8Cgkk7H&c@;r3<6bQoSM(F|qERrnHQ}TM9=RI^|WK zvM%@9!eUtIS#UV+`M-Zt5)u;qHdbq$(uIV&MJ>u(y8HTqe}DSJ6B2%We5@cZ-@Py@ zlvvr&AjG@t;J!1SpFiZ~Rg+X^CC5mWg5TWTbE9uPfe*BETvmf!e+D_p-;N(0AW4O0 zK7KC1K%J@>!_!(iqvw25gzipQ5z6E;8Z@l1y9vLf;&cBNtJe7vq{dgGW)k0LzlTy4 zFL-V2-)vT?{Y3HEiTux|!TEb5QiT53%gXksB;Jpq(<`6BujkK){_Kn!uXSRiZ|{t= zSBmF@q-xdL4M@GMr1))hgW*?oQI$=9mJ1v{<{01H9MasZZfAE`ZC~Z!P+aGD1Yasc znj%LB4O$HTV8*cl*f!qT``aE6NG^DF2xuK2C3E9(SgRGP1IdE}TIxmdRC%}J2cGJ; z`IF7i3!_|ImU$X6pr8eir%_&O9mU0^1tNC+}@;Jm6ndswI|q&}RtUFM zKX{p*Hkj7utSwvZG#UH&c16zdBtyoH|C*7%&rD@?rFm6RSd8^0JmpW6OQMvy5T!5%Wud3C0$~GQLjz~~h6H|%W5GP_Lw2{bm@w3YdT3NeM%+sfX zny@fd4r?U!V@>-y73e5b!k(w_X*y@i`A7~8KY!upH@+e*(4VDZVQ&7@r(tt(QPp?r zcTPnc>hI|I_`*sdjJQ^=Hj0FyDakyal zK$VEX_@y9=8*cQYyu78Hj&Gs!tJnS4xcXrShJmelG_Yg#9Kvj{XtR#)FjJ{z_LMwF zY0;mW7_MlS!B)3??y%Ym6!l)8TK!M!NXxtDDS!UV41Ml<7ZJfg6*{SvWUZ+grJJQ( z>o$}v+If#%S?~Znh565F$A_68Kkge)s8ik9 zrM{lqjKG;UABP{5GAOCJ)dd>e<6wRFM?P5G3W|!7G*!>O>&eN*mA8;+P^AigCMKGUkRPj)cS^o z;AP(I@7D%H&~U4|AMG?wO({p%O*gf(#n{Y!-cOniac1PUR^jgmOD}t}f|!0w)pNi^GbMg1ZL#b=2_t5a(6Nf*p5 z#9}H-TN@<3*Os+oWF%;`^oWsP>trqt=Ce8B(4>F;3P6A@`2{{_dz^oN9;wMZ`*vL{-k@ZJJMeI^K$9gVd30=yd_WM_Rog#H zfBultq@l8Ts@{$92zoGyl7w z)4*T3byA;!%JsK0cpmczdw=EvCt+-$_+$wVGhS5{>Q0N`r>X4~A1j-*v{^Y>PVk?K z8)uADW8vUvc+qZjv*;HwgP!;PrGMV&xKx!k?U7qf-e8|`^D)0qd9Lqo2zkavQ)>Jz zh1M`K`o+<$3$fP5NhzaMW@S0Z9|wz96CRY7k^TIo_xBkzxgStaNL;3?KZO(3*f-}j zm-XbxqNw_ zaDW5t{#?{G75?vs1qlfdDJlLM7OYsh3OxA116F!#OY~QLLF(VF?d|RT>5VDr(cHNfe-oh%N7&sPDm0y+HWOU(9hp|Zqo1pRz6(qrB%GTS56csquQOT?*CyN;7rI6 zTe6pEam2dS?22ti3VLiL-9?f zI`*_P>_)P4sd=sToDlFFs_x$gy;iuM^6S=Be$dHO9O`~_JoQf83~hZa%5G~Q8#ud~pAK%+mU+F^5nw^ED>N7JltJ)8G7U_43HjqggQSYdR#6BAz|9ySF z>*Y>PgFOr0NE$+kDqG+3Hl4Y!BrMC(5h@884TYfA`UohqGhl@{rgkwr z2_&X``gFzl1*Vq0qrt|+dt?;{8jzJ$xHOH&m<3%1z&C&eco!b0rlqA7W*=#=pXcD< zKt%$qfHoXKqn8FWvIK?1h$MoGESuMu+@EG1j7H(RcUpK@6hxk7J)Y z9m!*Rb^%)LL|EiT5c>u-XelTrC=pPwa9)j1x^*^-iIvgQKQYu>nQm4Rwa9dKUEU%o zRIyxna`y)ma?p4cR@0Q+E75ZbLij2tg{CZHekBxYK%s6!B52a#(7UO|x?N4(Ow|_q z-LPcKW1nZYuh3r#{h{E&>?!l{>cxMgQnd>7YFe|+Q0A5}Sozo*#>A38VZ~E%vRkgC zeulUGh1+hb8!s6@N0yb5ne_BRnU@W%Og#M{zz|tlnh&t8?fta%pOF(WBQ~sH@s^CN zdQUI2T>R(k>)VAxHH8wo{0VTGRayfrp~O|shJ)580r!>0CiLkM^8`RDH#z; zK&BtE}pU+c&mvQBORXJgn=Qk=mH4 zbS6$ni}L%Yne)5+cJOMTUH@p#Zv>YSfzOItz~Si!;O=6)61(;tuOL?lZvmZRtXA(?Tui98!lTb5tL}HV(+T(9{i=N8$_&mza zXr1eTUU0F4Mp(g%M9KnqnJT?_;e2Sy2x6ltO0(yA)z zHi?=VS$q4Ntz}0=>NP?5OkMEL7Upc}uP8746Li%62%=Z?QM~E!qua08tS?h$PL!0L z=c$w4hCzLOeT_{XUcR7y@7~eMkifzE)bkyg*jO%k?;c+SnZn4(C;mbHh(lBqHGTtA zMgC@qtR10o|Mu!T_(u2cwb!Fi!inAJWO06&e4WSA=&j8l1*Jw2n75Cs>z9%ec66on zLO~}3oeQ?2pvG!2vW=C6Ma_OzMpsu{K;V3vghP#xk`f%-GSwN&ZO?3^OfEMH3M57> zPafr~1bFvkNZS7Xn{(iOpqRNCtHiR?uIN^|FA4^Wo)0E(JANDi%^j~=@)%khEq5YM z4cH7j>^JPgx%{^zA!#ZBq*zWf&*NAXCkoMr1ybVTxOeZ|=fv{N*d$*J|Gf0Heq~O8 zLDXz%o_eXirL?V$7Zy0PPp2AKrQYw5GV-3M?d@G2dTU$TOl7}PMl*T89b<3rr0ne0 znws9zJTGFW7iGsC zc7A`9$+e+;?5Jt7{EjaCp@2X2HJ*`cG(PJCJ=Dx9ipCG_CcXW6(S+eE(bMoMVaWcV z?uKTt0qcDU8?7IcDC&^2AxNato_|nqu(QKzXtp8JadfW2oF9QZ>1U!^+Fj}=5qIgR zSwuLMeN$Qyx1F4N$31LGqyVEAv(F)HvJR8W19KN>Io4;_iw#SprOSz1tvE5(lOU_6 zOfqpPuMyXPc(A_p?bY@82l{4a2_Mp$oTnzJkwg_hx+D>#*(Z4&4Gr}G>;R(P)Rd8_ zRJS=1}Y#;xXp>yVsx_wY#b68Tkv zL@M~&agUW52{PS(tC6V~l<=H<_Us!=#D+!cx__f!h4K2zN{#Ex;P#wA^TF}%!h&_j z0+>Wu!K;3{x-P-AZ4x><@4)Bp=!`pBE$NTt=aLfZm*~lhQ&#ZblMjx)eSK?hZ)b1s zT~rjd`lw%xbT~r8ZK`Wry;Yke&C6?LD$ITDT2&XZ#@$^HDmyaMov4{E=koWi34~2r z@%u{v0cg}rZ>R}ezb<(FdRKpcYmFRWHon`TW1O6shYGc|-fnK&@$r%?D@7Zn$lKTd zHfYLroUiU>GxXkrZcIA>_H=TvfZv)5@sRa7EzVA(^S_4Fh|3%E^Hm*D%%V+B0E2bl zzu$wzw<7)q;Z>BqC*S&DgL+>IF>}g3`~IfDJmWTWbaXU?k`9djm-LbV_=ZSO&lxh+BT{34q^DBw~98qQRdK9@)As_G)1%>whC`t@+2^lYZm_;Hd=I9W>#&nMAgM8*L;ma7;TtRZhYt%4ktEV)BiE-6^Mjb_ z1at@OwLa|(dAI5zb_KUtL7#i?-o0CC1kL5d`_oR{Ig5>T|FM-cN?UoQajFeuXj|Jt zWKpwq&!e1IZq-+l4C?lSjQ&IxC;C%g_;t~m5SK62tPj0$y&wx&R#?>@=AZeP$C{N# z^Dcy^?P4U_I!|ReWL)b7bGj8OX(3MKKn72DH}=T!Mbm83P6;=iPHSkk`??v+4o*lR4#Cj#Mq?tFnKGbX$)9w$&YMV5S_E z+MVxl2OVNlQeFr8nVB`l^WKl+kn{&2&&-<&H#Y|ql0aKH9Nk}0SxH4jb+n|G%?0kd zE3Q1?LlCBTHSM@EsC1TiE{sI^2$lTqm6iFiPQx=gYOrL;T@VY$}flep=@^ z?Mez{pua{FwzpSZE4Q|~N}8oEXBUv^$jD$4Z{5Smkxm$}lS$80l3~YnZlA29pQj** zb475I)br=Z*lEQq{KMTKa4kV5v6R;QFJHfLG^^lW-HS@{`V#o^rRU3+)hM{E!?3X` zwU(B0MtXW?divbloF?=fC2d7b4Osp5wcQyC)Y0lvR&$-Yw-m4w6UPNC*uRwWGB$9y zlH;FQo4Eu`b!q^SBcKTOkZIT=22o9WBsXZt|Q$!6<*6--qYSm4~aE!jR$OVN#$ zyPiB?6IohuMlxixh438|wb}(pOSu;<+1E`#r?#cQ8q#Rh$*gJg4lIhF+8L@Il_zuX zw3iaBI@t;eUfnbhpqIMphE0j~TK{ti*Mj$d@`OhXakv$)tGvyDufHE0JiVGtwq03eq<0hW z7Voh>3h!tbysL8b0|f*t0SF9+(8Rr8SlL^j&QIf;&xgK6QbTX77z69qYigfBFLQnNU_3*e>*bpgr!(Y)i|2Em0VHYO}c@b$GW}IXl zX`jJg@xIxEb$3$?txWsX^}F62^p>2QnONOohs1mIg09LDF;O_#^dO|a^UL}3M*x}R zb*vLpsL~b5s(-v{<^zrmZ$DO69 zV`8yofq?$b#CwH!f-ket?2NvqX0s;`(UZUV&)GXHc^R?tqQDdK>8^pf6MR!>^a1Gr6_RVPIw<-?#cZ zHwAL*u*VSr<*R89VvlD!6IKCLNz>6!Z=CxnP`T7Euz{&t>K@Sq<0)<1bNSLGUWl-; zz1H5R+1U=w$*mdQ!)C%s984!m+0fLKfl~#vlI-fbDuLX?c&p%}h0fL0;v^R%GqWnU zp<{UJHP9I~lc)O%snx@1q_1_m1DXxy!jr^D3a)?OgKlnSVnQF^oOv?HS4ZAzC9my0 zkB*9pi*a)F_-t50o~Rc)3j8qrEFvljCV{x*qZM{J=JYCJFda33)t)~&gBQHLq*EJP z`8U(7NPmKw(F8(8b8(Ts&s!tR|6tu-?Bit`n$2I$TL(Klr!bQGpEubEdoC?8%WB7& zADD=)p|+=jOx`d#tEv9)-)BaQrne6cW-N>KyMO#RYh)x>CR-I1wbpaZf{)2{NA2|7Oqp{HVP&LNOgZI6xc?ZLrk zz)L`f16f|awDgnq+UP`^ooHfWGN+Pv3T#T*UtI(M$HT<9O=92>rs?e$jH~bG$lerMH-;=I|e74-3C?PBQs76MB zqDCYIWIw#Y0C~Z>_%rh7DNHUa>+9}znEhD?0O)G_Mz6X{1khz^jj{*mA)urkyv)s4-wHG;{Zd%zT;Mh`ZosyrWN0H zYCglBLz1oSao8ZWWQ(J#^yu>yjL{Qb8`xIc_U`gtJX3jI+kQt;fpvk!pFuC)+1aTn zb{q>63rq7!9~@Y=kOu#K_t}azZp2<_P0eNc(9nC(w;BZ&T0D6gUm}<5u!x5b6?CEd z`}^2n0*r;lNI;`7BV+yJ@XMASqF)s164H<`(1|l|@Qe@eJ9788u$Xev#9fD&nPCR< zIVP*8E5m-a>3Tii6QgLLy_`0(T_ZHu4#S{>Sj1qH4Z zuots41)7_G|0NiSeB=a`&8Iaq;@vwctUtkEQ|5uTwyMv;o|#L~4F=iPJV7R^7&EnU z%ae2D5gjdM;@*(Br(p8JW>9i+vRmq@olwQ)spOWMHjHur)18{7Z)|Lg@}GaKZ7fd? z6MRrvPXLH8ePT%(HRE<~fAdDY9tq@+FJI0~_MnXh<(!Ta(p7ynd+M+xJa?R`iLvqf zgam7;I>7XT2$^GtuT!7uC6ITI$Zj!|XVVoE;46D8jM>2ls&YXGld0?La5^=jJYGsQ zcwM)|(0xT8h#QXzl!J+Oc6Rl7c|%4pg!f|4L+Aa;$@Smwzn(dFfiXTLB=7EwVvFTwYwuI7CB3fb?ecK5nh!FlNuya(`qj zTMZCOz{YL*GBlwbsP(}DA6GRwKqk@Us%uk`rE~D|=DCFxZRq98m*Hb&(&cWMvjN`Pc8#$#&*f zrq_Ku02d^($y7^}^*8{jHZEE(@f+Bx!3^-cP0Y+Z7%zmpj*@H;6WjFvXMdxP zEIaK81dP=YHRa>wgusVl`I5mGgOB%2fbkQ^x;tuCE`tdMV&2$36FsIgh>v0qA%*+RVfA+}U}v<9pBh z?`?-GE-goK+xGAMM6H0IBO2a!`4a7%syyV8jw)vTdDKL<@ zEz6fGx#J)FtSoi=li)I}T~`>+0&B_1c}-1m_f1>aWp=mYy-S<3uA7so;an{xF)Au< z5bzlz9~6};x$phm12TM9N5^P;drft9{HKzQzEf|^E6M{-xQ2q)kEnF2BmK9L) zF^0v1-9CU_H|>Wv?!4m4yVZ8?9kazsx_D&;YvY~P-!|}DT0oT zh!TKg>fo2cu#F{h(y=H6NcaJRF5qT*gD}&@e+&%`rv}wov5sYBWw%cBb_Z;Am{S4Rc(p5juc3SLZ{ z1dX)6{#@_nOdX${dCA~`fuHJun^HEOKaSN9b1VuV1jq=c6mVE=Fk0H$&Z!AE(+qha zBtQQwJ(J`_FHxal3+>H`ywk~SQCh20=;4Fk+ug+_<0+#2jxG{;`W4)}I6FWa$Jm?@E?h_RrJF zS?aoy)W*uiwX!ycTil$PU7y8q|6TQOj=a&9Qm>Jnojq2my92DlAbl7&p>#uMEXG-2 zZ!aeFBQr__&1Tb^rVBMi9t`zQ%Z-P>5TBXP_uZ)RQJz$Z)O@Mtr1di7&D}@uqUy!n zy}YiDwSE8koHuKkI?_>G|EXD9XeCg~S-PDuEs=2YMDXOFD5$=kspAUgYCeUJFoX&p z|A_Cbwz^TCwRQ-PiV@=`Z?B*XoUVp*R4Umz(pl5>MlSFTSL4oiiksFq;$q)cPH~vl zX#(yBFIVgLf;lP|GETAJJx2ngSSaT+1iOBPl};Ev zM;_eY--g!;DiKVpZWxp}IcAue3=reX*56z#=fEq~l?Ojpl~eOx#e=7*Gjexlfy zCB3~~{0R9-S9gv}=x6KIGvBmK7b~IpLo1`?BQqO9Sk7rtT{g(qyE&$n6k(invfX(# zs5w=HUXo__cLYq7KZM77;o}>^{-4= zh|f){nzebsdNS!~+GyCAFj<>XX189fz3=L>3)a_(e^Bi^AQc?HZI_v+5hqP+eRK!t zP@=XwCDpZt!FQbH!YVW6pJ=Z>su9r6xMkf<1B0nKgu{YQwvlpb^B27NAhx#2 zMNe&9tz0czKD1a7n4L?pNtKmlvgvsv4w&Hb*$=r%(!11U2!{h&S__3}P{^rSCYf1S z`f)l+sT>EDi~?rdm8AX}P~@LKr-yptE$G;2!fY%;&ED%3%y&q8dwL3q^vGEbSE_l> zO^<2M_FpOByCCP3ie`8M$W&o=Goq!vjk!7e?hJ7Pj&qgEWoKhkJ=$xYpZ9>u*!I0y zKyQ{Ikrp+_1YQl=fAK&SI0Zh9m5D=xHW5+TuA_awoNU{ggdd*{Dhi^0P9ncyS zr)DN!rQDhPS<^#<-qa~H5Z*7Ng+MCnn7vg#^5rH63;sP;-`rz1e$h*>P}^f2`Q;9} zg=qEtm#4HvUeAc$C_|CR6Yql>0KJ_0Uyqs>y^I7lw6)g$Gme!Gj{3noUHhBSq}f(r zQcehtOD#GccsgMw>~IEVW@s;T8!KN>EIW0tm(9}nD%FTM^6~Mj%HfnQ)mymrev{gKuImgp-gFfh_cS%9P zWXQJ1&C^vz%+bn9$#1Y4)KwT_u01#REX0Al_5Ot$OPJtVwAj?XgRM(8ckte=G3)J5 zcPhk{$rIY>&Nz++6hE*GNvM70Tj6sj=*I9)q+#YbUR_%)snuW3(aOR93y zTwsJ>+nXB4S}o}eJC_~+G7hL)M$+!u+M4$L#PRVlOaG(PfX(3R{5FrP0t|C_W^#1H z!o$MKO;3XAI5-lpQgd-1$bm|3tSTzauz-zB8?2|!lH&RGpT9=K?PSsbaoR0lA#<{^ z^+|Z*ROo=N)*KMfP6u;)!o$)H_@ErMwbBH)5NK($3 zCq=B*tfR?jHLr&w3zzG1dDRe{7p+AZf8FiW-ea1&%0Kf+OR7@{y{dc)HnILp+9$!F)ueT#%g43TUCY}KDTqnzwk*^5ue#URQ2k65xrz1=nX zlcOaU5HGNpK24k5)|HO$I*4T-viL6114!yE)f~zAsa8aPcOs0Deki4D@Km+TxhmZA z0sT3je|N~LGa6fAr3uGL`SiU3hL-z~uwh_3UnjqTkrD8r-VeB@blhOLRQFN%SHG=^ z_ioZt%^X~8`>mHUJeCq#P7Yd~V51^veMrgX1b^fARA~WOwiW)RGs2Oo}k0xr$*SaBd(S+1#zJT>n80D`s9vq^77E=AH`x%BJXZ_*}5 z)K`n>>{%3!wBYnnzod7cFaA*haO;uIq=3Ie!!ugePbnWKCMKHeKAN6xNft|8-u_cq z6lXoBnnyv=!y`NaQYTf)3dsMWQGl52S_re#gh@XvocLqo=XZ^zOKtP3C#3m* zcg&8nE`_<|CUkUlrGS>1urM^JH#n%p@%G;R6B?h1-+yKTWpto~1KIx908@q0#>{;N z4Z8VwiMw8;sj~7%dwXJpqmvV;MO0MIcK2AA93gI{5)O5o%xnYe`b<%$GSZY!N~344 zck{%|b9&-lSS#reWWkP-@8yt<^)mFZSdy){q!DCvlLxdV3s!Weca#O zXNlsQU}tB28)jK9jqPe-43{jm#|oDS!GiV`=GxX4rcxY6piW z(0qMU-=3~o5W3?M^B8epS3TKl-n|d#;KIU!GGWrH%C(^(yNv*UT zf8Z+(bp`hz1RHTwC@2YaKDaVK)#(Rz7JLfxGmMH9uc&k_>@1*hi=TnLJ(aVm9FUkV zT)e=T5n(rY1%s__XG|xszKb75PPHDs#%q6XemUrN*Qw*ek2qE7{Vs<4kOi zG@e8!CMCH7UYgTXs&8F8UvrtJ?T|oe+4QF=`}N;XkDYUya<}Qa2P-=!!ouF30C74A zbBUJLX`;MEu|iD5AYgRoOsO+;- zc*~_Vk@yE(_2eflH^ycYA~u_scwAgyy1J(rNe&yBDqL<@Qs;aE4+M8Mv5ka+z-6`R zsfv9+53DDsjN4$fdN@A$qdA$fMvEVT&y3smR|oiFcPW9ETjBRPZfm?0S88Hi#FzcY z2X9sk8hAf)s@QO-ovhg6!00qtrzTqlTK)aT1AS?0tM=^MvrQ}QhYJ0&IlQ^y-2TMg zI?Kg#>;TT)qt+hY*jd;M3%d{ry$c0;87YpcF4u!)=ekFc?0}}iCN)t#4MX8wDM;iL zmsuHqV0sy%s!) zy}XHN`0Sa)So>WQlOi`4N)>U1Be2T;E2x|CUcw#(8ZxK#!T32xuTD&)&9m>eXN{rf zj3HE)FLNt-pFOqHbk%~$L_bCitrDj{UuW=_*@Nrqe1JtjO||1s=FH(y58Lm9CRKT1 zI@~TXIypIl6ls<(5f!HzT$o7*0d9%_7b2GZxURbTB?ty4O(*s#1>LjIEAa5Ta&sM; zVCCeKQEK0BdiRXS*vrBc3X>|CC_csDCr6#%r>6~(M(q_GuM32KU(;14O|p3e(cpkJoj=5RYaPL@tW^FFCT$gE5 zF50bPWhJPuw^vL^>;~lZ?XS;f29wTjHsa#tEDAn+iET5Tq9zPt%U;=4a+N1=@ z&Tp@qMU@0k*TTEPQ~?!6*x2K*cw%YL5n{{7cZ(h&`|)y~>rAvM1P zaS^1#H14$n-zhZwo|w6BdSc?o2oUr+eEV>7Gs`^nG#u>O4Z3Ka2A#V!CnpEY?+E$( zx2rP~517A&+5Xc*LzvD^pS@+bGq3Nc>^?a#0)j`#g%0;2p~T{v&4-MPbO68H7jR_+ z>}jEY>Fd`But*{Df>04=Iu7~yqC(5BwYTCqN5|mNbN#2DAPCEY05Y(VcNfLdo;w_l zq!)j2bTo2uu~l#ut9CvWaV{V~ttqQhpbF`WOeky;je1cw`*&YhefE zRY4XP2$rD!3cyrAB<9!C&%5nnp4h|Wp~7BM2}EQ`NqSAqev0HAc^fEc>0=&C{aekQ zUV9RN62Y}YR-hV}FJ81Ge&~L=Q<&widgDeZqrnCMw;DGSyM3?$a}zaM5QdBYfNdXN z9fLd-r;!kebT;vbJi-I{mh*<<0$H6Bo81`t55L@yD!QI9ptdEZtnCohDF{D(}H6Jd@AYK zQqzRar~mL!5-M{_QT!mmI`E9EVu3T3`5O0^GyjSP3Yu#D_{Nq5$dDh=ihMins zIaJk}*y!yvmz9Phlu;HJ3)MMKb%}#S>JBJO&sc51Xg^AHZyoP2VgU%C0RaJ#(48Gka`0-A#^L8vosoDhB(gJhZ?TN{ zU)zgBHkV)|Ij7aqAc8lyJ=OfNV$0YpFj$DIu6>e-Cfi_Dmx1 zq!%&mdt%Wd>IfWmc#IT+=yvb48?cKA3!kj-(q)s!O3WWC0wM)6f3m`$IjbK2?F|d- z=zD_a{?jHGYf`;cd$h}(oWu-@tDD*14EfY9U2^qm0H~>D3)SiA1T&XlV_11n5%P(= z0`)&Jc*?{_3ov%l^0bv#$u)%+GGzJ;#h?u8b~8o4)x&diM0F8n#)02CZj-*cKv4#1 zG^H;%-4t?%Tld(5-vOqo%tgfk%HX-$-CdkT;089^^pU*#B zA1ViMIxlOz#T(60}e^xk2I8u)1GEX;dW!t#MC3+ojqAXMJC zR=kOqcFmn~P?7((HgS%i5|D9vLTl4OxcyG9fR5hYX1P7}Mvt-4qNlb(Ont!m0vs$= zS2d_SZ%P4_W)NTrBb*Tu7ssIBB?b5*x-I9`Wx}U|#;==}0SB8jR8%*`#EyYO=5Gc; zD+Ddhcf_nM_QSNGKvZeLPuqge#SGGf5lcgF4K3m}z@WyPmo z!$k<)?s%hMk@}VCmsjP6;U>ZrD>CTbJDxY|sIRWJbyn_zSw8+F_YJU4`1slhc${q3 z(EWhj${oEHSKNR!Y?_=k7Mz{Sn-l>=f#qWa4Lxe&_eO;Z11-A=*v|lD!pXe^UE*3N z`Q)HyNPOA{%L|OLcC`3Odcfmfot^<>ijrVfI@@ zOMU$=?GpX&F|2P(OG{P+h;12`TatmjN8IJsZ9SZ2hXq%BAOxrguT2zYw_w^jPtiBP z-!2M%JAcevS_fS~@t;7!;ULTeHWo~5eoV0*#~B8q;A6GN$Nth&NHHNH--VGSDd}>M zhDRDAO;1jyTQMW|kBCS4xE8oqVYTT-XLeJ1W>8FaRRG?5uE#)-si84|P2g0jmgMTy zFL62q))&x=T)uZdTAf{Y$mj~Ln?+AFn0j!OnT5My+aopTY7DiIEz=Q(ka!yI(Ld)l zKr}ifE6c_`vBXHcMEkM^a8>tu>&TMhIK|H#@t z7I|3t_3JlZ8x6WSEWvvl-y6|2-8VfjP&5QWs{mYM0kS20r*r{`I4>~WirT@>&iG`P zZZe!3q)5Tw{58|B3%`B69#pI_nrru=BAO)#(#+@n+Z6QM>On>Z2B{)fn3=aXGCnh+pmkO=qZM1kRsmo$}62K6@<90f=L)@DqO%QqoAvY-a zLU~VJU6;WbRpQ^)e&v%~H;5(_q~}}7EgFh@vMWxjHDUc^k(si?yga8YlV$$;@oA=y zaF(XjqdoG1$_c`+AKBIV0ft4&He7u=-ZG|_6wz|HlaS8QIoKeMp6_70ukHUw8b|&ONLGKqpaLuIH1sn^g`sb2ljcl<6)_iC0e|jvh zq|^v@rVaTH6g}-ULXxI^bxcjW0p&kBhT}l~Iv{{>p4xfY9o5vdzF?G9R8q3`>m1lQ zb9yk>d2g(OKyh8+4Roa%sX7;f>0&GSB8b@7U()y)lcwloWA~tbJ9lTNa>!PP1QZQW zfOrKO~?jHki`f?|K1#Bwv_V8F* zTJvlG*-9sj9Jljo+`B@(qUiu~%)KaQc4@`ett#43kd_5p33#XANWr;IQGo}a?Lr}E z@$~7G_jP}n?JL*>hi8wwzN-@=9v}^y3DfMS`w6A28*04fwv2#8ez={7C2wss;sR$k zL<^a1`zow7z+TK`v1VSWS$~g>!NCu}ZUdUq;sqA4=dfe@CA*NYux0SVI6xp$ivAS% zm&)`gl;761=ixfTw>)zvPM5i!o+n`^fP*u$+Emo28C-*Wf+Npr#4ISjxuq%(JQB-* zjU&`9b4&!gHL>e1fLQI-A3=syh7?x2JtdRXR_Ul75}7{t&{oH3vYs>(&J6;K&6cnC zs&zdO%wxA9t!{y^Fl|^D>htbCgNaR2`xp>>I-R5v;_@Bv91Ied#e& z!sUkcNXAHZIb^z;izK~v`Ym*O`+iBe+MH6oa|oa<#0uMbpkx5!#%C5ft37rW(%s~p z2;DZ(1zZ#D-ZPdV?oT*y7g=m z81h8nRxYWRe?H!n2Anb9o~A93+*TJ<*StP3+)zUhO44@rCidNWECM z*&3*CC`*eoAL&-hU0qv)1#KKP;{2a=e_Syy{LYd12!$X|Ct?wK`nAG~rKL?r@`DDV z-TH?5P1`sYSnx_OaJz^hq9Y?C>~dvh1dKy-%hCDe<$(~Fmi-;hIxIRSCZ}lZ0W$wW z$jtJ9`spkOUL%I?`oC5H#qIFr{o}mIa1fsT@Zbi(pkP~Afu70Z_ha`%5=*WbIh)<8 zt0@rBxsfFwzdWDs>gKjUDlpg6o0J+~zy<+}xY64M>@xy;4=QRZL`8*r`}>`abR8|V zwfD&+{8Yahc^RM~7u@g1FqnbGNU6yU&orIFMnV?fhDadsZ~zmVF3(s!5r!onTDQSk z0gH%$LLl9}y^E?tOQd~cxLKp;%B-17^X%Wh%ZrO;19kwM9f|bLH&!D*e?A8B3b0!d z?BF~5`cqSrevv*K%kuBYi7*{~LRWkUP)s~sU8}2Vz#n0txr&QhnH;QxU1W$8KlF|Q zY8)1Pv;rhZP(p+V_1TS$l7TN^zfVc= zL__!}n6_p{gc%K&DClXdInz>{Icj&u9re?He8*1|jo6wolT#sqT17 zyz<0xeXC#f#AfQahLU#O32F8y=ke+GuYn)dfpte9@kIvjO`48VozwGY>$SWMMJHo3 z4@}>=NuRtvcR`J?uE`L2^>qKo4T0<`PisQqoIq@rQnrlC<_`c@Y9!O#K{`B3MXA;q z^F^)M?)M z_o*x{h%u(n5U)A)d;aHyo9x-m?9Ky8 z-5)Iw*=wUnvnuDQv|9A=$Vf2I^@Ur>6GLt2>F;1uOHyKD(nqE`k<|2LX}@m41onr( ziFA5`@e&jk4q6;X{J|)_4ZU}f^^V)aCzh5v@V1gGPXL4l&<1QU28>L0!~pC+Z@pe# zTl-ll?tlJA1GkgGE}Z$LCD6>gfX*b;EiBZHF#!;QHckbeh+${T^T}XD^u=Fpy!>1~(I+wHNr9nCQKHC!Ch^ex^>cpJ-ly@Z@QsiLf3A zP@uZL-fnubokkr1K{}Pg{!r?qELD%4_3@$h_In12SAV|G%S%WvApoH@oHoY8T~7w7 z3Ca@z=xhXM#gSd0(fkkU-ZHAn_G|a{wopJxkwy`a2I&wmmLT0-QqtWmBHhw0ARsLZ zX#oZ4?plDfbf+MDa{upq_VeKxdyIFF{jR|nN~{Izy3UyAoX7E-wN}$^5*?2`&%QWL z{&MO}P$Y_1CL<+n3Q6PvRbV0eCag5&Q_i=zQ#(a6WY3+Co)(7!G%tG~rq)WCgkKDc zHu#HR*n>h4WQ!$>Qab`Rwlyg-^341?_4>kJ@>RAJDFk+IFj^Pr#fCWQ63UGPq^Pet zOmQrJ8>qrtYD>a3e^wBhxG~R2PNXA1-`4b1xTWZ4U9y4BWFL%!3E5cn2cyquUBxR@ zbgtRpmkoJazvs85ZD5NHgM)DXs%eyYe1n{(=3tY|BvW)0eqwOQe~vI;-cJo#EEktv zaPbaJsR2d1`X{6KOw$_|$~j^TJkL&J^Yc5Z*FS5gS37Jtx7Z;-0I(t zB_wc~DyzEO=L;}cIxCBz)>IU#`_?)57=eI=SoPkjVaP{Iy@GV~HL$TsYs%Hw78!CY z&M9FU@Uvq6EO2r<4*F!{Elb)?jr_({6Ku71%>SQ-@B)WPx0PLuVaWx=K;zx4F=v}x z&gGYQ6G+tyuL}9CDYupk$u%K(mLxF(L4Lk!$ufN58Ra;yfk7r1a1?-Q@Dd;%H_67% zpeA)UVlBBs!vVzasCIBDs8tDT+Oug^p46CXTQD-^^p$ENE94HAj%VgK43n$gQ&L7} z6}#Fh)=Xz@^u?&N*{#ZmkE(B9+ruzrULg<0;cW|0(N$f5$tQ~PG9v%aU;Ld0%+MuH zz1v*AOUsx4!+t2VMEgQI1V(qGUSwJK5vUph0{kkMU^c-5bop=S!%&~=)eGg}aCbY^ zxy+`Dox%8X=_=Git~uBqfae1*LN1e$nZDz*11Im1hRwF>Y=lf29%Q?xT+gP8pZfav zKD&H~297Q$K!SW!v&ycwPtm)~sxn_KL+W|lC%r|GOlOmD{1~zKqd$A7!OQwwkWqqs z{obAp!FbCXJ^^*v42GiKOnu`}Z3V5ienQ8=6R_wSc^X^f?5`?56d2MWyKWHCy03bR zct11aMo}2P>(~Rk**K&R=cnl>sjJ7N|G5Y**?_ySoT?q|E4}c;zi?u2tjxW?Y>n(k zMFkQ$6Rn(DvMYG$Lfktr>rG`d*fqw#mp8OZ`bivPFkGlv#NQKW;(SAQS zH+i0A!6>FKb#U)sO;?E89lt56lrYP%yL?UcIC=E=ZsA3KIwJoH!m`OPTC^{tZ^Q@^ z2t3K?wGy&%uH=m={1>7K^#r?3O-5Si%8M6EcGknkhwJTP?P6MPTXf>X|Ky;2Cvapa zi)u7V|6?5f@z2XQzf*1g{-TQ*R@l}k|DCA+pg;cn2J1b*Rs8GmzdiGRL|%FFe*##t zzY{gMWC!X_)|f)uje0Wd_3w86N_o_j#VTe?0+91oCp*ylcFKCbT41kVc$7N0RL#M& z_}ekH2Hu5t#k<+aK5s(+E&`&3~cC&u)$R#=6UIyWE;} zG~XSuFH|Wj7m6<b5bJF{$0Kzk;&QSzf*wP!KDDW3sgDnE0z$WO=(? zQQ7fR-Deq^5@SQzWU0LVB_dz&gkF3+Opc>ld8Wf(MgA1?z_aF!RJMHHARInSN^>(- zOiI;~r5Iii!jJkqIXBVk%?#-ByH)MI;*v7hr@2B>hv+=3KJt~MNJ%X0Up685%28xi zR7rbb_putBq?Htv6hB1$iKfDUU&~EcwRjVXv`cN83G1I%27eZFs}2SII-L@fC^@q< zcz`mga+3VAap!_vb)iNqvB{2I8?NvjT>tk^0tzx6GfC#iJgTk3-Y7lEQKI_Qy~y%NXZKnCR&-#5_L-sQq!6 z?n(SemN4iBUD_Z{}WVPsPbRxS-oj-<-#$6TeK_YO-{J?yJ)`y%&tG9OmEv? zM1o=gup`vCqEDZCBMV{TsJVFl@$5fFj7yXXRasdvP|tu1wuS}Ue^$N<<=ynLG2(M( z*FPRs7=6o-VdNaZPbrf}d!lMVs#>FExg_xKb@n4BGwXbPzr*sQ|FTeCAoKYzEA{_( z>+S!&p^{GSgq0>kiyLs|;%^OiF8u|u`u)1K>#2HwebKY@$)#ozwPCO?qeP1z*r;^z zw;7dxTvw>`FcAvh!MEuYTj@-0PY__3A8SeYsrj!TxxgoO2iqT^TT&;Ewdz|r_0OFV z@`bi8cX~K}lDr`OHz@2!DSREFdrm(2`|&?Q0{-8-g8u_P;s4-|p+egIYaD)I1oB2m zNWr;@fiBt}_9EhtroQN^-+uRLNQFvaHf*x1#xT)#DCa1|6l!W4fM9WEm7X9d;5i(| zVxnoI#l)4DmHhrSX5VQSmzQH<1)-y3+E`ngmwov3iRvEvy;C_8_yw7HqUb(51Fik) zN^kwai0vD)0)ZftX0xC$BS9D$yC6MV6YVTbOyjJG zaO^prgPb5{QsnP@1(CsP2^tDAp_h;)s}-z;keUfYMO_`Tb>9$JZ`yhah7SHtq{B>Fe2PJ7DwX0)vme3~wV0QB+ z`dweyJW0=WFusJ9OWR9^e0}j_vV6#dgij&G^p#fsx*ti8T-jp9)vyr4GfeIRQyH4V z+DL=yBfF*8reLYf?cKJ{cHRUhu#zmv;s)q{{oII%a$0>ZWl@%y_aD~O?{B3h*{^!B zBi=TDK19{*>C>l5`=xNQ;WHq^8C4ml#qav%@f&Ucf)*c>l!u3h%5J=@)92Y0f*f-M z;(b8@6VCj?!h0==2S4^uCttn&Tq<0#BP3|lSuL$Mj9E+_u;Nxa zkJe-xz5qe^hZaK0+`K$~_l5UjnN*aOC>NtuD-x2#4=r(!64*}}D}#*J!3e8wY(;8SZDg-x$`%iAsu``|cGTHjCsllFZJHUJ zj7>oNK;Me79U0IZhAZo&GWR|}-m)s@>x)C%21~d%F@+wn92^`i`Zka|@%N|wXAIg? z2Db@xn-H8(=H}$^I?Vh4+u*LG=zVEgeBU>3LV`)ad~e89uJ!Rn>-xkv7#dJJw;b%j z_T#WV+12P5!jFq9Joi=d;H}0`_pm2aj|3-cJJsId;MW1jO-?65hLXG(rIZNRm}PPa>=9&icV=7kg6tMC;#=!D_*!bug>#MYzuA#f|7_$ z-)IWm00Pc01y2}D9B&YtK5S-iiOFOb93LI$mmm}hsK`p^RYlCLtuf)FTO6mVFv{gX z{O7^l(%ybI{leLuta*b!PGRQ*;az}V_5IWqt>>azr|oBb{rzfC;e0>5(%;{&bhcfG zLD!0)!bQ)Ph@YNbAKkGh2x>&V0qZQ|J$CN|8*q}vrWD=9866qvi0Aa2MESscps^fe z$2>Yd>`JV*>dHUj;#wnPACOSve}5ug`-nx~a^w3CtEbvSFu=?|+u2zm%N;-374TnR zGH~@bJXG-7^u?PHuzUTSa+veoeY^h47r*Z2u)}U?ExH8daqH%bu?Fze~$APa;T3uC?4c(xwnbbU@iik!}0N9 zAYumfIjMuB+EpOZu5EJiE{)5pQWT%Xi2o(O{rJ)pxF+Q+)#77O)@8u}x- zpKIJU zR@TDzkR193)<_zukYN8At((L=Jok2%_sP^TuU^t#0KuC_?Jp>W!B2OmNVA#tH-L-Z zCcq4KQc>gL>T17&5c76j>dZ!sSuDa zNw=f|J51yGl85^C*5YdM+t4px%uyIpYn~wm5K*sDa1l+7m-2u_dQ)hVEsGg}f4&o=s`aYyS=+sNqo14q{5aZ2k&$ohi^=sWq%nZu(a zEK=4vxJtL>x^~$8ZiGaNW*W>JJa}-)JfFnX^z5@3+FYGQgb`~ar6BmSn^k*&8Odrf^D54(iQU`#NjjyT zhiXY1&pI__P1=Jf1J_Zy8-K4dHJ7I}wnqXoI z01ubz&}RxyHRGYDCI$wau0szk4hQA4rD@3dJGv?OU%EM|P{m#$#56n3JQu`)i4qEt z?mrJqIQANRZ{7}SEHUi-Y0xPRhC0ohCdv7Q`MHJpH<$ffBvK_2hTSQ(HhbHW!^=iT zrU=~h?Z{`( zd@?gb!ge5*`F4Y%*Ni2fN(uF*_3Ow#4qmS>dIx*}7r9svZBX@NO?CC3?Fou?r&|oe z`H|I^ELQ=s0;aN)tPVdCk+S!n;wVac-ovDpROjMY*?((jXlghlhTA*xU=~hy{?d{+FLCSFXE@Yn#xu-&o_f z`trgThg8V=(`X++EsMSVaL@UzkM8^s2r*cjxMz<48;aw0bBtkXu5yg3DnDL?@@{@d zWFCaHlhm}VaTsFVDg6Dy4+L+Rk3RcYUfzuw#qfFJC6+l>WiGEHI~zV`Ysj*BfEDiF zc=v0X@oo7K?!KO$kM4^@kPSuPlj5`4Hj!F*h%Nh^qU{r7UiEs!YTAT)_3Dvt<#f%% zn$ML1z}o3lKb!IWd@BH9`;ujIYrnX-IKbZ@jE2q1U&hD%{QFomA37P;31`rpyI>5& zH?Xf=hP*3i%sa#Gi6d^_3`Jd@@)G=e%3P49kFKhyfMK#<8q?_-n#!Z3prE25m#;8{ zBkSxOUtKNIephqP#T~xi6V3`2=NttSx5P0ysHqSA`V|C)jK~bUemiXsSkg7>s$b48 zEj4nofJ<$%+>=Gf6GUqJAia67ln2N=Y*~H8CkdL8@)DRcCn+`jmAat(*|R$MR9+qy z><{t8m&~gI9nIszQ$m}ol>qy-N9gP6u{-tjKt%xd$CM(>&0{(7QFpZcA9j3{#Mtas z2o&usK!uyE1M(+djFfvyZ;0v#wD7;YDWn-*mF%xFBokg1l&H5sZahnBw8Xd1EGv7- zA9S1XW*6H{NoC3k3k}c??ui5wpkFDqxA)A~Zn%#`*zF$gFop>6YDq>c7fy_qB>GM) zh!htTke?rA8o+{mO`olDE0l+q&l>Mm^sSg*|2X`<8!k2`jvUnYE4=D0 zxTEGPr)p?vDdaN1!_AFFnM=#v`;5A-DkSHz7Sn5k(ZIlMsI(Sp)O4HgZGq5u z$X&egg}k85D-nW45KEq4cTbJd?UvrBfbu!E9uzB8lSIHb+=}AW%o}Pixh9zB`bh$z zx8?gt+EZU!7QdTBg8Ejg13BzWazx+vGG)r%-SJ#pH)pa1^^q{FJ>uv7`tHf(QC`)U z!XjvSzRGs{-8g1|Lh(>3+QH&iQ6M%TsLUP)8m2K zn4Rr+A4l_ZoC)hk&{myz_GK|DjdHL(7)G-(Ha1=;#q_|K)Cv(8py2oTRuMpd`%>P3 zSgo$x?KX(l!KFo{oRM4gHZlA;G0Qg?+Bgxy8Kb3!t|gkYKP4>oU%mCtz{44Y6&Qlj zaJR!wS?a+YP0Q%--o-4t92SmX3euKHA5B6)VtubfG$SMSSA6Jk;UL+a$mS ze4|73+f9EW#QBvym`nyeoTiKS`T{Aor9r{>teue8BW)-8iuSdr$sqsF=`XV=4qyAP zq11QTA#&0Xx(v+CaoUB6T}87|#|N?QxV@V?)(w7*@fw<$a&Kk%hhJV&b#>*oc*ws( zEF@2b3r;6&Sr3mCSy2i~Ho_4&Wpqxyw%k#Zmw$?2?kcRU^*}@=IC!~!{O(&XN5uVg zkLpWZMo*HWbarF9d^};}$Wl@VR0RqNTc`pV1hq zJPz$yua(Cc7X%`I7HM z@9xx^oT;HjZO3GPtD%3U>6p6J11gA?N~00mdS4f&MjDGB-MMzH!r8G06b$+UU8SnF z>MYU}l=MZk$~lTB5`Y9wlX(tSL8yfyqO`-uk<@ubVHDh7pXL7g6;e9WgfuVap9He6 zQE>U}%1XDrqt()OBRM}<(*Jz)_|Yb+Rg8U_tdFL6YYCzA(vmSv`(_@4x}~)%11%lh z!c{Gp;`0e=)=;3qyc*+Ac`yAaB@md8`UnnCbmv=tm|j^~Tmg7dr86%m?qVjh1)tJK zU%hg5hlR*D;93|LGd2!+Cl;#&0^$_+ykav7Q@ZvC7A)cjz8Z^zbuuP(3AH2{Mq8}J zmO4yA2Uk<-nhZn5xs+l%ch@T*@TCN4mkfQhIsVS}{@wJds;XQPw5ZaO5&%dd>>db! z4ENiR(jrp2FgZ^tZx#)}2qYvVXlQ6A5d+%~SUt0N8Ilaxh;RmnxHcM7bmHQEcp((D zw3KARMG!al_fhEy3Ck~ifGjHT@L>SN(ay6+a5O>;yc^W2O|tkZefiHz1+6NWJmM66 z>MKjxzv?==u^hG`V^hfG9+p1x2c-W_1FI5_BlY(iRH}_6Od~!Q|;3 zm|}3jZsWbz7MuOaK@da@EY+Pv?G>_8^XDXp(vU0(j6OB$oqGc`4|G?dQr5IR@%z#f%rsi3T!t&m$`T!=Z`Rr>M6 zmlv#?>J{X*3-Q+sqUou%xogwR+t>Z?-ODgHHxKx+TG^DZNfV0)IfD{w8@Nj(M8ioJ*+(h2y!d zMk#XJ^N`|@+poL51u%gYx=;F*9RkpFfnD5ev`n;Um@Z+gk_$sSR_RcQc*iw zv~1!|cLpsqip+TSc|O{vCUtdX^w;1A)?#4Lu@vo~TSM(q`O<=+Y9~ zFV8Ki9{wC?ojU79N;GXzGz%f46Zb}aL;ARn-o&(`@n>~ZJj7qrEDr=nq zM3Y9}u_-~!#6nO12g?8a!oO02c}K!!_eO@6kcfL3BJC4*Q=^x*pL*3Lub#yp^ z30yDtW`%Jgv@D<3MGe&+tdct)Iior^eRg8!=3bHMP%F$E+??+M1F zGkSqw?7QLfDf8sy;q3b?%cb!&Gs^`c&SV|p>OQ9D<2uM-BxPO|ZRe!GTYnAjX8%kF zHA%^zQ|T0@8DUpUOghGF9;s65syb{;RmV&@WW!#8j(!szolDDwiNSX2R}IARwaa#V zv)Pu1zZvXUaJ^%k>KS_R;>txDT2_D4&9+`6j>IxE)-L&yk|NEjBys{`=GbW+E$!U` z*4uH9?sT+vT2vla6yrgJeRww?QtM*cxv z)OZWh5HGvp?%@XLrlVC=JD=2eV%?@SDjj=WPjXBRO3_pv4`fgf1J06N)f#_UR20F% zT!fi$5%)Epi-IYOG`lWOW4X{ZePxC;4r_x{y=z(B9Zeh2`y8y&@*mM|`Nt*LIwWT@ zSm>0Nmcn-ZL25kSMp=-wcl88~}p?@BTVECL19p=Dj<*CCEZD(T57MvXl9`emB=* zpHuzz%d3e$(QO(9iNz-OcjB3ShY=Y8wEXwSD>C%{x|D#&kV@hsC&e{kDh~}Hm(kR; zUuc@vQ;iYupzNIsgz&Esfpv#6Q_Oj0U@N8QXlO0bx9KLm6ChSX0AdyApBNZ0=`A2Ly10Vt|qb`R)PNVj%R(Ps%cK$s|5>pasmW`fpt3r7R zN;y*wum>xusPycJ^r=9`5CUgo+J364pp&f_GNaVZWVH7ZmzzOZox zkEhvqXqM#82LkHg>~y<7W(VN=Wwygyv%GFk$6wc|k_s*Sge>Zoxqm$B1z=$4@jmRr28%E?3i$8Rr+8V+6WniJzb>^X_vn2N0 z6V)B7eTr7;@*&3uBAW6l5sqE0uB6@`zc2m8O*5bEQ8{02d2 zvlJ;O@3(vzTFmR$Nr<=uS4vf~hc_l`YCe9rPwWVa2 zj?1!=v%1u_V83ED0|l%8td19ieC`Vhp+1EZe@}!S5zR`MN?4Bp|l09j8;1@>P!b)6*Ja|fr zQ6p|_+!_$s{ESjZ)VQdcVDs~5h*djH4K&8Pic3oB!r$iiFTrqoxup4bSox|p`{Rf2 z!<>1uz9IghWSE0du8a>Zhj59U)?5z1_%fvFvI z+qM102eC}#?a+=~1$y~@W`s!N$jAs)aAHqasa!v^3YR#SF%)}F@GkxLwI13hs;`)N zJ!U>+=2_vf7~JSrELB39)KBLW*o`=b+14j|LFiWAe4&)PymtNwl&_a2~l(1_lNjk5=1&0w%}! zc7>wLEmv1G5j0?ZL7JcY4btPcCUG*dBp`}ICXP{aYhRZ%88xOl(A*v7FDAw}^c>cL z;$6Xi%=ina!2hP~i&Vr7YJ`*C(2)6!S8YhMX9mv<46d{zV+zbEbpI^-j?)4xMZ8_i zMTbPSYvx%w(IV3IV2$gh!KF*y_a)=%>+htpl`fgQ9Qsz@pqQv z{H;i=cs=T0P0RoK>c6}nmmKnI9G&EC%XWaA`q!K9!GM%jvXtq%BToF!8(^f9t$9ub zOdm||yJr7-@h{sdLtm(oe+oEa`~g9ZwR-(uFZPJ#O;+B&zH;IJqP8%AQS&zpY;+$` z2HnAC#bjIZ@Eu&*b*WJ*8ZMWGDg)^5M#o0PsFY#~S%EMTW@7ZAr~t6S1-(ml|BzyS zVVaAlu;8TDzC^5 z45;Dj^K6w*rZp54jFy)FdJPo4MWqvNz-qy}5NTEB5}H4|2U=l3cfqI_0{MO{%D;Yi zH&~rH?HdoW9DFtg%tg=qLM~jeYM=4AycPFXuf0do^`|_4J>&QPuvg7DMO}HN?x(^S z(W(z-BFv`<3F#W9@k7siAIH?n+Gc^WjhZN}A`of7Tuw`y?E6QryI1Px4f%eg!gWw5 zAxZnNSY#7&oz^z*ep-#c46NBclxZJBuT}n|%jWJwnASZ)0O1#>LiV@@eACTQ2>g4n zepk8|EoPo?ZeUaSO2?um<;rN{=k>OIJ`YAB%CdsJj=iNHc4NoBS2K~IN=#g0vOo{G za$#4R5?<@I3S*faGw-$n#Yq*z6FDNqI7HgZ^VLaW>I9Xo=u7zTI{0->Ly zL6G+Qvm<)?6kkEr5OeqP=|hz>3Iaz_zlm)tW47hMGz@LGq^Bv&aSUty;5_AR{(%k zaj-G5_%>2ysK=%NFzHja!cMblmP6$Tc3^jJnM3v=_Qh|ti`>Vwsf zb|Ds!cDqGDFuky#V`$pk*}Jj_T{_;x?`cEGooLL23fKnmA`WGt&xD03qPa=2UVql4GJ5OwBXriE<%?g!n>wD3DsEHBPtWIEFCX*yQ-$%)lTgrXt^rFx3IP@-uz9@&QPM=~>*S5QQRXbb8DC6s ze(;bHIqF7Ynn=`dD4oQ5lxO+Nbw)ouGjCK8Z~(-Dc9#K{S-T|BM+Y+fvpDD_rDAlM zyP$swlwF`YpvDadD^ER7?u3VLj+5QW^E`T7G^}2Qyg<{DSYp(*dc{2U`t_Txr_tp? zCv$Kb!xbMMu0Nhp*2>ARj!PjXC&=Oc;VS)9OI^J>GsqGSaWjiBu4q@5=Z1!E?uTCV zdgTvkjoTgC$o&KDz(90HP3=GD-4CCU(6U=Pgubo=wFV^7wMN@>OHjhY>+RB1?@ku# zUz@iyAPaQ*+0Z(037Rw}T7F)fU+V16DJoLVi8?XYWnfrhmlil%TBJyNXpicPUVH1m z`QgI{LP|cD)sLAgeV&I-Kex9!O7nw37L_w-vYv_1<#ZVsf#h94%jl@VqmJ*Bkv@;c z_bEnf1&{K#NTsp54az4PB`J75ZGZ>8NqmqvIx`RD9D(xys=|m0k)gZ6f=*1H97}pnSD)+Db89h}6gn=4s_yMzfdVh4@GoWbqYYdFVjLWtXNYELa!wIv zqx)}=Yc@ZT)j-kEbs0OpQ3td+<5D$XAOI6$qHNB=7cQmq4=+z(7Z+D$K`iDo+1SnU zDv_3%?)QXT5d1NKUt!KGtYGsTQe z=zphq3lABzkOi0hOU6eLdkd%+Y)8S&xW z9ie-(ZC+1*25+?NBm(mLY%NTCMkEwwzr^(Wky^d>w0meVj9TmGSv>VJ^N#Lr_Q~ym zR`!$AOPE(1O7lCQ&!S0XqMmSAP+%|-&h73RH@MlC(R?7SM4?;&mYx&}1uG$!nTE2gEIA?eH?l(Ldb2S1Qu=uuK0ZG5HkK3x{Inn)=!oXb ze_#&xqsLif=2LQhD3c`qIS;91s7Na2Me@MmCF*FpaLkK&9l9J0Wx4y<* zg-G?1($XLjuD))eFOyx5Y4MSa>X7w$Ag~85fL@xn=;~f=3?r08c$%v%uFV5iaeaP? z3Hur>-6e|~O^6VkxSsG$=pPF;?FX2UQ56>p0{bQ0$=v*WBlFnP(~I>DlwiZ3voWPC z`}pw=|NWJ@9KCCIQ`}B?&kIOmq5^drUnb#j-HgD3=7_^fc31)~x3xPayL2FJ^!0HX z+yEGTzo#cCmsaB;wFyU>a#X&|9a26HW*rn2g{AiIZN7FeYzhrD`R(bqbX^i!6Dq1I zq!M`Cp@kxx@}H#1W88*h3B?KyHXw6%{Ax? zXrbtr{+l{))LNVJWH$ZN!KLAt2m})Q9$FDrOpw z9)+KijL&I_^riGVC`94c^SK^dRR0qwT%9}D*@ z!OkfmtNKGagx%^PAW(AQ<~@5OV!X^rN7tI&o%ErgAVSOlu1%=F|Mvbi69WqnqCCOc z?z!WqeL9e%2e}!&W{GQ<*N{l0SaqDubbTf0JR{2k%*@Py`vczqUZio3%(}U(5!`rP(c3-+;suK*G7pZ5^gWv3>oZ zm5qCNO|Mz%+9nprEpu}UTD!Y>t+!RQ5)WovU{K zSV6COhvnYAbl{@$=`|%;)}C4lFDP2j)Tx%L%`Yz#@aq$DPl--LXR#lL#W0K!3i9$L z3+00+Sqp}Ju{!K!VzXSx((ztv*gm4iE1TDWi{TapN9kwmgXI7 z9$sGQ1RhQu)cZ4!BkK~b3TPwexY2;3_P>XF{nYK!iQA>75Td&E?kR_L6OR#h_Y-Zm zU-q-*;C}=Oo6luGh~=ucJ*S|4wjDI%+q}~cv49w+*m-;GBELb(RIq=3g<>X8@eAqeA=mU6q*iXw+?D2%&B5wS}5NYjBo8v!83wcnoTNX6EHXdCG{(SA5}vRM6%7;nvDT$tDRj>d;0WIzV6I z*)_0$!Uw73!>AFl#j^_5KcoCz=$RlKLc;F_d4N-RYp^h)PN1PB+!yz=2JTds?W6GT ztQIeyKObqq$?(G@-)~e-vJ+qvVNYT4~xu%HClimYr}D;ubUt?H`5^cWQOZ9LZKd zzF21d2ik51Bg{DfL9&PH(9^eLpzkEArKxtNR>xu42j2(@nSJdC8DD5%lC0I-83tO< zR8|o2TO_p*ClF`D;yqcrNTZ6|l2W04g5Ty=^=}fe3SX|Gy z@;Gk2t>%hBK~X0{;UULZmC|aB6@d+qKt9*9!N(J-LoY~0o(VUp922TbWY=v&>%?F@ zP%zp2qC*cruo-M)#*zrRgeE4U#qxl9bvqCVV&G$67ub)@F*gJRKCh}H-r`Klq7aXD zJV**ANf?GA#2<^WXcPcZ9om}y;(I9v!2kQHH3wVS8y+W1)f$xJDYH)zaBN|d(r2J= zhgOM467C%zR=lZ6cDA-=@kZfk_VUh@Pyu%M^6G1Wd&NJ-HP-nOgr^!>UJk8o&xGLi zG&bF&;NvJ9?SM==@2BU|zE~8MFMi?9^1`vJThN@3I-XUh4aJM0-bEp3_TD;@$t2jr zf>1RdKt~~uvBp(v=SgsSXByi>@Klovw{8+PupUhO^!Mz4XL*i0H#Q$$=+LUr-n zhjwWdRN6PvWO($BljtH1?N%d8I6g(y@Cx#@Ru$*z=r!HYN+wN)>?hwY0Vv-(Q_G=! zFLbI)u7(ECLzw+w|Fd>W|8(9ZxDu4BBs%n~g)8<6fW`t&7_nws^;OvDdcvE;I{+@DZc$QIwd_YxR~`Nql)WFFrp+}zm010H`~JPWa-r6GTj&8( zEVFhn33>MwOt|$R3;>)Fh)J0l8IAO0gne}gI@CKf6BOy^p$+N${356y15|LK>4fV` zfZtLc>i!rEZsIZlF3y6*@$j%|nb~+;WMrd2IxaZ@uj`N-rH$i3lvpO@rsl_Q-MVEq zQQOLs2<1mnLjH(Esk$hKA3cH=pY?$yu$4Yv##M&kaN3lKoUjKu88qE=v7UtF4g1`d=IFhxvM7z-$;oP8AsZRCAX znq+Cga=TVC)Taz@@652mM!R(e3PQpp*1|0k$9>cigyS}3>>&GRi7Wrs)TDF=TOyHf zC05(h>Cr85&EVN|N2$p?cI=mrTxj%hJfdcxWjZST$aDsmsXcP z_G|)5$KUiA_ZIJW=n>PYR8*J);3SGZX06S=y;N>TYfxC@bjH_gqAIE2$d{esL)ZBGd)DH~h+)CYRZx80zr9LJ7Y6STZYFt4nrumWfbC>Sb!R+)OJmoAhm0+)?w zeFznnr$lOG5XUO@vz^=Zp0xBv0A7|&&`pZTI_DK%sMPb?l26LYdTTOP}j&Ckcf zr1d$;bP<}OnP*Fp6@6eV-(Bj_GNN;}J-LKY5OVW50%Tz)1zHv|RX2%H0~{s?DP(np znwna&g@Y#0E#Y`3PrCAmi3E;;zI8*VC+%S{otWwA=a8S$)1TDdx2`SDe{_d%62A@F z^QH3nXj{YaLP>Hhb;dVLpU~GIglTFw@mi3p%2UwOF94B=WaiwfUpzbV-?+=p$e7N+ zKe-48K1uao`MJ4sT5a<~4SpdS9^(^XXFKsQAD#&d2dVGkiy!G1SDzgjyN zW|FR&H+Xq_W^5S;|SmedfHTc6G}55s2aQSDO?XCOR|bT@@9g z&VDr)b*ce;snIso68h_fCadQrR{hYTt&rT0^`sElxe0}TIZBij;;N4BlQjEntEVjO5?w2(!8}K2$e)k^P=k36i?u-$V0;jws zl(L|O(=7KoG(&(1*60|mt)cPBqrp-``7*TeF67=XZ6B>M{8_nU7>p+(Wum6{owyhO ztIb1!n#!gOmm0y-KFO=jYdx9o@NO3l@o@PdOYJ$inNVDj&G-55n@)KNJbQLUnVoq9 zRTBa#;^!H%6g(q`J>C%Vw%$-uh}uAvM)Tt{wlB=jTTGNijLSm{w&zwx?TS`9e^yd| zaJRLI*BvkH%*@n_$1?vaURU<`HQn$Nv_sIC&28?-*Amg}%J{;;aR>`5I9dxhSZdvR+s=!OJvZ*9yH-V2kYv#TwP|P$vu)R1I?A= zr0eOx(kz^|cqKdcJJRM!fc)aQ*{i;Crctf87;|>It0^k_pdA^@qCIzM=}Xr57`$+; zpa*KVZKICQdIpNNpKa#*0;9F*qQ%Z-`XPbDJt}>}splrr=G0o(E!|wLA{GV~&EoAm zAeA7_mtGtq^Zo8>AS#;ib4<{gS8Fd9qGabuR-j$pb#(OU#o-|?2aToQ#bpV5SZ+W3 z1KvQ6=n^WH%fjhoV`T^%%!*XQ)&|hNVGA!{fDU{t0K((r;eoUDU2nZGG9fYHdy?dW zZXXTW3*d0K7$W>2zc6RA)RlkO(fXCAjIzUL77%ir8t=71A{xt|2teMI@c>@p4$L-5 z+8)`uXV{(mKrmQGQWHH@z%lE?hj*h_rBejEG^6pQhO67VT06U1IRygmn;M~zPo|Rw zXSQBLi!G8*+@$)ZrX(I5&5(B7I>oeNV=Fu<_6T`SB}W&{Wc$ukm8i3-o|%~rHmK6l z;^=c9Ybf^`!=BO?zNbE)mz(k$v1Z+K)P5pr!rh zw5xrSIrz`)@M#km#P~F4C(=O0lA|D2SUTA%-+agh35M@u?W&^r2?Z(9$;NI!!3uT^ z2^ok-o*+CV-A}G^hU5V!-q;O8Ji8xKVT(fK?t%Uny%WM}qTeQ%*}NaI7~T%C;{zzR ziHi$8v{9K|US3#TU0#~Lh11IuoPat$8?9bBtjPU@^}U-+vS|kI;JtB%y7eRY@R-8#M4@;OCT+W- zw=28dyCV2G3)0h9zkGE+A?HX=@S~^+%e-R3H`aPLG;h!WM5Tv^h+5WE+=j(Fl@^5k z_pqbLz$dN#{9}T@uHNi1JsvJ;eK+xkwJvY#2nG6((6a6($3?P(eKp_Nip|aE*$SE- zV^2i*9nI^ueCzz*K+m@yEiE6w5h-Zk0KQGIl$A(*R4V{)CckGZ8)d>nV=zkapD1|b zOdQA+O;1B`oy{3NVTwD0ceQ_$J*rh7TrmA0WW&XKi-#9EJBy1yaTg6tp_c3E-(T$c zPI*qZqg@^=`9^s!Bl%?$jbGS>lamvuzsg^bpx=}A`v5(APEJo1>FYK>UQ;{>eoHzm z?%`%0Hga8v;^X8yOXDT#gSynnbJ~=xfru*!2GsZ7PCvYn9g;9+5|Y*JN!Cdq|ACK@ zkB^J;Asa0%qCn|BkACy^ED8UDz~bT}-`e}i%%xMTNgFjH0+a3&yZ{8C$)o#=LY=*E za3ajt3PQ0y{kG`jAaTLoopMlqWuWtTeTZp;;zivoPbkSOYXM_eSnxJ`!FEDoqKgN~ zS37(An3!87L5BKb%dpXn-jx#`@ndKaNSN?sK)K^%E_|IwOp(>wC8T;^$J!9^8ys#v zqF>AQJXE`K`Lgfjyh|4^oho>M!hd?=?Ed{9v$lC!_u~Mtn{JJl6UJfv@UIMW7qJ^6 z9GYII9_}2yMCfx6ACG;`YEGmSJmO^L@?xT*8tET#k-F1T6c7^N=+ZhgGGb-o@^1K2 zK;z6(|4ZpaK6Wmy+SCKh`w2t$n)5R<0H>Sw{tI+f6-{{arU> z50~jk<(XOB!{~?z*(YA%{>voGH?F#CpaKrvf)ZPq(JRvz-C6|p+j^iIz%-I?}vif!t`dxTrlFPbTs9ts_aXWTB@h}VY zIOOfPsy#Z(?mx+GeeE`!q&}p+O>sVMOZ|+xtf6$*u6vMiEEEX zMdfXGkfS6g`} zDcsP_Q(8I*A80q%)m^?eGCqC4>wK9a%AQ(xO@WQwwxj+iD7n6vE3u%UhVzS1Ph-u4 zi)jM4`;FZ+H_P%0D=em8Qwt1}gj^L3(_wy7kLTPCkP40OE*BZZT?bd}L7LLs?B_2h z&;I7(T~E==S1%WP;&Ub^EG!69wsjD8TCNjzUdc*T*Sce8ALuxoiYXzxW$pd-PPEvi zlS{DuudJAggnq)Z3HM~Q{PONyb=XBLat-%s!@-as=^sZc*{PmiSZqlhyEPAf9vvT`c7I09O5dk?K65B*n*^_+nXo%?}S&+;B{?I}X3 zk1mfwQS#S;s;U7CG#5FY7TMTW6V5)cdNrcs1veQu2p+RH%FkWC1B=sWXB)-$$@`W* zS=Q`~iL-o8Q=o4SN#5)|tDUP;9jUctP-l&opot84#z`ZYgxm_78aWQfF0I!I70#-7 zPs?D7iF5t(>-zNBP$BAX_qn*ZjQBX`kYG)OG9ffACn53d^@$y0DAUVRC*rNCm7gqg z-x*Zalt%0L6c-I z`voYOX%t15@rSYVv}9^<5aOmSvo`yiiHi0Dq@tZc3>q{gBoI@$?h-Y-xg)U_UfhO? zdj1{HFHem))_(h6%)Mt+lk3(sY)4TAQ2`MU zklsX^NQv|sklsOh@0|dlg&LA~vCrc^XP@zo@x9~u^*J14y9wMR_kCUKT5GO3=NjBf z4}uS6+^<0j zHN@tH%zrH|XNHVn-78mqC!u5y*C>454hiB7_N(pc>I7uGtD_6T%$}^#dFf5fcHa~B zVCl-!Nly87SA&Xi*;{(g|0FYC+u6aPV%ge@@_lCeRiZy!5Y`z z=5qh|`@?%HERGuS=%mI(R8;sdUk~fiY<2c?=M25P>LjwQNMl4|@m%<=pnCk99Y!eh z%ge{v`J7gL@?b`aN_S7qnHWC%>S6M_0+XqQNeQ0O@!|;%mVIHe)q^O9#;B~;nAFQ$F%XG~nvWaap&@ir_@~UKtt$oQGApY_`d+?f2P+Z{8pH{(#OmagJ zpaoC5zo0`u9atl^bCxopQLW4N{QeB}M(fBgKY#w7f8e$d?OLP` zQjqkLxx!*15KqW*-beMvs;x0O{n zLWXTt8GtInwPOo-`h{5yB=})}xvioa9MO0^Lpt>{n?=d;dft#zP@00gJWL62$(?t8 z5=^~Y87&fkdjhVi^3U)FUYnK3OgL3%+l_^T*fBS<)Y+l`c|CA=xf=z(#ZI*5a&k@i zBX7Mm!eDuQ0NK_8OGSA(rf7W%Hfg2XQLhPDHeOy{++lJsFpF%stDq~Wp!@o3s!YlD z_G;s&*7;5=XH6q#%|!Vm`^b#2t)=k|20aS8kh2+Ik=ois9HvBBOY00heYuTs6fwi3 zB2sYG{eb(I$DLUUrLSg|%IbftgwEufC@+{8OZiO|o(mnwb>%)@&lMDO&T!DthWt^6 zviJUUi`LoG-@0b)FxX{AQg>ovHPOH+Ij(JsS1Zqa(by6oW4E1BM*5}}A75X&`RLCD zg^6>&N8NQzH%+81fA7@4KpG9en+5A*yv7H-5>~gyST87;)=%~k%Qwg!PSOwbJJ~D= z|3ZuuKcwsE>iqKMY=#S$n;oIB?_$!=cM|Mg)?2N0*}1tOm(JLdnr_y~U!3mrExmQN zzMo_hB7LK6)}FjGayzgt;f?L{d-qS)c@{a~t%x^8MU}7HM(e@yyO-1s13yl;!XVy_ z`N6P2)H||$^;+-V4$6=T~k+6U43LR^cpzAn471ONc$fr ze2kG6=zh)nsa(!2I2hQ9W$hO@9<#*&du;NrP+42&VQ zs$E^G#?w7Xu{;q?W?jTry$Nb(T4BJPuG1{|pzG^}vD>Q%U7E>GPj}c^Z{oO|o}K+_ zCEi4W2enT8I&uoDJ0036w7DW-$AfT$-6HWNOmJ$wfAQwcnlX!D;__;S_cPH zU6)x6w~KREEjLYSO>-_IEErWBR^pS#YvgPqS7E|JbC@Xl@kUl+ys$^8C#AP9jv&OO zbnAu<;>$dcvLLVC5U8}?HuI8j-ZE6n8W6}^QXnfb zw^#ecO#BqDD?jZ0@Bs}?PA(5KlkmIxdihG5sb-BUL4B42zp`Pndx?2S|M5kjjgGMG#{?hDkHvs<1Po3)%&b{s+1jTzGEyb6n{k1-c> z?MrRoz}63QUg70+U7X)?DXx3T+N zlLHT;yUKjfc`s~VocZmM@?)N?@5{(P8{mzxBC-Ns06q4uj_C1*HpP(qMl^ln7`ah) zhD7)Af*_ZD-_MEyz{9y_-^# zlikK9&MOhIh-mx=B@x=U`74FKFpGXe!kf@wLjceUozWi>s$q;`|tg9jdTJ})WZQk6|= z7rNeg;ly@ON@?lXirK>licniKf=3tElAm6b2VW>A~8WuuFOS&z$> zU9x(aP9O7nNh*E_G(+6{{8_JekSAi~O30%T0O7zE@|5OK18Uqbk}sl4|G9w%H_&`0 zjfE|dsc1af5|WSIdA)o?I_=E)w=$ZVS}yToTw$MvC%T8-EX)x8mQjJIi4cMruxjf` zDz)ce)h!WO=+-q8fV0?binBd}>$fZ_!o=u0ExeE0;z+QJo`UoRu;z4Ck4#~UGGY~k zg&(Ouzz7m147wzu#Km>UXL8nA@GqOQlUWb=02vfC#N{Az@+Ap?qN-rim`Gjl7jN4C6UoJtrB*pnLlI#?)85F=C6qvss$jf(ocahx9PL zqSoK|&>_RYXIx=!!<#t#B{Ts}0`ZR1$=L}7$q6|u%%Zpqspv5Qi`KU+hYxS{J0d6f zlhc!(*SA<& zS8EO}ruFmP)+m^5T>vCs}Zv?%V%~BSpSRurjty<-0_r4munUAa}B^Gwg-b17lbCZ)t!&5Bv?~a#1 zC`n5iR;r!kYT*+Zgv6#FnQM>0l)&Z&`jzL6D_8D}Osux`CU4nSaQcc*6{cgcX_-Hm zgz8D6PZnE|-Ffza3+<&ekm`UWbe4rqudJo#;@R``XV1^8S0nFQ_N^5a4SSxYJ-6g~ z`Yd<@+3CmZqKWad9aa*Q@0DVoBs#XNmwgF7XE{7Nr}pH}I=1SoZxXc`Z&DLTBU)^Q z3={YYr$sl0`fMj-D=3^7-(=A2t~5YbK#T6e1z>Hn8`;9F+}1M@O}VCo4Mds_4Si}b z*jmSakIg7CE_|qW&K7*-h_wbA4KG%}SI^hi!6&i8ePx&NdtBTuf)l~!FU?%eEFk~8 zqvMy26J(UVSwA~Xbuw(W=6DgAiD8XXg@xg|dTy{oo~EETO&03Eetq`l-j$&3jxN

FAN5`BHjf{lfX$cB^&k#RIIM;G@ADCP)Sl{W zZG|wlL}9NmF|}DEpQm*8bquI0>*Uz)BIK*VxLZxso|NghnV$$sq3u`KiP;v01_ry3_>JS~ewA!r{RZU&ko)Z*FF;myoRo_g zzpfdzp2M%JojV@@?9;6MDqgilSs=fd?=Rg+vQI&ZuBbZB59r^SY0=YUkSj3707>Pv zf9uOygo9l}7vczajf1W(-|>4uy7~5P+`=3OkVP6r_iG_xUYPYXTl=LW!+`bYO(Q2~ zEC*SAXpii%y*ZP_C>gg5e2*}MrQ+OoJO7zVJN(n9=SnR$x585_KG#iPEqXdT3rq>r zGMxDWx46RflG{zF=?C(R@$xBz$dZ*mxyi4GwB8BjSKB(!b=%v0R|UVT_9t{YIAKI+ zF8Cawi+f`^7G1gzHmS-Duv-b&3fHOg`yw5G zwahfx?+cho{6I0V#Z4<7aDOYn$v(GUNg1yt=XE<#d2MUJ$WAp=TBcASYh(79p?O{b{*>Js3-;(c63j{&J8PE z*(9aEGyqDNCpVg|D|;lyWhswq%>88c9f#>$pw;#o55IV6`!V6?aA{En1Hy z%(~OIt*i1X2HB=>ynpaeV)9)22cMlJRS($mKR!~@H83l$r!oaSZg1z(Fl7LTf2m$EM@PtFA~{+OEC!NQ90VSr~s`xv%XX7%e3nC45;OImCve$$|ioO=H7At z#4WFdN*Pa0Prq}Ks*^WpNQ{!E7n!^63xD})f`BqTghivCTva>Ont%697rC&j>J(n1 z#a(L%xpzAQ`s-qJrx)X%)q32!6VcSvBquIhiLHrsPLtPC(&Tx^`)5h+d!2RuZKm5}*OTBJpH?faMfrywXkB^3|h1++XGk@89=}t_%N74omg3G`CaBOVCWEc8>u)d}y^*vcpY>ZHX_>g=?Eb+Ax;BrERZG5-Aq_ccF) z+x$zT;RRQ?Yz3bo|G!_^w~zV!;y*ph_eC}}7ko`^GGbV~^}3s28=R)fMgtAK(R5~z zEiXMIAI(Ic`g>(OJ2eoRgpWL4KXz`S;A>1)YNtJoqO|mPl{xN#PJsLbI^&gMK7W2P zN22&67TdQ^-pAHLUIRj?^?^4dk=~Lq!l|k$y~sse_tez&FjE#b3>d&^ zFS~p;SaMd6A>_*?Nyg32Odq`d_s2hP16K<6gr7p(7Z$*UHBEJO1;gLb^nWa<$TOdB z{qr9EZ`SR;*K}9^sngu||8$va7Z)i{9Q zlc0S6AVdAusVnsVnEhh4I41DsNo|_|)sSyPv~9+tU}oWgoWF0y-alTCvhJ0d8Qp$d z15F~Gr~Ln6^S*EB=FuzBwlt3$ng4O&eZ|VB?n%5m{f|ri@;>!n^DLH_4*eB8{{3wO zhPK!n|6jZPe|gsy$Fw;={xfd;FR%39;pcyQnZ3sti8E@76c#dT9Ic=L(k#z_{TuGi zUb}Nqa^`|*V024y(mu=Nn~-w39kELrP5BlifK~RD=qhs6ZI_3WXsQ z^;E}?^PgV>wPlJb`OZMl-{-(SjBwPjmh-u{nFnDgbKmeH$)o*oM3btr@>3P+9Etr7$z19W&KF}k)QwM7GY~G4g;3_3xx~5mmpUqqh8I#`1v$nl###} zZ&dq6^?xWzDV~7>O4n(NS7MF8A44uCC5@&xo5W8)*MR}V55gn4@C9jEE^1z5QmcgU%1oSd!`%jlDigV@?Z z)`hTd)3z(QTKL9#dU0@+q~R=HYqU~ZIt1g3f4NfoUEucBGssZ?j}_*JU5VjmW@2LK zR%Z{+UGJKjN@VW!;J4h0%gHg!#~pP@T0qHIia&99MqlG4fwfaXB3+P3VsXK7DIeLL znZOOvS81Gqb(@BWAVwN%`V50)6}JO#e(7hdl~Tk$sjWYL$aqV-1BPjX4x~aSq+O;Z zWu}q0*nHH}8QYFJeXQjZ>ohO{Ey)c!`Wc(?Mm7VU6aLHrQqAFl8w7Gme5WY`T3>WT zA0e=;yfx_B?W*su-k$8V;6py)h}#)o40O07Z#^tLGHMzXeYJnbtzRE_#9d7VoIPLzMCLSm~KHeW()N|EK_52-*fPG(H?(LN;wu zU~36KL4~>a!_nOp+jXfC)xg~sMx!a9|so$_GBd%&{X}M_^3=Co! z>cp}~Hwcd&F@OB%%eaRJU!pLM&zik;aVYovrw^H#N;SNrqoYd6)LIV2S?P5^hkm?i z3gg8$3oqJhL&cq{^keh9x$0CX88zQq6Z7ij_Xh#Df*556EX~ar7Q133KjdkZY^|>X z3>xm~roFsi@^tmnnrtpK`H2AbciP#(uW_sQyBG$1*ZUH}4pB`85B|iBUn78a1qaiP z-MFUSp%5{pzZ292-#wWcJ{kX2`>qKo{Lrb%W3@NpmUoe zagGd0$VeXRdRRA}GkH{{UxI^9DI(sZU>fC3W=Wq5q7Idwikjx#t;AxgQu3HZRJ&4p zl6+XKss|XvudK9<1$+a+p^&d$Usk-zEx_$LZV8n*;TUyvbX@kAtO#KjBP>hLBP{fd zszW_JYs<^u0=zNJ(8TgBzh^TwH3Qb&y9du-4<-oCz9 zzkM4k_dye@F$xSpVz$-DTq5YGFP_+)G&y|u2t=odAYRqjCxE~$_U5JJ=P&(WOIN;e z7E)%Y#p;t4DX15Qi!Av|3osb`Q#EZcP>qO)a8qB7(jqtxEcoj5uL%uKJW`hOd7Ky8 zMBeDA*wucRJ|70o$MoFJ+S*7L^_Q=V)r{fBCqm&K5~5T(U=F;UQ>Us zt)Z!!={UUjZESo!u(Or4;HXzYw$&w$ZhQnY~h=}3Nq#Tq3RT3aR>%0`KZ^6-L)%I1VgQLV zxt;D+K#;8}@VMU0)|VnK6VEi9W>;HEK<@rP0*dIDO?^Q}+}-YA!BP}8PRNs#s``ll zQo&;W?q#OFEXNS#>ebiO!EX6(L4|IU8}&h#jm}a|4hX{I3^;J`t!BTbxA$?sscBkT z2FE(r*QKD~f0V7qy@_XU?LC`Z5sCB~S! z)PtsunN>FQEY0Our7h_E?Qy^hoD2*M+>2N{vdkK@lGy%)j1$pyFGN(W*3`>dBM0V_ zNO(a$_OTqb;e6X}hol*I9R44H_@s z3!)sid6};ZZ*B-|Lt@h_zabJdB(KU}IYS)v^;^yzgVyZ|)xJSvzu9X0m>sVPuRkj&`}r z!4a*ur}n(iZWJaf*=+{K4I542f*xe<+q_YprZ`hw=gDE{V-lTx#}^C7@xClxFZ((! zgy`B-kwpo2^a3oCCEH^^vq7TK&;=2+5II|AS-SH<7lRrhc?kL%;JCRZTOx{GXYT0XA@!5H4K3jSy)SBSqgf_SRNru`kqlS9u)vfRxHiivB+v-TvVxb@0)Y}GdH5^*Y zku+u~p)&l0|Jr>G(suW3wyFILQ2u(he|>$Hd&g~{vflz5s6g=o?yHuj>PiQ<->e<$ zLoJH4 zoLC8FiYF8KEA@||$Ih=O!Bk&zl*4ja8(lX>sT)JoEoiH7W$@f(tp50P7!7hqzKEtE zi`lv~HsgEkNzF4(fhS*aJFD6fYyB`(n4^Rewd&EA0lE1 zrt;9<)?Qi5@)KPf8Ha->Gw9~cIOqinJ}1Vj>7ruEngqtbb`2Z;h>p`=yGD0dE5Phu zyGHuXz&Qc;N7n+5hv=3EgTmg!RJS@>0L)dQDd?FqWwo_kY-~Py3m0tH<03D!$-Yvr zgh)0&%y5e3n^}5EYXaJyWG%+ea;O^rWg~@K@Va74$%ujxPE#`ScowhAm zM$J@K-}iAFuB@?flqQlT4^w!EtSki*ck2+^AX$c$$|-1hbH{d}u=DBDAQ+b8m0mP; zJ$I$}{dAURYS~wE{_s^>WhKcB&inKrL8tX+I6;R)+vLd_D0;;9+17jdI}W%}GjhK) zH@_Pui`>eT!AuzVxbKnsmhWGQYwhV0J?$#}a~s$v4u;gzoVwKrEvIf>+c<;Q#vR|Y zbrop6Yn|OOm{em<(4-EaS`HFRA3-QV9A36Q<~Y3NSW})p93juLt1PU?x`M_t_FGC)o61YTCSWEzf9n?0$bYt+5TacdEO%YGB?@}z zxjA`+LanU20W-tMZ#b;jB}~EGks8m`UZl}+`kF!O9ns}wFJDo_$T36tivI}3Gb6FP z#I^5XCoLs((3Z2?m#$H>hoUbt`Wx!&>kp$hl*O1DA_ShVz%trGh`wU(BS)<7AEqkY zJlS5=?d+@VQa0nPB`;q4#a>)pz0%3MnU%>m|L4!8Rt*CUm6n%0ZUjQoknke0z`NQn zqBc0sqMx|B9Rs;7_#WG>%H=&P}7eM;&1?U%3~AI6^26A4XUWmtIDVT}|4(f8*?Bf&2{s*QuNVn%9BZ*)^4w+gn?C8?Uy8hKAOclgQBC_4W2H zCAOvXbTa`XysFH7$C*JW0IX4J)Oqq^V@X)*At8tPm(T8v@Tv_9n9rSHDx9k~bHw+y z4zwXN)WwJs*rb?O(VjrE3=q17&CDdiGSe5{BGurX7KbX5@A*aCXA< z@sFQAmB4T0;yQAv-X?=2uLaPf%g$g(LIR8LRVKd;5$mzDxsP1M2f16shI%??ASD>a zQfC@>L8lW?cK%wU)Bf7&(lSlY8uVOOdpU94;Z0L_<7#$c33^tIz1yq=f8@Tx@ckRg z>UPAy7BS$DT|`)n=~-q;33G8vWpUc|)ai5Qnei~hxU7%*&x-@yJ4DaE>SYnTE9~sVa%f5ZoCyli zQwDdc&D#6x48!i!BK4AYE?bpPwUiP!S2+9VqBp*tGNA^(-Zv;~~McMQ#r?WH;G&<+ULwkZy2yH46OD=UFQlGf3|R5g|81_>myRN@dRq)7cc6F0LNn9;0j zI;}}&$M7#4Ea|jcW=OLN;1F^kaLNNH;bO$aVYdweNY$AF9=n3MhQ=t^L%YS-U$V?6{9wZv zq=Ard7r24Dc2SxP^~TF3#;Fza<#Qu#`GX3Oav-<% z&h89bGvlPhuUE)$>4Eujl4!0?C$*j%!Q^@CGN|Xex@eiUxBFY*lllzv5m)9ek@q~- z>s}!7FUi)EHmbU}2;iAu)BxuSF-i22n;w~vr^l1W1HDKn4ELH{NYR*AHA!`Ryu5H% z*PkwgUqmIQYF}sNT%9i~w_UzPn2#PS7^>L6|4l~5dhBs{0;Qnh!GW~rMlE`0kYA`s z$V)m|#d)3KHBQa9=EH>1{7tBg7+uyD0b&BR6R-q?@mNKTr)6EnhQ1eGOzyO< z_Nos**<(wii-;*X2n=c1!xF}DK#PWGAhj`{L8dlS(m$=|M5i!gvia}nlyu9ssVkOG z_6{1X+FMxzmS&B_}TlN(%!$Ruh?=bJ(o+cec_0<1fyy9(aDM8`V zK7`=JR;3CWGj~L%egHzS9?(Lj!@FcOY z^kGV&8*N2YGN}522LLM2lU>zYxsK6|*tM()`faSSma!6|i8WnYu zUS6z=ciqk&sEHsqU=ZL3{`NuaCqEfaKbb2`5l!o5K;lO%+CA9V3%!AkC`NY|=UijH3Jw~%7`}#XwOo@`AfQUY&Q-HRhv!}rQ&a5#IXTH9yoesLU`(c_ z$#Kkf`mLC*CwTc%Ktx8&5F5Oe$V25XxYE=Mh$Se-YaVANs6=CYt_R-4zqA-?)n*zK zfMQyf;q=7|7lML~@CpUmqbghb|7#IhR|=+C?_v`+bdPZX^`6+FcDTk5=r$S8mOz%a z$pu1+a4shynnx__|M#VH1NUJr{6~oH7QdG2Efr?T_{h?U@~^+H_&?e7+aVEtGj9ks zZHZiqS2B^1xTa-vCT=BF)y&+S>eB1%VbM(q$7vi43=kx};ETXrnsdIbVQuq>38Wjw zikn-|L=K&venK1ekH{?@zRm zlG=e&a=E*IajF~HlV1{(;eSEwmUeGkz|+nko}+-ynOF(viDym|O4ER72ezWX?ID7l zNR3=}FA}~7Tn-18cDDB){Ka94e+l++tBn`3yl&jMGXua&(cDKI>|lpx|0XSx)v`N$ z?!ZM(vDQFpjuo1<`bEy@SE87VtCb$Q^i|c>oRR3Eo{TeZx1(=&Mxm7J(JXd*GQmQA z-|_Ma=7t6thNPoG4S)t*7o?;oitl0t7+*08|DPtK;&AzYRsdvIg4pySl1UPw%d1L+Vof1Po@ zDE4=g^Vh?S3~S^Y?0tJ2HbG7A*lAeOpWl0?cIl3>f4F>~wq?lk=gEY^@$z z{-(;Oc(2`zkIhnUS9nw){z%R*z-2-*VeL{MA0?{sQ%X0kFeQG*j17Sx@86gow{83M z`(f34U$p9%5bAsiH|eJ(k3}r39CIiL8`aS%DJjvf=0^2BqDKWdgl(F zG#nshTELU)n3b<2ySN&HXUXdg6cO*Jz47_tIAJc`rTcY6epMBl@#haCM)UWRvo{{8`H8hUFiJD||LnakPX!?U z61h_u$dxzz5AumCc;2}OhiCJODjdPy^N!#wsywg zuoMHDirV|U6?N0?ri(t3$ttH6?d*{E@4M--XlaHk(6cf9$d8|iH+bK_I=3BO-(f%; z|3pV)L3W7C%>0H)+oV?dVfJIl6<>-CZWWSXfO>SHOPC&w9a~9pTpa3ByBv!>Fz6oS z;USEcZzHeDJFJLAiMEs!5gen6P#m{7T%f!0!Q)|kg5fpRgij2@(M{kS{=))0>03j> zm+kixcxK>Li{wLtA|9ND%$+r>Jtw{ZZ(ms|AtPq0F?tV=i9Tajl#-hL?)Dn}=c~|D z`7G?%AjeO9@57efZUPxrf5zK5-keSl+HqnxCGAvB=rs~hKyWR{QjU9a!)wNeU(W)G zoXI)gFqEG9<0by3F{|~;gb8vJWSg4+)dOD8NZZifA_Are=X!Ml!KxjzUj11_UAy2h zg{?wCXT{{?`oV`(&L7lRIdlcxCww9@ip$-$ok=N`(=Yh>c-P3q44SainrdtH8=RzU z;bhQNRn^o|3k(h?L8P-+n$!`CO(OjVvY-z_Z6e$5d?fS46iTK=_t?otpT9E>d2_8* z$l&iNzyluwh2gVhJ*A(A13?`g)^kqKWbDfx9_&VSOp$F{etTZeMB`XQg8e|xvhV#~ zz{N@ZZ%F4t&id{(uHv|B1dh0Ya1*$CN=!e}(@DTf@C5HotHw}k2_A>}Lp8=vUZeLR zK3lfl6DJk2UA_7!`H7I*M=pm{(S`i@)O>jO*=qY)Z`|U@7IZm9Gs1W!umL7zGPL(h zx9IRNIb8#y2!J!!PaOqKF{WrLk?guDkB-ta#oW_>5D*hnI{rG4NI&Ac*G-D+TcY-RTOn927K#(4c}Kyu?N_F`nFJv zC+%%@B^dZRoj42%Otfa+98@nL(q<*&hmErSLgiXtPE30+-Xbdd^Vd>%d#lY}!Qbpg z<$v!XnY9>qgHDf;_M)T&&>DwK&M!m{j#UC`b!Py3fg)9qW?7VvNid;i)#WXA_#-97 zK+9|GckJ_-wx!? zZ=+OLx%WA6|GRj~N$n^AH>{!z!=8wSM({Q@_QUp5LrG(rJ}81F`B06VWg#jN%m2DFIj(nGh30hPwV&Fj}){1?n^72AFwz zJ}jV5VX(fwp~oErl!JtB)YesRNJ&DC1hCfw4RDW{1RU`r^lv8!9Q{`25{yWkIFyzn*KG0T0iE6Kd~- z?=|}hECwRpucHG&pvAZT?0KSG)G|PuuEbF_Ja*M98?|p55Mp4oWKEV1I8T2&EOX$% zfl@8fxg8I=Yp0jpMTo?Pi%}LMiobQ9=V&?|M0S9Y!_09P(+o;EdZV#}_cIUiVKuI(??k6T{yzb(+ zL-D6q;2a=*4LX0i`SWcajLwm0skP4Tm{m-<_P@X?%R^qvPDB&aJh2GQ9lHKV;DE$n)>b2l;qt;89_lQm<6SzWF!4klxwfH>@U6}y#eiv zVG^D~YSlI_&Hy?#J@t@`ihJxdM)QQp0(rn{D?yE>6_7LqS?PM=a5Jt5qo&X}RTxlu z?}~ucGe$NSXbENZJ_nj2T6+sL$S`Gi7@%=D19v$cMb~OwL44R9cmBloFh8mObVT*1 z3mc35jMGz>k5T(c%RipYaq+6?=Gl{60COOr-2`NTM>NoR`6biqzUy;ykJtU0_8L6Q zq7BdP013j65xS!I;ZhIu=E=kim@p5X!;%tr?`E`!Y(3C$O4h|(isJPxb%zG*LT}J` zt$v2fW^0k-;FhEBc*orDq!^bn)11O+HGUOnOr@l-^`5HVAn(R5226yTRaZB_ONM(or*rscSsFe1@T>2IYIXY0}skWBGjtOp(aW{m`8~O=*dp z#N-XFbZ>79k(jT;%!xIRbHjvGnBY@c8sl0py*>WEW>08Ya#2@Ht-Z6e$|8F@dP_k= zV-{rUncTvm6yCW1iIhFi$RbyCcWc`pq%kz~b;9|Oq2~^CK%hv}J-*B)pb345XL%>4 z>X$=t{Bby5R;H|8;&}ZeC>0oJ)PA6dPTu}Bg+XFb6Kq@KmK!+ z=hiCi`*+{%g`x~r%ouMsY1|=;FkR$!9uRdLgmKqjPb#)V-c6dWT}_X9Vkx2nbIa|k zCc9550{-@G>_O5Yvp-wGs$0i#pnt%{2S!{S_p|zVmb5(mL8mBBm^3?vlF5U!+UfIm z&b_mmVDWpWnTyZIhlRlU9JISaF^O0p#)4M^%*ir>BuMI%qLdU0 zg%W0bX)R-J-UKE2Y^z7i*`tRZ#NJ^R4NS}HMG8gvq63Hga&x<;^*|bBOp5!=R;j-{ zlbV67RGEU__EO;E+}s?}&ikZj*Es|X@40qba8TOJO*4bkZ7)8Q!WZH5u(ikFEeHpT zH`}rb-Ok)VZ!T*Ay~8{7I&0E3)|j(5baJ+()~RHSs$Y`oHmgDyFP(5^+Fo0w24Ky* zY79(Cj)&+J>QAne_^tqyCn+J3m}Gcb@yWB_sf+Av-c?NV%d=7_sU1+Jp!QT`h)Ekk z6cUDHLz4GO6-^E6o3n(p)`}LrN$c(EY6!92ioI)~t$1;ocbC43P<s!d%W|sQs6`Ee1B1hRU5p(I2qaP7`yKWWsdEh4W>d+2d1}C62 zKs0R;C0BAQlzdmYb&hiK(=01h?ZCjm+QJH_!jFoWORz^Lr%G4I{iA1LU=M&k1Gwp< zO36HeCK4?K9i6D%>?+fo#lbvO^SqP9+0&^fkExpFXwp_(nsBcoq}bir_@X6R?A-Km z*`64wFNH9;blV`MuYSBCypMqz`|x=#2DKO(R|Xg?Q}~v9Km$X=Vm?*$RTq zF{A0w;M8-SB+Qewd!JDLWe(wrh(8Ax=LA&!z-2dkG>Sq1#mZJN4L(dBdnk^}u z=dpD@B9bTcM1Gb`QCXOpidb3UKL#~0x)Nh{)?;2N3=Xsr2UZoPCg|^;KcShXn$!-T zeWxlU+We(mNi#bw_wiFJjYiYFvOv55Vn*j(D`--#&*3=t9ME=QI=SA@cb-rPkao+J z_mF^#z^l^H*#ankXS1CwIQ1Pm?}&}^K$jbyh=II@Qb5}{kEH$(FD2+EL_gKpZU2ZV z-HKfBnM?}#LR-4oW(&Fc!RRPI#O^TlU#+?6JRqfjFqrH%dXw)AD@{g$pw|u7=~}=U zdThkScR}d(a}v~3?DHtQCva2daOB7wORU2f(i>djU)h6 zbEcBcEiQH`8vA+NBX^SvW4en0E4|p{rHBWkUt>KNyOEKSXkokEAz)iOFTQ8BweekI zG^X)aO&Q#*u_V7*vXqnak?-t*|Di_MkQ1BGKNX`;h0s?iFPxT6Dv7Z_dHBeyW|yT_ zVc0Y^ev$O&?DgAFbd2m+8{94X$$C6)4Qdr=E|fv%Ag zhMQyP(_&ZT7HoI4cE-Rm*#{JS(2HmhBfroiCr#>m1D8JL)wVyJPb6Q@kMv);V7vH< zt+zxDAZzT%aR2y+vW?#e?;1>OA7kiC!cC>2fs&C?G&FnU4>|UA8OYFlrlPlzQ&%+0p(ZrlstIiel4V-^J*-(FRc4I#I=Tg2_+J3F{hxBzkuBsZp|uw z5Sy*um@eBhi`_0NF)q-ip{Lj9x64BW3jQKH1stYsRN*ai4Mzmclrsem6I#c zHdv4M53ZuvBjggp`{vvv9r2}qF;!kw6CzsE!iCpiPJOE7cl>^7`Wo2bzg3(goccArI<)=7l zzO=O5nfLbl52ij`YMK(Lx+Cn*c`gpW!Ufe_b^}eA4L$#J;t#6@>gqOIsr~1$o92gqeF}M{&IkXV3iQ_t|1X^7;-mk+y37pF zz59?|qv`+gDB!b1a>(D`w+ydoru~<`;`QKPB<}k_zJHhh4_U>&Q~rAvByvMyA7AE2 zIQ;-RIg|TegSzM)o2%mg^Y-q0?TO@L^RCtW!#F=pQ)3NtA(NjxzScIH{!e!JiGy*i zUuQdB@-QTgr~}D(2^40(N_|s9!;e9uy=xKOEa(9>87Dv-V)Gn4E+m;3c=KomZ zwV$7oX-BxwoG`Dwj*)vpjE>XKx{>8HkiEaaWrg}Z{y`bS)@hScm6(j5|GvOwDkrW^ z)zZgt>ev_4f?L{)abHZe>>2EJqI~*FnroBiIhX=!D}PuCvYyKRlBf0QV2lJ~P$fb$ z;e@{6TDj8Gr3AR=|kUy8cF4VxP{ zfFE2ltV{Mj<8{MPXS6lOd*{~m0Lpd_Y1O1dx5OuzygsK5Y_F3Fi?90?rLaEqWEW`F zyFoLnNno&#it^FK7^tnLKvI0v^vNn(Vj{a)gzq%fvv7oH-}7I9P<`V;Ul+C z3Gzi_rROgJ#G9pf9j)74Z>bDD+-ke?E=4+wm0cHa(;3IeG!zTGzdfCHyKhy)p@nOI z>yKmn@lrliw%R6Hy&v;bE^qQ-QXcWwW{qg{AAV5wef6}Yo|w{)F9PX!2`E9EG<5AMOSWKk*sR=?9?~sFcID~cO&?3>o!oZZYz|cw4bHMh?A92K1MCZ zf=?-Mk(6xWhLkyIM8SspDIIaB; ze_{`;T+z@ojHg_F&)H&7MZdNEh@xG`cPGZCKh(4F6l3QjVPx7&53z5ktNtWsj%)4K z_xSe3^JjuDGX;6~UNlcBnY%Zb?&qs6i&bk(%SClnL`R+d_uUqtyK)0}RpV<*Pl~d} zm3A;A;+<8meqrWRq+B;r`MOa?Y3%w#0ZCC;`IE6!Zf7j{C9W}yDADX3jq0kD_dW4( z1E&GgM?Yyseiq=~PV(Th@322<6IkY`_AFIzJClYO^C0=BaoEr7k=5wP(X$*ZOupj* zMm3ePC-hc-#iX;&5$fG@ZRztyT5tNtiK8qMo-`N@@?X6sXtlvAbwru1rO?~6tyv-5 zP9{-V{6WA$IuW7C7PWPi`4szGF(RxEeNhHcs8Hro=}DqT()_E9XQNXqvF4s^5=yOy zTUsbT*@|(V4im68%I1J~16n-?a`l zmL)RX7N2V?U5p|mIHGq(;&Z4iGec9|dtZDWtg_bTwEhx!jHSGQT6V0VV~SUm(e%{O zf}T2>`^zMWYyMuwWP|cP4}R*p3)a{L6B)3WgAc~hqyEFZE2aEgovp!p{rxlv(J|`w z`EEy**R*Fjm_A0Fu9JFkTT({ZI-&}BOooc~=-~tY&lcSSyTyu=3*vn^@o@pNg@XTw zy0;99v)lT-6XH0*f`<;lJ-8;gyK8WF_arzZxNC6NhQ=YdLvV-S!QC5}o%@_Q_c_nZ z)ce$1^JTiaQmG_d>AtSD*Iu&M?;pKnb+wo@dF-qnL2Kt8BL~5HUWeyHFr`vrMr(;z z7*I=j5{2QZR9Pgh3Tv3vuM{#Y^u>G>B*qJ|oovEngL`zN6Em5Az4gO^>(uYGx2s4p zBo3uH#e*9nia4qv?!X~zL76c1qQj97GN&_EP_T0vu0kbX zD1;T6k>+;b$FIw0_S?y2ZvO*XI&2CEJ^s&E3OP8UAHl3N1O|C0FED$MWzm-7Sy8kl zsy%=3dX)E4nLaOb)m)EbKd-CYT=j{0X%~BA0S@iQ^mM#CAR>83ni7Pt3LmHXP+j-! zulvqwE-oNu|Nh>nZA{R!{#m#aVaY{L-xp3asgPvC4??89pF=*Xn?%R)tbdqW%@e73 zk~LX%eaC;9MRK3D`vun+dMO8w=^;h#s~^=CVllTv&@6vNXF;+!udpa1G_u-`!^0dK z^jN@pt?d#l^p$=SohZl_YJ!!Qh0n+aA}r-z^2kkA#^lYr@B|Wb`__)DO|O zp*1Sj9-E&gEYS{c^0URE`uO48ey?P`=CpC*@Cnh2D%rDd?8vZ?;&`~baB^)H)yrjN_=lg)QxY5rzd}Y~H4R%=ji9oYnClJeInIZ; zDTena(>8Czj(+8?OolYEo@;d8TsX*)#FEj{sR2v;v0R{iK%b*QoMs*#b8y34ljA|S zI~jsWdhfS2BG4Ob7e4Xk%d~)(_dAk7Nan^UY*)E^FIg(oe!c@w^lqdu2)TO9G2<4` zH2OP-&t1?>%fM>jcD0732Vd``%d313_;z#HYox>A9wo5c*6<^RI!vs#XRjMccz@GJ zDsk9n@G5x2hmYxO_E#CGA)_v@?5A+J!diGQVbRH|xU#!%5M>a9dX#R2DLppIUycOT z*;!^3iPx<1dYCThSHhHM%Ea?R>gJq>-CduT>itiH2jiVZbORW>Ogx>Dl`obFD?a4^W~4)p)}e61F<=38_7w9M&E96dAe_OzCYt9*O-#CQyu*pUGh6 zx$|U@*jypQWmDBu-{rL=gQrp2`yUt>n}?*G>j$*0+?Z9;NUGwvu>a z74gij0PV}ZMmn2a2Ro(Z=2`3NX|CidOJeEB@RM)FyQmnPlOZp}QB%C`AA3)m2dr}3 zQ?9pl=Mt&A?fzgFEqDg-=L~Jc#a8qdt9c}0>G;^J=h+25w{P}3g%mJI2q$11d4b!r z$T-olMpBhsT)>9w9p(hrp?7?X3Nxn6PT{Tt~cF*@!Fbgmgq}bW$g4%X~E0 zXeFO1gZ2SB`&+O4nvadURI50y6;oMKnmShmct`fB`;gcS z^H$4CJNvS;;YEN@z|#U}Zn+cG6LdoJ@WB`Ab$;K<8Jan)n&bo!Vd}Qk~0bB{LVlvWoJYf_&*+nh<<~?X0MmqU!Ns z#r%^iSd7j^4VUss$&$N7kjP+)rKA~ErlL-B=*;vJDs(P1)&3V8e2R5b3@;K&$+fMl zv&5*s5@Pm^x{2!Y6>iIU34RFePF9n`@gVYRLeA_*L!*xjeFAxyK7r58`6avupQUU2 z%^4eku(AJjK3S8BU>OsRRq017EQ!Z<(R*g!*F7u-+>vy@k}%9d_g5T>caD=*@HS_wt&pW&|n1FiSpqdF6I_Aiy2@ZX(Ejn!XBemS|HTOF_d_S3c0?MZ! zQT$R`MpUka80%kF=t^FAr!AiUpCsAmjbPK_fH)xw~npK;2->r!j86=qZar@k^iF4--REP)G$@4VD1kYumnZGKg}m z!0Wa8i+PXtMSn#zFlhS|il?;h_FSQdMmkZINB-KxHf<(i{l1<=g5h6%#)C(ay_6LA zvRHPauwXxDvbFEm*kG8#L6%2j@7!TwK~frHlQrj1qe!KWXGS$G4xbL*i-IV#k(&`O zGA9flv#$*ICS;uI^LEUSY4K&B=0?%ELXy8ccb9gub$HJ#rH4~%t4(-iIB}2|&aZdC z!!)BNc7%OUpX8UQPB9{#D~1u>bF)y?14iWK9iQ|M9f%;)DzXa!YqhHLEBU(t2 z#h8t&waLtWG{5uI={O$x%qH~$j<I z^h*ysrhBK2$RJJ-{j&sY3HzUT-^p7DiwI6HxC;LXO&W)1zkR`Y!k4<9)E zKC5y|!my`v-vPr6Ts64Dp-Gl=?%T_n`P5Zs5lX1odxbOr9@yl6@#DpT%b~;62&5qQ zh<$d2_*Nh<{gO$TcT8nLzM>3f>L5Ik;f{k8`RvA6o0-{2j}VE_ScfOm&_h|%NR)x# zf^FIgd}XIPX;9W^A{q9T)_Q}5l)JYp zM;4bF^4+&@oJLOV8Z6s0z^v>;@Y>{Mq56|byv!9|FQv1jHV3|#SW8<73%yAH!hg`5 z8-81%GuJOFn_5Z_kpNqwKuOYdsbGKj@z@|&oMX*i+S%d9|A=-$mBK^i7p1$F+D0eU zcFcRen$ci`yToVI=aXE6GP4ueb62ey=Ng0A$;`oGud&DmLuT%kt%t3q_^YK0(L=K# z$_YHq_Ckg#lo(Hhis)VSCJS!>PkAfQ5EB=8ofX^yvdqcju&kX>?HM6jOx?p3OX2db zx9auK!kpp-{YsH=6r|zjq+u($LVdnQJH#`V(A3|CAo8t3ic-Mo5XWwMrhCKnrnH84 z51uVvGyM);pXAhiPx)?V5ae!Py}nGG?v~{`Snl;{tV-s$D}4T>W$%k<-ualJaM1kf zKC{^%Ei(BzMh=4{YLGmJyqEV_#gb%j5Lz;x6Ghhs@41E9_KJ{l?Ux{tfzZ&@R~c3# z(2uayG?dh?v_K0w7vdp}xy&gGWIk#t46sJdu86JmnKVEW<=C~b8q_nTTfa7wXf=WV z`IBP4k-(8S3GX4c!K3My@}gFGtG1K7jx{RIemDNLqk*?4-y1HY1%y2K294##A)~P0 zY%Q>ZN*5e^gfNbf)K^0Y(Zp2?zD1!yz#86=T5LD4plCmWE#!lvvBC)Pea2Mz({+8)kSgOGY6DM|H@u|)IDg&O~2wuI=gs*M4X7v#hhOjM2ivZ)%5Zc ze|>3f$ta&?rB7Am6!YFhQoxtEq}>1m0tp`v<7fQ7x#1FA*Of6DvmMxX@)F*C!@%Fd z)U^?n@^m9QL=EpfjfQhoFNoFUBW@Izuk&C;@O?^6k|@Fm#?=nWvew|c_$Dah$qI&- zuwlW$yA}PpxAsQ~G@=#j0jqR0_jflL>sOo>h!~x>H~z(x$)Rt-v0KWc zDC!Pkmv|VrC1DB{<3>oNkRg+LO^ZvA(Swt#Q1`%W&7#iwv%%ZqAt5ql2?Tv*+9V@3 zHGD*k>FZKs?cw$_{EW><_8}jaS}hf-3NnAW++;VVd_oFRxe}&USE{`3368YC3OT~y z$eJ9Vv(uL7Cyscw&`F}PWKFrKu~o}1EvPCT)dz;>rjZ?@ZU zxKe^G)bXO$4NV&3X4okkgByIu8tfh(+hZ<;=Ip|uUNg{0s$~ExwwV@$N z3C-f%-jz9++{2sk_;9`1=(74Xn%)8psf}KoS2&6kPZ^5deK{ej1Xdfe78~KX+DQreMFcWINrI5P1A+P&UH7xx$7;2v3h{f8wVl=L6h@S%BeKO+P`)84G2>WVQJOZcX2q$l z{9epFkYxjRfko^T7#xHAS537bk?uuda3O3vtPOCPP4U9-;t)VZK zMAhfZdeZI=pc42QZCBpq2rop+LRj>?Z`Cq!wf2)KKdjisMMjm?Inmk6H38JLK8H(W zT1`OLBQVY%l*4L1#f7KK4##>*RYA?3vlUG1_UCTIQomJ+j+G|n0fn)f38sO`2}{|F zUDl2rZY<0`_et$*XAi$oec0)IY^P;Q@?-r?^M*y|;nLDDjT@F;c+W66N-?v0$1(T4 z;#t5WgFV&wLE|x(6tRfEcA1DY?e@QKqAmT8!%h<(K2*EkH(4^ix!0ma*LXi&^A8=w zkB?#>24|m_J>&?(4ALshc$R!>=tM^lu2<02k(%(ddTMBG>Zl###3Z}Av7HJ88V?s* z>Px3!p`Mooyqu+*Ub}LN72&*Ui#isvs;uF5z;7z4~m$nGC7iNARcmf!o|dQ++}h{*9m@9`(i9 z1zx$QDs)Jn>*#~XL~DK29)$>*F(xCXyoqm|YzUF(+(P|p-o<~30X%pxWZe_PyLwbX z(*4j9ulvZjiHwYs5%SKDjy=ArPDSqne4G+IT=v6Ym|%l=SqPi*_y)`Q^;lyd`LnJo z4Ed|Hq^9&u^govm>JiyvO{M*c8%Hd)#I5`lN!)qp^Qx#J>sBfS4oZ-`aQF`I`uGB@ zM%CrmNiH0@u;0Rdp4w80v1LHDxTKAk4Ku_a)UMI)Blu4jgq0dg>`&L<%-BmiXBoKL z?XHQzKC-E4iRWU83*+I=lr7ITc1?zar5TOCK>+QvZZw3KxuPe_4$@vz5tb`t-sV{R zzQ-fFAy;Qf?l*inh~ne5lrkU30M`SOkR(CEYU7DS9>mT6O!DSV4$Y>Y5 zXK-z}F$##!NoM#^8*E>JbY#)9wJDWO>lZL}=FAoo1uw&WiQ&O(wm9J$RCT>}az6T+^ zFQzQA3N6}ks@-s{&X^UiX@f!ELX4*94F~eKBBlOr?c}aFI&vfu|4|mlBQt-H*>+2Z z*%k&cEAcG{e7y>n!jp2TR<*c%6Yvf3I9z2WUeWg*eU)flpR+QzmmoS9HKm!#*PnXN z)k*lbqD+hri4txU^>p)``mEFiDsKCf0(!4(Fbc!RFIfHaLX)5SeE}Ur8NAXr8{LET zHVB&5)YcBjf$W4fW<^!qH+d26{BQ6}>gaM^}A7W1xXe zdX|^7Vl{tiAGWYi6lzwsMLTDep2umV2hZ;e4r@W|Y&)9D8TPr%mJYjDA7U_Z?&jtU zO9rvVIkGkZHdoBi_dVj%a+Bo0AJ&h$M~Rh-Z_2Q%_U4TUrF(O+qQP`p-Q-O3$!F_i;iEz|9WuNqMBhmK0<{8a@Lmb~@Nu8hg9 zlkNL`H+7WO5@gA#F57zs`V_*Go9r+Aw1iah>pcp^psfmx6U33`)YjEWB_O4Ly%yKZ z(ZE&{DZQ1%@g^yJloNb85H$5Ya-uz8d#&QMm5dYuQ~IrO7J}|Jy8DXHQ>Ut~O`j`L zt?}Yy{`b+cK1t-XD$7M{FY62nO1CC*stD-WG7d5Du=;VVP^g{w#qZ_%8{69UOpwqTG?)|1(-NGD1Q)46{eVGzK>I_Mrgnbn$Aar<6c)($E; zI)`n$eUGa33n=wOu=o+_`QuBvlY_j3F>MACTn$7N?JPhHDn^YcgBkUm< zYLza*dNCj0s?BN;D-gc<(;0ab^4c`ysZK2p+h=wMym%|{S&ng?LH)&8=Js*&*HrSB=fbYW zK?-y>^i~eEx(?YN6qj<4V=?8yO}aN#R)wnIQEvvKlnq(iO1L=xZ!(e{Qpz1|fnYhD z>6?Tc&&!`?a*8hOoRY%}I+{WL9KrRr$IQ#Y zY;Fh*^{hVlJ=yb+3$&067Y)$raE2eqDa!AFrpWieg$cQz_NH!kpet+NjP?Z}0w0JA zeNdW7k5u38*(a6V!-mc{H@ZZSpw<+ryZpX^+f8L-U{L&mFi&74QuNiRI6m&kzvhb5 zv|2hJ`W&j0*u7$?;7SM!Lox?y9*8+V&}p+IjVW3Qp9)@ZLX-G zj^@ms5sGHC@)^syB~G>UrTLuzu#=6eZ7!!4g^$~~8!?=5w^+$F>dL{zqt8HfBAczo zE}!j%46?11J2?c}S_yhW>mDi3^yn8=@ZM4JIO{*gpb}h%|Eym8rQ)()LZ{&0D@rf% z8Ui|3*Q18rx+vPY+9bqV)zR8E4JN9pCRxt?J{Qsr4#i5Oq?teEKV)Qmxu&v#&Ukwn z->l@BlymC)V%}aU*+aleSOC564D*I(xY^zRQBYs-Yi=&xuqlAUIl^|f@mGwW4PF9Y+Pl-=eR8ANZ zDCuW>H(EvFep5MK`2dvTrsZZQ#{1<;OId_ozq}NsHu}uRYhm=8W&Z}^@+a}Rj^1#q zPAGu{ixtL*8Lp&p)6Qw%ZuWZ+26CTUVh+34s@f?+K&eeg$)&2`y8J|bJB)2&ll@u8 zb;XdQl2`WfaC{2Qq-}BMv>751-Wo?-L8-Klshjsq={D$Ef#|Y?3RDCWV96+PpqptY zK+nPSDP1Fh$vN>)#@Fo;SWI3trb5J;>Yq%FMaryPn=;I};i6Vi#%3~T=UuD%r>k!6 zfIlcA`LQ@;`1CDAT5t*!nop=Y!#bK*%Ew=Kw+W3;WYb(WInK>JqZ8Rw_XBm}il1j` z!EA0G*{+L-EUfQkcgZMTrQyfb z^)KCk=eGU+wkzKG#K6L19<1odqu03p}FqWZCg#zz+S_1)-S|D5<> zouv=39Tu+~IUHQyz72bw**L%Z;b5yO3px5ml-ldoYqp40y3;DmcM|nQq!Jc-)-g)N zHDxwHWaL|wjhwQYP{?9HqMrdF9?7?e!5<99$6d^}>!sXJep=i2^&oi0=ph>A4;WW~ z0}|j3D&5V9pm`ZT2vT0Xjtri&U@eKo2W=)*@!{D&4+McU4_WcC5KSku zooaMx77TjdAVP*6K=uTj1OmC);9ING@xe2g$7s7R2i4!ZEu+@x=)2(j{L5NR%?ded zBe%NH_5X6V>XXBG!HYgbE!uz02mRy6iQoUfe|GTy(n-cYM-aha_W$t+qPdfHyY_#A zWI41`zC6e@t(|i%WMyaXH9W359r!QE-3Q3;iUw!r=hybKQXD?0NB1hp%jYT;#vtYh zL5TLZ|9*(zcRv^bv^z=Wo5+|RTXK_Eu>(D&q!5@%s;Mgs{NK5ZP`mbLJ-}TT{s)!0<3G=g4hgd* zPncIH4U&W5nmvP5@KgcQ7)SC6f}g?PjZJX3hqMVP&=J+~^IGTZW*q3*dhHDZ{|1Tw z^*9*ej}ZWw;QU%Xpdm32AM&5X@E_by9(Fj;3k16v{>Ozsxga701t^fU} z|E+8N8x!}hpZx#%k^UQ(5&OMUWLuHnH=MqjUsPmKyTe1NUtw5V^ea}x-#_5VlR&i+ z9N*Sod+mT;c#&xoFP6i%j&0Pwa5@%RRz?ek{g#GXs!L{!p(wq4fB^|u09%e4sXCiE z=17mV z?#=`CXoi+jE7Saot`DML#p-T`S};ez1T6q80Cm2=1_BxXCnv*_cua)=?6bkB55y!gYui20G-Qo zE{u-X*AU_J>o*3c9Y@iI%y^nv6XqBDOVQB|u4Et6TttS^3E&EBy!Il8cP96&+Sx`djeJ51ppw*A7T*!2l2VKTyQ8 z>dO-OVq@cr0je>AaAyD#-*3}==838xB)lmZqUDdEGY`CnpUzLpL3eid@B$S%UlJseVKIZszs{l)NB0;3}Tb z3RaOs?ekcY=q{0RmMYCQ2^PfYQlZ;Q;xK>%;a68~SKY@vi*7)CxRDd-(v}b7>d(t4 zAYV|mOg+OzY+#+kdU4(#pl1o$<9iLECPP5ZoBi%6`67zyrtm67{lrfo!yx_flRqzE z(azEvBWZy6EGQ12vZXcNIw1M!Q%Blz=)Rz5KN|G)>xfX5{lo?FfjZgC!Q=*8!#>LH zSn616I#xO*BA`Bq#+B-v-%#O#Ftpet7|Kg?X@h%B5#8%2@VTr^(qH@C`>dy)L8-8( zK38?F^-mC_cK&s@9_ak3qKbnV@Y}V}NQ_80zHjUI3Fjk*d9UnIpZSZQf99=#y3jCc zhSK$4$n6pR(e=bDY?YlGdJ=)q)I#BO#^rm@(C2tmY&VpTLXSHBtdH4BLdRVK|@0`0-F`ub6Q zR#6clAVin1RQUb-_qUtchl4J#n$`uZXzmNP)u5anrm^Q{Ly z0l}KRu*zhDoRg`o?IA$N`rNJODUO+ShmlyATd&>6^v41PII_&+kV+tj*d3L3ne0m{ zuc4u!BR}iVq;qg~wAW-95ue`ralUaKt(!G5(E#BQLa^z{q_iP-Nc+;qb<;^P(H^7zhKnx^~2fE0#5`Z~;c&Ujxk8gu^y2Zj2VHY9*85 zd~QCq+`m{RrpFI=Y<2>i({FfCiY^$~$iwK0Z`S{e^us(G%YQ|M(j%dT2Yn01e9loc zAtEG@@v+TMT*$C`)*F1S%3}oxbs43Fg^jlRov;v#URyTYpbn>>3crJci7Al|Ug>QG zbfG;4!*QScCo^cdi#s|>i3M>nzAn$H?mRI_F#|X9q$_sd>no!{h&fmv=>t(KF7L_jO4GiTXJ=PYEFkKsK5kI)|}zI+`_M8)fRHAA7%w?O(CYJV0kIV!3j*V}CG1Ndgvg_}R%MscS?nahz%Uq+1z<6NTCa3KJ>fn&I%?VqXc2PS zBztoCY!Rry1(^b`0VLHoF9iPpzQPnQ$$;qLYz*FZgj_**Rb5?O{^cbQL;`~LM^7#X zow)Ymb$yZ&Ik|XQ-%(N3<>YYQPmg%rT$~cRvO#$40kgxd;z&<#IW;vE%oZirz*_}; z^OE!P6Jjv3f=s)^=-ho5?FPZXbysH>z%4$0;tzz3;#FfR)XO+y|kDnD;m={iqbpk=VXo+1xJlQlB|7nUUE6`pK z;^N{0Kv4`JpYvt)3XmOM1_MMt*d^X(9+p3kRS^E8>BQyiAuetQ6r~*qJriQ8$X zwU_tJw;Et5jf!I);6MBj9%A4j2jJ?u$D!pOtCZd`k&}CG$7~6)SZzz;#9$-(J8TY^ zXsVaQb#7|^Km^!jHb9l|Z9N2-V4Qd?j7TAf@msfDS?G-8??dm5_do8hm%9&+jerLN zp#~Vo@3w0OsG)i?Mvl^hkB))@jc#{f|g3Rv55aT_P`T0O&# zuZW1EBh#vl_X@$3m3KN|^&Fj@z0CyAj<6e$2%LE!b8dFxq@fH$349&{zwJb_?9OEQ zDnvxK~0u(gZm%L^c7um?XnAdRA z#!%n9$-0bq<)rI$hH^$rovU!A4J0a<6L($pTCa6)`pc>{N&u}?y^O!2x3ENofaH{` z1Te4v5az>WZM703A~@{Y)_~v|I|~~}?50?`;n8UJezCQR>u!SG(*32j;@AVy%^#nB z^wT%YPIU+BM#m)Z07^Ge1B{4|C+1Dp1&^GkR@*ew>iv8b6U9YG`2@TZKr*AEMapC8 z=;&CmhS^oQob&1VYs#OM31kF)4MKO3t)P7wtX1rwn@UJ5L-j@hU{sm`cMpgL5U02l zDE*N_&&$dSljtAfY&1I&Uw!v4OZ~AW@&}W=+Pi)Xu-$iepV9a)eYVUJ^L_X3YKx%6HuVtxYxdH^PV$pan)&A5TXDqYXQ&yxCzLBaq7#+=X%JHn|c@8hry94tZy zI1uBrwX%}FkZ0x$%T{&G1O@%3yBq{ zj|CrdYa8fHWNf{LBuc?7x86$R^mUe~38~~w0T=J&Gy|R47}k76?5(eF%+KE#pbf`y>Q1HoZoM1?rLhLwOu(T57QbQ&5O z7W|tFUa(;S=)L2`F$R2aFw@q@4irso&b= zR^OHGc#rH1JF%QuL+Vy+240dXR^ZKu5bNFOGU+}5>Ym`6HEy>Gi4@!3k}JT z^u!Q>dHvFA3srEpauf>Kfq9wB(3j$Lo& zerqYdFMvwEa~rMNDQgm@#CrT!T(B_Qa`{zc%&uKpRYOAGlA%yeO*dZ9SC9(X=Z-P-2)Ig*n}=GFYi{HpNX1$dy@$~?H>h|B5hv3)2Ho! z{EM|UfE)t3ZA=O&{T-k*S?w__19=G$L0XRLYHF_U-+vM){dpS4U<#dK1G$A9rIa|H zxX>RGA`nL~_&htCl#rkXIktQ-4T5PlRXaVs&wztXGh^Bd#3whU(z#M64Vi;G_0u!L z{7(%o_K5((Tfc&Xjr_c8$EGBo7Cch$tA&Ndj!!!n)ZUwIS}?D+CyY-@;sfz}hMa3_ zlo5%~HUDH4@S)J(W{V?@{OndAY5oY=H^T8DAt51KoZr$GHM=h>$i+e=lJ!{&5aa+w4a}ko2)3{2W2*731C6<7vY3ee5MDappsymU zeacHy?Bke!s6jqQm41FgiBdQ}=ibEyd8@a;%71hG@dJQp1iVE-Ore_tPU)?ilWw<0 zm2E|OItj+!OoPQi^TIAfN=8P#@waE#>c*h@VW%nyv^VXBBEDa6B7=7dc;|AzgA<&z zGKHCE{7V%M4S<`^K~K+i_vM7)<@4Ik(4l*P>`4`wW=oXX0dnKZ5u1G{yUY$122FNX zb*0(auD3Qjmzx=yxb2z#lG8H-y}Xr1JLx!~vEV%Hn=zaLT_GW$NIv}=t2Eu^^ZSU1 z2r(fbl?(99#=2BgSba1MEg~6mfIV#o&-+_nO@`h2iIJ613itXj_LAp?zZhBLFL=WKB)_ z@#%fRZ~;;uAn=r`o!tVtbgOg;z{Z4B?qaR2 zuM7E!=7vcCjNjeeLs~jlpqB#R$pFhZFvQs-`W-;^k~KBHAUv9A@~;It^NI_p?~A8) zocCrbcE5xMQ7RGjeW~>JzVm!?Sprxm6Vuajikf>X$XUL2i1dxKv)e%CLZSo}0JIEL zBz^AR=jn}lgpqqC{odV$|6aYojE>UO)Ko(c3j%%vkZW;Bk(2|^ko$=F>^}(`1_O+g zKuK7K$Ak-W>KlBr&z>?-7bA;Ev@CL8K2i*@BZ=w>JxPYG= zK0CNQpvxMe1d5aM17|2G;C5B3?Oq6*srNSDHc&{XKyX6a+aamBR*Ow0CS@4v$EVCE z#RjRa&R2C}HjMB_?{5ZJ^JiQ@XEwjb6jE4FV2l9{eFI<@_$jb|7qb!EJexW@p~sK} zC{LmB$;rNOf{~d5^#)DJvjGjk7FIS3Wr*0dQg4DDoi&kHq81SFp zBh;ts13=PN!dWe-anPcOkw=5Y?gug=f`WS^40qTkr)KYnuQv@=yJZ!6Vu89ajSBq|koFmNBa}5^ouGILapIy2ot~If z=6=e3<(i)4Zs!5lb9G-Eo6nm8`}xRY(_YerfQ+3{J;7JE>lR86g2^a>46;A6s7hL{ zh>%LhLi)ylu4{m~?C$TvmI==V;H;yQs5EN;QPa_pk(Oq~5YOSu*#5b;786Nz{-Mpw zy@AdCM5gb$T8$ReT7tx=V~;c3DgMCR3~CDtfv^YXT{jv9G5RaU%MCxj{F%?F#IyZh z{2PH!uRIw&S^ClpsbmT2AMg#^>xK3YV^2)P@}5L#7r$HTt#jj)NriXkqWppC{}DJY zb-r!fL2*By4+;v>a&8r_V6r>MNF&$*)Kw2uYbslR?1{1~D2(hjpbJQ-|*g@x2Wg`h%j zwG0R?E&kSF*kT+7e@47$wXo>_|OgT>s&@Ol`jI6C4|f7uCIG8ne$$T zTHs*gBL1PyB!D}c-C-CklquK4P=8oy6YsMe6F1n+MmSkDZP{T5GT38j73!FRf~^-X z#6Ns)rIi|O*@$-O&qZ~L@u9WZO*jIp5z*fhQU||(Q_?W>@;i?&5DdP{+w)gd zRzBL8D-+L&`t?2H@-n-^pbN?WOP#|8J3IS?tDBg~Fc}$H7=9uSG0up??&q{(6-|pG z2R<4f3{W8R@bdI`>i)1fsQq&)4X7anKPAp&Mb2lJXj0pa^H^E-1NFgOQr4P;p1cC;~TMLo0^=& z!XaYdNE_Z90v!oBzlRGEN$1g_ou`bwoeLW2iw|F(y139NcE{*cC$F}mwgOL`&AP4vc!8` zwo>djZjwU{^96gB&RljTo9dYL^cL3kRzzmrJ2`!uNlTgy;6+81$&4i=BwD{L%&eS; zH_3RZGlUbxq&gM$4ipanTok;Ez&ciRg&HdaFr~p81X9iLSqBQ4a&H}vnc3N))TZ|u z+Q&U3LQr9Ie{>D267;q%5d#wwT}_X!Xb?ew8p0a0wm{ibUQS+aPM-B#vs>R9UIuAK zSQzR;TYd5^S=&bz7A|=c##c(zIjN~7Ro&flCZN81etw?YNli^dxJ#7H)FI6=_fAV! z_r8CHL^SLbHulWeq{sQw?sf!#=iiK@Egv$mM6%>ShBih(med8xrG%?x|Tu~vY z0>3rU)6-PJ{AcI*C+BmcVg46^tAWVGG8&ViKQhU3=S`N3!qtwB=L5#5TykJlxz~kSdJH6l>nwtTGD= z`zuiHFYoKc#XWiYRMHKuKIOIhULr@}xgJYW;Pu4BgvHdY#~4ZtA}L^Z4hpE_n&3hs z3+U-f^Qe~?)oeS0m=qw%@#!S#wv6oTdBw$2c|{;h*N^UZ&=4ys(QeV(%N-6xyF1Jm#$p7gVkJCxhsdF-D*Bp8~Sp=skXx9b?wBXI=KoYz8nwpu%-1*?QGRE&q@5kiWs0#z3(NOfZkBr%Q$qb@} zN-Xb0es)x>_CC7qMkIZT8pMiXw6MTa@9*Dlq~VbVoZ?)6_uv9A=as39%=xU-{p_?U zkR9rxiq1nK1))HU#Cm6ANLr(%?Hn*#pb_@$@nb~N%F1{5>eX$Q%YwmG)aB^452 z!Lh@5LEd?~g-gIK^t{6D#@;DgR4zJR8m~1*0hn{rJBI*qIYrl92}t=+TZ8ikB2$f zJFInn_?FiuqpCUu4bzt7oIG4OeS5+XO`)tpi3QZ4YFJLob91?9XcVggii&p7mhR4% zZ_-;%;X7{v!sDDLr>0_K;>6}yCniejfBAP0PApi4mz3nX93=eu^%J^Ljp(nx;#aUx z?~g=!e0&mLrZ%bdDlRouU0waH-*c1~DBRc;NTdQ^1hYI&UkWIxs2G{l?6+G%Q&KFc zIQJK5x_Xbk7QmFLV|m{5+-6^u?=~|%x`o5n<7H%Ij?r)thoyRABx23hFtJ{(tr|4f z=C0Fg*8V=%JkW9N#m=P}?_*M%5|*?gWe-g*N}eGFAhKqr+r447Wh)sOSSU0!6e_v< zos^9Ye@yVvHCkmw1%^O&ZYugFwg1X#V7>y?%^AV8z%s~SXJ%5<5V<)xs$9zW1cIVQ z0Vi3-2;AgR2Fn0syp63bBX`rw@{yV)_lxy-ovrotg*$?R;^O?mLL32Pd=4Rv4EVTo zL9SF9xz*dNvs5`$7JKynDIBFcOUAh`uQ)w zY6tDiuw?5-I_Anit=!yPjijO<9~of>fn5I#sER`)D~YnkUII~eexEV++;o;r+w?`R zo6~(wMMZ6Sbv?H1gZ?r;I$mtOB#>J(@ZGz&Eeis7d3%j&>u9O=(|Wy+f1Me~@&flY znt`k9%F9dVw7PKyiUpRHd^?jRc>LCxHSysRIqp~f>?@e4lauB^xOTnl#^-h~Z!*k^ znueyouP;WSea^ZLY9r-irCm}KstIna!sn(NXQyLE^&AB~Sj_kA+}!$isx=ckXTqJG zX~e`B7?Bm7f<@Fcvf0GZai2kxTgO%oID@m6d$PAAX3LN_qF=Wrpfy!qyRMa6s_>|l zm6enf+O0j1YOy*GJSOd^qIw5nI(d2Z<>h@Q(_bH{Oy_HS;&p6BxZ?Lzi%O(rWl_)$ z_J94d9h_j_ovGnwV3;!f>EY^nwzZ&|grinAl`h4e?yp&(G?2hXvH$Z9*qsf@Qo^Fw z-rlYs`imDw)w3F;ej}y>0|UP&OB(X>Hi1%U7?I=%!RhCRAKC-I#gI!`6Qt^tDN#2z zHj>LGMJBw`)zrj9e>Kiv{K;{=O|KGTA*--3`IWN|Gw-0~7pVAJb{s)}iaazyA!Xw$ezvP38IDYqO`@7utu1?+b;e_W8?eva33JN2quj8(4&4 zBb4~z-}mf~v-jHr!sMu9{e_=^E!4~4sb7E}%r-7MdJHuBS1?z=9s_b*gX`011gO`| zd6n%XKyrctsHuqsx6ghXXi*t&8X{&@m<;oI8C+9gA*!m@^o4|7bpQrFkk=!NmEiO^ zZ)^`-efLflgmWUI7g233<5lA_&y6%E%J+QWTrESuH>YywRZ8Tfuo~!S!9Pb+j9Ie6 zfYx5z~t)JH}@OCiMZ+6R145HAS?9T`we&&;l`8H4!1&m}a%IJ>8BXxto+{GR}} zl!oU<+&^MfwdN9ofoD@G@;djYAa?(Cl&`3s)YRm=#gS28Uk~(^z8}F$Uz)v`zB38i zy)u~ysot6DVE~QWeO_)LqDJO-cz%3zeq2ymU`-!GM@8p-6}ze9x$I;@VN$I@Cv8M~ zV{t_SK++2VbMU*oG7_IPyl)}~HV2I0dyffrb_)BH!TV?T=BKk?C??I{cdDizD7^^`e<@H5H}PlTu1dENqz2jCPkYKa_S~{YI_(ar`3zsqJe7!V+>VT| zUsEze`X*K0xsR)j=~Hz_t)dd`RuSL|Sk6}swYIhn59?-)u#~zSMrsxhg2w9xKuL>CP8KB;%yj3)L2ypYC&jy0fTKII+A5*%H`N zj4e*ty65H^dY>`yky_sdRL5y$2pSq@TU0y2V4wo>9lSF$Gf8~zRGu&I%rQny4-PI& zPL{%8x&EQWI!*Q)Upav#ffs0Iu<&M&ZK>Sf$eNVOT4sD{`_kA5;#Yn&v~UnYtJ#Oh z&%QTn@i^Z5>NAN2W)vX)V83IpXl}MXSkzW&a`~q--0L{)+*=3#yT`fb zNN{j*c2(6HP@#f@;sP`CkQzq0O3|&&-nJ-lLs{>H;oD)Ji`tlF@z7y#%nLm7og0!C zHk$JpT#}OOi-XzO0Oa=?=<<0}{fdOieM7zOTd^jC)-8aeJ=Mt%KkdIbCQg-^THDG6 zsb$VLuu^ngueL7zd`BekyXtbMKi6a z)gv`GilwV8F2X?8+2@!nz5K_=cJ z0G(|%=->8Hbw0ih!;Zz4yt_R6(duPnWR!cLrLOJ<@2KdM^fqVS(yHIB`8t3+#x21a z*La-1p4dBQt$-)9JNNs>kgJLksQv>taV5o@x z6aT9g+Ph_N<*ja20H95BhyM@u-UBEKG};o@aa05qOrRhzsE86336ixD5D<_osR5Ck zGjtP10Z|cmL`X$l^~Kc4Ky@4=bT|LGfsH#um9EmSM65qZas@C3urog@!oUK zId?cAn34ySSY`HcqI7t-sBKbCAd6FQDO#^+Ka-=>v$uZfJt3XPjwAJH6Oi?E?~Y$n6FYN=R5l*6 zFh`rKgm4(_s6!H;66@I`42^Z(!;NaAhmbSL4g2E?rx2kvu z{u6?NT8&tlnWsO8+5vOJ+B(x*r~jKvmjR&tgM-f$wb&-)q>?JWe^1U=OaXJ;tFtk7 zC@nEDXVrlqF(qc`PgjVDhtLP}rOCo#p1p~ko0~gHzmc7ha3e`!Xn5Elp+NrDBWuYV zMt3v;8rzfa@kNuI z#(z)55d=v>>u-!06qeY#U=PoOAi*LBPO<%KKtCzBQ?Znk{L3@fim1Df{O2jYTtoZ= zF(3I)-+k)x#IQ2|>*+G99{>B%pH#dsr$hcGXC3*yOE>XfpW{C*oBz0o{`zd0)^(4+*R$zzo&lk07#h`APT3RlQPsg_!m2As$oF}dJ z+ExCN-~HL#tb>Qg#FwHvb1yW4X?ORL+?3fW=d;)4&RM2%+CP(Pb^{f1eOlTWE^1#{ zI22R3C(mfP;vQTGW=;~VNf$7jnJw=i4j_R}%^j={mSay?K*%f@|uWd8JJVJk0Bl zhc*6(hy806`ds-B$JPH%EA8;;Jfb-p%sh55@7&$YF__yda^HG5yi7Y@>5-lPG~Q9? z(Iaw4M@L>&-u&KV=nEqv!plAPsA8yS)AK%778=B#Y|>-SXHJ&G31fEv0NPkuvbD0h zh)_~h9UJ3b#dub({+#3^4^I9Td|Va+(3pc{p#=2C0`FszyVeyKZwD(Ay5G|X7_VnI zTV%}}cxh?EjiGt{8tO?b7xvR9m(J-i(|rxN?eLv!C!eKBqtx_xcp?F+f*z8P<5$mq z=%~MT9S!{}4j(-x;tsH2NqJUzdiv>Tji!L|(|=B1GzW?POLhZGLk{!q9zGI==g&{{ zIqk90(IE|mT6=oNprbJq^Lv|dE;z%5rmS-E!Lqye9lAq&vhu%#X<2V^2QkU)Ooc71 z&k_1NFa32tMQ5{rc-qz3cxWzv4pFwTPotm2sHH{wku6xNx{r@+yb-IEYW8fl7=szs z>Dw<7J-NJW3*i&=$qouqw6Zd?^CyaWpLVpdp+q=2IrZC59D{P#p5D}R-9~F^(xi&Y z{`SgB#q>_fi{p4DRY!Y|n0B+dSp(U)_FTib>k{WxvedIOGv}tMACfS5B60=;>kXUB z9>p7V%PT14SsoCJM0ZxmmZhZLc~Q*I(+LyH0YSLRb*HPpUy)>JYRa`)i=16M$8IVP z*IJ+9$^P?t9;|S9>4~WX71ctQ)*-Mq5TpIFHYi2AV3FBaBO5QWxEKTd%FT;p5B5Fg zOGbnlE+XFS(3e3QyNbZvSm)`n!NI2tRj*|Z_I6hsW88NpDGpy35R*JDW#wX*YI;-r zL3%nqA~f~$t>)TVA{JdEhjS6IXz+Blr79^Zx_$9^Q--ulnXAaw?4F<8`-^o)_fE>j z-e==40Zb#ysuy|HJ{kqX9XohkT z@kmS@${_o>CJ^iV>iP86e9;_HS5!hOTT03&Y_IsmO6VDJk8gQO7P1Z&vg`4pC*5|p zq5wZ?j&n>cjq*msheLzPZ2tkKxc2&tSO0N{98#Em&tC{6eNW9No6!1$17P=Z>ycg_ zO(w3yC~-aXbVx--E+DEG56{P>vIoK~J^<+k@|?dgBCBVgAYpj-&ZjF=qra2Q+D*f%cFg7)bg76v%*Mxt z!o@vw+mr1D9L-C?HwCg(R?o{kW-pL#^KJS*0$)^CIVC0RBgkIE8yJ;r?=YcZs0z)b zv9ZCFmWcnnFM7p#_NBiLh}>b#i<*i(S6O3cpJY`oB@(f{A>w}Gq!%C%7)L9kVk>?9 zYK?_5jPVL;YHT(l?6e8}CFoGSm<%3_e)P-rbvJK2B2E> z9Se6?S1t5R{8OUMiWU+nyzDnpqsU{fb{LNu8sc*e7jhDqdV6{#y`jGOA=qVfaY$6F z-)Z7Qr8jy1K}*ch{g*v_YDtXMkSh4Z`1+35iQ%PYe$d@7nn=+uzaTEk(!@*cwml|b zX_*b10OSwplv<9r_8+)vxuVm>IPN_R`}tX{boCO=N^f5Tq{AXl?rvFY)6LWllamq> z@n7tcpTEU=c=-AzE2r4?cB<<@3+Dpk{)Hp6v{yAX`;{o0ub_pQS((k|KV)WV>F6w~ zW$SQV26|V}yLUDWp5ESg;HUO#r%PVADX}rhXijtb^+NfLxP zc`_-ja{nw#Fe&<^H_xt6@gTYfUP@4~%#(sKsT$g=JPdl9ksd{W=Ju}8|ERR2nA`eP z!Vq+Pzjd1p4owtFF7g4#%qG%6GAtn4p7LmJMdqA74>gU3hCqEii*lWcvg8kJ{z!LE zPyY7OWR*zx&!x*gYB?kzq4a`WPR`O8WG69uJ7@ktlTXw0m{h+@f^yLCFsJ@<1Juq? zPENN&>gwv2uS;B@I#}1MU+Byp_-d?Z09y>E7D&56SEe)yEHq0UlnExpzD}gmzP-dr z35KeIxu`Jq9T{eqfdjfTXGtjmt4?y;Y$YV)TaC=mVcGw34TysN95Kssq&_DxO^xX&+q&@h7_@sQWnRNCB*;!xkqUHHw|nCY+( zg)2WsZD?G3TY|ir{vx8Nq9jK#ARU_EMnCCrWbHwKuxcRAt(J zd%eS?Vl#DleR#l46Q}E|_7a1^sA{{<=Z6ghVA#nWn~xrQ?zli?r=9aW`F&OEY&E?m zZU~pEXs|Xz7^tpRPHFGAs}E^y#R5!zHdQ{1T?1oYx)m4=r}b4Kljv3>uFGe9+;?;j z_OYRP@39`0P&PxYPhPsxQ~6yyC?q5x+U7<7Hi$E>Bv%83OJB5Lt)bydG2G4}{6tci znI4my8+F&!Vq-qw+rxN(#!h&|=VjcuJ@uo`eZTI<<1ingorZMwrl#dPbr!lqmbk5W ziccja<5Pz8Jgyw{^aGMX$!BIox+^QClN6@yN?uj$_LmlBYvGd1GCJ}`t9$B|OgdMM zZ4>A8-Pw&Tx`w^NSfTM*8AU~H?N)lWh_U7a4UN*-W=0^DL{U5+e_w3cwJ*vs^A*|k zBJ8f~b8Ym9*VnIu-482KYqN7xQ_4tWXta$TSo}ldZVMM~GIYe$89Lb~n?ottKo#pK zRehqP15Ix;KHm~?x$icXhLeqbVv1=J7A_BBYJ@Vxhtkq8_;p$qtkCXKN4)!t%9T|a z@@gn&M+iE9|GJ-}V9Cb9A`pO@-FzF&ZMxrTLKE8nn4nB7SEC)t+TH;=@5>!+lAPIW z#^iJ|Y5K8E>mX>Km*idqWDNlNHA*cS<3)Eg*5nK3uKWzTb@&iH>(qS1tIIhwJXx0z zk9|-lPWpXZ|sYoyqubNW)cO14q_*Twkjm8JG!JOG?J2I&_pblXh5Vu1)WCPwS| z_NsuB70xIfYPes&{$?z2Z)FsBkZFbqFDO`_&@M0C$vR2H%x9K(mCC;2nBF$~(WBqU z`wqgrOH-!gW3q^Z(xHIMcc0sh6d3`_y=_W7;jWNSV_>L!n$kw!)`JD2s|EtiF$7B{-EBkFXzsQSvzu%noA$OSXy9pu8r8XuD zsWfwC`;e)omM<1^g4<<+wSZJY0eR;6Wso%L%T(GRE0DI9%Z|8y);siQAfQ zXVc(`RK9U5U*s=So_JY*tpR2``A*R<(pZr=KN-#w#grz-U66&G?^v@$L& zD`Q=Hyql$!f}1uk>&l?0AbuN)KN)*R=7qkjY?H@{v&PH522J65k@1n#0tT->h2(}5 zxNzSNn4=)$DE?$U-QHqn(%IKTChm8cFe;y$dm4qMcwSm-_4!+lXrt^~U)r0Nmi+~i zn$KostC<}Pw%(o6!dmP!WwtrMnDwqWVA0h-2pWavoYe0dL;4(bpOD{G8CDaw^Tvvh97Wy4a00tR zg5G(2*4CT(BJk;hja6Pqr796oD?&hPWYbRgylAJ4#k$g!Eu3n9UlUv!RU=9+tm`m> z@kt4*?bjXH$BK*QO4qXQ*w()77-3TI=h(58wAFtPLODe}p(*_eeryI`7+V>p)_pch z(@qNld0Cu=DaTA_uH%R{f`mbdd90C_c!jo~V3(4Tf*0N3C9EM3h=r0Bw_N9VY+0Gc zea8FuSGNqcZEb9fi_bkFEWWGHvY1b}h=3KSQ?YqlhR$R;;im zkTN(d^cZtpCa3F#!W)b048PfrAKGqv#M$)q<0~AF_Vz&qGt<*N=1Aauv^0S!&Ftd$ zb8o8pD{YCq`S6&xY#fhtulv|1Q@G_mP>OOZs;2Z88SgD!s6FAaH2BRG@y2z>5gHN#ag*WE z?8kWA?zV?!cN0-KSx)qU!n5$WXd5NX;J_gNCsCY=B&}?$WkPssUgy)pXK9V%6CYYW zete(HY!dRX%*@OftgE?<41t&zLk^y#jEu$$<)9HMhGJS#Xp7{0DA?;`WVBk6jc3Cqh{WJ0g_R0KN zYj1VxZq#s9&V;tpZicFlK%RSBRq89G!5FeDu?^FPQ)QQylUrL~#;&%pj1jp$9{e_S zlrnQlU#4749W@Q~s6_V*HGUDlamZQi*%u0~SM9ZV5*-{I$b}a@cjdOed7ybM`@=(G zde~T9)-$9Nv#18??;Cdg;+&B-7m<<(%&p!$_IoognkR*!du150y0+H2`%8c+K5N^s zbJ>>$UtP5tG+sxEaM{=kJ2|pGbEX8-reNG$bxJW=E{e@uns!U44@pDQ`1PoVgg-UY zo_xvpG!2;#DLMB_H}C%6$4IC1g}RcGVsd(ZOtJ1Oh)YI&orl|Q)&m80wmUzwcI`gF ztZK2{L&;&Dc+>N9cKSe}dm&d!PI|fq6zxGxH_9m$VpCLmTjXH7|Mi&zNkXZEIS%fI zs0MxSsnYnpYVBLf*{NBuY?cm(A}!b17;P|MD)E+{Z-P+tbJvmU>|DwG&U zO)LpH)n*J&>ho=WSf8}Aw*|@ZAe7(OTsV`M1i9y&`r?oIS}Iyl(9`4AJL>GVyK>K3 zurM><;cFEt%(RwA(_Ln$YSJQz>A*N zEW~Sp+-NL}llF~^cF9Xl-VWRflk~2rsL(j8FXPR(U%ra3s&Y#GMcZX7CoIfADhh2d zUR!m2N#Uq*@6hn4Yqx{hnAtgF-aI1#d?h^FvhpM}5Z{2nRLXU$>TPfgFcFvA2yJrW z*RFp;KN4eNWQD=j0#Y0sAmlY7sLqPAATKjyLX zSao5dAttJp1h2Bjd9S+K3mRb}?3F_(hJOTjLcOkhv2Ugg7Y-o zzS<-68YANtoY~o{_LB_qyH6jiKOzL)GEFQ-$%b|dv4%}|jZ7w60zv_x|KO3Jci?tL z@g^k|Ro#HaLSMcX(#WA|@osXw*~YxQtlybjfty!_Z@Bv~Z*3Fg&}-Az$+0183%pN> z`@8kzNqu60p$@|fA@!piRcx(rOv*lcR#et9nC7k;eq4lQf8VyZqensU*XV5%LdQC- zM&#*2D7mpVcio5Cxk`10Cqmhtu+BF3*~!qonpLPIEtNB@h6Rp4^a zJ->YQ>h#16M@SFOJ-c9?b;HPH3R9izJPPq^%vpWW(tMXJV?EgzK_OmViL>fR*SX>@ zp571NQs{S@aoayuhaqPcIoO?J;~ikcUcD-iVSNj|w8~j509vTRr%w`cVnh}c7^*NL zVI7^uQ7kvLJ&`wa#5qjcfN+rbs9?y|Huu8$v+E1=ES@B%N#s+NQ8CE8>@e58IYR{% zW=OZT^`^3YvWiqtJ&^3Ze!8xt-7;kt=eGWG%H?trBV#%~qinm5#*+f#Y>WAM_{ksp zWsDP&R`=wEz<_}A%|iOhO1V9HKZJFcwxFlu%>2|8qdC3B>cM$;)}4I!gNF(y`mU!z z;N{a(G%+qL^)okBu_t)Sz|;K}HmhFOUvNP(a==ei_!PUAMitVsUJVSWq`I>u}wOYudbg^5P<06+2Hi}W|h}!Nd<{% zu&A0GIY(}&QhH|?yJpAd7`Z~^@9rNi)@^TbkG2DzR3mrwo`C)u&b=`0_7n8#%OY0; z3H!AYPft;WTj@&75P>x!dMN*%xUTk7@fkUy-^H*11+&R%v!0ce*OYmGa3noaIUH^Qt3jP98WRRjpWJlKn*vPfOWd|L7;0}!7Q4ln zl^27G34TzDPyG(JhyQ7HZZMMJrNK9v^vJ52;^?Lya^B5leP>PCX zyvY6gT-UE-EeDP+do~dGhtm&KOz;}^=-yJU zT&?!~!E&1i|2ri!n}E>AmiG30OesmfHdR)(k5;u6VD$v(!_(3X791zCXkl9*GgVG8 zoTxb~bcu@lv>nh1WbR*OVhxRr?R7GAb;Ww6q$J*$>n;|f^G;J@-uZrdlIGq6l0(Z7 z^v1fr`@VtmvuiS@te!t=jX!aysw&s^r0wcN%MZFt>(5==;B<8K?;EE#;Ij$hjU1hy zZ?!q5{syLVimAO}eR4YL@iRhSJye;eXBu{FkZyk8bUutzhN^nn+Y0r^1E-6Kh}m=5 zOWFz=Q-nmLz|ys_Ec?-x>Q5i4?Melv{<3~VRsTlc;4Ta6fL(lxF`-PgqT4MC_F z*?6U@RpV#havnQ2)nl>%!c?mthq-~FmrtIfHdR~eCszJi(&FvCJaLQXL}NugL> zAro}DPt5o-mFJCl_l{FTgPy??r+q;}Mo#VNiD<*`!9AV1xzp_CN~2OMbPORQ=y&aDzR;Ci5Z;N}q>t(hp~ec=MN zc3M3>#7JFp8i_@Z6Ixg21ilU&tls*%o(ZH7@k+p!)Qe5T{68PMRlJ#(7y5+iCGa*J z9h{xDSqtGU+iPM8)dYH}lDcXeKKre;FECjB6|W0R*Vm~l*O29ht<4B5K2;QL&=5n0 zMj;KRf0@(=sl;+c9w$A$i1DvqD1ZkxRLjA^5h8#$wGp7FpJTc}s=KIU`aN^i};jeLI+45#*3OZ;_w z#+C#i11OY{lG2OS68m%+IeEfo4-W(lPE}I!EyQq#$W~9Y$XByORVSxM=_bU2^x8@b zY7W_+bi=r0V$x$G)S!fH5~n&@2nJw26vqDSh9?OLNhH60GQcR6`{q#si(32=6+`!p z*XtplcHh}!@*uX1OWzKzgM8;ln7LdgfLirUwSl2GPM?|Lfx_UFT7Qo1&gX!U?bUe} zPmooasH1vkBUZ+89p-zPE#nl)8@?4`fl+hzEU*;eNq6_C5djSiK}<9=Ec81oOqo-ucM62M3&M zjpEJbWudszGh&x7Bm5T~PU|t0R5U#il_j;;rS5UIX`{9xEX%hVW^ohRE(+`%*}~}A zXEyz**{$AZXwO|}A6Uh}c#lp`_ioNTIjPj|GBQUxHg&EfHWs-uuGy7VS2$;KCBnGv zb%Mvp;!w&lV@u1p9KD-d>~X<%c?AV3y>udOn8HH;DR}|(4nT7oV^jm>dj)9maLq=9 zfP>lH*lkX&I$uu;AiAF%Ue8}w&|(*6kfj!x8ymZK-@ysZ&TaR@Dm4b6!jTKTrLRU_ z`ySL1RFT&Gp9A!~C@PFLN82}ig34bt6B%G{qWjoa{JMX`y|Esn*^!qX zDH8ldml4|J6(ho-f??fYOJ8%w+Z*`-+tA-{+c3G*c!`Dok`@(roUZ>Bp>LJ_qAm5* z#dlCWYxw@>{IlKF)nkBJ1ZdOA3f?fk`p$ClDb%3?oyQN1WMc*Qu#O_=(fAZfEo)yw z>ZLd2kxzu(>l+bpY$f}^_vI)XUA}2qSGo6oJ}%e=IMd?2bV4R( z{&d(U{(jWlJlh?y8M9Avk6ixq6nvn{5NuA?&XM6b+uQXb;sTSxs3oG*%WG%nBnd-a zuX_8H>z2h+O##=x^WW!3IXgE;a=TBbid}7-CFW|$N^mlRD{RFlvS$0{iGpp$;9AO0<8bv>vh*{?N zoZMXPI|ro3XeiDb+_Nh91|>JR?j;G(X+AV+0X5rdC~=y;KT~4^P(UvBgEihbll=VU zdfVxj=MZL=T$>F#cV_QgvFqjo5NsOg;-yh&ID2-B$pE$WqyUT{LBfAy#XQ`C>-N^0 zDe9bR#_G1$K0Y&vlI`%86PFwiZ(;-rP4d-^OI5SOXR?ZK-@mVP>tcVY1ksHkbTnlg z!NJBNWTj(invRNJ$84>z=jWT2gw6Hl@5L$CgN(%mpEV~#`6ah74w3}pC}dan7NPz{ zJVU=l=@|RgKoxr4?MF|s?(ifym~1i09A4z^+T5n&Wzyr?^Visr=}+I&3C&}?R7*;W zH|F~tp&%lzSNUZY-c{XOC%6ToEX(fQxbcK!Xnts`WzWB~w3t`Zd17junXhxhivrlX zXGuwy)>aBzyGa=A*7*76K)g}dZ?ge_D-`&nqoXDK>D=MssAIC|%gYKP{M3OzunjLS z>bxjby%?V`8dmCPX_++&X}dwQNw|Z}=3=XK7&|3mw_r47fA2(3H$1ndR=)*yEGxHM z&%oe&a^p;muX#ARbH|UIGsn_;6U|knWhL9DR@+*x(a=QIvty?-Qg%02c-`DywhEz0 z8C+ak;!wk4VA7+eHa|aqyE#o=SsClH)yC6T4W+Gw=BqS$<~v)w>1i(--P1vYJU%o8 zP={@!GOz2>n1p%Jk51M8$&nEj&s*&5nkxg-JeuF4TNk&&=!KUySLQ9wT*tx+n7`6r zT8ja0H!YRp_wC?`-eb00w{t#!e(DFQ{acCiQqoerR;laVdHQ?GK3QP}6OCBAt_EpR zpJgo8Mfqms`1)+WK1%BO^IZtE+Sa*M`>o&&0F7E?t)UX}=F_JNa$(fY{(;cV7+}c< zpBhOC$Ljq7r<4J45dXUOj{~C->hJT;3wwoLtuE(BO8DoXzGXH~N}9Cnr?FsfYfrU` zF^I-t8#-DTu6v)o(&KaheA8P8*Wy3^c90-P#BR)cCz&<@^a=HcYDTdFYIHk2Pv*9x zn3{2Fo|cl*=1`60zyYo%Nf99Cdp1?!htyj46E{u(`M^itz#upz1j3dH{-shuPL^HBN72MB9E5&Jxm zLlQt{|CGEtcL&*n${eb`w6=|Y0+YKq_pN}e)K-HN9z$o*`rq~=>+226PF5+(aHE@0DxF5?z>m+cfV zTKSGmObG4l>KU-G@}y$wc0~p8beDCx=DVH0zri-eDJzrYIP{yeb3M*f+1lEUG7zKT z%_fwMy$>+oA)mji!OC;z&NJi?xA6WTlpdd(85~uQk=-EfSmrs4HA6H7C525mm}@T( z)DiJYNY}Boy&dxNs~F1YLhKU-P0d>}w*B4PXG{ysdW=dY00h={M~mZEtTwo0UW}Zb zJD7;WJ&}9s@4qu%+WcfA{3hm*TTr$U*~7eWka@3pztmeoiUt)ke<19_viHKoc8ozMo-L}vjsQ&c-$D{((iikAl{Vtm#4SbO;Dtfn- zM#DAxwPbJD@3H8i9N;dExhTay1y_~Y;{Zynw$>~pC&%1Dw^v>F9zacuB}kC-s$HTRjNAtojUsQ5FR zI{9|@UxBQR+h5NGG6TkXD^r8lb#_O4WZgA(dbW7tV6Qb%%aGM^LPuSppi%EtuLKMdaSLI7`>n3k(<+S{Ra2tAQ3 z)n^Z6?{8S9m#dz%vA11WS_j#KzCNSAKBcQ|ozPQaqnvAgB9@jcrW8@GcIJeYoP z6DmlnKfd(fz|EQrC~oz^WdyV9+WI7!=mseckYahukK^Gn?v?I9w<6hf(mpKh?b6JmtJ6P-@siPw~ha885L<*eXv(XK? z>-tGjQwvldmC{IkNKMkxX{fAhH$XPjHFE~>rlO*3dHL*SI$C!F6WBHkwfUYheUH|b zRlQWD+cocF@3<3_+A^wNAsu)o^f=J_#rR4@DQJ}y0+FaOV+K#aS)UmdAs9$WmsiM0 zn|fojEG&?*MDc_>msfjw)a>omt$F6@Vv(^(6t+CAgqee5BSS9O$H&Lp8w>#Sb>&~S z>L9zni;v9An1cU?LTY`TG(g4zx^oi+Yx+OW1jIEqmWY5@3*9ZU7cxEI>ve{fU!iw$ zx+D*wW^5d!on81qH5-I^v!6*HtD4RopPA0fjG@Rc&k|;0Vqs!RPcWgo&@R{XE>O|0 zK!jx)B|gFwhmF(zz>6MdB=uTcS;K)3s;x~fge1)=;A8wuCO?&=EfG6~2HCAhbO-q;Skl)rT; ztO57L4ZL1M9W*npU-o6J7K5+QINcT+_4zb_Vd0DRSq)ivSaGQ{uqw@0CfGsLXW19O zw{_f^k+B*<0#iwEIRHV)*~@o8CpXfOx{ApyI)aKw1nBbaUE=bx`DRL4{v{R;4$C1! zO0Zo7ddHmQUv1A-?17I6)XHYuqLV8A7Zs{2J$l2Qw|h%U&Q*%n3qW!}>|C4vO?-sj zeHX?qaif9!^rZ-W4XdFWteo9za|Af0~P0gp(OaP zR0jV`ewXfzSt~&SKUZ?w$2M~Hjg-G>`l9DC%1?(MhiBuj!{NEX+CmZRcmxT=SxN5} z+O7Q*(t-;L3jSXi>8Y7}CzO>#fFUoQAWr~7e&a7}si0I!OEa5O18(}*AKY~Ff4J$X zvvV=5tkcuuFvMwTCY>~scKlc*(?xLq`W&2=RX&i*N$snp<#q4&MFfeMDt`DE)$H`_ z%uJx^g8hW?T=4|qg2h|gEGjNx>qre!DwBx z&UuHh;|;e;N`AT1IfRX9F4W)MdZdw;u6?`L{_>IEzuWPHQb-F7jLl^-#|-nYY2@b? z+S=RVjkx~WGkM;hluHR(lwXVA6Z>k7td1P9B7zhGP@g0fGI{7^LwNVJtF~4xp|t}_ zV9F|8Gt;HpZhxIGM~-|{gl;ugW1>M96cm&@7H`JV6`bE{Th;Gtz4eJV;_PW5&k*SrHHHWN#5pGz+fT zfI~FJ^MQS#gA8?H%ccq_eOqK%K$1;-m1=VrijOSpr6q>V#H0HU0gMC!@#oK06Itf) z=-2>fOfAdK)>dFDp2%6-+R}46b$|F)Deg+Fs@(jEhEt66Bp#5To_jLdx@Mov>xb4&=>$7~@BU3)nau{k+8Q`40HTwhkQ z0umB+k$h1220{t2vBB2DlLFk@h-f+mW}5S3pcBv7T8K-Kd<$eppu`#5`Ntwiz+VFL zgYi)VY5mzb+F18>Wk?#P#+F8I1Jgp#nKRS!$w6&UGFX9=9$v2B{UW{$FX$is-Ofau zvkW^9z2SGzmD%*;@d8n38gP9m3?2Fhd1<;$DU0mynyd2ugvwwg{%(tRrl

gP}P` zZEmfv%1KJJ?+GcLCf7?OlCMgmmc~--_+u!Z&lJdjS%)Qd|D_VBt5KaecWDAxoB((m zP_VLW1FlR%b#;(t4^?eKHitc!H&mz+u-*k(8!g}Syx=n6I?ng4*UdCv0XfSf0~K+X zif_Ey?%I)B&P}t&|E<4Ox61McsqWGWhcIAJUKIS;c_5QtLVWHt(Y?rzl@$(-h+wch zdE!L<;Ts4j9B|pWbUPps* z%)&lKPY{MLE-tdNvrkVu{A;@W`0=7WP*+aUPimy?oU%K5^d;D&fra_@=H~X+@zpoe zx9OFcWwXO+F1Jv$fax|96bDM1fLIegcZ&bgWLoDmAn-lAlc^625pM=RKK=Kt+Q;iOU>h%3*pAYugBVS|Sb0oi= zg;WW>K6~Ho8Y7DUWS0|}yu$C_&FVTx85_TavKPfQt{V-3g&-zbUtbO=h;y3$Mk!@r zXlQSb{^!8D?-&ynZ^bY5N`lq^Ns2{#qf?44D}#nWTcY>{*aby`#;ayBwd%4^*{s6qG|~=yjk2rTvlIo zBqVCSr_A=3DiYlW%WFX1v8#}&BC`7RLLt!w56uzm!5D`pcmSs%jDqe7QCjAplq)|N zFAPIEtt8V}Zmh0OPp|*;8dmfC9HKqJdJ$0HcV= zLMI+J`czobT?|yJV(+-wfC@@HA)x;9>DOmy_JazbsQ?l^Qq4r~j%0SP9>ar|EjMgP zI1&B)WS4JCA80CQDs=qk*Yyj|OikTiE2JL_Gs*9*?9cUjq=Et39=V7c-p=3n8n`U& z?fS^QFwp;j1}^f_HwjA`-;|RTC}$LKuhAQG+k5`nr6vDz9f)pkY7M|JCNZ{&PoKwGJ8>+@Yn|YM zmA2p+>Q?T?LoK6bJk=)ReovsfriPc5tFO6v;2ROtd0g`?FTp0RmD<5!9nt}KY%CoT z^7lzN;hlv5uu;w_&kYHJU4 z^8Pwl(a{s15f!zw>l`K<#o6B4T08sH1<}{0j{}$b+$FVp$<1n@*hY}V$QBM_uJHCd z?YuX(?U;UDM|E*D>ckzOu)_+D1Svc7MbdU3&`Xpr_vVTx91pRxj}lqHfWWU-8c_#q zsY7=x%8R3g#jok;VuP4u+o$Y6e@}S*^vfP^cWoG|Yfc3gF zuvb{?eUY@f`|gFqmx1PYxw&cS>4j$fi{OY$MiyoMAX5dsW={rG`L5T#}iYsGeCq^_l<$nZfC46XMw=Ymwbu z`{&PJjb);`BJrKZL4kg5yHjX`x0dPCedD3$GTmbd>x22lsGmmx|BV1p8&YjKHo{tD zI6{>ydqyAD`~GY@2ih6M>kRZqT)6Pw{r0N{ z&_rTA_G!kV$XVXmSuMTyOAK%KeyM3hdO9JU19##4)!xHQ)Ek7Z>r?Futu_0qy-$8K zhkgrsdWwqe1sj&fWlRlniXPMT=c=^T*H65ai1U5-&SKf<$rGF^(q*R;9|uO~rDNs< zn?`PGLU_2_LfKj?dI1NOmvNqsVj;Hn{fXkY0w$}2YNe_L5k4S`%(aCg6*AkhI58?J z-MbWxbbF|*teg%q+v@iA>%2^b$pr;tfzVv;x39RYEOrsxSEE}0i+>KK)j8PVffXGP zo@kX877o!cGk7K>i~zj>=qEehR|iVWFRWZX_T_aGnZD>N(E+EqgW0)R8Ao7j_7Dih zTm{RqAlE7^blltw+K7acs~@q=X*4)kL+{#U7aadh=N1PCXv0EZw*WkJ7{IbAao+iD zvJt=EYPk(yrEEC<<^31{$e~7pnqwd|~T!2F>A=l$6ZeXnXs{r3*}MJFeh&HRIOt=LPsXnSZu_c3zI@#`@me&M)R*=h}8$ zF7bTbFOyC&7wecq>JIvMzFakhm9oE0@CV`zv`+xz>}%IinvRV!d)sG%{hGpg(BMJL zRzrSByvWg;3ApZUn$Hd9V1H!l0t-cQe);dD-??orMV+BJfFlKbV<$hH!)rTAkDPie zbNk{u^n&waSqWz5I``cvLjH^@NbKM+g&e;j!d23#kYsAI-;wB!7vk;Y5y?xpKI27U z(s|{Kh2?^dGflzD34)7b6i@a*o->x>8YiZv)Y6sp(&l8)hDs(Zd$pdyHw0|%{=ZDUL*=Bc{zG!;f4Ilj%v1QAV(yXZt_3RZAs$46p z;3Xj^`$Ae;T6lXD3wS02BN2~qprWahl~3B8%^<33P~_Lr3OE}bvx92jU`wIg@j zfPOY5hIXrRI97}Jvp`PVHqD-iY?T4h^1%+@v|+RRO|LY}2W& zo~c42|12Em(2tW7T6rV5-;A$W0M1Tf+G_~c4t69`8D>NQsW~F~=%LbCn(660VBzJI zogp}T&^kFbUg3V8QMNZw(rJlZc^9lB$-TRQa|g!?z2dKcgK41m*sXJfNmEpvK-N=J9ak47NJF}uCC4^G;gQGe~UwDd%@Nc81{ z$qO5dO!Ogsu!DVR?&T=kTgcVhRjl-#!nOs2=guWi&(5rCj6N#JG0jHaV8k%VChl*p z3^$i=!T`*iIpG23cXriDNB8#??j|WX*O(VRertww+t+4MaoYlad|tCa&XIW0=je_3 z9)t`fl2cCVh3(3+yY}`3@4(&{!-eva5`Km%=x8QWsV*ni4A5L{S63JKbM_SM%~-lW zD}SDJu?n2{km$8f>s&u=504y)Ne)8vEa}Nf+y=^ysbFx6&YeU0`Epe(c#X&h$Yg62 z>(6{YG@LAR@AzT8ybdPBAT@ZQ@*8j(D*XqHQv*kCO49%q)mW&_7a44%0@RJ!hzYvP ztgKL^9}U4yxI+_mYeWQ-C2nO~B$RF6c5A_3*v(zN_f$RM@LH62#J}>U!%pmb&uf-H zp8^t!+iGj6FWGLPL%yJXPD+ZZY4!jkdzyr5-sWmaDfj6h=o>2HHXPp58W}mgvA^J( zUB0zCkG6E1Y(=x05l^L7ozsLl;`~YbZ~w=RIK(Y>sJRkrA^a zr@!m+xg$;5Ofnr5Y&wg({nNZL9$^6`S*_tomGEQ`B$_mbwycKe{^ z%%|Gte8oKj2aBENg?(&T@X64$scZdS4V0};rxY-n98KkuZEshE#bX*|nFI5C7>0;& z_hdkxFaf&j$AnX-yuo_2cl6To%F22FD(Rp|un7M3q}PiVv3ZHFF&VQBmgRe8`R3hC zG_zkzP1U5N6h@xNCo81E7_{+c7_eBxMzI(CF=Lb$DS^vQ#$aY1HH{MnrO?JBIEWx(1l<;yKTA^!xMs z*893#OP7Z;u=l?AeP7q_`USNg1&_Vti`QN+Uc3&{&_;F-X7tv*2V9sER)SssLUb57 z5Cf8_Yu8IE>UT>iltIDp91Sg9c_ZOXMp$u!2|!QtqXGY)7mHa=#8C2cAnbF4o9rF-Cw5oU0Ou#tQ8hH=59 zq4I`O_x8C~N`c4`{Q}aPSMH>ei2Bh<5TUFEB*KE5SmXe!O8o6a3}G(Hu+ zMqvgi+!tF*%Md!W>(k9eWTTV%Ph7)rLb9W)U>jGt+{MCH#trA-MY93Cm9P7SQJJ>Nd z@O8RaYpS=pnr`MvyZw2FLF4&cu;~pJp?1Uw%R6FFcDn7X-XBc7%knWSEH*4`c<)C> z#<>L#@BvO0(y%}f1CwJQqnAJIUa{!8eWy#eRxRbmN-HXeCyi|jP*MHUQs7H13KJZ8 zL;c(IsXyu)f$Sd}0V7hT#$Y09p4*P&r*w6xb4%En_-2fiZQZ|5+(|$w(Y^2Qhd7ml zt&I(PC%dbnKcj`7@`uO7s6N%CxIyw#4Uz1POQ%A1U2s5v4q&d6lS$9A)mZnB$Pqe5&1Wze1zr^m zjV$`~BI*BZO`Ckjyb@`h#_TorZ%!gLmNETzg|_pj8o}Ew1&)l}#WE-S4LemCbl(LU zFbSs6l}lBY^46^r6}=No%|N8Ras!iWWFNv5#^AZOp02Kbxf7Wk8%Uf4`u?_ItA$&h zuBXaTsQ!%5I)IOk(34{na|OM<+*q$YDM$HWu$$x4TMU=Fd~rSR7%Cc7B#G!rgXGYd z`!bP6cytuHxYSew551ehlQUlULPza7JhJ_>u)|QeR3Z&cVj>OjbPyxU|LvAvUz9Z> zda1(L#@TE%oJ5p7v9-Ga_K9TD(t0-pR)YizY;v5v5w=cMt~)M(!-I?FO=`~Tmmnt! zsDK}Cakni78Bht9i~5D@N$rk04SxWHOfK%`bz*(Tp1QitEEXg+zy6Q=?INl=I(=Cz zylze_oz;=~2b=De_*RpQWKkOox0nT|gM1Y8q^W7OYYV#^i$(7}OHjer1Gfi2yxA`)&xqsc`} z%gQg%z_JZ*GK2%{XVcd9_5vAMmU8C%%T&t9Znk}o=t0e#V<$PdryxS&0xyYY0iLkg z&*l!N{Hc-ai0)f~8!CDZ3ahRz&V{q@h|>yT2x4df%vArLB3Ll;7WV(##iEd)pgUY# zvIezV^{(E+ptP>8ZhM>k=R-G#CybiP%CuH6EL$C_sw#s{TUzH;VM@wgj#^|yBx0RR zz|qnoPam8fyVV$lJl<_n^yPp=v9LO`)>u+b$nt1s={3c2T!kCVW8|*p)cK$HC8Uy+ zmjBt(#@y5%_C&$e4F|lTFS=LC%q2!jzZY?TfSR5u$xRbCx6Uhb&kQR@R|1 zv)!+WV&)bW*oxh}o}#V8!?ztBpPN5V z3~A&M3A(S}y|c3`^x9j9{rAKKgNF@ANy+CI>E-&(AXc>CkWE(aYe$DATlgi5v_I>O z&YFPJIe(%7|H@f`iB%wj3JOs?i3O!(6@FqEniDfE7`-gyMATE_s*Yp(-4q`G7Njp-5=V4z5cC zlI1yR#Km`4acyxTWd&IU>$tvN#JZV8p;QQ%bnw0$dCtM108j&fh^;aAax*ganmSXo z-FDK_QCyi1{<}AvB3t~RxB~+s9b{A=;c|C662zL2>=s6^2Zk3XAYkDk8z*NMkPEq3 z4BO+Y;J>wQoc4#l7X&*C(($tQuU`Tq=yN1DNHFJHyO`nZlP|3pt z+Pc8c9oHlKYXb;J$ zz4o@%bgzkq^F*}t_w-3pm@F{Z$;-WG?Od>zl42SK1F_Ljc6N3UHF0xa##qedVGR5E z-1q#7;G!+bT1p;iYhyIA)go@QTTs>v4-cOR#)X~)Q3{(*uLZ@TShY(kvqk=EKwk76 zUYVY(g}{pP>Qn4wEjzlCIn2z=gbgcEB_L(8EZfh!6Zq~gUmd&N%NyuiO$;(btvpMYcYB|vbFKezBdFmt!A=VYTeMkuK>_Wmq8P}$R$WeHNa~=ItyK`& zpPUL2#_c7y18Ow9!H-He7}^(%HQA^Yh{Dfq1ufX^r(QfEV6Sd+)HJuWSQdP)eevL0 z!rCxaM%R62+bG^^Az49qtjZ{%=s9GFl$NHfGeOd@Yn2W^YF)`iFgy1No$Q@bORdtp zYMr4DV+{@eL0gdi21XsOPv39HkNGAXR@pvNb*y6xp}1e}gym^d{O6XxW>?bHbppYx z@k%wPqPVcHOMYOZDYeXki}Wj4TVkKd)idsoEJ%&0$0 z8sMmGWH|(|d*Wxab;;OElkQt}3^}POe&n!b_yMq zo8^6aFYdL831MoCd0y|>u$76d+#6xx5bm$4*sttPmLMb-1yLGUaSZMKa7D1Q2uPeG zxp~DrnTP0eYW6b~RcHe*&~(YmQL!o*yO--b*J+JGh9Z@1KX8`86a}95-uhLuzC5p^ zRwu}jVxkHN@cZf*`(IJ|DRyTBSBD+g?>@cLKHu;3dTV2XAYQ9_GNc!u_miUuFj*{y zWpH2rF zOE&}$fjJaef~?tz^Z^6pTXM^B6h%9j6ClXw03m1-)9|EFZ>|rVoPj_O5m?LpR^#-W z>|wz?O!V|jAZTiARC9G1W^*D6rYbkWvQrexK@i}H{oH<`*`r5~Ji!g=%NJrAaTS%V z`wCS>;E0>59R2B2c&N)p4)ukQJbeg90m%foABvs>DoA>Q^ZFS6nD07Dgn>aOxf9;v z15h`BY!Na1Xb;)7m~PbR#G1YPRFQeHo8n!MC)cZJc)JyDPB3o`J9> zCaPA@*Gu$iWc<4n9NGx_HQv1W1DGY?4LfvV)q|eg^Qn^~laZE><}vt0Jo(ARZq#)S zKTWRz7Fskx;Sgk!E?CLU%EiUV_%}Oyby9)yIhbGue?)Wi8gun)Rn5m@J;U1(TsH2; zl3$4@VFh%>CrdR$qZS_@A9m-1jI^}8oLQ4DP(awV?7xd6mU;e=^J5reyJoIL<)Zn3;lAV3}RO@1jB7kAlVB3VP-p&sU z%qITc*SFL4PEDtLP1=Q9?r9BvJy0+)d&b@s<4aw&xe;jfA4x0>F_7gO2ibX*qdCtV z9aSGbd|?;ijy>WXbJ-LFp$3hQMs7ThLtoJovEAEcWqe+&nj=HfF+qit!sg*XBNh11 zd;DDGt7lyd5Yl8gz~t;M;G3%26X4ieh4%_+;UFNqE(lzn?msJ>u-7^DowM_p22oxi z;X(iLOrf9@ko}L{rmn|P=7VA1@5wA`8zBaPGx>=y{roQ<5}fO0VDQLU-D*>yazg0qdMi30}F>G2-Vb&fl1B|_rvEwh3v=d*Ow=Gj~i zCu+~b)kQD7W#U73_mO?04D&~K zP_hCEMm8k`NJMYpd(V3waR@}}%D30AA4lm9?TKg(zvtG1D@xzkxIiPW_~P5h;C-vo zRa2%=AQtVpnZ{hdmCWs zZ+{$ND~}?5sVlLc_U)>wz_3{&_zuF@^p@d2Pb`iOcF2CD)pH+qM#dC4w$S*%)lRe3 zWtoHc7)Fv)InoAJSpAnuYP`HO;up(0)t{-V=8f&?LMoEti?7Ma$qO{zwO>6NBX>pI zE1E~>S#x3qjZfZ^VhsyBD-a8zEQasE$gSg}Wm;HZUvKZqaLpVzQnNBP#R}QQ7yW4r zo_GF1v?oI4ZHi+S4wlVLqL1&9%@A1uG%3`W$2#cHpQ*;*^Urw4{qraPjTAAvqy&Yt*saygS4V5P<1ufgm)T`K*%;YS6NGDB-hxL@;k`xwW!*4C=Dk|(*!;Ncw*utD#2xB5!_FLDiPhwlIvot>9;;XGk6%IU69O8?R>^#HCV zyFDhi$IJyWJi@u+6|d>JHiWM&?)IAyqoSnjQivD4Hg>nNLi71^dwZ3O={@~ReL313 za%-BZI-Yk|KSo7?S6LWWQ-x}_9G~MMF2R$}g;Pv8H7BRi3ttVhto5c&Dgh@M0hp^n zBQiHJ0fO~RWu{cSu&|J$qjpoKx7-eQ*{i>1Fg|$D1T)W}u-K>?{I^C$=pvx7^Ru%WiqA`(#umzuE6XcTS+n~LR62kmfx48`!uk}0yGJDk zgZYoXFH_{G7i|E$B;c3Pf~bmZkD1G8h{cA^7=xgU2{xrrMXywgEq zJCoCiX_@vVm6Tqpja_}c`TCc3={J2YAm&55;0Ar;I`Q<<5*9q0H8sPc5@I7F)RmM* z^DmQ7Qclh-AytP{(x?;^WQwhYd%NY5758uid(Jplm;LLP4j^Vv!7v5*4Vn(2cU(%{ zhK!*Fv8`A;o9pCs1qkPm$xc8(P^FU)86Oi7L4)iuR=Voq%+JRU)pyL$P*T!c!b^{V zxJP;cfMY#NO9&3%UT`W}7AtF_){c&itw>0yK}8|KwprEDaM52cH-dFRuN|^<*K|?MMih`%3f0kdHKA`;2b7s$g(?odir== zn)(|xb5)HD=D1w)m?R`5sHw3@-;*{1F0f!Ag|o227pBkfOU}*n%|B%J*PEiUvUnIg zF#3FF{JD2ou(w%1~Ztr$%1kjjHYXDn?- zzoMMZ5necd>D&i}2PwUMJnHWo$17i0VTrF@yV5Z!F{t$0=hQECB%Z!Nb4MZGlIuEr z^I@;r@p0l*g*pAZk8%Fg=hzeFwdEJ&v4ppHbk#uRxg6 zjCv~mk72PETsPrDhP=4d)L<{v81mO(A*vKp%goA>l@aJfVNWd-T3!jMbxF+1;)DQ} z8ZwcVV9{UN6Lv>%cc5}$icqHa`8%NEt5gCqYk$z+2B!HzM3{+*blHi8}xFRN(x!y*Ygrl4PA-Txt$B1T2~i5?l}8fo3E! z+s&Z z#(UQuruaXF^HUg@>$dqIViwsv)Z^yvwQ!4SVAGpzOsLuahl0$`_={yb?oLWV!tF6+ zE}$vUjkp`%OnvGP!}zhGXGW9-7ER@VNP`3hcD6+UV^!G%5R}Np_MfB$Y_x@l z?D5~et#YojL`yYOIkT6{Kur2wK{mw~2Z*AxH{-d(n_+2R1B7W{<+Znq0FNpgSvlcL zU32WQfSi-(!Gc*QeF41jP!q%L*H z#;bI7inu~T=9C4msB~@*?EtX zlONKY&vn*}edEt{1cLeo6G*`!T;b9hkzly>nwC6>BiiKIMg5kSa~gbu?@;-F@G})- zkByD;fQiXrS6iEoDI<-~f+s>gA<{Dmu8$7*iDabY%s$_tIU}Ttke+eqAEhKF-t(fa z?Cm8r@2J5)WD2dC*)a$nw`8MF(uJ0}Od=PmW{ zu)o!c`b~bavm7a8`lWWTK87M&uTeg{1RP``zOiO{Js1*sm!SnK$}9vf-A9_bkOax* zpDiC1M#`2(5E1>&oeR*UQ*k&&OJvO!PJKUUfebUmi_H_hS~H_3m!h7a&yggZI8OzQ*Tu3E*e?*=awo%baI z-d4~pZk=-^-RTJ?i~`mVb3#y7AYuV`08zh3-tV2=T-Ewjc!OtX%OxJ$gR!gmaDHl1 zlGcz3B%ko6)c|T+7-k3V2#;Pq0^!TX#>UQAsB)(J@nFB}*8HGobU=$OjrhZd>+Fg9MpyTy%G5r={KD z<#p?x-3IVa7Tu%)^xnruie6|$2wR1tq4@!l zD<}kedlO>$eqWgjm~14mXnHpr*uaM|jgNX%5L~9593N*i;`u~UCow!MaE>NLkw-Ac z4g7(LX~E&GE$+6MZ}7P_BB(tzODsO^KtPpF@jSnPfLocYzWx@lY7I4M?Ky|+sa&?M|U`uZr9+qe^pmsT|K0F*DG@k$? zItcF4J_nJOcg^P|P5N{Zd6519BPDI85hI=?Cop*c9NO)$8JC^ie(oG(eRKj}FY=vG`F*zQ~?8PEnseZGo?}Fm4MidJbIg;ybHa z3%=Psk}|Q8l!U`27Q9cU#K20DSoAq}R4yY2Xy|}vS3xXFa}Db?aJPLEaxmuFD^$(_ zeyoPap-!CJWfTG-n3H15&QO~+Vj06?xbedTSYi1LtGu|sz!j+SV50)~XK4aPZJ%-N zSu@mL2VNVu_FDk0gaIG`iD5g?QJHYvT=*#Kn%Ui(;GMYaQX+l-f4kyq(L`|XzA8FR zKFv)ozF=P-BJ6?%9?mgjCTNzbh)&X&fJBX>`v~|UU9Q;xS7&80zau8xA%qwMQE7ds zYvduCyYH(?wgp&xQo0&$4WGq)|Nf2`tZ-pNndq~K{b?)LjP*LSJ^XPGNcQPYV@h<5 zFlUm)fvbOGLd?&yI+$In>i9kbe+68Jw?dPD4vYwVqd{+23g|`*S@q6htgM}?I=s06 z6rcFBYDw+K0A)Zy@Hp;Ts?!%RhH1(ExP#^Gkk0MDMeV+|15MoaCdXLp(QYRLHdnLe z$+`96PYJN3S!_%_)ZM}oF53HLEoO04ABYdd3z}#EKaj=;TAuCxQPvf$GE@@|UtJk9y@ zZ}uy7_w_6t9i1HIXOnQIhIwTN4Z+*O!osWKsuQ+lKMf0wf(FfbVq1A)bH_%*M)lHI z|Mv(VprPZ36W44ghwp9)wRP-^k@mR23)VD1RRPwbW$u z0M2NZ(3iGdYZu9Mlaw>t5^DB(`*p4P5ml@Hs!Q~=FnQ4~x)Wn;LuVSM|Py}+|>C>1@LotqoYX);8G8Uq(gXfzC4<8aOC)B#N?{*9XK zIOArR?GJT4PY`w;!5Y5HnYXx$?{@5d=ia_$7uE-r&z23QA- z;es!=Ie)(Z%(V8p+atH<$rh3u4siRZ?3~Qp(frN%pIB_a2d~%YTczZN43(^5s|p+u z(a}Kw8u{#fc1hlBE=qT0tAM5EXcjnwrlyuL{00p$>CaWo_Wsiw29|1G2bWC@Jy#ltIu zg9P1OPY4L!`MiG*JX+@MW~oftSNYGO4;L3t?)=eEA1JBX_XZSzWeV*r!o;_3-Ftbc z?}dN;`t`S=p_pS&$Jo}L?G27?09#U1t^bF zva*8^vQ_;-i1t;n#u$FwfMk(w* zpMXEm)Y5v(xgaRyq1C9k<4rW?fkX5YxK_IP|Na0}7WHuVZ)r}bPOPk87QUzBJtlN> zbJ-ZmHD=#@C@Sit{q!j=>x4T+)6(*7kdGjO@*XRfR&=bUVz?D&w4eeqGgI}#qer#r zG~q%=AoW{_ZVHfmY~b?PV1JQ&EWodGo`UoS6&YFHhs5vw^V!#~_|mhygSWvBE5Sl5 z6Ioqox_yDf21^k4*}2G|Epho}-KP;YSU(Y>&!79q&sh{}9Bd4#oE9qem{v(Mc9Krz z7v=(h4=wDfyU(%ICb0U98BXc)i2)t-3Myk?w14R(LE8iIlYLg7XZwSv8ek=L*GdJpgO>kS*8C%S3?>wzEk|$$7YX zz&14Hc6uJ{sp86?o<@j8H}bdb8QS5G3eLr1DNJ3j=0_!)v2jclUxi1E82i{F4nNgl^%m%sHO+VM`9mx{i|QW0 znwy>;m%eLZT6_SaEYqgziG(=2TcndJs#1AlZJ+etAJ=$ybNBek;@})t)7L4 zfs>Qp3D!Bt5ggCgmmHGqSVazIyESw%-@2kTXpVty_wBM+uordyl$ z{UzNGznmu6O_+b%%l%AFPR`TQlXhcT6Fi{RA|cf?}&I=qMc zzrFrugg*+X2#bezK_QlZ^EhG5X0goN3FdpGz>WKy4=dJKNhR0aul2<` z#-+ya3(8@0$wMLhCQJvYHF+s1jQspOO!w*?%o8-)6-y2W9&eo$^AM|=D+BWnxc|9* zVWX)M=8my`*WKlXc~zoCvWi45|M};ibRlfBhp_~!b_A)eRW$s}Y1&h9VuO$#Lmql> zVMhyQ_%>!j!jxI-`lDxk-SvHS|tj%8Hn(h z(?l#YfFCa}d#5{AdD?(!|6hOorI8znRYm4x3*4$cd-iM_;=ah=f3JaUvGe+j5w3!Q zAF+#ZI?`eipd>Vq-z-)e`}(qONFF~0u~U^sjInWoTIXlA+51B)wMcAgYT(@P^t{vV zTnwaf!y8LDir{CJvqalyo*^tvE41F*mpO#ac9!Ri4M8Ae7ln1QUrM zYTnG8O8XgU%7cXCaI+PF}s+QfeTA=FYUNvck ztX9glpcb5&)@MpwhHVVOa`TR&apFJF=p}Iem8BL^6<0;7XBPcz6TaC3thFlN5K3DG ziTHaSJEv=0B*nALvR2D+hJp3A%!vMlO5e+JvQko2hh1j&{8p(OLA<&Tm>TE35F&f? z6OFhfret5yvczwG`@Qbbq?5FVlvL=TIaz3BWnIgwes~3TF`L;8@rPkuFPvNa0y)aH zm{1pIE5_~lBSc?&JycY*2A+$_^)Q{^x21%%w1!?YHMY++&&5B5z4!H%SRSiECFavS zV7$$9hn=5aHC5TaDHx$y!8gM$fcrMWw^SWaOW>9R zR%OcB+gaLd(djG#;$rUhnmwe~@l*{6&pfsUz}+ENy&?-!28|N6!Z8RNFmQ&$Yj-24TKb3~ zf$ix9$BHA!t||v*pt$~G_yV%(P`)kEJh}4$C!6-5PiacgUPUiU#s0a})Ot3y))wt} z*Ns6sm$I>3oiZ2ndrvPnSHIuNh6qm1os8Z^7rQp7+ur=~s5`yYvj4?!N$VI=7t@-p z&Kfx)#@F0j_xOh;%1Nu;aW|d{mIcIU-Mi^w)E+nI(lle6tAoqsy!aJ)jg(w?ts-IU zAWYz@Zz^CxSGl279xU-d<44^Gg>=`rp0BT{p~hTMe&Q1KC;BPvqMYpPqp9Q?uf=9O z5P@MF6hw7jC;CD5-o{e(={*kse#k1nCwdZ4ar`;#3)NWmzg&ZrJ10D|&M?)cYIR3Xi8d5|>F? zYBa2m&(PZ9a(9;p%8D~Hn>R-q8qS6_k8K8#g~9`Iu5Uq(a720T=_tNfI}^x361uk5 zw6-R>n^;~xByqeo++d0AShnZqwHUM>9XUgLg^cVL8QJL2&~Hd{htbX|2Ih2}pcI`> znP^ds3J7S~z*IQtFOLl;XB*raNcSo5wJpr;lfPpC6-k3>aWK_==u$FzO11LfOP^&& zIvLFk#kUB>ZTk9pcIO|Ln^{(akHm$G7gx*heZpl*c6LughF}s}(AO84FyfwGpqAlR zR9swrv`PW*3*xTlCqFI}R|N$*Il1s*`q=UGtbjyXkRy?XjZJ5iuGaw|jV?MR8KrB9 zBEoC1QYpDPwNsVVb5$X(rmG9OotS?7t}^t=Y9Sq6_RH!q6~=vVl8%h1Jku`rXq@fM zuF1_6D2xQLYBpJ@pI_7X_(u?>SjLl5QYNu-!FG@FUFhT5y<$Iq&Z+b^EZm3UymGxJY8W$_sumCJnMUP2 zZCJBzrcId^;DpXAK~bQV!?Z3t#txdVPGYZ!PnY43yGQzXtVaLz9{rm(KmWYPVZ+v$ zXD<|;d1D(I8UrOESoz`ZzczH^1mG6}$-_uwvtcn0H}B3$O0OdjX=}V*M@2zMC=jDd zhF9VxPE_u+5I4HIV8Bs1-UW&?WNPYuT7p*ZHh)#kelIn2f3YrqPoZk?kMTl=PCmX8 zpJk893d@R$eyo0V!VKtd5D|rZea_n38==jR1250Zmn5?#Jb}F$+8CqZt)1MwvH8)X z>*78V!uXLTengvTF-YMbKY0Q#12FIQ0l&mtCnQNJi0Y-y3Eqos{_x>|^^$LIN z0lxU>FI-xfWh!>)1zQ{bi!NEao18=QE|V?1 zfjiZ)J2a$$W@&+eagHk5f9L__4j7YFU}oz>YH#G^<`yRE z>i_$^*VX#P8t^K>@3ZA}*ColaQ;Tv^!l_UDo%;=_cLU02O3-yGl3R(Lrc0(~s(A+u zmM9|o^iyAie#3iJ143jy8QQxMR!1jX9!>o<%Bwl9OgJQ@v>X`*e5@ zm^z@wT#mMOjkYrG+0FhW4~^2@x3aVAUcjDu9sKJNep5FAG*+5+xT{M`{TYawW^K#l zy?D89<^l&eyLa^_I9V#0eGqQ=;|5uTbgq(_xOl4aWl4flzc0`>VJHdi+>sa0%4Wgu z_GO>G()i~?-b{s~EC0I+P2@Y(h|NOI;*+I@i4vHYaU*tIkjv`@Ll`Ii_{`~n~ic6O- zZ^QUXeGq}NrwHZegg)cJgA{l$1qH9#N4250po)F`_z4385z)DGT=SQ6Epma32Ynd7 zMP~mJoHNi^Z)|RxnVYj_Gq$1yw4Ofo=(3>z(;sWauYLg9B8rAj9gzL@z+#=<+|pBn zc5=Xn^O;(?*dX9{*M(ejDju1eAI>Z3HKc^#;SdL{kzo6ZC%SZCj|ExUR^?<4?X9$} zsk!-x{{c+v*vsZO0DQo3LyHnj&AX1-9Xrv_tC@1eKn3gGH2yMvj*#lp1DCEIdYLis zYu(Kuf*9fBq1bKLx=vwnDcg-TDEf0XiUsxXqwHrbAd3(R9+J@a?+=1v`-y(w&c<~= zul{dB2~6kYSjl7>j;rAAVz_VMb`ABHf%g1y!*-<9e>D!)an6^!V}^zPlJ+V;C*P5~ z=w#v8PM`K>uim>rAIh!ybAMkV+$86p0vEB<-QTYur4QE#?%L)&*?!J03+xqJrpY0l z81tPRmsN3KjNaL`4z7PiMuKR=0R?ltvNWW24*sHaMLoM$S@C1~)}!oB_6PX4FU7*- zxn2R^QRT{DL&JNxT1!(?%K2U52Xl9SxfR99L7ppYbg(khZl?z{HOMXTxX2!T2o8wm z*30wb;^wWYna(}gGCExr@_-K5)Rgi0b7>Jta=26~#)^rSoV{B7q$>qz)(VC=3-$p` zpSNu6=HkX2tVHmWQ_FGQv`L-Dvu)8LrqkEM5=@Q{A1AmUB>J65zhq^|NwRwj^cN~f zNe!gYf$Wv$^B={jsr9U_B^?|hY%}WvId*o5h(iG=_dvP6>+&VZq$6i9RTS^)Zlr2y zT^)D>miC}-lnWQ%@kUWlNa?@%L)X-pDqf=78ItI`if_&}*vS5w97=DActEZ<_e?j1~h}Op&-%RR$|7n3NPfrUqKUwymKBvnqQxlW* zU7~Xr2v<$eU%y(Xel=>Bk3!E3+JYk^uR!1D!g9)G_< z^R@?lvPfQl*de(#EhUAS`>c0+zOXoBAOjXtfkrejtL7Y*cULG}7cr4Na3(RawA3ir zqHrJY<_asnQ^)6n0HcQex2>O$nya86Lb77QN?`b7i8mSZZO^w>g)|ar!LOS!cxw6O`4X4w#?UVWcSazfB&{+ zh5(fm-08JZ`2rx>WugV@JMF8&!J*>(=RXF(_{zt0-B0l2 z)sAi+V%n0{8Ahog?VU`8WJ~9<&a`yhU4;ciWTd29&3~VsKl5w1U!$9_=ZAv!t+&`s z3JT%p2lD9_tg1308Uc-4eP6zPfjpvip4vw=+FOn7?LB!OP#EN!sRl^u>lb}$;-?JO zu5g+rr_9XDGqtqbSid*ww#y-?kdwvFFU9-dK~a9bQh8!|+Tsh<*{#e%pvl*h>+H-9 zBa4?Vz3zVhtLr*+4v&fg>>2huUM`-ZVSMkfCYCS17{|uKjE)dBH+O&Zvf2opk@14EgXlb=oRmEGb@!8faW}HE*JgwLb@xM|4yyk>;8XMW4Q5hlCs%S4 zyhcUaU7@_o6IiX-UDf>=cP~AiZE0yK{KE<8l9>2%_2h_v9c4c0zMWx$(OpX2+kC?8 zG;9_{em#A?MVjZ;Qbf`Wv^0s5%ZN5`k;7=voZhg;K*|M-Kp3)E&E9{<|0&G8p&2)T4|kuWhc zGcvQ=1M=TbL=kCxcModrTiTk>>231YVQ}*U{v9n773Bp})UOh3kKt@>Y@~-Kx7PbT zfMO9%9f?x#v(L?$Dc=l_B%33jW}5l&W4hzxU6Rg)%BjgKKW4fpF3~AVw0#x^BN7JM zF)utA#=Y|!pxS})pP!`h^Uw9>j{cAdY}mHEd6xc?5_^R1M>P$NobXmMG&r5AF!%lk zwau-YICq|~?8Q1gpzgHbme+RpmX7wy4H9nWHARxae7o3QQ?Q-}=YKlo?jj8fh>Vjl zlX?7j@L`SHH}!VoEz;12_wOxBjQ0N_=)nV9n|jHEV$<9?KFQ_cI=I6?q#%E5)dgs} zwvc)vWWhvx!v8pb{^XHIAHrl~CC|#j0%`%rp{o^`RogP!?{svt96LE<)p|?EKRu+& zlE8nwl8cj;nendkdQxD8XORcak>KoKa@R$gjkgfO7JXYLg(JRDg+K;19*egMZVjNNi_a#+2rnl)Rpz8l(UR#*apUWu>+v;NQ9~zj^cM_zu3Wqz^CvN6qGP6RsF|*_1LhG-4XU<^E6@E1kr%nlVbSp;A1paGt_W!ryKXvMWeLq9 zubuAHsd!Gbmj=hH<1@>Cga3S>fpuJpz^D@0y?=K3`b@Y^Mz zzqG!)Yh`SHJ%bS36%O_fcy_02sEReHfS5dv9UB>$v@LT57CN<~AMJt;TW za?%?&JZ5H2PP@kv^?rWn8D>`ICc%&S`pv_DIwInex?OagYw4qOU5lHiktYxi)GJl{ zec5o-9jrZB(eheh>({YM_IsWi=_P{e*irB(&P|KPzvOVlpPJpZx-?u&b7z$@!q>vZ_E|ohp}uIayRZ@T{)S%kSE&m zT4i-FSXNH-0q>>0o<2i`nZ(4zq^(7G@0h^Kz<_FgL5`fG+<7C#1j>f9Rgc6qG&Bee zPmm|q?HDVCL(gpk0vH2xa$1Osy2qhVC_s{;H#asxd0pty*mf0>q?$d~79R~Gh&%k! z>D~T*y{2r8j6VfG7SG}wgrtr1^z_I$1V!liq+Eze@(0Za`Fw@7cBHgDJjfaD+#y5= z*)@zmI)B#t5gcP`FjX|1eS@r4?_AIXM)+KN?0t(x{jlcgyqw%@&HWYdGqPG)_^+M? zw!Oo>j6c1#V{S=@F#(!J%z%ychx ziitgH39BGf4P$#i%LHs}&f{HX>2r6CRgXYXlmkF(zxQxOy7K{byMt@MFhF8ElQQ@P z?I@swxQ|uUT)J?{v0uyXh`zN|F5UumbvfQqM_2`puJ6DG48Ls^64Pqo;r5i|-gS2% zqOC(o%&${xS}c6@^C|^z?$RvPGv;1xaBiwi3KS4JSarj@;k{7;eY+L}d8Nyr7P`~y zUALBISfY^gx3BN4ub0iFEro?$5fCy%m#knp_P@rKnVZXQdF&qcyy$Qe?CCSqDPNvK zR5&`hPR=XQ0egraNRjJ`j%|tkqt0k^9+X=b*dV!n-^)knth`X&(Zp5)kT=x&+p=cy zdG{_*Qt8nn-r-%`M*aD9@8g|*qi~lbLSH&wUL&tr;q-12V&V+oo_QRl$wikLt}qAA z5Jgf2+KV9;iFl?ud`p`l73;!ahTgQ~W@S{t0@(>>`Fm?IVwMGpA{S-)!+ z0^kh8k}A*puItxmZKt+Hlnfck$Vhv7`+IqNP&i#6V8ezT@7U1Zjy-ns@<7X>C@u-d zZ=;2Ex-8$#XvfK`+uF903J3`e6hrw*S zGADy^xY$iXUEM@nJRGV?V=gFE1g&3qJ$|h2w2CXlc@|pNo{=ug%X7q5YeUI%M z&q064xT0><@)EA(bU(zO^)Q8n+ky{WTjI##;?3j`rS#1!p)bXJA5aF@No)uztaq7G z?`?BTPQ8kVDt2%f;&Vr+?eNkuQ+_&1Cuu&mS+QVu$&&4S{MV&rgu5dM# z=N3E$U4>CmJQhFJfcFpa;0k?>DIw+i4NDQ?eTIe9&1eDl2MSB6+uS!F-@g68x#Nou zSbR9g+xC)1$;ER$f+1P(_oPW+yLmrCYTf3#{bPQrP^7Ri?8Cq3NSnUV$VNR(X1$Tf z1i)apM7Ekv1I8S2;D0qua4puivZ@Mb?+rnSHOykf>eDA_LGgEwjlUGb3j$;Zas%FO z8nO362TfN&+X@qi$k^3A47I0A0I#VL43;p_BbiH>rx1imQcpQ+plkOY!bj!2=6=LQMkY4G={zLTY<^hdyvozoC zTBw?iM{=t<83{-3P74b=hy&mHLDx8e+or&z|68!fdG{Q7c&|?dpABn2#9>#zsEC(X z4sSuL!926IWfPySSh1Jo~S#{l2_&!nRJK~>nGPgM1Uniirc~kthAS*31^T9O%Vk)Zb zaV#F&Tywl1pD;S1v!Nlb>`Sff0inS^5KRGo^{-qOz}P!hQ~Wj~qon@}3|LwuU4yRu zsC6S^mHK)G=C~mi%V_0O`z0hWXvK9vPY;fJ+K7z`EQbR_0j=`$=UZJY$YFCwk{d#% z`l9ZYbN+R{3O)=-0h4Qs|^skqi(K^O^P+P+&S* zE$*)~P!Fs)wm$_X>WYqo6DjOO{SO?~aUvw10D@zU$=1;I~F_Pvvj&YzF>C4{oPrLDQ8ZSbp! zOavFHjt#7)*Du)T?d=Gp#)UtSRY(0A-;DIspvg6%m^xmv zqV4MIGvq;yee0SV5KeAwZH!8gle(2B%WrOK3bq}Zjb?dA#}z#jCbX0CaD{XV0Ov*X z8#ecsW_JGVU*{HapLnW5T72e z{TXa@!<~Qs<-9h5c<}DSxqHu?z#rkvyT>Qk?btfy43a=R1(VbWWe=vvWy$|NBP**c z6L4T;q7F|7H!LDXwZ%6!E~}JGLxrF)^TZOMBaFyVwDPg_AntMfyY%DnDz^*kpJ$ko z%9nz1?+N8TfVSX13LHZ$2KJ6(inDW6WjGG>cu*b(`vt2-tWE9rV_RLTMvb|2fM1EN z!l&UnU}pf4T5>p%p+WqXGN#7FxT*7pO@BNUv<6W+t_G7|!!w!~0n^k_Er*p09bmsr z(uz@nS9o0bO-f2-5EdIOJ#q2?^g1W!G-4EPnjroD{8mThVO{#bo8mDfbvQ8=YsHMr zfRMeK3So|#zd35YbQAt{<}9J~gL31qZW~Kdt+4{bV`H~z<!$l&cUkykRQ2}l_iYNE$O!y6(HTHz>Lyfn zzVx~|*>XT##2*<4ZS=ohgWt-syu=3CZ_}r-YFQ`#Kg_)cRFi49Hq4Bpjs*}=5fB(r zDbhrcUMwKeoAjn4AYE!`Nvwb(Fi7tpO}g|FAR;OV(t8QLm(U3<^E~&x_qDI0v7ywlyQ`!yBICqQ27R8jKAIN^E!Yuu#4LNqzxc!F^&JQK zRv2<^d~09d=KK`c7!oV6BY_yBI06f;z#{2dy+tRnxs;3Bso3xd>k5NpQ{CPZz$Ua) z@MS5qCQyamu`%2QyqU{=@agj%0=^&TBTRYpsP@sMwzga>{)~O#3qA>ntMKT%JJIbf zw+`~~OF00v(MFmjwTTTVZ4bCe=AJvZ;znN2MF<{_9kcsbp7Utz5Uqg0#-Dn{;K zQtCHKpxeM3>5I3`RKPDZI9MJrJx!BdBNwQkISVeIonQ1`mzR5_TzRB+vzBzd!^)-4 zH)&3YaqSSI@*8PdHS5cRtt}z}>;&qNb_wxw$&7Uzx<5 z@7z~+z{Dj}sXEvfS&Ils|AgO%&k0^hw0VO52&5UcNZAr_5@^oq=q5}*d%sS(9vf$% zr8VPN7UG0N5&`n_KwOX={V;xv&g3#HbO$grUQJ#$-k#1Q4DHORPy_Uvh!y}KopWO$Qu6R7i_x0fe8idxU#4~@graZj47mvitR`Sq)a-Ij*2kd-E9PTv z#W|soNPite)T^D8lB!ip%+H!)>e+99nz@KA`mlINNJwbNaGz;V(H~wA1 z6>xcAzbG<(XK(~4#Y^|JQ`={h82KHS-zk0#pyy*vuW83)7rRv@iQUZWX>wjjq%AuY zL!)^?V5_bHVnxyn!zVKRh%KXn4h#T#YXQ;BNrzb0vU{zU!1en~5L9O%=AeTr=-VLf z0buLWWfLkXy9T^Ad?rVb1_txGQ`tccNg@C^xYW$9Ipo&iV-A115*h60zI=Y4D zaV65owL3JCg7+)*QwB>doo?J$S5q6_U7b{kx-y7sV_NX@CFMot!(F_c(je6&8Uk!c zZ+3Pg$K``i`GRb}*uZYPUwRK@wkd)JSclcmN5+7$S)G8LN<(;U_lE2Bik|ViB+9v* z!Nq$mGQgz$0lqQ6|L4L>Q>BTA09%-Pce^_I^C=p@(n*_BCVdMdRzfA5FF$@vpf&m| zS<`I8PuRZA>Ns;rhl)W^Mg}C~gPTJQxEb_)b(E}ZWJzs#ZrnMlT284it9-kerVY0J zCG&knW7X0U8A@w0QZ8MYxs_@V;!WIhj#o(W81AM{Nek38E-oqpB)M3Z`<4&Yv7^V& z1c4hmz>1_^KK!glfMX#3JwdmB16UY45ab)O0)q}}=I>w%2uz`qK_L05*u#c1s9f19 zJyzor2#pas?P8Rk-eF@f<47wz@cb}!IXGd3QjJg0c|oqXQHj0Hqk$sN!0>RLyAL0# zsU!Yk`PRKyHf4Xq(r;O??LD@bFqQfNZ2i#a z?ID13$kTyhn|@<$b%!Aw!o$q;m0TKK2bDABST22EI*zty!VUH$bep)1yn}`DyL0#M_@~t=zMakX9MuwCZoZ1$ zJ|tp@o{p{`|99^LJ!pEaT9i~(4HgU<8Ly1gDzMIJtEp|`q890RBKTNqyeXF@3O$=a zhv_-C-t8RoB|0?LRLCHN(&5NHZ=`I#&^upv$gO{CibtcK{LH4s^UU?6rl+fOzIZ9? z>_KBzl}ax5;~ud+LfAPk;;?t0l{1N#fY2q?KE6;j?j4igm$$J_S8P9X(7+&ip`Qw) z94Dg8>9st;&R4k-GnLqBtqr_V==FSkfoXdqZPl)%+A6oGxR@hyIxL$}LioCtZgv-F z9t#WASx+*Y>`LPD@sZu#QdI@ICu{XPpUbP(E43yf+GG*@vWeEv|f4Mvc!)WHyC(q4Kq-1Ah zH1Tm57EF4Y(dp7yK6p&&7+mtTe5CWQqbjM#qLckE@BBf5L*P?DjZKO z8`iZpU0OCR&jnSxk^a zpG~oHbctPGMUfG4%~CVu*_7<;>}=?>_K|tPWfxUGKy{b`!$QJ%^|sY93g7~8f+g*Z zm8KK*-s(>rWHU%qIdMKK(#amXZkGZ+xy5Outs96Wd^l4mk4ZqK^5fr>FI z`4pIbs}PlE>S`Z3*H+>wdV*_49$8zjFE4|``ImWG5WR%r(3)>GrAhCIs7#=8h+39v zfa=<{+r(ozG~C!L#TSYhaj)?f*ij{$uL6RI9DfBdM`Y|YkT@{{Q*&hh)7}Deur0eK z<9(d@uAzp<_?@^ZJ%BF|L++TXJe_tW*xf$Hg9l%-&ps(BA)F338XYrzEOHKvzN;#0 zw!g;B9hs{re?9I)$#j(jH*#A~&svkGVjmckbk2h9=H&?_eXYRyvrjj}8&UxA^Ey5< za{u({&;{P;zfa_zl9=i?Ml5L6f9~zHu9A)j#ow8nCUS7xtX%?ucw=g zCX<0Bzj(<68_`Jjdn6Om20Lc#g{KvOq`-{`UAd++)MJ-a-Rm>lAM!zik)DzAYLf#2@nW}8{NGo4jF>^LIK>Hw zR^pJf7`Z2g_7p3uoI~J`@!#Eaj=Wk8Dj9VNwGF1M}0f0V4@1oikFg&=~!Y{P}*kiK_#K zzVLQZb5>+~=!&<0^uNp^V3~){buA{^fa-0Cw4qx6F4re;d~YjHI>z}zn;8p_`cYty zl#Ok84jlVR#Td2*+&Zm#;Ii#p-B{bc4L~P>IKM$lT!fT7%_vC#rxld7v{Kg8_~Jg{ zKiEr!QAsUDANDzCAg?QP+h2)Pv$&TWvbqvD&fGK;DBJ+Dq#dVPTf+Bf%*WM7z@j$| zlu8c)pk`$yaIF&w(>s`CW&($NTaL8(dH9AL3#>_FByvzvnpn1Sm22+F)OL+E)+FT= zl$3Z$aojo}dcxT4n)m9aA?(|P0uniT0PTFQwh5;I=()KC%gX#mWnshc2oa^3kDiKB z1L1ktIVu0 zua_$3oImJC_u|F5_!bG%?G8o0vOAo)LoHGqWV^!<*w5SKOdPf@;p26aTBqjW}#RV_Q$kaD*Q8D!g{uJg94^&1AKsOdM+R)irCKb&toM+o#BYX@?05THkWmv1|jJ z%t2l#!Yz9Fs=j4QN5|T54OaARcz;2T^U{RH5a&zl9Bgt~**7C;n@4RXq?N)O_99|$ zPn)^B%Vh~!8yem;w5iCSFKC$92GM|i)o4RidAjfCFJGm17Ig*+c>Mg18R#2)x>*A( zrI09?WgbS6{POePx>{UfW3w z6*2ujAt4F*$XY;}@Cjz%Usz9`J-d|VLrN&*fk5RJ4h;|8`_x}CRSe%*gtOEvzc%+-xW=#h;nciAlInF;%)8hJf?%B?mWai<`CW?F_)4?e3*3(U4f=tLZYqUDKy4Y*u7$h z_VTJq`}^wE1tj7W43pBvtNHmI;^d;E7+AcgddV(W&p}NUL^A>(dh_`!h^wbF zamQ8mXNGq*#11of9gpJ?*suUL`CACB)BvVoQD2bd1&KiB@Aaq!e_?U9eAbn37A6r& zs8==KuYFb0oYJyiQ!)ydC_a=O-=Z_A8tXDRM6HdgL$}+YbA_8| z7@k*`L@N^y{4SH6l9IBcLw)qPe(CTheBrL$dOcVk5EL^z%QtqksM4J3{=RRl&-V}5 zlNRUxZp^B%vPwPeYhZMQ2)CXLcrR%g8um0KE^KLXC@A%riCY%*m6yXLXE#AHR0}N0S65d%JuH#O4-wwXLYq>#vw_`e+JSH~{8ygr z?DOZVX+}mahgxl|*sr2Dea?b|H7E$7IC^tCDLh=;{qMH?eA|9kZoTsFl@yNR+L=>Q&Z(*Ojo0vhV;x1#fdt!z z-+((TF0Ox!T&x2(Lf_WD&G27u8lF$l2wH*1ItC+nG`pA%o!LERbB%)o5b>z~KK*`u zpeJ6J{)PU&CZ4T5XS&qNc>KnV^D1dc7E>E*=0LmQ;o&(ztlPhYkGBD#|C$QEIJZqq zk^%$m1Dmu6Yvvb!i6n2*YiyLHQt~{v&u^!oC=c8x+x?eY`u!#k5=Q?si2whYzu{>b z_4K=W{0{Q3I6W%wC+z$jWl;>v$%?lyoxKB-T?mdq0yYl{QD9q;@v2KHUx4RA7E6%! zDi7dp>|a3($^Xdb!!tfVOi7$GRrO~S2L-xPrX+((=H1J|-=xo@mLU1IuTQ_AfSSr# z_~$V}5u^78#{QNzkEYFcoCHI_&0%j@nuHsY9+sg5mL*JlmDxyuAy7J7{PLz7up`9L zG|-V|g0NJ~f@|q$sXRVYf@Nu}LP|N%YhcDXVFvm|$i55m6sc953k?&p+AWm_=HF!> zD8a}E0gweqx%TJ&Hx)6U+*Q&8f+P(0!I>mYIW{sDnE(_zY`mxt8y>X$+Vud=V0YJX zhpF(nxw)C?*-~y-oZv*bdGnL^ae6k^%b^+h?{@ELn|BjS`fYI)MG7fb8IVj=n1+ej zUXHD^w#E3tpWIuD>m<<~^ZMdKLBO@zpz`hN@81dX*;|>aWf{QaqNf^e%gB^97?jv% z*uS^$9tLG#6pRT%6)v#?7L|VPn&bCoJgG=yZ*`11a#smDjK&X%@_T!Gu9(Ww%Kzcx)3$xgbP{Y>5B^pEw@L#TBSCU-|jRUv>Wq zOb}p$0>FNl>Y+_o9i`BkrJwrRQa&44LRh1V~%?2j^{`*4! zB^a^rmF@1jhC(X~dQ1iP!GTr2^5LuCpI!k2a~L_HJf?^R=1n{BxrK!G{yJ#_Lf-fR z$Fv*vkGp}&+0ydi7+)z_?Mj|dgR0KiBYZ~F0_Qwtt02V9`rF=bnry1@Im^E_lYk|- zzG$$2BPRqN-(C6g8s2o)%L{A(08~fknhtbzxo(;1a81C8CM~V0tvxAEcNi4Jv7)8{ z8Y=QC0MIh^zj*P2++UH&6Obku%)CoX65}&U>%N=W35ND2*!#91$*aXc8pPMwIu8Z` zb1(eJEqFMHf>QgBM0rd5gkLjY9xA51)_J;cQ=Fk?%B)aQP|~ZI9^Kr$AqGshyu8MN zt{J^Jhjj9gG8enG3zv4V+!l)VH-U<3vo$prpvpORbcHM(Ap@SxXLEMka*hliK@`%tN+k(=B2@Qqs`x4`$du}_QjUFRdvv1wg) z0Y_oirfRaL^4YVB;^MAP@1A{nw@U^O`-BBM>~Y2EV@5y(BrOf+mMrvWXla23gBvhi zkI@;v|K6gN-U(CfUbOwsMw#LX?TjUd=k0k;&u!4J4UksFfy7b#@)&Q&4C+Ji67X^% zox}sQ&rk;Kk>2V-`sv4i0FAQ&Z-g#Q>^8^q6=OWocTi zibhXjYVdX@hKA$3axBpW{Y~5cLv&|OmzsoyoLTHELVkMjjrAl;ctYnRNxq+hKZ@b+ z&gL)kOK{umihNc73|7X_NT_$qNO-d%ePj29Ey5t$2~;L<&#Edj%|tda9G3zNH5!X0 z0cPK_Jn-gC<-(^vSRQaF0HXm-86XP--Dawtf#+FXHMZ`QdS zsBEdq(bvq{oReDLpQ{{y#|1*+^6oRPT)7+zZt3sd-9(698Fu5qEbSgU!&i0jJW5S< z$Mqs#iIG3>C@D?%Z>0okvStbS;QC2DRj5{{au!H?&C>YtsY%$e%jaqXVq5a^ZfDQv z&O8`DCXXyDgHzuR)q+?&f*zfw?yKjJ0 z*)>ojx)1=m2g&tj_Ce1b(@VO_@(hE^N1xuPX5un^5MWQ)-=fCHs;=G|Zs`0HbCqjn zcw-@&nnm=khQ5Zn`bNi4yyCeq=P2)=gGGvu6!e?mo4r;!|2A`6uqlX0Tx3>elDY{5 z#%@(RY)*N3zR>|7RS#b6GRR$Yd-?hPw*@P(0K>FRICge*zOGve5kx!()s6cUP8d8k z<-V-#4DT<@*+&PkUJg)RgZUcxbFv`k1+(=Hb##(W0n-qmyljp1zH6nhbqYo$9Xmx% z_O18?0B@>F_t(R@S+-QY$Pm4?;^%*`-Ko5K|71Y(2^3= zPCz4!{z1-=Q#I>BNQ6|D%P+wR0$=~dj~BJNpaY{iG77&}LLw~9cs=}isVELiy!q2i zwZLUghfm)vDF0cW?;9mX$*Z=`X*?i8)5+N`Z@&eeRM_n=_TpV&7K8?`6a`UJk`xA) z(`h9;{^=O65wwC|Qx9d|w!XSWy|#a0C`1nCFc&gyPWY)x0}TmIr9Jbla3kUVe6z7! z(xQ}^owYbf*J9(KnIMc0u@v;$)HlG>1DC>qrPHbi=mo!&kRL@S3}ZOCSOHYP@<}*d z;c~*qY(b5(yejaI6q=nCDpTme?1u8z3;3iQ%fzTl$tvd3(ihMd_g3;{)r|CfSl%fa zL}kCrA1Ii3OFhWT$5#mfGhpX~D|!E(8$I%c1X>CU5OA@=XPUBG)XQ1r*gq{r6x075 zz4cIeGm=k4IFC z1A)!(!VBhPL&=g!KYe)r4tiW3>*x7PdHvDy)~{^#Up1)YQq;5FRp-Y2K|uOsWejs* zLw|~=`gvPCDR_SQ3P(k>vgz2EY%)j*6LyaO{7Z@hY~0)f6OO0p&bSsxHqh-a(EnHj zdj|^B{coZ|>#skS-95MNk-ZV)`)%G8(`!>4(MXk+lzG4PQAK5Q;(hgVOy~T1LFu{O zS?)?j$@TNiO=jTC%mI2aZAxc36&C1H=H{%|_`A9~V>mn{4*STq##YZ1cv7y_>_5k8 z&vRRKQndB5!EC}DC?YL5QKpKD3Mg5DzHYr{`^ZM*+Xq46{`slTAK>R^gGBk_^LCw{ zeFYxH)E}`;1~PEYH3tFt&PK>UQ!{C&SM@6Qlm?k%wgavPzY2JVV-m7jJQ;*eNG5SK z_kWrFJT>`&Lq$vk`HLghe`Zs#e=9seB{NM^#V`SLv});@qU<=hjWzXP5eb<6Yc-tZ z!0S!Ue>EX_K_ua9G7oyMAs$>sXV<-@ck!z5(6fRi0JyLjn-=NIjHk25GfBAl`aU#c z*{!*SWuC76v9`m84ucghA->M1I)M({SZ2i+tJ2%e`CIe44UuDo=>kQ4Nl(eVx?F#!C=Ccs$vOKV*HQi~Te8tcoJJL~a?t8qjOZ`QlnOi=Oh?s`gYI8K^{@CPcE}d&7g~y%4G2r%X*)i)H%3#0P z6H(d|ArkZo`jNpZ(}wGaRcB3W!TJwMae~<&-X|0nry3o2KA0eQWpi`QTJ`v$cz37f z?ni>aQiD$9u*W@>X+vr=fG799Sv06P7v^W&S9;U;XM>N2~^sqf#{g^XXUG|p{(1f=9JDUb2YTx_JtoY;1 z%?0@~8k=L`cb0%xbW3UlviX+@N0+obg%l7i0^wtWV*(szHPSn@GgB}|EBsMB=Aghy zIy}RHT&rWu#cD@}7#bK5I)xj+n%=sk1e>LuDbFGaxi1HA+<3|sJhSJyh}yvsAQuYp z8X+AERz*^qOGYX1&uVH|&%|JR$lA5MnX%?xHEo%jw{C)qMg=6TKE>D3FTf?n%%n2f zMTNC9dyIaQj(y*IiO$3Uw1M6}OePh3(Bf*S>+!NK^0`|>?>w;1;BO8oOnzsK zLrzH7Jz4{5GBg`-C|7|rIe;rrdZoo=bc|N!A(WRU=+&)DbVmGk1D`2K6c`-p+r$(m z8hkp<@iD81C>a=NXl7E@^Lw*E!oa12A}%0?pniPHeDlY>wS67Xc61!_GS|+>3)E7m zcS7(yyM~8zBWs;ZJ^|ORd&p}hafqbnE^0X%V&|QY+dYZusNR zdau=Zvu*9{z%>PTsR^zs!>xtxSd=}>*|W}2y(K3n-ik38@IsC%ZOJ8 ze7!->*C3_;HXfZ!XD@g2W`%eVh~FCI;=NqomG;>oY=$7`JPrvOrDYH~R8$lKE5;UR zalIlY+A=XJ7JBY8z^(Mc6$!uc1$(=H+?}8!kAx1G+=TKN2to530`t`FX$)e>uDMee zNcuih$}7TKnb&5)IUihWfQN}cE+rws>f#pVD<~)kbD_r7D)t#V}Mp~;gx%KQ;KkjC>wh7RD$N~z56h|HK_Xdb- zkk}sI7%M=6CN(!Z+a*Dc=HKp(+BD7VDdw;x9FW^jy1C0ir5#&In<`t{-pd_sTl(=2Z4&S!)+xjKAhI2z?kh!B?;H z!acqgY=B=I;DzKGA8&6Xx9Sw=b~mTTs6BXhXQ-$?-TcG?jKcBcQ-s9(w`jo^Ear;c z$^md>K_X+Pct*z#6Zf9VJ$NuWa1AQNiz3clZ^{JHn%!sP(L}JsVy?UZm!pId@X^bHO8YuPeA`L>WXOfjo1PhFQI zk#0aUidpQUyy(L`-L7nd_E?oTW|#$6xE~AmT5?4=js^!j>9tY0b9UZTq?~>MW_Fif z!tFnkRQuYu;SzY8Gi2YUrxYfX#y(BV+V0?cihrM#^Eezvk=o5NUBLh}O=d7oevccP zX$6x2Nm@O^+uzB~j!~KRYr9vLLLwr(npM$KEkA3OkDOb(4S_@M1pi>Re0_iI-g|I- znwu3LW0U<1hQd=#hc*xYaoEQhS#69yf}Z)JQ%p4u*Oh$mnFcmb>)Nyw* zkxPic+qY$0Bm@B=X@1_ILDMII1iVj%PR;ni4av~i%FN2+>ept}G&)QYqBg>yGckpkXbXxU( z81YPzy5uece$L=2;SP!-<@AC0vJ&W6CQh0q41h&oqn-G=Y}rmtnUtn>t}wW@x_G^nm%#mCuo&srKJq?WTtmvE&)l;@XYd>%8QQ{|#f=c}tV)09pI!CdPf+SqdlbB@lzYRiBbJ+Zz% z!&MGK)(b+$lYxC^smY!T?5Nb#;%3zEDoJ&990Z#tm7!P|Ku$6@0o|;gh4wgu;OENA zi#fP{AX`8*k{@8O=s{L6PckvVLW)k-&<9YEF^R@RhlH@=T8Dtt`-DPX=EhytTi(Z= zay{okdqKmT9#Y^$6_bTXlZQZ*u;$xjr~8z#pykWwajX$NPEuQ0t-HTn95dw*kKvd;A!?vI0Q3rqh_R@BkaR z4(}D1BI^YLN_0z5jqS}6Bautif((S&qlXKqyf#~R6x?f@Y)RXMVA$Fh0RUZD4gBEs zqck5jytg7y*04?)+pL}Oo*NNP^dgp*_Hm+)kU$xQTAA@ta3^|S15|bvDC~jN28JEro2Qq8eDGjJn5$~u%`dlVZyp@Xl0v{TN(=0@ z&Ixf_&J`;wh$1_537qzo#BCrG$$Wm0C$YN7Ui0Y_WR|ZS3s~bgLoaRj<|_4OrhOPf z4Cy{QE)R@2g^}Rqsz->Wh_wCzwtz7;hIAWfzSZ zVgM+!?JaLD_$rBT1w#?#Xkm_tiroubRmAOQvcN)5cyV6gBG_`O9WfhwFb1_z{*mRs ztL4(Zfqz#>s7qW6wJ*qC*e)K2WM`Tn&=F%x#(4_LvfIOOEWgogp>POLxdz|P4tN#7 zbPW^%dGRCbbKS1xT&whWsMb^q5^tTX2_zn=VUI^nt2{i-!s|(D^UDS6CfcB$9+j{^ zp+@Dyt{7rOyQzF?%nL4a6|-N0)E7b9hAw~Ja77u{`2km=ayaGMtsh(Oqe-`r>98SL-$%X-~*q4U2yjTpvSwVuUJRpOo7uG? zZc9Kw0P67{i1-sHZ;S1-CrPk9DjI0U zSFXH&H@X>PguyYNJ}t>{|Bj)Jm#(fGI4^6AE@umaSyZioE_aw~16z1#XlX;7yOZn2 z@>zQTy~n;P1E*W=>z*VK{hS>0RmwH(#c#93M@)0jh(?6kmLRIo7GL)c0IyM=COs#8 zrtm2aFA@vVsf<9eTcLB5!tQt3#KPzca38BaMoA23Bc z9t=>p1iDm^8S{p7v9og_*K}#<#RU0bty^o)($i!Mu6hYKp8&89*s@x2)?oDXO}?g| zxI}RNBP1Nm&J`7ugd%rC!vW~qaT%gqE7)i&W3JB``mh0H789d#Y1exokZ}w*H60U+ zrONaTc#YTXZEG8_9D`$M8DNU_jSX#MZz#z=Og}ISLxAk4xoWdcvX${iMaWVznaYL` z<>szv7XkzU@2Kgd*-bYO%RO;Amivmq-MiDghYy{jHlvF}SC4)PFQr2)q=Gr;f)!vs zX*|^D2Cy4Di{L1jSpg#@vLm@G0W@s-Sce7AMPhFItNXD>{}f?(3;c7J)_EC+ECWkR zz3plB`h(4{)ryyOo2&|c00<2V670r^8@b(?of*MGt1E4sFM(02z`;nzxCjZoU_fG) zI}R3tc&uhxdv(_L8%Kbu&wQB`R273op0%6-Pkh0WZQI)e!C_q-5v_V6EOx7D9h|1^ zGPANoY)jg$D$9G@RuAKo;rt-&>PT14ZFtHBGS3ZG8V1EkNQk<-DLBqHBU^f~#qQ7|9%_l1rWB>rBt`-RWyihuS4fEx zu=e)NGREYLC4X~G+lf{(n!;E-n~lo2#8S~d*B%`m4g14YMNjbg1c!3SInwbI9L>(j z0nuhQ;M>K{_M1 zUT7Bb^QqUSqkgPVj0@Ze4)W??^5gr$$A@dD*h2`0HvsB$PJsDDl->M+9t_{ef#=V} zjW4rJZr=6*n>N;yWG-5rxf#nq6>@dtf{%iNBG_ZbZSOd}s(HKp@F5H=Kv=o7%&w@Z zDVLi1=8g(;oS0X#r~O@^IfJDoDKKy^CwI`d`uR!Ts_EZpzVUHL0D&);*Has=wi@eg z&|Q}~7Wuw-=`AdDEuzu9xgKsNXr}0bU{J(*AxQ?xW#;-u=IJSs^95zLFbxiMA5W7a zr^k1_5mo4&apo+rsRgcb$-K8m;)jbpZml11gk#0?yPkB)un@F1{KkWgE6qRMubWKnpEwh$HMh>`|Y4)m5=C zRL_MCgmv@RV#$490n+)d(hyZZ!$cnzy|_p8l>K~fNzZNX zV9DM%??lVe$OUi-K^#hMHT9kYQ%EJ$!cnFz7K5otTZcjH5X9;!6L*}|WOmST^t<3%( zLHHQ*(htyO%p$hTtyT7Z;>bTMRTht24%&$VLhAGCdxG1=G3j%R@vX0x1Fzq?6EW5B z{rj6@Q$}w--N!e8j5Vds!V*tM+WCqmtH8g?bt) zl209+0ut6aWwT!)c8_ox$QiO1CFb39f6kg1cc$lv@q_Lp6BhJ#ws4r#)@Llv?+QRp zsNES2f>6nzn0(aN>0GP;G=f8# z@ea}I63XA-A9i>L6Ob7+Jt*Rs;Nr!YFMsk2LE1J8zQ{-BrWW@Z8N+pkXB#5^ph_Zp zu`FUqDx#-Oo!#AxCk26tMC&7&i-1}XsCW6<*m$K-EubRsp^~4UTzGSvj5yPd9;QDo zMZoPeL_~0ikpu!U6|f1vTuLFw zJ|uTql{+cOk(ETE@-nX#6`)kvnU!yT2)-uq$i1Meomz|8s!B?bCKKhk;tYnCli7@Y zv7o~7I;`i{`8FK=5ODQz8=?LTtp@Q}M;wtR0&+w1XB?H4i}KfO%*?i)KBup51?AXf z&vxk5C&V1*u?T5tEF@nYXLe@u&gV86Ab_JG2rGe9Ec37>;e0c3N{eQzffhYbVfNg& zVs{p8`d}1D_NRjCeBYxbwqR;E(9?6`e z6z1UeYFQmOmlE{^CNE1f!(fxj=T_4d{POZ$ZR2ulLI6BA1iL@pu_lE|I?Fy>DDT93 zAyy|T!K1!?ZXF00ZdQFcM*X4Nz$ORa8KOp@ailqN^eDr$`ph4RHrP!t)wHhw3?&M{ z76lHV>9Bb2CbI8st0EL~yXSD|4OUjL`y`<@(o<9Mym59Qcv~rv+uN;W$I%@*x?9~e z0!HSldt-U{+*dz(^^d)FV6sBmZXQB!tqzrk&)QBwP7kDlHOl8YT{ihNH92{lsqb5P z#QM66>K>7w5YzFm32oU(xj{&{es(dAXZtf;kT5Z?@ea;*9Ku2MZ!NMvd*~1fM?418 zSnEA^Coq*8eR&KXzi$4)CQ;}22_NooI!l1Sh*#gO3KlXzMF1xt2Y9X%P_i4pQl*Wh zgvZa7D{E;@0M?mI=-P=?0@adTiLkJ6?EAb%b5BSnnE1X5UOjW=)ssAoA&;`?E~x2i zx6MRE^Gw3$Ni?l&pD~2?6dd0}rOh7MiH*2X{RRBg^xRyh@VS6cNv_i@UYmBxwvFJ? zcFw~_Rj(P{r>}3oyftQ@TREB9ThO!$!Rp<>#>Ed7jDfNeT+){-^y!2(&WU^=2~!Us zCQf%NM~k(us_Fyu21$2>6|QEWDO0c2{h*>f*3ULxwT1W~_!(EN9+q1mfkp$)XHMV< zg{x43nhFiWc`&of!AzPHsyA@584rq15FDwN$EwBn){Jb{Y6dQXfKZms_zYzJ<^aM1bmw?v)qFCSXo;qgW`RR{ z7=3%%p@RpFQ8QoQ_g2RoI^(xKW3lJ-u00n_juJjzr|B)VRCH1HkMaBtzS#9%v)!Ve z>sx;UE!bN?ghun=p_hmPOB=5W-@Dq{$DQMw*#P}HfBt?Wk21t!fGQuS9;-sfdp{pzyA5%?jL{_9{g4R^QE04wBAKD-*Kv^R0u3_>=Ec}G(f zQjS8_DJA9Vs(_ZZ)+xHv5Y#egRNviAK#&Mr!QWJ6q)4NgTAAIhtDjdVn_wggl>lh0 zO3=FnWo2d2>~ZB_+9}H3o|D6}8^aW)>AnkJgQvBjZcpRJ_mBk>V9*&|1U$9qed&ky zrI^Jo`N&4wmoK;Ab$~eVvnu3dV3^%`xM#Rn^#TY>Aee#|&eG68r+w~w=?)QEDo`Ma ztt(y=24T(DXlV=~4F<69j?qC| z-A19RfewiVeJ6n5>6&aQ7uk`3P~bc_`W6CQc_54=VolxeOk8f3zD-I05Q;$D-F@~H z_4}~k`F+FC>wF7?1-Us@Bamk1!p>qFp5)?aCn1}<${y3SOMe}K@!cw;#YBuLVzfr!FF8va)G1qRSwfm1Ko zAnLd%lkB37#YFz%4G1^h0i4B;<)?@#dm|+UQSNNP@<5bl1ULavuBf4W3kWwDtA1_33Nq3$-x))^uw2DgMXb!GCun z{=0_XA4RJtoL~9gPNJUX)>ECr-~gt!QXK^%fr9eVs5N z2$SrURe`d2;>DVGk~-e9BPo(w!)vD(u1o(6U!!;|2I&BxFb1Y*+A)Udsu*qFw9K-U zEg88$YQ>haYuvAM{v+>T_^}8GA1l_O9LepBqvemp*$I<-sooK1=WE<9*9HftT0ZRF zd&~{5)ty_Wrby&1uraufIQsMBeEj%V$btZ1wPUsZ@`{W!v`n3NH3x@p zRm4a*y0g;LnWpW2FcByyM6&-h+j)|kyZCHt6_C^9)<EB}7EG0@HfI^r+I!bI zL4mdU@kVrGSV+kG*7sl?Z_{7ZZdE|Jmh|)YqnLgg2SL_P@k_l{)w}NXQsfV$SL9(! zPzCzMFN2Z6>(cwH@?=kzqo47=1+@?Du2^8VCYnEex+%^~oDOcbvYwN92nJi#)i(|h z1^0h0xM$kf1MUjFqHmqf5I3A|OpK6v&B@Cv#LMdnSS?dvAFyw$U=j@jXfOf$|Gyo^ zdnafLdMiAe2UQrynWg3(As|5qKT;Db)Kh7ZOHR`Xx}b4*+j9TMu3cETju@UdU0Z(j zD&S<$bG=@;jO{}7JJjF?z%H8%oUHaku9&fV+OAMW^7FTXCL4gd14P~Z@3DPQz|52T zua732A!IBlt2}j5=9LYm9Y^;A_#=D8-s0#tg*}TC3|Rk#i->zp4)h9uE8(lYQdNSq z>y7>IC@lpwI^Q8`X6I#DI^R2}8zUnZ+z;S>Ayysmj?9hY(~VUr$8Iv70ks&6xT zx8G==pZPJ6I?*Puvs;+sB`_&8%T&N@6LW0;A=+F%AF_}K8^GvW4H|iQl{Twul^9=` zgUYL%gbb!O_?xGwpJ)+ic3!F#3j^`g&(}Ctl|?dxyevsZiLv}MP-9o_XzYJ_RMwwn zo~9s$g4!6;A`KHdt;B`u{wWBdCghlENeqyK)9c;KDL*J{da6vjuS-+@d^^yZz`_yJ z+adkeb+R!g@b3=f7G*brAB~h$)4eNTRBp~J(v$NP-U>X1NgjjH`Qq5joKkLw9x(F# z;IInL(Et7#KWUxjV*R(^f?i|&Dn~6-&0t@p*sGRPizWWIceFiqbsElZ@^C&_ zw)AwY&Cc0RPSG8AuKiKBf=TUdU?GBkDlhlEx#!JZso}Xf%)Pw|C^o=Rmp4lETmTG3 zxlPJ4%F5gmF#F`dgz!Jj4lDh_q>?KV2T6QXP#zjCd*`)l2XM;!pFHh_n(Ht<((Cu% zca_R_JExkUrBFXkb?UXeA0&j>6MXTee|f`ArA@ge15HvqTm~N>=>m`q|!Pi zWoT%p5!%gV`A82wACB_G^VNH7SojpPQGL%ovBCkJqlb_x`i(-w>5o4)tf?$Ea-#U(^OvnNxf?vYj znSc9S?PErnrrW!2+FP6D$bkPWKjgu4yC9oO=NR7u-c0*wqcn*{~N{d3lKbZ ze4~Ar#45?PdhB(v-B2(8^-x~v-y|)sMR79m8mQ|Pp~lWcH%{ivG^GhqP*?&`2?zDAR2(2%F9#wK?%$D0B3uiJLOk!}3VKh47_x;P1x zl$aV3m*`+B$GEl)S3`1V!%yLwO~WB~?+8#w zHHP{6eppktgt-^o6ae&v3Yp$clYQY+R9K83C@Lu{yCfp=7{vF9w+?}u!W47UT$2&# zTrOjeAl|b`Nr--1)p3UJ`p~ppPF7B~7~VA)+&VA-Mx~K&VRG!GJ8*4Ll@wg*`?e^OI3N=aBQR(+?%m55_owix^c(o}))<>MH5 z?i=3{|Jjp1R8-W|(Nbn@X(nE{8J4lQZ`;Bl{sFIj{rrJnVcl&jXu!=18j!NbdxmJV zroPqdoo1gA%|)M+6-w@@iO#+*p_W^RDXz1?v&Db*IDo6xRYM70zY7UnbNBQN zJR5h8o(8|?0oBS)nJSZ#bTXlofq{{UYAQs23`Qj7wDpG$AEaUw>oG47jpenskDW>! z|9l@r^Y*t>+egj`yEi{JLjSs<@Z?VeZl@;X4V9aEwbeu4#* z`&9;&IVnY2GPARcF)CBjK^MG_t4yD*cZH{1!X~KEh3K_V&?_4@r8>mbd^ltQR>g1%WkH=cHWq16iw3P;MG^u`&2MHNZ zneS1E__+{6hviAtgq0Pb8~{5278x>~XoKKhF5mXvafjgrGt$9qK(m5vE-zeDI{oJXi`CC&7UUR&aiB=Vn4M`se|J{@u=QGcdbW;L z(>oIkJbLS^>$E|x$Aq=CI(>cPbz|z(kVw4LlvxmhYJI^+6G;#M}r&3q(uVRweR(96EJ|;i|)@o=)p- zu7$15LlkTAAFrcnpa-Npz7**1zrlaj%4!a*H2f6Bwq9cAR6n}b*K>h;v!KX%RCgRw zq4Dnb0i^7YWCv%8;v_ZZ$W10K)io$@{Zw4-+_Fd3)b}9(ZGF;+pIC_vPM) z%oHz5N&ZGbtZryfl(Fez`7W%*c9I_H9~<{pOhP_eo9;{~&}WOqJCcT!Fsqyc?IuD~ z-=mn%otl0uQqOg{h(TFKM)%R9rIpak-i$L89*KUre_&RrQE4pvYRb&PPLe>Wdf%(rwPjkOzuK)X_o$((OPYRxauX4Z}K?Wq8r9`5# zt?eXlacVnkhv$MWv$}7+G%^82o(StTF!2SvKC^vYOhqBH(|qDwUnTo3ncK;!$tkHX zy#Js(b;h>8ssaD_vy7Z_ycnkhcL%JBp8o4Vdk{dwZp#V}ik;fc3XP;wuJe6P90XsB zhB(yk%wk^uA8&5~Rpr{P4Wn)aK?M~B$qk5alSLY@iQ3iJ%F{=^W4vU&w0&j&UwxI^p7d&tiD%Or4$x| zqZj0%uSMI=lL_>wa7vw~tY-5nrk_@(lnjrm6*>HL@z7kiE>TQZl~YqoR!oDT1FW>P zEF3)I2lj8Ir7@nke^1Q>flL37A7L)Eol--6y{}J4s{~YNXDD{=R*EES)|6J&u?ArK->0d0ev7 z)Ev-DYhSLYsH+na{;~Im`(ba4*5WmRD>q+#c&OzN(=q$%~uFD!IiwuNo9=ABFAFY?^9FyXd0Oqi6#(R-(tYyAd~`tNW*pxWR7xSn5(_fPdLmN{sWqPcBAoy)I0R z5GUbCOfMHw8EFMO(a{Ry*S0q{UJ-4fuVO6Xg8lTsq)tq_Uyz?aOMnl(Y4IMIsQcID z0_fueVgn?D8F$sQHlLU7|3+KD`o3F6E^*h)q^T} zBDPL(KRz+sY~8y?XI!%OLAFZu5pqMv-o9nHnPrY|fiF3=bfj+Fb?DPuwbO}LYDx=B zOOLbNwJ#?K~bee_H%{ z=dhYW>4+syg(P}@n)>>#cDg6`aqny&28Q~} z)HIXbH3>T_tKFTA-JP9#oKCX=8qRZ12Mn;ibhXa}+$sgw*$d#JCJs5MsA@efUP?82 zEsINPj;sMlxP2(+!2`kYy=zxcvW~|;ULXRxjlnb9+1ty~hW3z{Phwz<0(cT~ay)g- z=$k0|nq3?p|A<1M&_+IyB*d(dG+VTYX~XK#6_5C@+23<>N5;n)Df!qQ33N1fcR!8@ zWk;?ChbFU{u(PpYQAkDe(RHSzq}&mr4aG)NA*C_iFv1f(I6A0rc)qvXn}+OMgGq=P ziW2B+knpUla=r{%6XpyeGjx zR6LM_{pRP(7^AP}-c=#C3ME9YZES5-xYQOmG>C|bic)xLW`Wl0&dyG|d3z)QPr;Hr z-d1%D8@J7~SFf5EU)5EV^SQc~OA>cz;$>+RUT;xU8sy^TE-5MP>pw9`Cl#d;1<@l8 z4C!TTKVRrd3XjHLUtOJBUG0~=5Avle68!h=*sfqq=>k*;4|99Hf0*c5Y_1_E8+7ZT zN3N@r9qx5>AEXVpQ7$J*gfY!}>V) zB`XVxv_s4%aA#`^pdhRhQym3GHer#&#U~fO9O)oPk zLSJbazm1_y#fw}Ab=<4h(X$K7dMt##z7o5uf?RGZ)DUnQ7Nv$H{^2F`SxZ}ib2x0` z<)|%z`Eu)g1EV`~If#U)hmME-F%SLu3_v4&6v`;l?)pWH=w})7!r}*>x2N?y-d&82 ziH5E8_;D^N37dv&(9%*==M~d-91Sy#7(5}6N)?BhTj1syA3hB5|8N8A7w7es^L6FG zZMm|4R8m$F8gYCL$;l~->Ae(5)N5W(-q$PCC!AXhnlu3g2*j@ka0 zzWwoGN|G8DG^xN)A*gYd47Wns_McL#sGSUT2y>*Rj_zzp&uAB=xt4u%a(KpL`9keXHV%RNnG?7Z63LJndnmgNVZ*H zztw2?0Sh6A`Ds~bs!oJ#{3njNg=MO<;7aQ*<6{@s*)h_EyG0flZ&t;aL*KqVO=3Mc zaS0VSkTlEa8z~(}KIug&f5=bVh&#&JL9P}~pRbqK(@TevrPI0l#e@`|>II~u+~Qa| zJa}bzl+@f$<7M#i@5bc3eYO01!FINPSz24Cre=1ubc|;s((teN_+l09>0Ei^C$~)C zLQcN^LP?!N=a|r6mGQKswYa$07W>9)V`EyA`1KTaW?Puj03!2XPwVQ&ZFAmWxKlhA zu8&|4R@2QIWE$NKX>V?p@bTODWhwnaiqBy&yG9%9+AnGGkMP{Qd)hE>5kAhIeBZ7H z>W2NV?cy}~qhqAJi=oKi&g}-ECkvK2N%>VDcy@j754aH^gw`2F>Z|m2$(?!_?H*Y9IAFSBWvS0px(4R< zGLzDCI8iX_TSkLX zEg~Ke^4{Ca7H(Ws(QQ*7g-4lW6VkUp$6576v#oBNpOUZ5noUJoI&k)fXYS@^y6M5Z z!+TD-(F$+-7g8@@NIhi8+P4|Mf(S@UOT(+Z%UlStz2>h#a>4js^!WIAshz}d+1?we zETpY136H}Yq4=hj=3upp5eiDGTS7)`M7^weLAR#8uPs&Ck2f5?zXH}MC(3cUH6;gk>0Ja>7lZGv*KXR;eYC>tQ_=YQE^Z1DT`BN?Pk{YXcc3xp?2=bF4Zi zE(u|I)N~E$3oM%I=>_}c*}%VZN(+CS9|lQ~8WHD=j2$m~K>dRq7=hw427KRO(0kj9 zE45Iy!DK75;9z4Tk?MTu6Sz8BCJMKvp%IgpmzS32het`nDIHL#YBOk#xfY9oK}`3M zJ|J*)Wp$O^t&JLjB(PBZ{V?=vZLDH2|Dx*BgD5Q-#8-`_0&ip%O&!IS{@9vYDIk00W zYoW81pPwISQ~2{9*2ho|k;ahW7=K%cgeQaySdE`+ZuLJl;6kLNqyhqN2>AfHGtl2( zur4~hQP5iWRE6TlXDU(T+QcX|H=qDOGT?o3Z8mQtl08;i0Rv~KXfJO4EzM6tE|>%E z6?Eck1A?9{-isJ!V(N%_2N>np3(u*da#jpoxLVeB>%}Du%bz{y9g~5~^$B)rw*27a zWN$p7tikY5rIqnGr7CN^mLeOBGK6vq*OGY$+#|@CFnY1vhkp`l^LTt%$8?7(vJ9Wdubv? zE<5CpO?`v@w|&oq>!~*%Gj(*pxeXiF_reoxrLV0Mf^`OtBYUkq|4tHTfql8@Yr9fL zcMEAL@n3sez+&7Kf<}OKK}(;n4>6*tTZ`^6Fz*AG3Z+nL;ri0jxM2JqWE6fc)^F_K zSB;TU{W`oPZlbxE&ojO>dA8D@;H+#+1Ams#?Qp#Rp>1U>dW_F80%x&6;^*pZt71TMPFmcb(RsgLlCf;^O7yWMd=rXAG#GnO;fn zHcm}VQ&YKu@dhU*!NWr&0mLCpCQqc|WsLilwrG&Ie{X+oZf+i9l93p;V$=~7%~)I6 zXIdC{oydiDrRSBK6nXwOKV#$LP&*M7IkH>32!oRBO1G9W6Xoe)>p^Ef5wTg&*;R}# zMI!%(;pUXmq=rqmZgD#lm~?Se&v<)AX65OaA)!F)U?$Z1CA`AFqdZFAG|z894FFR7 zma7P$ybNX;B`xNTe8?FQlr|*gwan|Y5p~A-q)+W$$H>ls-~_(yvnG06<+28q2K%C| zcK*7$)U-|)WS*^Ng`W7eD|H`k)2wP>V^kwx@ljt94E>=4Xxr)WR5S8{4cbDWcLN(ujJ8-?Fhm*}3w<_cj^{!Pb@ zNs(iPj(6_RWaVV}ugpg^eVltqt%#Zcv(w)NIOTj$)?qwWY?VX~uPL)?wO3p5iT&GWYd`zr-wN(< zb?>qM_E~SP-~6kx2aFTt-=#k|p8s8E&-8rb{`+g_Q)%7q?DTB6&QT9KDj&km?jarL zzmjWj4DHlqIJ%hOuM`yIta3}`vj**k$o|U8d{rSO2wBDd)TMJIOaD0Sb<57XzrR0k z8JBa*z(AY~y#i_B-?Rmdo9nrg(mfhp#LL(Z{jJL8EA?}xzux@+;wH@u^4(s&`yVe? z3a_cO-F3L8tOsTu-Nr0YDY6I>whrGL44xJ2ua~!-Vk#v5x{l*PScP3E%dKe>4EL6N#r#?Gt?A!P*(_eYsH-5G+hYeHY zi3n%{&!l7&PvQv8h?EdM)8;b zT$}$qbNXptR~i^cVWu4l-JhnErCBjDQHB54%cWTT_1=4XTa4w7i|~F+{`-;qZ=cm` zXM(A65Q-%#*iqF;5Lu=5lu7Li2ry?FadHk{PVeI!AoRcD9RAl<*w3P(W$*;Yy{4~_ zQsx(!?ED6=_UY>E!kwk*S@U)4n9y1(Z`;EM|6i~1SQ}~NxEEimN@|c*od~O$hkqgP};(I*R(m<{l z3kSf#IKuP`k+=>HO?%>}(b2m|`_&&OeF?tK9SazyJ99 z-}{*Vvhx3Fd|*TWet&Hn)rwvH@fV?+x(=AO1eHQQx9w=mDyaO$SEr}G$wwnIBB0pJ zOfII|I3M|xNxs0pz$u|-;HEI2*{L}BFs;bnhY_o5i1ZtJ_1f%hRgtVSqE^Oli zyqmV0AFuk@%Ww}J5kx`faO=a?o_n^tOif;?zn*_q9MoTzJ-?293yOdEPJ*hXhR93Q zDZYNXSj2R+Wm&ZS1H4PKcSz90KfmVg;TFpD$~*rOQ}rn9p152h6Z4(7J8^L2U#tBO z=UiQ%m6XGAVw_YVVZ~(kL|P^0Dbj6MB*P@Vm9`byKl9FSM72Uu;$Qkt23SS`O)fYg>Q!B@L1OC^|tlC4}C{KUf5nvuCj+vorGkikZhh+~!o3*FW7) z`*&Ds`c8W*%R=PSwmWfr-#8aLo0CZ7BQ4`tq-ms`&yoW4@EdAx3)j;~{02lA7-x+C zG%v;?W^$`FREQh&n9c-0)D51YCLA)~<=)6Sjq%|3SUHK}38*nP?PI_78~k+D{1Xy& zeJo8nOy49y3F*MK_Ac2iq$a0}WL(|5y1YVQwm#WswJqW*C7k)sYZdhV*R^&n6@;EU zaxXKuGfq)X3j$_mg2b7{>rOQO!TV|ezXKoA_y}81#qQu9!IrN!KA7Ko)zg25U;UOXdOLxyTv=+lU*1xbvCF@7w z|2tIsKe9L&Q^!5Z3d5GiUn(TJ4Y8q(d?nr55{wIc_?xZB%u1=jp+B#QI0n@2@>2g3 zQ2$~MEm0X6A#x7?)-Eq4rFk~3) zFN`}4|28Yt2br1WE6Rry8qZG9Dy~OEPRRrlI9Glz6_6W>3L|A$tMT%w+minOz@nCw zz5Rpiv3#HwxZD&&zj~7P7o?ri75N(g#rQ8#!GF7cfA{@g&G5}szWfK&{8T$i)72te zM@i|4XJ82<;K^y~i|4F12lw*L$W|HqI1f4><2 z>ksVP9m&bd^D8S8m6MY*n7exJjAkr5-1??!sOoAd3(dzrh1pk6)N`w0dkLXrK^!Px zI0i`e-Q5fj`OeJD-IK1$*3e&|_=y5W^Dlyrap zzLHYWw{OSW+t1KwOinY^ep zTzySf*KzLQ&qDFVhfBmXFN+)6+s9WJjEus4d=}w5M~`{3!uhBNl?CDvBS+QhEKC7g z|E?UL((QSw*D_(32xf8Q)>$09Lx@oPVt?8sYiIdAf^hwHJ)26O zfPj&l$&3!Tek9}9X$cvf zg*p)bloaQ)Q|G!k_tR4Mv$fpZo~bDrXs<%6;o^4g@6Q&PD2*O9~ITsSLmc;7khl5<7C^Q+Ibado~F3Z!?w>@NDcmV-Y&%6U#I;3bSX+3}K z?2rqdiu*Uzbay}EYM)kv?3!Ax%Iq_}gWn+DS-A^z@{6%1w>GyDz0Q!t!|GjTTvSbaqC7&fm5| zXRPaB*4d1yy=~p~ULIsXu%Mv<%6+5W`)GHSO`~CO(0%%c2~(MA5L-*SNJ7a%tmsedYYiIIgoa)Q2WEU~g7tm;a>Ab-3I)bi}1W$TSA$RJ)>)+?|7xQWa}sIg25j(Rqu{MJz_Zb-pV; z9E;-p4blSbnrBH@cd5z9VsY-46Oxh9dHVSHKrcHr@HXy3wzgv-biJkatJDQt z6f#wJc9^TuLER)DiN;VLs;x`NESXRG@@4tBnDpCedeU?SD_wnr3425;+?8X=$N;AHXkA21GS(rLL~GHQUM>`PkOQ9c5ROUNP* zdtV7TsKKd9ZgQbZr_>hxNT-(l>eXg=wC8+*vV2GtoGmSvjnA4{d4@Q#v6@*GyPxx& zbaW;z;f|S~On6@>=C@s>&dK@EQ?1Dp!$d=4vpzvO>2~BOVWkK)6!muofqxaGryuC< zeg#%z=u1RIq4QoLxx2a*nLx#*GnMXcE6y=Xlf5<9S*66Jq=u6tr2DTLlRJ0jk|dZk zN^6IQ4?Yr)fo;TLerRd{6DdEXWtIv&%8^Kxil2_dQB@szYf*X7*YY4wQDAoJoBmv$ z39l3@N|qbZ?{vD_8vd( zsXUnP{rK_Of_(;#U%N-{)2H#9+ewlpj8wU~kYKs3GQuZ@k>fefU!cDBfjjEbXme`=N&9(}0z z8vRILUr2CpxA;9uDqe{!Kb{!H0Ie2Y9>>k+2!z$LXv4M=SaUuGhKg*K&Xy}&X8aci zSV%XDayLEEO0fQ$MMf$L$gTF?Ip@(zS6(g0xfI3Nr$uI?+e>K@hV80jW~hohmcuK3fZnNZgtOFL+*=1#`L!(+EbLc=&YG$f;_ zs45@gJ}YLDrhKX#Dh@t}orPtnwY4lWb8RSRv#-x}8*v3g4F`MazAz+vW*W8VJh>*P zbFsa?8Fg0n*uw0CnLxoJ%b)2>OZ1KxI5`91EJ^u7je*nlVwwys&neB2L9#uYI*NNDUGt0>At*sx&+{5pqqx~+oK5;45 zW%E^Qk*SxE)u`EC05Q@F{SvG)8UNw#VmIfXe=bH@2$&%qYilX9N|&x6Mrvwakx@3j zgcq)rZ(S&vhQcGr*nRl=(6aI#i}qq$giYU4FM9;G7v;l;kKvn#XYF8tK$+E{H=}5R z@%;XX{5?X;6lw7}C%7alW5v#!71VWqfA_!+&QJdZ0ut_1Zym==cOJ)}7Z<+-_3%@p zoQ;iVM=h#;#W`VL?ojLQ&cV6M$6QeKiJOaSca8r~x|C(-^@`U->$aAbpPcXtI<|EU z=qM?jPLGW$%FE}tH!o+X6;0p}bb#W5nd{NtPXgR|Cx71i>%?XpPylGz@DcN9hj??u zMB=51o$62U5YD^b0JkKuYG{nZf(J*BtriT1f$BzsO~JtB#`&iz7VEw4c6sUbh5@7u z?mxY}WD~a*yQ$ilX3%JMtLXx8C@f{-*cA3FL%FtSkT5X_Gx5L2y3xWwgF^ucz!17{ z$rv8Jwl5zxLvy};d+8fN7n%x>Sg|w}CMlqMvrN#}Y^<%_4$*_N)NquoQ?ioF6B-&C zU_A;^jWuCz!g6wYhV!5*?OyQx`&U##UnP~c|H?|QhF*JV-3?+6)yxuJ>%@~wWrqis zf7DaI=)HdZ`g-M2%e^=OI2ieM%U4pmz43IrzTPx^s?saCwlf%rgB$3y7~cppKwgGJ z1*KY1=GlByr>fwkQCEiyz8Za~7W*~zc~H|?E?G*)Fe)`IZ8Uc->kfkeyNTzWyLUJG zscJykOixcxZ7pC6G4zZhVK`Kys1=zL@*h4% z>*?z+9BjpD6kCRcwAS?ZuWmHP`F~t_$XsQ&{Qd@0IcU(bE}#<=nZWJ4pX?Yh_D<4; zws@080-}toa7Kb@+air4Uyt_llPF!UY)o9>N$CC(9IPTMi%%Sdhu`-C>n3s3{*=d7 z*uw~;b=`}_e`;7aqt2lwTSouuyxblrH}&v%_AOyXo<1}x>ge6&PQlG44HW9kDIxmN zOpOH7J!Z)`uv)BJ*|NafNrXgRmX769$QpFt8{f1K5sjB=`?B$5S>YQ1ez<*B+eKPu zay|?r7UQm8aP4BRyg|IFtu5czHx~kdl+$FB!B_y&`x}kqrmj@uWqV|g_4Rpo-N5WI zR~_>pN=r-Wsi;6b`qk?hCbQj~#=C+WKb|F}8MEDx2rIuyLPF^ZCOG%(E5^Z9cT5vXLSuAZv*({kMZ$;)_K zvN_$csYbZ@&}eWs#;|D8Eso?f4A?6vi4aH8v#|+@?{+1K#?i@{yD!9qcN8NrF&i)A zXnyj}e#pSk-uANBn8ay!1*&gJNl3_?7B^_(TxP4^@*7nRcs0$R*#(mElUhiNi6Qty zK9AB|gM+ zD@71@B#3B4hVH$reT4#-bQfbC(qhmD4TrAxJ$y`10VC_aK zzpi|FbbERxSWTsRcz7;UDa^?+W=UU0UEO$~l%U#CC|>4egmoGI=k|^c<#LC@z7Ikv z@(^6c+V+rqI*Y~Bc zCRWhUAh&jQ1npOBw+_1#n1d;E0c?X$)ySD5l9EWwdlS^No zGF-@?Jv~|wldJ(*S%xgW0x-}Bx0f%Ej}JQGWT*5mMVUvQPWwrVZ%w-H&4qEy&Cj0> zsKu2P7h_TEt_*kg=qhRP3uA}&4-D`f%u)>xCvW;}qgj8!n*uO7+~40>8D7t-aWHoU)zvJw;ebWRn?c+W8$hM0vvk` zwXsf13VB-4PBPqQ%agvmvaSE*e{H>rv#A$M@u$GR$#JA0pWV_m?x4uX zQeaBTxy-4gjZ5D_^R0}@^ z87e9$6b2EFy)-udaei;Y`XDgVeoIcvs@|?Qd2+DD9%jrX1PaiJ>VrnV)Lzz2bwIY0 zw!O9t5|=S{=g{br76u8R1!9!6PO?$N{`bIG5okIr94t&a&iG410GOdN7tQdnO)XnH z4E^;I07P!H5q`r6#On9VOnW$5va`~+h@)7LYe6M8;a?uib~cISNtE|)b5jTiAlAl` z8Kdl*L341S-MQg$c*EuU0s`E0DQRpVuk1HlSLP&^0fhPAUTSkwLD z#dourh6A8a6!UnlPv$eD|IXs#B5nU`sC@}#sB!`iFq)48q)wQ5G2m&%2|sY-2?@dM z5*N=5_?>ry4-@a+nTxvNt;0enaOQ$=Ja~mp7;8#Ufe=h#L`-c|?!C7tKt8N<+!RE; zHQNuH@I3sCh}e~;IEu`qPSzaDN30HNxl@X2LOYTlvjCj}XGw8!!;c?DmB>)>{vHLP zgl;j7@bsEG!mjkiMP*0%udoN7p4aRG>k$4hf<|HJ284d*ug81=r+lW&1^?%MIy}7? zRpAulbPCU8VOAyrPq#6E(b_0caVajtu?cZinwcPT+_XA0tOV~C#c$r&s6eym*c=1z7%k_S^jGHT0R6$#GaIpH& zekEcnHWyqX@CCA+t`H`!R|Iaa@lPGCS2Z;^=etp1drbqB586ssR8n$1m=gt9qo${q z5Ethj&cx4LVJj=&m$HpSg03Efp%YFMeLxq&3|sWCpM9Wz)|)>9v9OlYJ-&?IazaTta=&YiyCs zqmJt?G$T&oEO)u>+ZJGPt%i~1weGRX+ti#h1ab7-hdT+vc2N1GpZ`>2G<~E&m*8Vmyz7Y_}>Cw6%sEY$o>4=$0 zepw>9#CqS8kkxRy&@4&~t@e<;=wPGq4iEp*Qa%fCgE{f)qww)m08kI&i+3p1&x2!5+o|{@mGihkQ^N>NMwxmb3dpz=v+9> zl~uQ&C^wo7SI$$H-aQZKfG4)BzY{$7)a>>x+4(ldC3Nxv8(VZ~>-t0mBmc=Kb!|E( z=byXbtKBp58an3Y18d`FM9J00Y=HcXWLI^ptvN9sKY;29WK!c`M}nWA0`7m zez!Pz9Bz|=Yjm6i&l9@U!8idZ`1VbgpN|iOi*xkm;Qo%_Hv9V-4W}+9#EUEI&){B`%%C(Z0fdzN>+ag%pfdBw19k^>ME%Q5yt^sa?! zR`mAB;$U0PDV~M-`eIKw%#@_3*JYTt4-Xqmlv7VkRKn&sI$V~1@UplRpsbG0XAofr zwsSaJryVFlPdxC5Yl9ZqeL9)(@dC`u&Ig+Y-Zxf@#}UaoE{wn~1DaNqxFlGn(RQHU7MK zu&$AL%*e!)osQGe#z#UNMeFJF@#CYIDx*&Ol(o5{$+l%Pp!75~?Zw5B6wjYOH|pK> z+q4<9>Eyq8^CrJ`bys4dc}GVF@Owvl>xVzRvw)U`sZEXkWskW_)J#p^0h$G(9xx~X z@}^uIMcVlfl#1yQ}2x$I&5u{q60~ux=6#z38|z5>U`_@M1z|PcjPAz>a4e z5XMd$tHq1<7JTgbHy55^UL$mx72D{DQG=&!x7c;cI)9af&mobo1p&i-f~alg{IrFI z4@&K-R>uzZ-4R78u5m*1rTvp1wXg3v-RK-H;LT{JuL0=hV@~B-~GKZWkb1;fYwl4Hdrw zPYKa;9pXi|m7FtH6AH{JAtenB2uE;K4;7^{>^Enx_8t&(X12G869^?Hljp0q4i5`Q zvY7qS@dqToyDs<~3##=eT=%HC*S|-AmUWuIt`V^>J?6$| z^n|pIKYaU3x}Z_QTML2=$d%hdfBf->WVC}M08_*E($?(*U++iNL;?bCJKqTn4#s)L%Q+xX}dm)IdW}Ur%o!cy4-qBDeQA9 zv`h4+Z(E;CmGEE8Xl>%szxVm%U|}6t!q4`$73>gcFliOHlks#mZTD+b@+dzh^3SR| z{YdgT6qU4g0LOGT@Y0oQtZHAuSC`nVM?vZV&XxIi>0~77TkX}6e(Og3gPbXn$D~(| z2ep(`ci*uSUW1+RyiytSve%Cv_w(}$#>1DzBsc;#{dcRwM9Z8`k-0iXJoDT@l{= z^P-(y0jvdwt8kU&7GdiS(r{eg07!~2jKk)vMgkA2+T@j#R)_O61e$+aZlGe6Gb3C%&A(>xb{B%+|dL<|9D1Zt|KMA@j#p{ucR@Ng63?DF{zWL<^K2B_4-GRLr+ z)R<(s7TcnI7h;{A54R(XNL=)f_tO$PI6>#0+;!Vt4M#@i69ej%m6cV+d}P$is-^xk zEx?Ep*xZJ-#=_>Ds5#JE}otQ%45}MY*Jq6x*1QwhKO4E1S!(oqla<8ban) zXf~>UkU@ep-u>Bl=uILA4va0`-QKJbVaP@_w>2 z$AnS~FU_glHz(aKOzCuL3A9|;jJvQ>Zt!!LM5U!gjonK0kj_p|w@rEEkQX8jDqF$y z@7hn3qxk^~AFMDkFq~}n?nP;&Ltpb&XZVEuXhl!s+e_XBA-ELmc3OE`l4>pa6M6SA zI>>=*r8`CI(W@>p6O)jXYS-XYRwfg-1828eFc`~j!fZj%z8em-8-Rm=3O{>a75CvS z3QC5Q%uLl{%Ykyo&ZfFCdvwptXVJR?C)*@wW;HFXB8QC?$E`3iGZrsAE2|G+D$BqX zE<8KPNR&;PeUdx(izEp8LwYQ1kx0QPS4T%jn)``Z6#1&F63mPvB{pf>vgbJp;4{C= zr^3EFU5KkgLpx@`T;6bt@vd3C~@%H*w{nI zYB7^B%eu2fr*A<)wd<8?ZXNEbFl+%33Cecu#S0CHZ$&z=A=zCxsHrBarZzJ*#Ra)H z>u>qX+du!K@}b_Qw;YM@T- zelU0RBG?>HNF6!O2vnK#-Ws0J&AWFWL7G_>bF9u=ZtiS8Eb$e_d6Mj#uG1dbpF7sK zh0ZMs!ys_+92J`_y{cxx2?@OwdjgNOt7RxWh2lZMACl^pgPB!ccriR)<2IcSQ?5z? zbewf@24D%(YG{KW+`1+Eu&fp^F*s`RUM42<4ATW)h6fTlG*CWGLod@IN z{N^Qd9blST*_>!Rv2X)JoRvAE`os)JPAFMeiiZtU(aMaht7oqVRP3j!e_AI8SDhZt ze2fb6@)8!ueJ$j(va+q%bJb9vgvZwSEdJS%iAmZy47_#g7KDrz9HWn>VS`3;IKZ$r zD3|CFT=wS8%n}@z(AD3Kd!4#$piJMng%NI(MnZg;%E6e$85%3Ob7K z`8OD}PMD&qOvS{8fSWZY`aBM?i%Ay0-iUQm<-?;^sc46G$IzT}Y3>g+WKMhI;pbc( zO{~M>@dx99fdQya`Hnyit&KakW=&4ZtMBI2c}ok@{AhS^9#rl!IS@C6Vx6}SQE6#v z(o2ffH594o>MEh%9yERaM#@{*)LEfcm<~x9X%6$_Eq~5fy(Y)0>D|Qy>B@};8aSNJ zGyc7aKBi8GHQwZvbuIBa047Q`D-K`uN`zPHoVe-f-b&n605&oPscA09tjrK`Bij|w zKJ)~L_kyanMfpZ_-)>P#idB20mi^ioz3Uwkm%|-7B4Jsq9K&{IYl2{K(ZIAAPFBWD zx@;jB4y4|mJ>d5Hf^L#af_A`5xNX6keyWiICju!qmVl%;6Gs{?`7wtzpK(Hp&1E45 zcg%6&BwW)eRKB|ia^7GNb3^;(WSz8PzvhT1>e+HY5lEIUfj?U%mLSUy{dI}a%(8Ym$&}pl&G|VTirH%#{J;~UD82whauyz3ZOHW$(66MxsPL{iVRv0 z8Px{TG@~+$#0@8B=cA@csYu(I#21{Zc~po#EeY9{ z&jV&)oWs=CLN3~QUF<3ea;<@MUesKR#;& z9H&O<_NR-pP;(K;w>f)!fGZa00R>etk7nc_08z4zKusp6w6xs5;_(!ex(oIa(KkO~ zZe{m5bxzgR@|CcELtNlF{o^;_y-vdQke(ijH0Wt*f3Q(it$s0dV|v(TX3*TzqYJvR z>~08Zt$ai(iJVacnh0Y>*tgYp{;(A~?jT1HjwwnYbi6aTZc(vQd(b7i>jIAW{k3~2 z8K3LK17gh3%@91?v?h+qMXtYv7Es+sbS|T3_sVDl}-#)%{Yk6&Q#LWtbwdc8m zgFFO@03PmF^`t9rI35+@ObHEi$&lmW=|?0D$-*zqD;-POD(qJ+fPjd#y7K|Vl>k(w z2+VZ~x|@kQ53>R{Va*?KpJqXR2^7d49xTyxejeFtdhyT`PeCC$>>lTuFI0h8+Z@3# zCSgCrxUGwq2rlHs#{iU{a6jw6gt-e;yZ!ov(Nf>N9%B`Vp?*BW9oJEme575?LZ>_m z!VGI=S@~tx;CWaF$VvG7Uq83GeTphT5dr+nxW=AqvoM)tbzX0hAR1`9gir4|RCT7M zH+}c+Jjhl<*Sl(OeI21T3Yal4{j}HZIIU-)K;U51LeS7C`RE8!Dw54$wkc;bD5G3N z!*Om$_F4**d>u7#SX){~^E-cLOSA?5muoQ3*wZmj;hjt%*5MF&vOAi3ld2)L|23rR zAT0_tYe*p1RJ61p-MJ2oeJ9)d9w!IcN-GJY6+nZKGC71YgrT7y8#7s-rcGc&70(1j z!0{hb$)x5!3NH6pk0K=vVds`D?dz+6GB(JL#Uv(Xa*nfUNZlhKj&j)PmkVRoqIYG> zz$nmM{&_f{pr8PoF9E1`A)jBrn&A80JFvq~o_yjW}wu#bbKwQ><#WG>g`Xii>|h(l3O^sqx2;NxMOhb635;$D+5%l4b;*B9QhHKXblx zab&=6&~~vBYM%=7m6zKy{a~PbBph(N*`MrpARSgXx+W{unW9(oRyA-xd^uZ)Zc zdbjI^fLO|h0Mxu!b44Qa`6qg&D6>jN+(dl?s=P3$HtFy07jG6MbKCzA6!h&n@u`^P zczCGY{)*@i49llOMHWT~L{62vjDtgntr;3hW*~zJ8?@lOWG(a#(1dGc7KPiHA+c^^ zYZnatfw2xH+JRj2H&l*l2dL6js^G|TpM5gmVC~gOn~ebCO+tK$-gB`B)yTWGy)uoRp*-!t- zfl<^z;0r&0esF{-KIjRCc?|NszF@|`^`I=8 z?xAmxOG!eE?+G;#KZRhfk*M2dbD3;Y)j8-n{RiklF`IwrHs*7%oGf=_tvXu6aZDPc zn^;-QN8}mFrS2?8HEZ)Z{6fn&tadn3dVb2YP-`9#eUhOVmE@>WxYBO7425j^`iT`= z9TJ}o_4g}l-9M-@5ff96%B6$ymM5|1)U=~c?sC0#=BoY^pJ|k# z6tC8c`N0GGoUs%r31(!R$Te`~y5ZKPWzKvXKem!?4nrU&us3#l zGj8ZNZEMsSQIS zURE|2EGFO4%CX{m^+&e}30=nRI3;?4<_+T`>?-MR5RWj_Oi|1{mX(SDH9{CX#%(jN z2Nf;#e$17~gSo-iY&QyNIV_*Dl3rV`*wg$>j<_EYf!KVKnq>Du$OnoG!c~6~;LkR< z=GWa(Ql}BscRw(g>q}8$Qs**h4sz^G*4l&eMe*>u$v;Mo4u5_K@oo;5~x_ z7s<0DUScNx{P}jl;6%>~eTHqDS-?U6dtiy2fcfzw5F9Xl&;&;T>I|eK1>DLHFlIxQ zq{7HFpjcoq^CW}xGT7Loqg~SblJ2mx!V;Qe=bqh=X2w&DHVn#88w4Dj*M3D{cXbqU z7Q`rY*a$5aY-q@Z@iH>n?JSYERk}hRDl(GD3l9W9i5d#V|GhJ#oLY#?2$G?&RLdpD z6#PfeB{^H$bg0RL4ADN4&H3a|&<)}gR)Rj`wpJ50Uu{-iq&XCegf;XMu^ImW7)JZ@ zTOQO1ZPFQ2_r5oU+72kSF>Wm_18m2x$V* zx)%ba-|6Xmh!fo5DtD!Y4yztp@YnFLdLj!Tc!t7+X6Lr{_ScB!PoI7%y4fi!d-9p_ zOlXS6YU2uUb+U=ts;a&QZWF8~m8frPYoNFa-pTRB*&D+PVdxixL`3G?TyI~ZWM8OQ zc?uK1#Mo!fUSVc9nhA`XbY2h5Gd;5FU?={pQMe*`6xIu>7b81GV_jTKObAG|KoR+x>~1O(m!M7Mc+6c@T~ zf*pd;1<068$m(|1)%DiYSpAy1*)Adq=~5`25OLCg!EtU&DcpvVQ) zU{_))+>w5uda%%TPz6<;Zb0!RN$OKlezSnM3To=17X4xC-IHqfh_QQLD=W>F$>97(y7jhwdKccg^neJiq6>&L8J|&-vpVUhKLHILv(J{@nL< zy{~srkGcZhqpGWWa5zgsOy$;A_qG9WRZ}iv@t|vl2FaMiS>F2kG$x6dK+&Va!=PLC z6)_fqY}~vPE~uX`Bc+faF+PU5QbfmZ43sF5gmK(yQ5GN)fb+OHs9Na${fU;A+=>e5 zVG(da0o>g_oK4%lSlX|of@9w@JGzhO>le-wfr=YOH7vgFGPli3th4QZ(rv_)$m(V( zrYFa)iuyqjh3qFTD#+_I0W$2zuuN@S{Phb1V`GAV_>g`XO(fa@MqIaf*X?MZ0e1N9 z8zdCF@!u@BL^YQLMVktg_>Azisbra>=##` zeG)iN)nN7mx;{J~9~&Dj(axk2@j8wy@ydGlUY^7rmHOr--qepi3$1+^)}2nhANFh2 z*a(%M2nY%aw-(sil7tnhUUm40wh=cBn3We7fGcPbW>#<=R_nhw(>;#@T4C?QQ4~hi zmd^#YNPr zOPiva>y?;zrJt#E%xupAo6p))O( z)kR?1fJy~~nz(Pj=^4{;philg-1KyaQrg7ca;7{g)1uB*LM}LccqPWOCY*Um2ALK} zgGdSc0qhJGwas_v?QL$_2R}m{%GGK-{^oUoNg9)*vhruH>NgEE5rh_#H2`aMgODLx z#d)1_*sUwg0y^5{?^&Utp~1lkfr{SSP+sKz)94Om9W_~;mD#FFkag{c0UC2VIbIHC zX)8m;{MieJk=nwh{plIo(Bc7Cu40~Ju?@Fvle3x2adJYO5}ZK=BQj9vwl{5;*4fNglgo@4bTQFB`B0 zk7i*kP+X7~i|zf2gCV=B($m`>#vekwj#}kPWvsM4Z&SHBIR#X%)Ma;N(Y_X z$nG4Nfr65RwU~7YM)E>&F)^l<`seT#IT_h9k?8w!RWAs(W`mhwlN#Tl?KfTaz;P^? znB`g29-7y`Ta=wCXtQ5r3%Nhn1O)LLz*IiguV(#x+xa;&kV+air<2&D{jx~!vRc+& z3!{RAi3ahF#1ECNY5g}h03sdaMNG=w^z_J0eF&9cz}r=z7z6HXBl-FrFhqWQvWkHQ zT>G!Tml^ls4oKJm{MBW1H`J7SUX|2-F)kM z?;Y6}&2N(WekweGEORlj2G*9kP|oOYPGgU%S&F=5NwbG802!sdd_H;&KXrKJHpQ}* zmX?oX{}l#?MU++La95Xh8nSwS6p#uba$Cza^rB=(=1e)XiM{>+QQ~M_1h(6AwjEj7 zOyOn&KRa-nPA3wQ8jhufw|z+`y-*8Q1yIS^tz>g1qlMi|fCli@XmX|}^CM-v4jZ3J zywTUnjd;MS*>R1_O0|0zXB5$Go>97gZX; z>QLv2EYWYjK~CO!DGoNUlzJD<{&IV*gvaMoOylHKU%tFAtE#FB32))4xED@#sU6t% zo85vPFu=$|U}>2KN1DMg6*<>jejcBghQlD4OZ(+Z;QvmPSY?-#Xsm}!t*zO&P;mmn zEgt+h`0?>v1Bu8MP`z5;`=E{q!osP-)7FL2jwrS-^lc*S7PVJe>k^V;_dqTWL+N3_9-EIIe zY`}dZ4oScVX}5m)Mva0%NHz*Jo2L`CgH-(lzqj9B0wwq01mqF&ijl4z?@6&Zp= z#q`{=8D-8gKp${J!pwu!5qUQ;PI}`N4uIaUg6rQv^JOi$p?no0NcCGH&g07o`%759 z{zHok1{;%UJ#_%^>bS3UMZk8xygn{JI0TU#(Oysp)3_QWhE=z{ept2Lt`wnt#2WQV z!K_#oSVXKCPHn|#{abg?HvuYA)p5m)L3>wj9DzQC?m1V@c3F0$t1B*l1W&2Bc+llGWV~>oG@|lWLtwP_R11{GF-At7v=J(1d`o{B8Y5YO2(FTW_YS$QxQAAw@ZV z6&;=Xk!Fp4`LHEXK74Ib>m|lAyo@?f#-#O`0O6RaoBMBYPS`CO$e}Q+23$8xXgkPa ztc*Fi`3Cf+oSrG+T8@K6iow+|V5vPbtHF;Pp?^LEM~VA-6uoD7M8q*ntHES#^q*=X zaxX2a@pZ5B+GrsVAb>kMwj8nFc>6WnDJId5Y z)wZQjL8Jh%VR?zA*i%A1PR@C%ZXHw)QZ&G@4{C1yV5BcZCms~Tv!-)&w^+aB#{2p^ zh{`t&(K*`kDN69=c<6WnZ)3Sozw+KBrFaiPt%IwMcF_>c{;CzuHLuyg(oYDQEr7p@ za&zn(K8Va2(KUPwmXsISZJ`l~Vw+lDcSXt~=YtVvN9HY*Z-L(p6-j!En1jPA7BiWI z17OixU92H&zd%_&`0DA? zcvd!hJexO+tgMeu0)PK{Ptq;eLsxLMe1*dDIS zU0D$sc>t}i@X6|Ie@89AFnhyTdFrU<0iR2%&ggzI0#I`RShpExce@LC=SxdU?5ZD0 zXs+S0-G$L#=!d(@1B@0!Sr>r9^)>dSy`$qK@nbvzXm32n%6F_?!SLk~^pgG88(@gO zH$;dku{84(2P)*j*p39eF+hpZ@!T_ItW^hz*?ST>YtwLm)a0e!?TwoESp3dw4iaa4 z@L(=A#ws!uN0K@oV&r|()7Ca%7BgC7IiaCF4q@9_LN7FDVFf)?lpK-+vXI36NkF@coj}lU|&`qDMzZ9l4@7@mgU%TZgp& zJvO$j>5mCtu9__@7_q8zl!lM}a6+_Bqvy{JrXpnufEtX>S|4C^02L^#RGAE2h?R2% zmP_*b8($#Ut8Me2%>9JXV1WMM?3P@gz@8RFUa8EdRFaIskiF!^j6Ddz8X%jVKhuqx==P{Pbx8a0~EQ-myT^DX6NR z4C~f*FmA(R0=>7`YJ9`g6d%NwAhr1VwLQ+gWe~Dtt>&IFP#k#Qz9i5&+SdLw1jdu7;wbLvx^@fB>|ug>WUdU`@j4 z5n`-cK1V$$oh21DFiN!?HHeqZ)R)7}llk2@TSN50b{7`2oa}hgpcw>vRyv?<0V!!4 zf2Rp3p^BE)&8t`M`}hUQeiL@z0%0G((@stmhQu_|F}%5zb6>am>0c1Qtr3f(b96-~ zKw~bz&(Cn*DN4xI4A`cRf?-+#8m#`I1aRTL3B{6(e(a*KgOz0nSaB-~ zzfd%qx|pE*I(KH|*Y>>hVh=(Epg8vvp@T6L^cJd_+`!`8em_$G{shqXq5fr{OtDw* z!h0(zFVD^x^13hp17n8%h8MuMjA)e@}w=LO$q&mALfds_X@yH7X!rD}+X_tYAY%KjqJ#G8Wq+aNYC z9fR?cAkb*^P2GO^m?t;WL(u{fC-#oioW(+z<4Q{hq%dW}p~MY6HoR3pYJ|5mEc0CV z&4`ORV;JeC>^hI!C%ZbPRY7ItlU?-=-{a})d^ORa{juW%$FLDzeLY#1uf_As>F zpYs4$%1JFR&&`2vA{@>`04~KwMrIH4Kt~Dw{}LC#6$-cG7uB5X^oW#!azJrt{v(ao zkGzMu-|v7irug$FiFys%XWH6piwW$7O;={${FOHWAU||Fz>n`>XCJS2*OZVbgVqOr zchM6KjSR!}8OhL8iRm1G+_*G_C;I7|Iy#KIW7FoV<^ku0Ng~RkUdQE4V&KJCD>A4d zpREaYac~gN&l$WBZ@El+U$!CSYW`0#vP#~~R*mIQ@KZiXW%X+w`xe~iHb&?9QQgwAStgreo#?lUZ7>@UX zA*_lD4lZ3?MbJ9~SsYI1Utj5SrQ^Z|&Gvu^S{(Khgo<>m#UIQwVUh!TiQS*lr-Es= znlPdT{xdo~O(!&?8YuX1K4@MP^d%LD!=aF=dl}FxB_L|JfPA4PKMGw1+G?Nzj?nMP z$q(t0EN3y4B07(ZyFKj}SR(M{2Ti!(?7jteY+=Xkzdoc$>tfeRX?n`$LBFo5%KHya zc(`6kQlU$1Qz8Sh#OG(aZs2`gYhYD*q(fm2<8kk zv%!=6v*2>L1v3bbg(%*X&|Xmf_9x%QUEdM6K}Z_khF#eyj1;f;573@(@7Mzl)Z|JN z68^2paE&?yV=Z9wx~FoZ#%@@fyJbx7jcfcy?wj(DdCbnzxi$S5c(h3S8NVwb@ zs9TB;x$oSO2+EKG9Bk5Mb_ColzIBgP9p!=~4)@gB@j$F*8o{{$%RoA!vpCh&Q*aX+ zdTZe412t_`+gUhSB@>noy4OzB4>(niv8QB4RBtet^YfP{tZOk^%Ty+*DboI>rNQv4 z@o&Px#=^%p=RwfEy=zBnc3@fpG#8kz8tCcylm1#Q8HCZ_OaP(miAO+0LMMpxCtVj) zpm|f3>Sk);xCg{5Lh{FYQDHgI>XX4_xM=NL!B^bd2e`*$eXI$3vHD;lv+InPjg=L6 znC?A?d3Lag;goEY)OQL~+18eBnk~16%m9Bb6nTd$v!cSR?}s>Rs1RzGU|@JycyAzf zcbC(l9)`0qoQqgc{IGcrm~aIP6(~>(HMoINC0N)gVFWvE6vtQ|V*nRJ=IXTq09iU?VhC9rSV2}2{p3oB~SQxH#hgsF@s88Iw<2?D`fU@;?g%`Y5 zbF+-jh=G+o_UfV~*y;GZs(v|mfy(3zQwcaKHEwaH0J;w6NS?(=3>>boYUhqXp6$Fo zb_`tIP7UhP9`G+gK&uSBsiI<$e#-(+HtP59-s5wr@0--xEbhc}TJeC(2|TGh{T7wN z7r)`U0hix>AdRj4oHz{1N-@Io850d*Z)xxs0fZKSmJ0=V(#7Zn-Ix96Un z?vQsB<3J$lj2-BtVVd55tOr#KtM+l=wOELKgDH!ulG1qpFO~Jy!9mN;Z`ZD-j5ne9vX28>~Ik75-_$ymmPY^}};*esE zz+zf}-(T$M{`vE&$B1F*MIwsnsn?Hhx;QyC!)mB?-4Q@8YD>f58YEW?9W7c(eWx%1 zjE)`aa_~TdqPo}|fdLL{XZ(22;I!q+C7w6G20`>UySQjAP?(>;1qbvQI_)~pzbVKg zANV|AV~geAL3CL?%~qKNeIQUWclP%o%M9{0V4ec~LuM9xaA9Ew$PJr=YZbejXhBO2 zRT#hq)*8!rmR-#NE%&D|CwG?t%C-%fAP8maefe(_rfl|w<>eccluS7CGbq=AD%-`j zUw7UfpNN9iN3FmhXP09dw<6bRobO`EN>|@6&DubBrq2|C2j5PNeBq`x3=JEgXhX4BK`ANS!m~d zNY~e?6%XP-cnqHr@oz?3`uZeO6pOFE?ls{8j94IU{c!8@ig<~glhY?ZKgf~;6m)1; zgI^VBqW6YG>U?~kySTW_FBrvz=3nV?ku1;K+}%!?Sze~TbxT`YTV3tjWPA1OXZZ!G z6pnlM#sFn3*nXz2o*s^+8Z8|fF`x8AHV8f;c=;{CdR|9>?SZ=);_`E4<*Zr+z|WC| zh0>OmvasR6vpYSdT%jRMCKrO@LRS~@zFtA?o33)$WJnL!B6zJ36R+RXZUr_XD4M|x zN9E#YOV2;S^a$2A*sMT#3bMVFd;tq*m}>e42kS!91pgKj8}Ca>0RUL#jI!{1mII!) zmKGVAl847B*hK_U3oF7eNR4^c68jp)(E0h7fQAN6b>)E&oO!^@2~cAhwOr-V7m3i*7c9qf?99z ze#>dTFW{k38u|>{gSIvhfAMud|EB0M=O?=@?tHUaM^#z5x2BKI!t+nLSU#(0{NF<` zZvr{b;zMaKC<8~7DV#bl0kMAzIAgK%FWNhT7w)@kW#{M4OLt z(fsq&7Wx0%cKiRiP3f14Y-Ui>k5+c#9#>-)jugAW#PX+EPi$g8{WQs8~pn(&@Wa!2%;c&ry+vxPW@WgSY+~fAfRXkuAOBBz86mj za~|SZcy|!up{6gNuX8$g?i{K}=pO^ue;)4t16#HK7f#~d0bgDuT_V^#*xxVs+c@!` z7vVEHaRI!ntQ-C6lf;RJSM~@KJRqlhVxs9k3m$9wr(s`ot6yGn^5N9flQ=mbU~YBf zny+f#o$8E@rNk#$h6IFsdwJf++~#Twe0n#S`yV1?!&fEj6YMpcMc-sH;DwX>^I z$voLD%ykrqs}A_LWnkU&ci$~`j5^EKx&^J2GGB=jRO?{t`-7ph)pjS(ATkp6$kj+{ z>+ICY*LzjT%i&^!{Y2Kx6rN_MrK+Mb!uH_y6>7KHFmoK%UkN4;l?^0nsMyd3Le#B? z-*XEKLv?Zi#c(vz1^zX}MLuL=y35C>kgiNE=p@9;i`}rSg!3V0TsckdseVh)XEf#f z)YKiHpV`@{Qg+9#gq6%p^DLInd+y2)t_62i$fuh<$~@JyX3fsZDnKVP^DDZ|heuwf zJ>Ff<-`k+#8UM#37p^Uq3jD8!lf-#(bWHet-EwmKY%ftDPvYk@fmnsn>I)1E;{10r zhhgKqy-&ku&~OeU3oh=LXp?&*SNpe;DB~!R#D)-ZvHDHS_?2nT@#b0N$gy<>@Cb=- z-2t;)J|v67^2Ua9%dTcsmC!tYP0eHY9Ir_6*hT}!7Zb@q&p*8o!_cvI)rT}EhYdJ< zYyI5lY~Dx}N1lI6VsX7-sq@G;+L%@S?XJ?)n#-6K+pC>%s$|}n* zT>LXu^{IL+ktSC5WN!_mGC=sN-0((wkdsHcugAk#4tE?gvq)@w%AlFP-VeH8Vh|0J ztz_+j**gf34r=Ydzj!PkWtY8M`yCh*Yc{DmX&b}2kr47x8G{04$my>DYX}`tk?J23 zmcqTr=g;X4=dVZH1{+T4J^wbj|R!6flvKqH*$zz=^>#pAa`;HWx5AA_l$ z;JX4iXCFMsOHD1wSnfpM;xa2vNXRAq@nhy&iMH#8Fr+Muk4pzrTVitlNv*a?qIjR}#`f%t4~qt<)WOA<2g(Dk|#*BqC7Ni4yqE^uUTGy2bG% zMUg)3-nS1D;=vR-Dw)2dBO|UfEEr{q*N(HP*(4$k!62GUNRXd=EUYQNi5e1i!;*!K zmzk~ronp-qZ)b-vm`Y@NWmed(@e63yZUw$@5C0Rd_c6GW5lk;l^`~Bk9H(0X1_}nx z%EHQ%f6S=8Fj_beIUV@@+oaq^L1Aj1j7?Tf3RW`CtIK6mVU$GV%wyLiiu=UQ&aPs6 zyF*W3{}cm;m$I_5kd(2)Ya#H(t=B>lfvDvVemvIi^jPz3FlXF-)urg4-66~2?FGYd zsrm0uF%0rG0*~2-+D6n-17VT6o8!)zXNUUP>Xsqz-#4U0Lj03gQDuy#ic0souscXw zhbvX(!!^P~;*W+XR<{yn}-ya)Iq zWhK4W*Y+H*e{VX8IB3|Xs_YUdDo%fBnWC5u9(cPudyX~^57?P|y4%c6Oe{RZ^YXsb z)!{)}92tXLL_|c?VB7_KeBim=yVuxGL$<8%OWM>VE)!Z`QO5Vt_ru|#n;;(_$etD# zZp8B0_FITgOz1!;Qy@`bx$A}09SFoq`ti$`bbh`isQAly9svPGxho*iNLG$tUXGrb zq5F0D%^Oou*4jjZ!P&XAJ!ARnS4HMbzAkKg4kUZFTn><8 zX}MdRdec})v(w9cCCw)sp=2KPMw*L4*2d;L=}c=#k`a~&xJ@aaHdN18njp;RMbhE? zFgNCS&&j5@v8Ab1zZXa5U5jS%F)=YYU7BTgz&cKJ+-Mh<^b{x047ZV|Kp z0DM^(Pw17M;SeRMNYo>1(Nl?KdF{OoHl-1 zT@@;~DERz2>krHc1Yy(PZ>ACmvzo7mbf?~nyBqEp$6<*pDxcs+gSb)?lO@zfjV_!S z6*dwQX(#LD9V17(cr;*{MbDpZ@33obT?0jC|8WiE3T?zc5@z9g77qj+Qu5nmNdDuF zT$R&mjBcqa4Gj&>&LH*NDQ4Ak?w<`R%Z~PfA>5{CqnSOxgZr{}=^vnTHX$EK*X@WEDGcYuIM3RB+Q!^WtX8FySmE>P zouKqoIuogmwA573)3(TLnouLeY)F5SE(>;F0{s#hrMOj4VBZlGK|=XkHM-oe!-=11 zWT-*VsMGI_7(N_MbNoPc%S32aI<62S^Z9$qCYP4{Vp6ph*UxevnUhcpouZ9(HA6=r zU=G`SJBLT`oUd4URRd}DdOPZ;G@j!IaNnn7(W9i4~0;f?h|js>+iL&)vMDxRvzG% zVJadu;vPd2*b)Yc`UQ&|ad2~Yt)e>EZja|$v(q*v6_Qf~rUk`lWY;WbbR6%aO)Cs! zW!Hu$vt*=CRC8yb2ZrTHKnoiM&N&@>0a9#vWolG#0^Xt1ayItby76(#)~TnEG6bKY zxz%nX;tB19naUaH(s8Ee2ArIQXA?DrfB{41?WHZ-?ru4xb){pUQcuZQqg{tAFUSRB zW5*zZC^tJB^02E6=4`;KqG50N#cS{I%bEC-6!BUZZtPj_xPlCBuve;z=;dYa+HY@-D66agDh)R|1GvSKp0JvcS)(H)bb>We1 zew^_H?`%`#MA$L6hLa1o{dlpeL>z%EnD;@6mtHD!_8OgL*CP~|ieXTL`(7Hcu2NQ6 zIqrn&vYU@%;d`)Ig0gbM1Rlr=ewJzqKvQ1VwR_S; zGWN0=KZO6ih-N+`lvaY@NYAZvLd76O$^j}xSmtQJtW zIM2whP@C}}ySny{+d_44cNLaGUA-I_N9oEhPVFfn{iw9j$Hi0+{V{=3$RQ)UC%zgs zsg>@Zt$sd-d?|{Ir9sQXrNP0ar67pqTv>A#`7)FHv7cDvw9fO*o5aTx6PV-uk%GOA zi2am46Fq$aLRyHGn{+!7HoZ6c{$u|(;brRto>z$q_wF~xTr~9b^ho|xjIjKfX`ave zPYy|)>-sN(_KSZWvSX%X3vGhra#drJIg}n;1MYC=K)Um)h)rI~8l-$4AAg7wbjBVn zy3sb0$qNmoNEeKpgQOr1Y9p{#J-{4Ha0RqNzYUYB=*08bO^+sH-Vo?PoM;^rM}g zPJ|3IRr%=6`u-jtQbd;e!SHd{`QwB5hi|e-_0R=9c6PwSQM@?(xY}J}n)iKup4E72 z!$(f0%#6z9dqI@^K<}n!V{4wA%+1Q0S;a0v1EQ0nDEt@^k(2};vRi^1UiZ#EsTMcZ z*S{IXrov5DRrPKS<-9mg?(^~E-WrllK%gIqoW-rlmF$$R1)7NhM|U%`(YDZ6idp&D zA9?BNw+f`i32CFFqt8z5#oXK?`b=b`{Aq;U$`sQn`SQR_1b=&*u%}Fy_8HczI)&4w zp*b5g_JVFjLU-@(!m@z!8di1@wl4A4ubGG-h=V+u%uYLNKRWV_DYamSh)%dveBb3w zOc)@$beirWyKc?Q=cc?f3sWRx-|=M8cuQ|stayILJl4(Vz8obp)#lh3QnF;;svRQs zSX4u>YDU#jJx0Nz1lH{Av_6XI9tK0$blD#nL_~qiHQI)^sI5Z3e0j}ySM#Oq>F}U= zL4lpJ)SzF8*oVK?VG3@M#lg}p-S};~ z|4L8PoZHv*V`HNW978zkp>&}I!>0V%D0P04$2O6Mh{i@AHO70enPR6ucBIV9$MQwR zg(7;!k>}2zpPAJc6MYvD5PUQ62b50f=z)i!va%Y;&y7%|DUzhp-*5D+-CndrIZ1Nz zT8u5PuirQ1CnhA~XLzb!t&Rs z;A#xVppC+7MZME?;gc?v)4gGC^I=P&7~FA900IrLg*rLC%}-|07Sg{Q9c`Xdqm`)e z`R1d!hTw<<+ghlh*m4Ri*!pL~h6MDm8`S`HV7yz45?N=lu{D?%Zy9kyz zj2QPTWWjSMPBRob-K>TzLG8+Wo*SzMJkQKIMJAUEQ)JVgtss#5RAQXn*rJG^nZ+3l zUWwYBT2V)@Y-my9|-`ua+c zb=0c8sqg@nLq#nQuHvM92KAC*zbYs6D)4#P_t)XZg#B}D*3FyOYQE9Js`SINrT43jbH-U|pZ^0d1B0?%Fg42b(y~0(?FTbS0Y0z+w`^T3VkgD=SHVl+Ed$Z*V_> zrK4;%NEDj;$&cV*caw#QO|GX^2-hXw@#fju-i~!z=~A5S{`!^f=z=e_g7D(xz|?Qd zp@BKK+OX^O{y&Y$kYLNxkr?ZWzVUI)_`%~m3yNHY2UamonWBj<*!PB(;}(%&2w#oi z9BuaDuUXoa6I>5&Q&TTYPqTfLF;G|MXOLHk?v|92d5HN9j%4~R>2hB9#FSaU*|s-- z{K!iZ&2Qv%)ss(bIi0D#;m!pSA=b>#;X5Cio#@ zE<~m0_ebtKbsLD%i95xeY6rvj3+L)UMXPKFb#6t4TcIP>*)3%?RPuEdK_qptb+x6J zrI6F*>n>vP3GD=gpukU#CzH?(?=rK^TNC455rFiK}cO>ste9~; zAEoO74IM<%ixMD2L=d$~bS*6{P0cM$K{SF3;|GyO21dmj!;WiTLBXwx+3koHkq`U> zfj-QiZT{>JRsenkJzZUe4Nvj^ULfST_Ufp0G@2i=t}p8M``fpVG*OlmVsJAABxu=m z4XR$E^}_6x{+Q8Q;aypqzkY2u_odLzwj4?0x08*{7%--j@tZ6e7cz{Rzo~3%`zV!$ zkT5_dreSfB{5}H%kLJw|+UJ`7{JuU8W=?0f@EZ$Wq{)E{*+?lfJe-%Tbo+P2O(gP2e^i~S6^gx%2oD#3@!_M-pD}?UrVqIa9b$;B zTSfZB<>iATh;tRwcel5>#$V&!`7DiMfroeQ+^Lf%=u1~p0IXHiZxr&a-S@F)Clwz) z3>~vbiBB!{*{z&7acO=oiDxxNNEL0|)X2axcrW)Yo8-pFQ^Trxp20p*i=6@*!+ayta(va<4FbD(CnR-X(UkGS2_Ym6TD z#rmhnO6TB-YImrZtEzq#oAUz5%B2~<{uI*U$f>9tyOJS?4=^y1)5Ag`-JlX4!DP{s zcYm`?yLhD@4HL!iL`FO0L%8;rgrbi2cKhvZN-D@}5CGp(Gp=Q57mpWE8;kRcnzigv zlr!9(F^6jh+>j^0U+y}nLL&E1=l43r{#F>^$frh4&ciyK=mP1MOSE%kAmGv39{7=& zQ>8@!FneYf$Je+Jm9%gHmo|GYVhUHV92fF55wizh;z6un**qxmj`LHjoHXLPvmR!f zcpml?CL__5R8-v2MrZFpjc%99^&*n=M|iwbdHIDInF=S#XpVV}a;L1s#6)-_q22+o z3hXXROLkSLYIy`5Yto~GL!=up8puiIqODB|*_mios)>pU0p^~Mp$yEro@?Ve)fIb7 z6Wl^V&zMRwGdsT2W5~#6bX@o1nj?B_(8JvB=ZYj_x+CoEq+&DmA25tM9@to$-hvwy zT{<~4KhNmX+QED})3L#lbXbfnirw7WFX%+GlGtjSkGB>Nt>#}hG@Mry+3?$Z*kK)m z?HL&3HXE-9jEWio3`D8(L#^md>xgKD6(x$J7QnCt$A0H0Iq*;tr8MXvzGswjQ zE#qXLW@#!3H%EfhKe}#Po%SPn7UhgERb9sv%E;@YLWKrf*U11x^EE6AcA01r? z3AwTzOW#vf=5aKF1J!hsS4t(g4RM^zPlet53|LM@qMU5BmBVukTvVQ>So%lM5`t!avd=mB+y+Cb}bH zbz{SrpYwZpl&Ig2A76j`xC#w_sj7!oXK&*9c_R;9|CCO0C_`)hsb#^FRw(}xF1P(S@BCsn*oP@K}-WGq!l#!(R9Bp85@=J>hGTJ_c~^}wOMFmA$jIJ|CSM+RQ0%32uSYyv7~ zlo#b=3%@bj5(Ku!y6WuL!KCL_H!4@h_UKP*_FQ{+7g@>b1Bh8s)QcTa{dCDTqC>im zF7O4J)uW#o0Rcct3Q-9HIE5Oh71`OC!%+5MN`9tc@8{2-tFdXldPRo|sIYT5RsFc| z?b}bz_13Tf&E3zJF9)RfhK7=L*7yephqu3$y_TK*v9v5%+208K+eTKe?CfpbP_cJ( zl$7}l#FQJNOAZyWv8TDU>g9S(OlVEU7+dMfgwj5wuy*@?{MN3#FCY8RX~rj?^*J-P zI*uon)lz`ozzBh~cl-;W^c1s^rG@<*#dNE3*8=4!e}8>oAf=BL4#~)^r^|eJe{m1k zCLEXyC7Ul{4UpL#YD`HvdMy0G=g*#)lit`C>YKu5Wh1JIiC{Jaxh(wrt`W>mC5Mx9 ztE(2S@vxor_R4M3%r|}tMOdSbP6@4HN+wD$JvBA8a*|88y12;f(~EYN5q6Xk~cNJIuozIPTo9j$F%$?NSf({k^?gLMC-$2iVEvOt#Xsm;WTjVI>#&9x$`a z6etRJkYsvA?=4X>bGB%VBBs8hTfTpFN7pb{!!^-^|hC z>1MmXzr5jFAp3q=zu#c|;`#Hxs08w&CN^{9_j;VBLHTY$9`-#bLx9t z<$5%a`#$ENC-zO3*U2vWsBtz}=43Jq{Ifb8YQ_e>mkj>#X})&5cv$NVMH4u0RqjW3 z0Ph@(BXT3&z3Z}SiN^J@J|@%FZaG13#Kgn^>m$8;@7`|+(!W3{cXSYHY>JPi09-i>R{Y; zkxJ?PyK^#9KTUqZCBh+j6-b5;g@uK=xxvtT97>=nVRQ*4_AtXd!cya9Qr{TAr$*Y; zYud(oG1HFP(`LJXlBa%<;V+@xjKph4>4`ml=@nQRh|v^zG4w z=W*}O&(7#&QHH4!ge2(xE;cVHEJW_2tbujF29El%@r*tMgoNeu3vK-zJ;GQwS64Ws zgz1>MxmoGy`DgQEnrYGDJH0q{#2zO;kD>; zfe&D)K*@RkNuoky^9z7XD}-ANBLmltcUEX%QjVqs=fw&q z*_zp)^ppJ*m@PPM#NTzKg_{ypbY$(?WS$fM?F%Uwmt7{-s_}R(xc?K0CZ-k|D>UY1 zU}ypLUky+o#HBml30`oNlnk4h!GTFM-4^={!Bm_ajE`!*e+R-AsB|kvx_e>wt#)&S zmZ@~i1GB$Y2}m#nCFQrTUqN@la=nM^HRX*_NwyO17D_yuD+)~E*cdk@2jyhayUc>d zFqh#$#VckauV1?c5M@(S6YNn73kz+!&~Qp-l3%-K*2DE#RAzB0omW2OL+s(^Y!63a zbZy7MY9Ge`Q{*Mu$C%TAtW`>6?p-<<=mJA6Z&TMfbLr=ULZ4=eVu7l1+Yy-dg60F3 zjFXd`lT-JG`A`rA|LHdFZ_nuI5mQ^NB@+I9tbfn8xEgjH+i19Bx8p;)!H(IqxCs~hCa4m~(&#k1SbW&^TELqRb?721Xztr6~EMHS0>uYLaQfA-mnSdx^xqCM& zyb0jEOwi`V1?Z0m0TBiUhyc8g!_a?ic8y!Fk@P6Dy`4!_mAufQ@#6bwU|g(!d(6$j z(b$$!#5b8tu& zdbpWbw42=MV(zs7_eX^Tt~G!3=p}qYp{M>^ZHnV(F3H8KS)w`a^+%mXB6#vRKA2vW*2UD|Z?ku{Q@4K%aev*wj zgG3f@(jV3d@t!BA%%3Bpqa)(thTE56GCdQpQDx-v!PnQt&FwE59+Shor`x6U^eHMg zl4OmgW8^-moTV-<-?SR7vizO*7CfCVZRMvv9m&D|^(V8>LsS`yu&^|7`@+KY(?cY{ zXOs+}+W8a|_$h+i+^pa0aC-a02V_(~=Db`Wm&IsXakjhGQ9~{@H8s3WV&dpG*!3^k z0IJ71i)5IyT{qCyj)T}6H8qLfBIT(>7vHO7;^ffUt3AcH0r4Fl)52gp**VcC%sxMJ zt)Z=ROtqiOuP7{tLB{|@c?ea+?c7=(dVWcQmuhOe6ZkZcrVw#sqS{&40S6`Yhey`# zwy!82Dy1lnKRnV%pB`VYPD^-82h(&hL3q#ViuNF{B~8!@oA4ru2iWC^t--eg?^)NF zYrj9C5P#jdByDH6GlyRuHkSh~HDvZb!>**qWEj5Xe< zIUiH8U9^SBbt|Sv*@Y6Wk2k;eC*q+`^r6tW^Y)AAXs3ZwT1{=#_=vOfLfm*Jsg7W}B$;y6+M1m$ zvg5IMK$D{G zL0*2!hVS{XbVi1`<;L50@ARB`;iFvPL7ql75s~a7XQ6z;%=d{RoRn@N|*%wDtyyWENuLnG* z6WIqQ_dco-%pG~Tiw{qZBvN=2G$bXDa>jKB1}6&|e+=ruhGKYgVIaM*q$#1r?L}x? zSY%|+tS$Z3PtWceP1?bQfa~LR)}#uQi?s^rNlf=Zs4tBPy>vfI%cIY=!#QxY+?YgkAFm_&? zJr8STXG%&`q~@tDqrDuDw%-+0gleju%E_07M8pp-fmDZP+mx*Q|MnUy1uk! zy}W|Yo9AM)CHnTslV`9NfS^>yJVy8HmrAyWA5qLFlP5|1_WMHa?n9jh?CkR8TI<={ z{pel2e3Rzg)|I{eUA~comu`oABiwf$_`SKA?kD~1Rk1#5r|p(#O=FamVmd$yJdYmLH;APp z%>YbfTO9kE^m%bW^o@mugQ8xQ!o7_2obs>E*(oW5QTYB%GrxZ?!C*~Z-q6|_8$Omf zyiI69+e<|k*#VR~xUNb{0dl;@L5b(y)*;0huU;a$s4u-T)%;qYKE$v3mCj!uk$rt5 zC_y)g4RqoFn&|xb^RwFu6;)R!CE1@& zG6}$-Y?h4$b?2Zk?*f|iJ|wE{^#Qxa4S$trO4c0TVA&^U%7d_ zLU!%i<}7&v{SRUHn$9-IX$J%zo=u1)^I1#}8Cl50_oJz)o>6A@P7ia!P)t+@Bi2pL7q_A!oit;=4 z@Qg?7n}SJo;yAH~=V4=8On~s|#s*fJDLlH%=%Z`CO6H(hu^nIRP)W%owkP3tGwi!G zHJg+PBApdapEp6W9)|jU$GN|l>*Sw5+mlV&xJ`uj@c!Y>IlP?rwGZ*81i(1qnX>b2 z%$JwtW=DF%w{G@oRk<9^GH>VXf8oj%>Cs??h7g+z= z#LOycN|Kwxa!+kPCRTz?;+a?|e`fe<+NJ=cr4a?nl6eGWpLl8rtujl6fyy+SwT!4~JKsN_s$oy-iElK%D<&EsK0Lc5RWRIWaDc zwbsO}g&_p5du!o1ZY(8v5$IH~PyLZDr^B()JUp0abXP zNdfCS(Py$?af*Q;b#$!69nUn#3ppDm!wRk8^#MZI`-KlD?}tfTXRfp(e8?%}0-1%IE|cz6%d z>(x>4KS^ID?mc~KRz0EcYJQcT=^!GJ(~#l0jY1vd`$#`?uN88c{$-$Zat;sA`E$Fp zl=Cu&O^8IYLK)dhXXl>0Ol})K%ETw%9rd!W7`wPUD5tbCR*keIS94#L8Ob6XsT9G) z`oUewa>HDA@wOp0c8kQb)T-hR4|H+A>Fmck_V?`PF-!qwDds3h#~ z9xPB-o18pm!l6?W%R8w52cEaPw_KXJ6*Z$xu^LNgc|BMRQA7f!xE6p-K{{2%nBrC%Xj=PJp zdk_Box9a+|ueE2qdZ{G*cSg6jyPB#s*?*p(cV#bpTZhRr*e}EEHUp+rw8kJK0q9Tg z!s5Pff0f(&PrP{DmHq$gC-ncTtMh;3$@G2`RX55I|3AJvHo^5W>UcLre1DCjG@BN8 z(%kO+`)cC;bY`DWhGEY7E4}=sMUj&biF>E}l{)}8B`i<>_s@-`khWLEKDi?y8AW~V z+MP&A{EJ-b>XA?$rnxEDkj!qm*xJ5)CiB0jd+(^IvMt=#Y^wwT1(BqM0+I#Esg)9t zoO6&QNX{9`L=piJNdnS$*ZAX^6%DiL*6SZW_r|5xUmPZ! zw6t&s1{eMJUc4y=cu7Knf#mp9*U`{$o-l!meq!YL@Q@Hc%bn9c&z}Vyu7ah3E@DPAdyky^n-AqtdrdlJw%kZz&6Db+^sk|FCO^AYf71 zQ*>E993OU(-+uSQT3fe|+kpbqHzqV5ud#yq1@^PI5CKA^UOj6Y8|s`fG*k?}Wy<{A z$_KR+;b>fZyq>4%$C#Lg&x~d>zA{L32d>7+=h@EYAkHz!SsejJP3q79{&&t?Sfy@31z&qkb6t}<|woHc(i2+6{%ed zxKnPpvAo9+HjEm0cU~(`TMCJB-Q+qle=R}v&n7Q;I(hL1yg5q#HfT<}aiti8h0}jQ zN`F!VK?cQiwy2BQb%Q?!2fu&+)H83hwYKBK&tfvW5B*C>Y3a)mhW4McvI-@_2FH}} zOLGtr-@7&>BHY}P9gyb*zJbknCz|yt?1O2ixE?ExJ9nx^1SKUi1~ea&T#BzJ zNzKo%N}j+LU0phRF3Y!EgQwKy57ZxN7%(04vt&q9otR4fVOfm9Y}(naeQpGVSw&4v zOV!I6;D%<7*KDQG>e5Wxf)ug2InP{{MoUoR zV{RkzKO=4s1yJFv!B4!8~)n)vwznn1BIUOWc}hf<WT&$^iu;sH`=f4=? zrWgMDwU5iX6lvPKgNT}mH$I&9_C4q{(gGZQZFPnB&rsxeLu0=A{x;$wDd`B>GqG&1 z?atZjp@h_Mi3)?GG&w4c##rXdzic@m&q24_gWj0RKWvWHjvfn+}eEb*iL7*qiV zK=-6#8;>27pZVe{4-X-Q!OxB*yoa8N&FRQ+vd%z;r8_Jo=9-hINzHE6HhgHCF*?|F z$I;BMcZeAu)lrJP0WF(UVvyHKp}eAk-rFTrcgbR`zk3+%-8Y&(L%CPajI~$EhPLr6 zUA@5{Kb{mF7w2sRnR-cI@cecV=$!ErkaM3(KT z#H7&j*pIn048Fw{hx?_;$z3+?acwia7DXfshfn$sO}#>Wd@BTOw5N=5=d+|z#T>?E z6qi}{aKzri^x-(Wk>-J-C;eNCD-7F<>jw`Xrb&|Wu-!gBB?hVBr_C+7i+5WVPrn); zuU5t`_wJ)%&6k#H*mJ|W9>B%Rr$eKhk`n3O7|(f77Bn<4Ms$LU(^pR;i=S*d1taB= zV?NHpEP{{wm3uYwpx27i=CG zVub55I|9y~`^Qn9+6%`g5hlzILZ;hX{!2sj<`ghEHLLZ~03@>@7}0B&zpa@N{bH_W zYv60z#eL*QbGHou?8W2HAw(u*{@w9={mwRbiM-cV}2N`{1MRjAI}&I%t|_ptpe^ zREmCGkNpe2_)mQc4XZ4r`dHJ3DUx7zvYnmX#s0#!WKAFO@Stxs4{VxP$Oca3!pYt) zqgbqD7R=)5_3cMmNPrlI7w$}N9pOcb0 z0*JHeUgv1;iS4zzf$*WIoGBXoIlrn3j3p}%wpY7Qhbb(Ikk5=ArZAFt9=dIcuk`13x&PpchS>5G-SHJOp*C4Y;YOy$udPeR&o zeLy-iNx{hkoJ8`$)Rd`ap7sMcOD3k9cyHgvgC`3=zg5#ae_9e|4qG=U-MK0xN~Inw ztr$)^l^$(L_<_(dKYxAHXCVn$yL`843XHBsYB_s#_4J@`)4)&#Kb0z*D8eQ#9?*hM zZ&%3t{Q1LsX2C`7fV{UhN;fWx9&~H+a{=xTW1MsIGKnukUWSMd zBFz4Y2%i}nO}piu=xFTt_%n|9KDH~%mnd7yqEQujdB)xIj-~pMO)hKQaVaUP;^9gDI5Dp* zqfatZd2f%0$XE-qz7iNlYrM4r+I2-`H4p zw-3Qb)82lCj)U#*h3Zo^ypn7qFaJIx<2)IxzN7V;5s)5&21SJ7E}#fq3_*1!froN2 zoF>=mO$rzHr=9hf`rs7k8>@1xxgKCl$sv0L;%ZQ%4S#&wlb4-0u3RKKms(?(?CE*7 zsp$e!A$Nj)G%qQ25A33_#*r?|`wuPFToG3GD?VEliHrGq1_qKcG6nhh0z;Lk zBVzwdNx@v$;4Pp=;Zz7S`r&bBil zit0oqIHGKch>4qIlHA9~lk3xGEZjw#3~3)B0Xch`={)AORq^9Td=d&IE37zH=hl@$ zU68J**kz8oT2Qw3D+-Oi9+Xd4!k>cW6DoR#7anK^X87NV?Hd|F*FNI{!)dV9r1>{G1S@9%^c}M+Q zzoc&M56tc@NrFvHiFZc~iLVU=b~~*$dz6LRt@Tf!3lWQqcHM3a{z%`Dd2Whr<@@?^ z?qqJH5Y-&1V4>scO3YQUf6M~dQdmiA8W>p1ndvYqB^u+pAy{VCo?V9!24TGpzhuDp z{F&|0Ic28Qh>E+WQYqT(Xw1ssW}Tk>5wD1d`%g)pBD*`fhNI(`rt_hz3-mO%gmVW! zsgH`E-q+*LH??OF^iW@^m6XD-ti(rrltsequZFNhmo=5MXZw~=BR1Y!Glr$+6k{Jg zR8!TpDTr6XO(j;mLGPsj`P^5!akH;N!2sGRB_1w!&#mn4+70yG;Nj8djG<7GG@U9S z?Y6?iaL)Do(sFTiFXFJ)%=einrPJrc zWo7MpsT6ivA`s$9TaERb*oXR8E^cKc`6MWqL7l-|^(#+a0 z727tm8z2?fCfJ}gnmlVB6{W>g+rUG#zQ{WS?=jzy<8w|e^OFj4a`C!L0=>1k^73XY3=a_n4aH_W>ZlH5eKyTJ z>grVu5}L}vYeeS)%UpyL6*DM;kq^oq-NczY&cU!0B zBQZZ-eKUCedx2-yIRsjpJ;pgt-hunnNz{7!CK7Ty!t&5ct0Yve#*DmE3p0a_W24xj zIedSe#2)-Cv?pN@Q$)hNK4}RLJ6(sjI9dGia=l-koP3bQ>A}-R4C0P!_x9diCFRho z+^B{g?D6qGppW9w9j?DO`COu<;HS|~;m zH-|_e@Hdc$=9f!$vQJ6=?}wfeb8iMD`Dz%rY6s6*v)kM@UW8N zg86_>XBghrTIK{A6XFWM$DkR=Ih3BChlDj*sFb zkHW(8lFyeK+zteu=t)sr3%(hTCi?<5499^Y+%_cC3F(E_+Dfd=xj1b3%gPBDYkyPS z`-3)aR%1G4cZ6HFn-`|f;ozd=@jQwlD45lA)Nm__9>zAegaoxGh>Q&n^Hf*oXjit@ z)VvHLp%)f)sg5$H6LTwvJK~EN^L1Z-{_s+%eqNo4kn^(W(Ea?rB(WGF4#A)xU`hu% zTo+@5KQ}M0)xv=R!x}X;HALjk{sr*CIv72mZ~h?91P0=>q-uf5mwwY*QqAt|B^Z-_ z?zEI@`vKwdr49*d2gaAhy18_826S|Z@Kii{w90tifDqfX+q|T?sIN-mzFjxa&ry;P zXK%$r(r$rYU$*qrfvqKf{~$MxH}YKu^SQH`1IBV`?>2UvO;Z?;2+i>|?fG=rD`AXP zt1dQ54rdt4rG_V>tBNYFDx}|S{dqeDz2yT%(xA0Wg!uMgt(=P{vu==T765S>-bMOpMEYcV|@n3@5L-UOoN8%zVxs+ zzcC}AaL%u(t{MJRbK_RD7CUvq?~BuKJYg?s;?rTcJh1e*!J_612AZgm>SNzMe*P-F z5O(YLT*HzZi9uOeSsfOJ4r?m-s~XrLP#@_M6GvG!%r!6vflbRaY&4hbeD2vkwKcaF z-P!Ifn&ss(Xr7y2h5$q>AFrfp@VCw~F78?th~C^(fH1?*@S{Fw-_38Un;OJ(!(T@Q zoi?Unz;eCskFO_C{1LEO29hK-c5roDooBX_{qzpfS@}il@W@P zv?LA2?F=<4rX|Wg0L)Mn59iiwxfqO#3eCd5M_}r{KNB!#lzJ~Z9VPWCK zgcfh>lQaWS9#OIn)y+Jlq_Si|c4E`wER9X%4Zd)j%+<}M$(#qwpvf<|3O9Jfcg#v0 zZ1B6C9*5LtOQK&$XY~^#?D6_`Ls`AmMICm`DKCu}j_dy9N$7Qa)aY;K7~Q2?utR~? zRb*QvtSQH!pAZ3g-xqJMQLkBFi@aAfCj)CdL9QSPH06)q@*()IuejV$8#8^1Pa zCFc@UoEa5KsMo^qPEao%qouv_I}&tk@ck-05b=4D5FWh~iA!40ZfU9f$rSmNwn-Sy zdb8MsQgmy{q1;W^U%8y7K5Ly6z=(@FNO{z(C|aTmkAC-@_i2;yx`^e zK`RXEM|yhnvlB;P>2zC;(r>*Dhb@D8vEQ$k8KwGml9F9xhMvyTkyhu<)rc11qhH-& zWDGS}?lCkqjf80G8k6gCP6!oekae^-wti-b zGPhh*JYghg4mBLL#@A+K4G;4SjQR&{DfrIWO=mI!Frf|B43)ltHXI9mxlYcJ6l5Y# zoe=pd4}2C|+?aVShb%)=5f3TVFm!i(v`eYqGSW#)|rc@$zqNX%b zGg~7&ZrbHLWwR9W)qwqCGxtVgI;OLe4B6e)wMP(P*xcL-cc2BqGd5NxSlegrM^uD2 z8kAN;)YO6TikHbAWv2QAq)i~OIZa+@Q`UTKcI@qiNVEwr+*ft#2Kj;7SKDpz{Fh

IVvvkLt$~XfNMbukhZ2 z38S=ED0w)9gdQ!eC?PNL{*aSxxp*mr3I9)l(4neyOmbkMPmYz%`hn!jG}Sq%0DJRs zRTX9mnd$!8m|IPl!0wAWj5j)Wh(Y=l&8aiJFlC^t+1b-Wcli&)Vh8cPI%OT5y3foU z6`dA6&E~MwLx~lTSE#9@-@OwpE&`g?df^v=Kpy-`VyB4|Tui@Bt(use6TrHtDkvD3 zm?UVkd%>w-Zhkd1v<>Pi$Sc=Aj~;G6!FD7WdSP;N6N6-BTY%mNdhQk!8H{`gx`&&C zgS%f(9M#qex>XHLPX73|aJxNW>-)r%k_0BoIQBfH#rDpwJ{K!~-81+CKvi7eR zw^N#F^nz|HDS_>CS{h!H%|9@(KRSSu#epzXT--gdY-jhdE=`n^OI#d`mtXm+grS_h zCi6RYU9-Q{q(?<*czLi;L{9c^`hkYPsb8|VA0mg0it6FDP@Mhqfqj>l*wx)VIXg?i zz>uoBSD0VP>p2_yK3c;PBV4F`mJZT*4hJDvPpvTHM_Kr3Q zvncyUa03n3>`x5*v`1Ty|ph<&ySD?z7dV864$+i!nrR#fagy=+~|!#Qjj7#^jm2G`7EGYKc0Bu2}Qp>{Bw(>f%d6Y zBw}KJ$9bS0q~wpwqp)?TFeoBDJ$K+~C#nG#2Kag(rBRTagPWiEm~Ve@-o07%sguH!Ezsd(BrVmns~7iOhHmU=}2z9`e;GHWrEaBDBH1d z-Eu+GMf0}kyEYf{HTNX8TGf2SJFGB%=)EpTz{rG^E|HM@_N&T^Z!B)1K8#j`lRP*8 zYd^?cQno+qy+x+7dCPWtw+rQaSCFNkG!367QDrhDko+lUU&^+S#kjJ+p{+j}57k}6&R~B zR5v;5G4%DF+GFtf(c3#==4qSnq^H-gU|$X{TV|+x;B=~KxYIl~#>$oNg0?inVF{nx zKURFLq@ZM%abCMwLpv=`xhLnoYSLan2d6^83eM4CJFVd0=dXFofGZaAc!dx2O32(? zTcBAYd2AqTEwq|vN!Y$0^cj7#ujwPaBzz93`GtY%n_TtCbhZ%L?{G^q)z<#m;kP!) zNYi&TZR9uIda(&|^pB7wY@i_LyR4$+F0hws8}s@C4|OQSl4Bh!tGRm6MRxjh_a)vL ziBexSZ+pLq%|}B21Wv<&*U|!uD7sI*ctIWT3BgIG(WCh7oC?W<7yAY-W;cx{++>@Yh0ZL%mt?*+XtK7X!o!Sfiv1Ku2 zChc!flX!Es9Q6{m_sg2uFk|ImwyTwO&!O6*fSy)u@xMtnX8Mv!PsWTb1)H}|7MSm zJfx0@cOP@sJzk}S=0Cu>3gMn9u&&%w*GD>;^y@}ECekw?>KW;CTAt6?=5Vcy}N(U zjeO|{WcQ`~j=i?30|xu3c`udKc>5{~Ya-H12QgHr-;ws^n|3OW7s&km1NMLU&u;G< z_foL3a(b>WZE@>RE<2y}V?|`yTgA^mhq^rRVfl$?3*%hSml}q{k&^ZF2$p3|9#r}i zR#a9VR#x7%sTdd;nc^mM&S9sNb9T;NKiKp-EmG(@9Kd9u$rk$dER0GBs80`0h%o1g z123>&@=tl1<&cyiX{klosYTX^^I%-iZm%ZX+b5IS3E7K%RPJwZvMXxf-QPd37?W zb!u&Cxs;4qcS#qd>}vsb`w}VQ3etL73Qw0FGE?YSTnlANb^JA7Q=Z*N$tmw1*%IpxcuHyG#~@S9^Y!NG^J;)>#Bw zuEd<@(k~JH3pj{5Cf&Bo$AR@^xonxJ9rxs7>A%P|sJW?$(LO6X%k<&Ivykx)fb~fZ zzrlTZoX&n?Scng^sV4TS%>RGo)=rdU!&)s9g;eAiRI9Aar2mJy@j=~qf3}1pwOVlX zFRutMulJw08JrM*z1$gr(+YbX(sH!%BOBS>*(r3)!u{*@%$pwgD{1II$yJc( zN&VL!90O+;+K<_ye}9ed5&d65!~P5S>HjfK4fv>WMVVj&z8=XxKZ!%BpMsV&YE(V^ zpL+KR+7YVc9Nnw``i{I&rhnmW?#X+I_XqQ35=wvbXUk`zVqE@)+>U=_SV6`=Yv!+O z^s2FrKz)~|0jl~7B0|p8dgm5!;l#+Plx0In?tA? z{;((p?6H;kDmGT^6QY!$|C{q#~YUNV_yE-8h&=?xw6}>s{OhUQSmD984BSd zd9(5xU?~vF_l%A8^{N`GEPYYt7i1YP%#9DSJRWG7cGbQ7u_?E^VslE}9CG7uFR8e= zSaH+gF;7~0J7vg2D91plmvKwK1df+5GhG7BuXKnA%RwW zKUDI3K=Q^;=F_J|0NeLRx92VI?=OJV4mzQlDsiG>2Y14+Uzekr7v?b!iUve)*Zlhh zx$_F%`$hi;H^y7c(T)tphvh<8BsRaw=oYkqP?kD;m31dgdl7Wz)N|jx*Mam`UcZ78 zXoy_%^6w)FH_0yyB(Hf4XVPDiXA)0JI#&51UI0TL#trpM(HIWB)={@7TLUE}KZzH# zTJSHc!c~LKER!(STEO?2j!h=3vp33wga#k;;%Zlq7)1P7MSTJvOo%%_QFU`$e*Xrp z@?+IAyR}kh+DKNFW*kcN!wyig+PO$Gx^e}Wp@2T>ldUzjJ1B4HKQ!JdqF~o7sx!&7 zX}pu&mSE%-6gX{DinG*NOgQq7jq_>%w!-kiV@ucWrfMarVbM2o#ow~H$sWM?wsGYXv zW7aj;ceonXBYc(*W^n-|6aI$CsF?d;4KEr}$8+{2yocC=-;?^^Q0w8zJ0_ORuA$Si z-;5k3$Z2$^_93O0!dP=-x}J z0AI;Y=UJ`-9K=NuyPdqCPbe#Y-T3qh_!PyDE>8Fv!;m*bTWf1FvahRNBm#{)K6-`h z-YkFcVj@$F2V8w>cNVrN*~6`^Ds%g)^-+m+z`mA&FDk@G5MT6v5p=kQ(^{CPp%wnr znsArRTuR&Ww=(_EPD@r6_I;C;!^%Uye>S@th*xNSeNF*(n-4J=8NahwTH9uS&gq@o zVBCn2sl9o#S~l&QRUx(*3nqO}F~UO?KH^El3H@o|kQnvLJjxdUgsPuGdTAwNjR4Kf z0@vRx(a`}sCCQZy4l*D+5pODx7%hD6@pCF|wZcTde?MlIa7@>*aR>SU59zC~KTP|96S+KL$1R^dySIRWpCBsyw|nI5_AfAmFKP2JUz;XMycbg%GO} zskyT9GYA`%l)c;YPZwn*1py!b~dnv4icHDvt;d8R)ei zGc*rYH0w$jjlKQ;7!qtv&8ObMLbQrtx(1)N`mv3T4P>me?#6O0=eSz5-$n!+6W_}! z$%jqAP?uGIj#M((ee4tN>(OR4WSgnZ^FRp(9M~;o6?c~k#9wRJsapRrV>en{UFqL3 z3%UjP{Giqox1g$${zd(t<*jv1jm0`~pRaM51k4nncmqeF7?1C2TqBee3W}xviQ+3#j&(~?d;iy z41UEH(;qy%y~PMWr7(jXm4%t5=399c02UBJm2AdRH20#k)G^rNw6wM-G+>E3;PAd? z!e+Fb)6?se>DJj^Xsp2Y5*%at;?~JkEG>UwbHn?wH_We6Qlg-J40hme&bXfa)Nn#; zl7s-jCrg|~0|1ZoPSlJM4WS^QgOM{|2|b2SH2LGrg^gN=G``yK!;D17y&Ok*j8o-x zQF<7P6IEo@fv6rBvR3A^bD9u82nAYb zEguC-)KHq&;+}@CZnAdX6wI-dd)J+q{$vklz z5yT6J`#7y!HMH+P+r_ZCAcaJ2Mf;H6W-X!ZZ-k0Y$sW`3#Rdih@? z+C^SMd#T_nXoX=+P|qcJ6XF`^$aN6!TFmqqhOSXknpjV)GT)2W$?prFQgJG^X$Whe zdw@i&t#87B;CRX0Z6A6v|3Lr#cPa?S=0% zNJ1??3jF-$PmYR9Pjsyd*WJ{NGYUC2bPZu`4DFK|(RdgN z8SNB0XRE2JtB543IdfL{aPstt6T=iG31ya1bew?_VbZIfqJcWOjM^1eXZLO#VNcg|o!fPWIu$o=>9H#({TT{LQ>iJs+RTzR#6=k>D?`0wsHaB(xjx&L2rO5Si|~lM1y4v`%-Y%gSKF~0 z8{Njt0)jfDDjjl2=oKhX=S136aZD{|IN4hg=gPdeqRsYcgqs}ye2bY z&v|(jl{-ldAt52{jWSaXf^1ssPi<{!+-5ALE&7@$#(H`Le!6%iDoXU4Fx{%n(=L1! z1X>Jmx_MMl4nhgrulfN#b0{4aa(?3H&Qz3}>zjjdLSMJSFnA~b?(IDbKneKHmC`Ol z3^}(xD|>5lb3x1ZS9M+l8xv(nKTUpFwBDQH) zXfyYcHHwNXQ7kD=oXfWR5PV;W@GJ05VN<2#w3QN;e)`V zLynjh1-V4;IVaAcj}hWZODjg|`aiyZkBz;?Kt}(hP#f&C*Yfc0?pwc|*)Qqu2YW2A z<_F~A2YM~%5>;S38k$k%--KWKP1)PiYh!aufR8UpEU5>C)t34$h1(~&2O!gBhuQ&YI$L0ti+hOhC75Z4fU(T0BLqid=*t& zUcPwt+{;d5R2#VRf+a^}(>}ml3ogP469K)@u(0t}kCV^mo4j89x5SrwV`Z##Sc0JI zZnV&uxw)5n_P&K%<2~7D7Xe7rMKm{Fn+f5VZ4p`Xqz(vvL&ON}3WS&E!_lEvul6wb z?$1THEwE*0?d_dUU>|>eTDpi+|2nOwLZYF)HrD2D4L2-u>g&yJuC6f16hES7NKot`QqeB+1qcV3=~G0i=kZ*tFG)EaoFj{H z{tuFCPQ(2e^R;K*v)0z?yG%~r+MW*l)Ns`iHyQN&b@%W=pYP<92|P!tnLmI2q*XK4 z!TzDk6sb5#0rkBS(;Ur9H4YhGR|Eoid(Ut0?1&e5+$8Ji?MV`H_Ul$A^siD7hlYZx zgA@BBpUd0P0`TY>1JD@qDvfnv48-FT0JrD=^s*cv&r}Efjo)6ED~iOAJ}5lIpoDuW zs;a7Tv#>3KYy!*JDx>(}J`V0 zE61<#H-Bw<7=okuWuKd6n#Y_`l z2-SN-ubrL>Z_+`g?y#IMUwWe((TX31YnD%CYwnRQp*F3a*9?}LvZg~7mq0+IynE-Q zhiDOYoGcSdTHf2}@&&TpG&gF!hnqTdF_fB@yv~hR)_rMrG8cCgmzXGGRo9wceKlaS zrtSzz#Zc!st{^)v_BW}7^D{voKU@XmCsj`XRi;!TCVa$=k{!khjQQd%hFDjAeSPs_ zflviaD_R3IK$j1?!yaT|-@i6eSlI?c><4IS+I!*2E!op~`Xjm|_D%I0cm{zCZ^S!s zP_`^9N#L}lQJmm4ibcBhYZz_>RM#nE zjKJaz#p*v_CDtRnyygsJgjjt})WVW`;4)hicWYQilG(MHME5a<1!L$0L4pS%U9Sin zSv1sd3@juFEn#5R7Z@$^(}9cTch;zj^x+msD`C$Xrp}+u1e2BW<9gNW6PPO7Aw?34 z=H38P-Up!L8yHMEX!-i4{#o&nlpMff-2xA+ZJ)cFM@>1lK5_PyOs%0$`xEs;H#f6U zOqx;$ctSN2N|B%ssB3EW^$3KOb7j69)NR;$_9s%bxxN`83pSA6TcL@vI+7WFVVSWf zPrkhR`MT;|@E?HwrDa_qozS&4)?>Q_c^mm|`@;h&z>A;`sHv__Tqd_rU_*l;Ni=VV zW(QmS7LvUdU!#T1RHZ?BitMOSU;9~7Ll0TjC`iIOMAx?qM!W(T9s5;oK@XIR6YRGM>@&{R)1F2*msI7I7}`EM7u5MYbF*X;rd*uo(tva+T&l>&H zQ|9nvWTUQm2{5aphN`0vL5OcPs8y7o`H}DWz?z+dl|$>P09T~TbYtOeKp{sE3?q_6 z)^HMf278ykr;z-rx=W9%3+W0Jy8#VCEW^?UV)86k@^mUh@7ysI95&xnWBYwc8fj}w z*E>-;Z)Iy6&(9%~OEA5LUU^(7NFZ!&ncEi=XWk;i;Dn+reeiT%hP|*&`<7P>1ZTjb zw)hx)Dd3@j(K3U6Po{qhFu<2A)i*-7Bi#d?mbdRhGh?v>xVAP9MpBzUV~0?MHrS7R zpy80H22RlT@2B6tH~W(xbf52zn|y~>VN0)wu>8_wPU3!?-)K2iy5&0bFL^NML6sDG zN`_ZA_=n6iH6K27wY43A;wti<%WLew;_xSiC$V~_UhxW(=f1#0!p#H%ShkiJ5(ywz zU^snzEn`fKR=aH6#O%WQb}t8mUwMduRli=Em&uSRHZxm)l*3xw`rZd6iLsW^<)wv| zN*_;SDPc00nY205zc?;C+QXMk4_$2=Ya0b(%6PtOx3mTfmfXQwCm^7{Im>G3A6a>P z*~R`98l*7rjlrjfEpa2Ez=)kOM#I;A-B=J?SFp zjTYHIWeI3iuZ0=Y)VPnP_)1uGOb>AUP<;C!#Cyln%S8F@2Rf`e>m$han1t8t^UH-6 z_XmyTCIr`Llo|+Un6qQC7?fu!hoM-nu8>jaf6{2p|4E~LUQ^axdQ5buijNYUGxRDK zPZ9{5DL)#6;zJJ5-WkwjfGtcxd&w8Y(cJ@@{jkc2u(b}_&bq3no2whxB}B()9T%Jd z_0P8bGXLpX*O0uEDWM>ecMi0+Wnne!yB6*o(>T^L$Ag;1nfxT*)wJrzE{D%kg4G9UQ{LRkLbMDt_Y6`m9FTVf72Vp`z zW?zr6_j7)Ebz@_Ch%O@y6IHP~J*C(2v7_AOPDXawkPW|4MfA#~3FIcnudlaC1Rm%h zcEgYMIRaY-3YLxaIOUM2ZJ3T=Q1Lp5gh6OG#OScuif&<4Ro!Up%GImOczhGyi7w@a zkx|dv_Lp4_&>Z0jtJzT^=0D-K;)goqnPyT>lLg`AL>K0vy~;pk9+0V@KV|Hrr~+#o zV}}~Jr zkPiba%s!zP_Hy-0r%l}28-t%pTQGaq7~6tbQV0JXpOzWd8gasa1&*S#IzK#7C~~fq z*9i#YG}Q}a;8rJakDu?}sE?~4t)i+4Eg39L#apY1kBx7uUhtMob+t<2hbuWUW7llf zz#aQ)yW~^t8N`cJs|jh zzLFc9O!sc}Cp+7(jK>I8 z`iT>O#4L-6nT`F(E)r_5j&UWnnFWv{unc&IeoNb*GntZn{Fs8fX8Jb8wwwIX`kY0! zXudhT`J=RV05rM1q~-K(}IPk86sKE+3X0w8R2bcZqokhq}FWo6Q-o$_{f zY3$W+2(x~hr~_Y~Y%LDG3afz}9o~8sHs1}plSifW(AsFvIwcv0o>6uwt5vky{!z1Bip0r`5|($=g*zHew_vm z!2LtiAxt_$BE_IuP~zmG{coTgN*uh!T9aaS*OT@=4!-c^d072@$GC&*-=pMLaojfW zoMCs+$c>)o`KV3%D(bC8fuZ4AVj4}@HwJnsFI~6*d93qK8=g40<$wV)GJcC@*V%3p z7#my$5oM>E^y3WZufz%{Z+u&XQWo_!qV?8jy#VNRN>?rsaj~*_ai ze)>q6T8O9b1^kHPKu%AkUKcjvy?L`&GSkx58fUw%qa_kcW;a{_4wQK~cL#6QO!eKy z^g#QKOoq)*pj2TnBYk~(xdc75*#t-<{cZutZQ(f-!LK-3xyaf+@R@ei^AXkPH?@pm z7A^sf$1!k{c}*R$_Mh~e$rhXW?7J{4`MP3_-{FVBjXlgK844@%a1`DP0Q>ZAZEL&_ zpWi`t5=+!Ypo?%?qaZj|ta{xQ&J8=>M>TnyuP7xd_6-isOikr#?mfCiu4iXpXt=pc zvqHM}7TQFOElSXI089=Ll2=+I@CF>(U@Z_>6DPuan|EDRu4R0?$Eu2$rnlf>OXa<| zZs!afSZ*!vu?h;Zu$$>RIlUVkOA4ZV@+szh zG)R&Plcj&JgU3!Iye(8ki_SWTL6|cFGb1HvRqOAT`c-^FqnYmA(`JnVONAn>^);IK z50dF;m6GUJxJx5l$G=9*e)N9PFvVejZtQC)E6z#D&K9}xdxhnM4GR6RU`Y@hyoamu2q5c=Gf zmS;58^_!>Gfa3dBBiT=d-xkP7^xW1CDMcM6LnG(whN-rA0qIQjD_5f+aARhwhr$s# z821#mZMsw*Z~kbs*G%i2KBxQMlpeas86KkjeINL7-BuC=gYG_w-GCej**Oj7+qmVC z$i|**f1A_+o4l+n>O&X|p%@hI`jdN6#MulC*MKrCysJV>GAk|Nv)ek5^YLRP(1f5q zxRI91#pDqY30|!id75Ki^z{VavDP}Mdt7pLP(o{S(9YQDST;-!Z>R-0^OjePpg}?F zYr1Mq8-z?b+TJ<}zX_8d3AyferHAbI?$wTUga_s6mkI}W=>As}UP=6jX)^mrqj`3A zR`AcnE%8_x5ICWpnHs*ezunN`>BS=U{`JFFW;Z*qImV7`rFxcsimr|meU>LMTrel!N_kr>$B)k zVC2O{Di%kJQ(-iNl1vaFx}u`DL`6U4j7E-a52-Q&XuI=?EUEK0}vbz~UX`Pxv_ za2FP;z!$a*PDVh!Y@KZr^C6)Y1d&i({gphEMy9A%Ub#6L6k*&KOVormKt_SiGC-3$ zRmiZ)ZaD$I7Gd)t9i*b0-KNi#kWJhN?7@nNME zV+n1XrI|tqgeWIJ(@k;Cs>i<~!}U4G%0aWA+t^)f5sKV=kJ`=WI(PmoKd!iZV^+)N z(%Go@QKXb+?!W6LwvT^bAN3Yt@bh?RhKwDsdBWTXo_H(! z)9X~I4VW$xU(=fmr56!*Kf?Gsf4@p`n*`{M1-~k=e}P31C(pFk{^4Y)6Fl})_d^VLLKnf(C@Uglz3Gj{eUc@)6(bY zr2(Awa*B!tZ8Y4+{S{aJe1FAOKfad!2B&j*yLEt_B&H19S~At4Z08~RQ8D8PYA(VR zeqHq#AQg@GO<7ldn-z2TWvS(^B;}QL@cKJ!%3xXmnQeLGN+@VDXOu2|h~Xd7d<6_O z6t_n0QhpVbwT5A0VJ~nJ9EOhpwe*44z~Icx^wvmM-nny!!6-K&N9r%&uWB;+JahoW z(O91XU-b`zyYM3+SrJyd2QIpCu1`C^?Q~@@Lx*M-n8r0(4V8_EI8R$`ZEjVq&YXuP z;E=G_%B)wBW*A+tPRPeoeH7gpS*rZ(A{e$L9q2w&z?HXdUsO_Zjc_rX*Mg@(1uT%B zmEC_RX=nC5A(OtYty-`v7Z_(uuB~Mm$9pmF<$VZ&z)G#)JIZ(UJhIsGe*MbPmQs6d zf({keL*y%pPTRioDjG*dID8F195;4QaheSg_+kYkJE0p32C&kPeZhaLlLKVQ zIQb@MpfMgZREF-3O@Ct-j~@UP7#?YG`d?I_FeZ5>_SKo4vY%kv1a^H;{T&$z?U)7Q zMM2&ug@3u><*nXUf?D6ukQ_Vt9B}}Mr^&&oQxs$u!S8EuP`KXrW;rZKWtPRcIgGDZ zE%zs|1TOOq5KC8Pf!ycNBDA%(c7L)@!NsL#oc7Hi z*#*#CD7xw9s_|Zld^d@;^?mhM`SCtkQU@ul%XJTfkrC)~n);X^mTrR150PhrgcDSo znsXC>YHgSQ<(fy|u&2Njc{r4hYMzA(B6`Vvnl3V8EG`w$e{V0Ac)dXwXe3B4&eVQ| z5Jxuz(eL1p2uOaI3+*c)WL%2_E({FpY$?RwrT$M4A=B?Q_~-~m?maSS**28{K1AYh z>4-7Kz+1e?AN?0P$VwpOqt=rMGz{#Gu~o(+x(Kg1u6&qRH#7*1>FLA^GihyfUawn( zv|#*;>Cwu;(ZK-nlEh)>C9(OgJA|em@@fJV9bv+(tHBgVYLWrfYZMElAsL1q45_LIL^a83k;gKvPxJ) zDYd_MjrbEC?2Nfn1noEQ71u~TK>fCTt8algH&-!t%1cC&LL@s1;x2oX6f}!>A=?{y z;^eUpClcFP=h;fRfH> z*cvB+0s~B5Xw%?ZG_zx7<6gcC15gK|z1%P=mlhTl=%}d`kr|3fk_bqOxAMM3FIYH9 z1poGd4k*G2hf><&7L!7s-JQ|s-rnv~XCL440gRNcu3rAMfr!&a3GbcE z8UBUJ0%o*?!9(+&1^dd1Cq1vD$+UhW#TaEH-M2-AF zCVjiuBSgfRvzeP-HJ_#jwuIe2*Z>F1aOmNDUJ&9#D#sD|fjwx4BD;8I(9!k;7iBQ? zO%DzZ!L`vBmH`A8EGl8nT&UaQubA0W4$uBqd%V~?mrkfDrFLGSiUB*e<%@`jw?VLx zbu?AHM#4!o&S_DoT_4j(4r5-jvnnZKQa51~0AVEg>ivJBQ}jQmVUOf9Z`fy1TNU9@ z0r>lmkN?9R1?F>ttel)jU2^&4D$>%k>fwKveE#~bWlu1gqn+E_#quBg8+~{Dhn39# z|MR>1-vK@kHL+(RH*dT3{B7j-6~D`8KzHD&Cid6lZup=ME=I4=RvO*^^ER@ZzrA84S-d;cGRwgX2^4x=y`c;*cJ42pPo0s7fV6;sg`*~dA zcs;l>?EIw*{4f*8Um0`!SWxiB>xacqr^~@P7%Ff@ogTWLbPo&&ZQ{>%JXI>-iSYtc zLQrUEf2zv^87)ko>sKy!_skJ=xnqR^L&EzB{nevoJt4B4*VL{@0Aim0h62eROe$)K zFMG{XpW1<^xikJ;9UFL2qyja6R-e!WFfb&C2vfi~>#>oj9y?j{3e=gw&PbLaJAD9i zO`8h)Z->Ba_fL99Rw?>1fnZE@W5meEQyR=zUS6#;X|k~(P=N(O;A}u6CjMVmLMg(#L=f4 znm)SqS+ohF19(KT(Xsyq7mik5X&E0eSX#8qRt1?NEG?O7X_e>nws$rFQ6!Fpu_BnK zWYk_cVgxj{Ha($9C@MlVH0Tydfi;@n`jLHsmVupnd(%-s z(D7>0W}lh4R#0dQ08#?N7xgbTz+xUCP_}W5CuqI{Da-%V61HYaqM?TXzl`KWELg#-S?^orR4~870`a~eWHD*!jl?+fpMn``jgX1)`4GWYP z=DULGPNaHk6*07x+>{vx{o)eAVus=PPjEqzn zMCI#&T;(W;J%W>+9i|xbJ1iPsdAN6#AG6wiqX7T?wszLgYcb)?oxHu8uZzl>@OpTGS2m1&_eaMvmQX#%|<-ZVtS zz3&B>t&Ly4>^890%G7-Rakx?CR#5B3gG2S>c&^n}1`Mlo6&VCp)!iJ~3v^fz`2aaa24L;T1zx)^%7gO655bXD4Uc(GNpIah& zuOAw~!+-?Dhk_qQ0nV1+N7|zXaL8L8J#^2+^bOFF?3!e)H&1pPZD`N=}xwzS0FpEudGT z$)~T@2>;OF|8$~?>)A@Fe|kCj!Huh!E5b_umhti74CUQmM;xWn6+KOs1|no99v&7B z7EYj7yC?q(^Ma^AV)?khNH%C6TPB`-P#QTq=jalOfV>BfPv0ePq2EeA5o2ahP{ZB<<_0k0 z1DZJn1{%fdB=qCQ>|IXWEkHzs`z=uevS!GLb5yl~2H4EG%mJe&iI<4NTE{keZE_XG zkQdwsSm%B$S{;1*rjN&>{`KdiRzXUsZ;-P)h4%MS!S`G&J|o17RP^*v!Tw>$&ml9c zpCD{bue?VoPk;66S0q88M2a5W?KBwX3U2LvdwR|y!e~j0e8PTr`rW%U{EqrLye&kQ zN^y4)WvV!6{&3DlVK(JIcCm(p?A_C{93K?b9V-Kr&`3)A}hUHHg z;E27y*xosy*XVDYU4;T}KI=$r`Nmi73=QKYUG@m0Lr=fd%UWdoDqHJ0S8N*P`&SmV z5~b}4q%>SzO-(00TPqe>;X6)8siZU$NJ0M{EO6Y5_&+Ue6oPTo{K$g9k>*g^U*2}>AhR!BKF8ZgNy#2I6tx!?gsH*I-%gvCQq7lddV0TT!$Ha% zU?D|~MOUP(tZx2`EgTL$o+Sd^{CjH#F3O8WEMZaauLQ-!7?v}#a7A{`5q1?GJxJ_o z`ip3GF#*;R&|Coj0nk9e#D^v!%t;nUg9C+!;%7Uf_-rLG@fWPh);hF_)L3V*_AoZ? zz|FbsecyOAbq%zR@>tRQGo?ZUQvAx@la7`aGDKdc962?0zC}F&f}I4LPBIqiZ`h4zB)_y@rHm~Qw;xm4j7DlHKBBmX@rEq+7q?>aWr_?>H-2)E9^(bK5*tpk4! z;zuMCG4Svpk%Be>_-N@0SdFOJasl{!KB80#4I-+mS@HV@xc&JDY_vh$7S#k$N~(Jf z2^AF|TBlf)qhkeKvyce&YL~uC)T7ZfT%IAkPSUF@EEZ+rpBc%e{O-NN!>RVbJ%?Z&3^?)|=c0&fIc zhd>NQx3nH%7ubeT@ypxf9Ct0je^35j_+P6o|KfjX30ONEOvbAG1bH>I6!-NNB7+0( zw1p{k;MF{5Bf)MUFZR1wUO(gYa-sVnb{!D*W8$1lR4R)=i=18UI_&|24JPGaL907@ zj&QmD9U|6f`oDvS^*cX>N>9V}Sio~3MT@z1@1pCECU2RPPTsAAbvu5lfXB#lWbL10 z9*5KN2?9zfQz>8dCc)6ByWHHxtK<2$h(_OcWTR?(^SMC-Sva(f5VafFoMaJ^0eK-+ zmZlSv=`=x48!gZ>sJ)Y@Kz^9aczUtuYOF3V=T$HLrN_ec!)4EB(bZL>7-k7;9u(aG zv-rvgwQ|whF=v~X^n9$wCXkqJs1n5-e9fmmz;uj&n%Ho{xMMlkaxP!+@Ox`&)^9J* zs3@p{&ts=&WuYydUbPQWfCW+{g}{e2R(vTvJq0;A9Q=rXNUIi>`we;wtYc!vkUtjU zwe|XWpE40S+R=M|1{&vLreh$Lphd0AL#2LgoL8N5hj!^+P<`_s9wWWyzt>Zs(m8?d z&~yo-0{@IFYTxDXE9B>{j&CIkQ%Aw+OhGV|OQh-K20BJaHi>akc1OzXJ+}~JQWb=x> z`mwPG=R8yltbil&MEtq32JYJN`YGQja(ciIR!S+!O`C_*2`|s2f!G;+z6n!L-7&2dpcbsPXuCBQ>?U^FC}!}jY{lx|1(Dted?I&| z@(c>E)-a(OHZd3$aRnEul<%cwLc70>9y_nr=|Jn)7XN$2^>6=OdfVWynGY@TA~;c?T=Tebh#PvNvQhc%2F~~k zQsEZ{oMi2~9AT{?tvWPT(6N2aZ4mQSKMx542t|#Fmy8`Ul7cW|b+orhKqUI&R(1owz^Dr{yoqD@{c*-r z5yUgH5+`Od1fDENex-9c!?%K$%i4wYIU`R_0z=#GMztfqPT^Lkaq%^WuW*z;3=jSi zAd(2Q4tDP1K#{WT29vzT>V2CJy3M=>Mn*2v?Re;k?G-6iqwR0cZ1(iK>X=FF4z`D2 z90_vj2IS+spa4=23=E9nT&)oxsQ}F`YlW%wj*#73`))WQX4Ow4?g=MvA`-BO=`HK$mad6xS5zbDPw@=IJD#wMn9>rw*(K~?Wa?kq5e6vhShi|s3wKYyxSKv4UE3W4> zTgq!qFEzKUtSa-~0*j2F2PrK-QIsVJ!~H?vlg~Xk`K#t)VmHfqEG;b!T~3fG&0ziW zMj90=mzO(IJnkOqgH6@K!i})jJ*TRThSeMyG&E(yPTbrC{w_$cW;3P5v2cg0tXmCD zD+H>}!D?#+!)xSKYGQI&<~hk=2AK6jt+l!GMHJs}^Zz6^P?P?kX=0`CUcy0gIy;;# zx5;hRE_2w?9Vfa<(KOEEax}8MOp&RkeT|HO-=BjE)E?%#sz#jUl4*XbELhJD`L?Nd z&)BJE9S5MrC1;62T=H^m5{+7#`RtaRX?PutVf?hHbT$se1M-Z5cz*ESH{E>u`L^kt^(&qL zVl-00ABGYg5`;%A7Zw+*%r-VfS*!cI0Q88C`q|R$s;Icw$o6HZ0gl>whYivMuqhmUgTWH0iKZ&6AcYaM5r4{ z{T|VjpUGBUDb{WV6i?l^*~k5WlJh*#qI{JZ6&DxaT;8m0U|?_~p~;Kw5Ll9N*`0VU z>)Y8bJ3(B>Azu=$m2rY|QC{8Qr)mSWT?k_AxDkENy=+?K~I{T%4><#SwBKum0Lc#kIU!L*s z(OZGN$+IH?58}boL1o!g_ zy`V8DopVKfp&^l}LKV?X@T*r|BXF^mxO{Xbx*|Auf!nnr8V1xqchUca7qrSj#p`qg z_7fmB)_lhn(ue~roRc8yof}-7R>R*q;K(v7sg_v&8N=gdW9h#GfHKi{KrFG|FX3EnM z`6M7Hg_S(w!OM_eTuo}iJ>*9&4-Qd+!0fnAKuYmgmQ^tB`w(@|CA{5Jtd84LE&VVt z)VC5Z3){ZDh3q~90qcY1oB*{VD{~_;S!l96xrvR3r?cy04a6h?%ULUyu=3*#X3%E+ zwO<804%F|5f(uJ)#n!mk&+_#$G9UhodjW)v7~sa?EPp?S23-gr&kme=cFx&a*_<2~ za#O5+A1_I3)6Bn!z{J7OBxx|%P4(%VzHs3Ju0R#6O_Gw5P}M z#m2_Ig6ZyX%;qTl>(oDk?G6TS2F7$4*$pbCz(zW`Sl1B*6W}p|G3aIYH||)o;w_{; zKCM+J2ZM`!t>B^X@hQ^4EUB#QDSE%Wckx?ycSD0IScqQNlOkp1)s@uLAPDTgaD&Qs zUT4xtxNvWNfuVoam?LK2G>{SyUA>Cq{P9sD#H>IU*2YZ*Jz{^9C`DhPKApKyx&q<{mq=9-UCiHf3 zX2xG(0N~};h6=8wr)Lgn>e!;FhiWRV9sPqzoAI*MR%U5W=N|Jxqg03d@Zc~iGEyK| z#HYSiGdwlx*6($R1JFw;&aoT_prLQyf2f!C5B46R8(yD}ja%>UqEKj?ilB2A ziVN$$EBJZ}CTo(C>_9(|s|cnTi5Nj3wBFTv^uZY@L;eF3cCL`v(op}9gS#q_lLY&2 z;SP|2mgnaSc6@z&G$$AU_U&JYcH^0Gb42*@v$$piMDpo{dP+Ya~DMbC6dc5lS z$?RDJW)F!+;JcE)hDpL07@O5|{!BkaX_ub;V*qcXx7jMSVaE^agSR5*$1YG=2k`ej z&o~Fxsn}Q%P>BJ^d%Fg-o)CJ{I%8vF1Ii))l%famVJV&vD@RQN9--cahEmSa@ONxV zZq#2O+k!|t2{wR2M;jq`C{>vNbbar9b4O6CHbJUlHaGV1Vo*r%)nel%8ss+QS`LyH z{1sNmTP(*!WKSOgn?}29#>cIhe*e>MTk`&t_fiuYj1W_yL4uCn$IrpvsR6o8fv`1;|laoHC5JtD~fWqUKx$Pf^-Sxkztop zA=ek=agc7u^~U<+z!Jn-Cw~9X&G$?j0v%t)y<6?oAURW#{Wrvmo#WvZDK&AwpyN9- zC=<8KEQe#Cu5OqXWWLs0WFd+W;n? z`x=0=`Y(+bm?~Vh_q;WQUV|DrH8WkMfUQm?8Ib^OT|zp^TeoO)f=rCZ;Ya|5Go>Sj z1{{zf3cinI0H77yxy!{}EM~p`>ja%tfAfrJ(tmMf#c``0?<;}1@bOJ#hi$bSV;DOc z!XFgnX#M>$2-4wj-mhPH^R7qB;R$>y)koW5G*&!z^G#h^6KoU>&)J5HHOitL7dDL* zII0|&PZtZFJTzt1)nS8Ep3?Y|v)c>ojy!SpU&hATU*y6_J2%B_UW+{fMZI^hRQFgw zfImZf#@rqj-ja^rbXhc|2LfIjWscwe`q!VoeIkL* zg(UY^+vG^}Sl^xqkBY@aoH`nuw{|3)Tyd|8JMnfWeq@y5@e z#a>)A{&%9C8Km3s-OzB}olB+qSvuB;Gq5iFJ6G=vg{CA7UH<;~|1A&i|K)Ln)~wk8 z7@jIC<(vdv(=Pogqkq0{HVn`yz_q2~Ri6-dy8IYIMZtDX4OY`M&tE2xzrIiEvd)_3 z+1J-^={P_;L6sJOf7&`~X=zz8i}$|>h$U~n^S#W-nEle&IIdO`1cCobj9WRl?0%Li zK!}ZvEs*eb2-LzUJqytbOjj}O_{VEr>0&b05z(=*w7leW1|kE5T>OLQa|YsvV#xkK z@O*-FzlT)wne#-0zVt!7><_hOA&c#=pN~DOK3bH*fc+R!>^P#@L0IhyVT`sX%YuUA zF!-vSF{e90UAYWiUrX=#TK861v2b?}4jfpUo?v0iIy$D4ctHC(D=V!uItcjm3rDwM zq6<4H3>=J5kz`nF3JWvy@@Ano3j&;%df-LB|411^6kwp?S+_7hpOKbUYIY~((@+m^FDfNtqYX(Y#Pm~NCL_j@#evsWakT?+?fWB&+d zP{^AhBGGAe?}>YbQ{ldj{5oSH$IyAl9lG-j5KiDxA<%H+-y?pWQMf(tg4%yYaw=Zd zj{X`H@5=yjTqhGXY8TrJ-skzJ&>}M>Y+i& zMMO|i03a9u7Jw2dC>c!MJ^BH3?c3n#^t!B4^75p9nZl>uVy+Pmz;o7bbX-8CMS*VAsN6K(x*J~>;uYB2{&~cnliMtqLiTviP%H$W#psO1}vP@;(q%=Ye+zu zq44mLM?Uws{chsz=(0$5MkSV@eLF`!(w2h*yOiV(+JuCJ?v|FtR5C7XITeLC#~EeV zTA7>c%k}kUxV2AjK%j>gv<@pFL+m8$ugoY-Q*KVqkkHU5=1_>JegFPCuICm^H9^@1 zl$pnZkVn%7=pPPjVI(-%VVFRHtG5~*A)P9lyompCx~SuWo=~-u6e7^h-X5?pp<#GgZz=wkl!w8W8>K0crCtc0gpk&mhWf{nPw7?u z8HJbDM?`}rW|-sOxAYL#RugC0@Vy+#@#&d-sw{tnuJy|%fJty8fU|YC&>AT0KYpm2 z&rZR??`!e=Krk(*uKr6_+Y!^r>nLdXQZMky_IhLpKK_)E9=b6ub2t`Z3C?BPD*0c> z!z}Di`X!71V>}W!Yu((|K6)bUrS@u|wUvgK_l?)KgRq>O-?4e`i3@|NB=9X0fezFK zaY1*jlHnVlv4o90o9G>IZB;IWKm+hZDQ5!gwU}HF;20ykHSlbMnt*-h=ESC!DJ^XV zGwF~C@+u)E+AQViua7MMdN7;#Aj2vt${dQzjx}>VF@Bf9c|90sfG)uRmsI zXQ!pv%oioHq*VyudcwpO_CGTgN|L4;T3V1w~j628l7aKWW(gdgic7LEUVY`MF zIDuaTHoHK+0*B7Wz$C_TyljM97Yzm3ux3Gd$O42UF*KdV0ENF9Ut(Ds)C+qX_`#+UIN~Wt&{fyk!>q%;nuc zssprAz7+ANe1S&MFbg)5FL6T2=$HTP|1rZ}L!Ar|QqxpAO? zjU9AfT=$ojrX8yg@#~GOg5=t}4{`#}3dTd7AcZkscBU?v*@E9l?S^iXT#wYA^j3gPCa67zt`J-jy%)K0F}R z_FDpXCt1`Z<>MFA2cAR7qSEA@(fzins;a$`L2!pcHVYd~WkSLtY++R9{^zK^m=~;GUiC&ogD8G;bM&-N6Vbi2 zqnsIfEUm8y-241a{tmG24nuA&1oKsGtu{g;jS*IWP`rW$P@33$<4w3l`$CsD_xT|Y z5(@(li}c|0W3EhbF<(nNrk|!7D4>p39jQkoIjkSfC_y5hq~Hd;<(L8T+;6eQJ^HuyB(6>kfD-T}O#R^sbscVYWu?{+x5EPmLMdlw zEknboza(@aE$P4zhFLGz6;Y0wmPdQ~hu%3{(y77isw;$6BR)IxiR2Uq&_)Ut`3m?m z-0Bc`DjEkE-$_954lVF^xho)S9X%p0 z`(T^7?$8x9Qj9OP7(G<JYNC6+q!3DYQcpg$1ZRJ&hJ)qnDT%7$D@?+1er<77`THoq2Z)sm(?r5z7xdS(8F# zEv-bbje^euLNn)PX24tu#J!1=)mtoKhpwm7VGrUzW@Sya7oR*uE=THvL~5j8+z2J} z-(>uek_q~C=G2kg*y6K2c>p1Uy`{I&OuU5M@d+g*C7>~68Atu5Jhf&?@H5_u5Z4BB z2ym9(`G`6?91?ujB4X`$FLEm|aEc*B!O*Z=l(Ew$7C=ybXZ|P7ldm!_>M}5Q01^Uu zvmjfQk|-H>`XHgBJ8TmlhV#(TtLka`dA#G<5?RN|0_gb%#=q zqO%w{?@Mlv!5x7PWepL9ehs4*yAXA8zwgJvfP)2rl=T#W)16Kwn+bmpM2>v$U1C*Di6lCx()z-&5Hp5jy z+t7I7{O3x30YCvlKN^Sw(D3=)i!N!s4%(gYaC-Dm0w%^MbDrhmw1oJDwpI`ucCwrz zM8ex>^$+z;TERcnHz!|&Z1F*M1Q@7`iTCNRl3+jb3~}7r#Dt<*jCPA}&83Y+VWg)= z1rSI7nfwP)zt7pmOPohMDSk)$K}^a8_lI}70uNEtWp;NwjrlFPL|MXem$oln{A?Vb zvBc2HCZ?@Tg09`E`R#&NAPV8Qt1%8#Y*l$_W!0`V&5MP_wl1K$bf}D_904o@?5;h5 zb2nV@5egNcG0+8YWPr`hQE;qr+(chLJuCIF3(wY!C%V*98pa_d&8vrB0{y1WK7)jo?B z6fvzYy{el6L==LIcOudYqP2j%xIMOOBQGx>;Is&Pu?R?y&~rek_J6|#uiaWzanNyJ zdb;6d3=Cbpd60hPdE0@GKuyi9yp~yz1)@9|Lf-lNOO)Y{5_ENG(PgVKYD2o584@-b zI(fCVFFXmsEAl%IaLi;&3+_w``O3`L9CATAF_rYUIhJ1N_SeU+SVZ`$mA*Z;-B@XR z5-Wi$ECiCm4z6wG$t)i(b7f`aA)XV6y6gSp!~ifg=72FR+#32MSvps-z`n;}#HDz{ zJQsT86`Q*Q#ezlm*b$>9Bu_yvHg311cBsmA`;O+4R@6hAD4v_NUs)C3pQMDH@-89} zze{^hP>WqdD`++r^bVJ`a)4R}nO|8N#Vu7Fah9UGHii-Q|7xIuGZiPX~rIX*PLKfq0v@7@jzG;=%jcun?|wdgC z00`2AlT+o|pEJ-k{s|ao2+`BlR@7gZ{MrJ@i!jJm6_ea*b-px&486<7mUEh37hvl6 zmhu$&K*^_mb4X9)_;h*9)`?J|jC);AmZ?=Pj~PszCfFoyUm8ByKFE$wHsOjAFB)gw zt+ZYnt5nG=A2M=yKz8HL%TMPkxz9CJ?jlNURYLp5g&L@|n;%|$`*icr1(nYyXgLc> zTW$m09CP;?Zoy_Xj8#KYJO;{M0K76qv^q`N zlw%xRi5{(xGAMhJjFeSWWwNwXVRQAbj*ANllCc(t6t1W868=yBR7lNGU|4D=$H2le z2Zd+?uloWu^G48jFCfj9CgyX{vjGyS^hxsR{#rF4*T=^{bhE<@&^E#s4Ila>5#e+w zanbQ3^7~P5hp~|=2({(q2JA{&-@G|%{4ilf(&S{s(A4)t@V8cu|Hb7NWuJiJXs?5V zUQG9eUG_e?Lfw$cK`$vgyQUBITjHp)k|*9z7(yWR zs&ven+(J;T<#yZ|=-!Yg^=^}4`edSbb~aQ!KjIUVd#S1UUQHt-t(za0 zomEXO^B0hWERR~njVQgpJbx~nsg)(?w0flDouBXN*#`LwH}lLbEr$v%TLTkQSkx+_dg`ywaJUZwbr+fDb&vB(S`% zG`+_OJ2#pJE!x#nk5xtnQ2sIp@;Ex!z4df2D{uAsoST|jkzkQ@0Fd(WaeuF~M@O^m zPp9R*0(>vSmjHw_Em8BsH86+pDnDW_cS!&h$`zNekP0d{!+|>v5 zHE(do#$5NN(~Z8*G;ajitk`|FXlKw70qH1Mh6UoP|GkDRSKNk6&Ul+k1_@uz1 z{`=FQ7Y)9z1!A`krmCO&_+x71Jjdn6vbicxxep`OsVQ(&fG@+MG(~mwDH1gAq_#l2 z;Wg!dXmM3Jv!8!`6o^Ly1A2yrtwr_|9^&zBY9*tFPJRKq4(<7;N`eoC6T9ZEcm?Cu z=I7ZQ+Iv!1bI75t8_ILfm-*oJviZCH(2)Hy;_IyMrK8C&bq%cCFF7N#tM8p`2E@@N zOn#hl%j@Wn0L%YaM~A;xUj7!uDhfzXSn;-Ih20sGre6w(<0;qPfN(M+qqi6sL4*Xf zPl_>`jrMny5+&5hqJSfkYQP>ncSij-6JFAYc=<6%Jb71 zk5MZHL$^V}8`%6v+k4(bx4&gZR290os%jxC*-|jDH40eHyQ$b&8W9ObE$M82MBVonQdpz6c`g|ZsrhUP_h021Lc#G;Sre>iAW0S zxw*Oaj?kRqV$r9u_@wv%Tdls^%{dV-K%pCXyN=DskgR<$DE@Kj{o$$E9Lx*nh_iC- z?hkp|RfaCfrBYB*L`O3@KLc^~R2|*&*Y9uN+JStVippZTVMWu>0>cHaq*91Zccr&wj(B!#b?u^fQXkhR=5$k}%&Me7c3H zGjpwh(aNkVN`lf5|pF}@IkZLJ6 z3O{ZfS#x1q9xGB`SqE7vcnrk3&ka#BAo<<7}(pi6|~>#KaEL<#VLkZu5j*|wK)!vg68nJ(;EJ2 znG}|!l!-h3Y!@yZM9Ho$Qz!;?_q5iPs3Od5EG;Pq>MdFgl_c0=g^!M%ygsL-%q0d0 zDKLh1&b(&bZv8U+iuD|#bqa5LEV1;l2&X%aZ1?Ow*}9( zD=`9&v{6od;Sb`qCrTtE@AzQ}3=er1n<$y?>nvJ3AU4N_x zZI(USYB^PVkM_8Fjl96R)nOXhE|b#G*nq^Ba;^T9s_%Q(+fU%@QsEWiU>qDu0`AOA zxtuK-Aac05o%6HZ-)jo`@F@bD5@&Z$t->yZvJ(_|q>^_-TLlI()xMa7g~?gWU<;G? z3*qDAi=bJFYZZs)3bwm-mFim6Cw+{pr+fQzaU+_e+#Z?-vd-v6ifrZXo9q|4gMEGO zkii-(-;xZ{f%R}LDjtdq=~R=6(pD445Gja;g>6(-oo9lY^+o{#0`WSxM~@-v0gt4Wz$D_mfpOoO26*%n^r# zDO|X)KQc0MI{&y+S7(+!7)@CM#oz3PD4H}s-6>EkeEkZODqCYuU1dp9MrP(%W22*l zGCLn$A*}dqK}2}{N>xUI>FGWRa5-vfet}zcJ7anu+Uy3j$fBYI_?V|qE9EfrWw&^K zM-)p=t`9goKJ`U#@2^Zs5{v)L$^<8%hQ`{;2v?r<$zF;p=iR%OJ41f%SaV)0UwJbD zKf&DE&5_3o3KEm8(b%-(|;@+5mKvotD(E70XUJp9ZXsFA7F%k0*fZSUEL|r@w zta1zccuPwxCJJtt(YR4BmqkU$D5PLxZ%17vrnv$(=#CgMt|v#=xtr>QQHjKH&24Q9 z%gdNpc;0@bdm^jzxiWPwaXcU3c1gs*a$6?#;OmQ#_4ptq@DgGAh`dOk0m;}_=Sova zl$x5H;dQs-QNzXFr_0~^`fd*mF>uySD=4H?Skf`xxS`HObh_@IVqu9j!pECLFb8x( zh$=&*fgPAJ`-eoC(NMx|8QR}O z=lKZjHO4o0U!?GpS4nw>&IUN#$GmX1{adxNo12^0XAnlEmJXHh-{g7OSd__6C7C_2 zb~4}dp*#1~w^|4o@Qu@{vT1pFoA4#0*U=sqRb)}mj=;piXl`|Ea!vbnV%50)`K(_V zQ0t9xPAA=>ouy?@)RI2H+@(YfE(2Idjc$6&NGVm$<+B!y42O`|SbU+| z{r&P+ul}IE`a{S;XUWe=)Nmw(ky~2&gBQK|aPU`s;RLJF(hE9|wwNBT#Y?0FJ&B%6 z!iiW=0jyyCmpVKID zkI#CmcTeaVG@2T4XoG1HK`C!!WMvO>-hdoY4HFN~+AJrmSrwvqURH=EFY?(hR+wXO ztCd)>nL+e1+YM|7$_+YNdP9;(65Xp{SuoGN12Mra(nh-SxC&6aNlHdPLLMI2?Q%oSeOaSB%<4Fm%tBe!5oaq&^9 zo(+ckS+_4adTUkDqx_71HInKlfz@%Q&cbzZC!4k=Q~L=Yx?gYHXa9A-e$|NGO{^j% zDYx=M1gB=+GOa#qajqcR0HCny9rpxHocUIJ}+XiYe8 zZ_E*O2pgjvDt>sr92iSG2N@Zo0fOU(?D~6xCg1$Xi-nG@sy8kGK~AI0Y9&280B0mZ zJ^v5xstePVmy+OJ!~Omien4Hn<4_6P_FtX1tcdlTv{Ee4Mc_(PAjNFJ1i z^|lj-gLZ`jfmGFSp+G`WM?^lc-(hOc0{g0NHH=#DbSf2t@F!h3OO1={>P^p!L45Z_Ne~qy;@6|6zqy>>*-l$_Xkgr(C|=q znko%e;?SdQA1t!&fi08wlk9VJc@AgFoorYIjx0@&+ z!wF~%av=IqnBkM2dy^l(UY4UP!c!^Ru>lf`Plkn1vEkm z4dKWzfb_}9qZ-@rmMP}#j-C}HV%^Ba_i+8Z6XF$yS18x&zqoW2CN!56}&Xv1*`AP(daI%LVi2T0%9kH+=1>zu~$vuA67rB`}vWt_?C;G<) zmnnC*w(J)kcM^2L5_Bbp7GKUCcxC6N7tcsH z#7189`R)(f_L=DUT&*B)TQF*AZg+`&cMi&ANj_%dP=jrA}8mshcC|PSD~J5s&8zlZ|wLUp9axN zPP&Ezig-*Nf!owMZg~3+%rGV>RN^3n6IEo>7h1Xz3f~s2aB#Hv#}kEIw%JZXl)Ody z6jI1mI+8nLpQ2M$Z8qH&F z7oJD71-l1AjR5tRbzw`^s`=O6YCPB=xojuudCyOVd76%v2y~78apqNHS7P+-!^}E1 z-?sMl=WGSGRwUWCOvUK0u^?lb~aMG|I#Sbin?BEbO<9`B6C~tX#`oJ%l#2H$mH+g3Qlus}Bu%BtL zT2VeIG+ZDY`|U(Szh@Ha^E!V}#q0_Vg6QT@)|qe&|L39j^^2X=mEa3@R)F`RE&KbA zomv09L0k6wzxnme6FQgw1D~Xb5C8u^9`bL|ZRWo{UR}HK(@XR3zk2@g|K8Kw44=n! zD!+wfj+-06E8^czRCBfecdGZVzsf&jb23a|Dcr4J&Ps zuH?N6|JUzFjq`(NsNCtR9(9E zNVg+0*J|himFabB8$G=gIk&v1qKvJb9eo3%1GC-O$NZ{zUBaaH;q)9IK6%#{@y4t! zFKp44f3^8O(@K2l`~jg9V6dB~>yA{`*Ucm&1bRjuudcFak$eI{WLh5Yw1lp{{{H-2 zVQ#MdKAjLnzGIwoS#_3#raDUzVf9f9UQ9HFHSbs#4V{T<_0GJfM5a6kHX#KCy`^P) zb92vvRX`Qq?>7d&F(Q3oQ9-7Cf-rJ^l1yM7H(X-Y^?e`8 zgPfd_lFwgjZC0n>d(MA(Ohx>hS|2Q# zFIwM6sp;8&CP+(45|WVDH3Ng zLT1iQ4pZ+$wh3B*Qb=}wd;o- z%beky+9e9Lb2z6@L+|WQc)rOs>zv-)p#;<`Z_It$+~tFz4?YWbZRyJR;K|s|aOm8o zrETf#^c(|u=tPCi^w*0z)gy0&&klJRnM*a#IpTrGLHr5#C)%%kJUk_iO-unfbW$+c)s+8gW&z`@>zXAvmRd`2(&u65hN`T4Uyfnf`AUpe~X zDk_fR;ww))cX!iSqpO($dGN})T`VZdVGVEJy?Ei0gm^fAm)m1-7s;tfOB0ciMMs84 zo6lvbo7$~&{dM&yLY;<-!i9~^lQv6|%Simw{azsXyFa*r2V4705wibD!t>D6QXmS8*d9oYRsz3P;Q zUwU1w@X+SbQEhnmXU%|_@1Me0)%fhcbCL&WlPFic(C=C5Wpw9WnVx?-WjJ~CibAi_3ZSlCTXP3^_>i#&Eq+Mfp;+Bzjzcvxcv znwerWzVvKUe)FT=DN}!-BVmtRN|nts*lBV1 zA%d~x_C;rL>y^xHJ}p!<5VRvh3QX!Eb8?;qKIt79iHcKL?^BoV`Ce3k7uyB*cwzKkQ_?31{N_h8yifY%anUvtu(6F zM39*7-8?DjgA&Wr(<|Q*g}L~Ufu$fP4?(r8j zt+=$#F}2z0C2-jA4fBDARZM!*g8Or^f}D)Z()>IL5fvhkM?qmA^lBLHLQz0*xo6v# zFI6ci#kukN!IKDEKag!OGc#Kkf>@$MD(engjRqow1pA=D;}frYp5$Gpy|_lL(b3Tg z=O4JZxG^y?J1ri89;;Q2;`CP?9PF=vY|U+>S{yC~B+ibX2F0F8rhLHFV4ZWtm6d65 z@>*~paW3*YAB_4Y=|i5N=XvFB9Lp`DHcjr1n9`SQt(155R*hgTf=EltK;f;E7hRYx zH)1Tp!Nm+NdytTAhm$bAl;#Yl> zcm+}x;uHlr^cyM>`4JHS0WP=U*Ee-Fb-y0}jD&#VD#g{8bVM3e4#vT#eJ4S`ma#G8 z!A&_v1+x+89BnI#;6@8}slD=Ky=T_o#*Hor@ao~@KKxPCPR^al%^DC8ca>V`=~#*` z%^%b*+bqQ&wZ3nk&x#BRn)Z*8w75T%6v1(FXnVT)9dXyBrM~_a1H*SZA?Jmy%1b9l zDbD;mt>#bDeccw7Mb`7@T#KdWs=N3cf zM{u$U4qB#4^*d*^;J%Uc1EbE9z{OT=6LRkCkk(3HEK$*xs{I2+XhR-qS%`E8r*4dn z52fborhwqHQ*bOUDoPUaI_QlhM5Lt?{N~3uBcQa+$+4RlcWd|~6K>!wGwSe}4xF5P z+8|AqZuVDG@t*H(&L{5Bpejn26v`j7s~I`~Fp;C+5O;T2q>+jbfYrTq0$YK(p^KSNDGKam(tyx0@5YT&^Uy2Hw@o3 zYrXILtK-{yAN!9z$NVM8#B)FQeO>2u2GN~?j0&@fou#^Hj`S;+FIiUupcl?KB2OWr zubEi;6j}|4(c<__fNCTg8L0faS}>WHZ#8;;1Z1%rbjD6jkrtyREh)AZvXjQh{qpkI zx)nChhPW>%4(~=}=M>P;gLP4>N{g|@A{M&X*oOGoUvd*LtvyKxu2@lt$rH}z&> zuczzCU$$(OE=T1X2D^KiUhn@{=#6heZw=>bXen!ddvWu8%26kA`Fn$C(|XK3k%u(A zetwEg?z-1-ZbTU2z{Dh4Ob=op^n3<=5!=_{K!(5RH)-;=#8D5!WS`_8H>`JJ<5* zhVzhAT+Aq5s?M8Q1_p=27Gyy>=0sQ5&kf%{HjWQIxRu9|t<1Ve1&JS^Tg4BZ7&jYH zkdHwa__sAQ=%E)%pEii5@u|O=Q|lEj=V&nZe{to?30Pzb@bUFwmsE*MybaV{Fzzxn zwe47qUfA<`f3eb!2_F^?ZgO&RQ%`yV=SEq2dTUR5S&U|#m)BWvyhaz}$)RigVgHV{ zc0!By!(!u}_()TBWl$?HDzm7SxfR;zAq++bde_#0RkDT4g`pU;N$2wcm5o*unfZK*6y8!))*qnxYOfUy)j z-Lr9+_QMl#id^iL=fDWj-5IwWFFcA~bo}#%yW;BV>adljg{>zu<<#fT?BH|JGBUEO zbNjC~a>1;5JJlRfmXfmD*gQKu6ddqTm%3)4(m-fzK%keQ)XtmOfO#XlYqHHy(f>BLP~yq zZ%a!HuX5p=o#T=7Ejj@-2Mx`k=lP}%-Qv*Ft{cYWvAPP+Ncoh@WRINSF2Yn}{6X=& z$IiH?WW7r@V0XeT2y^Va5+9jGU@-Q2F)(PRHIDF0bxJ(`WZ@nNJt0EmP4Z z^?xpcQH4Y4b=FE_Q}rUqY71&J8d{#G^w@leA1{EM-A*<0AGjUCq9K8xT$@sO`SLkJ zJ=e!CjZP-mKZuG7XxUqh;YmrK^Lp|!ez%qIJ%s)L+qVWwOMqp;1ZdzK8l{Duqxz$@ zUJC>_0Aw4rJNU?cUt0;V={vHEG=({gDa;-3T>UeaCtn|}0SF&Y-t;5ZY-^`T+8znN z;NZEPBmaT)h)5+>~#&I?E$kmwcZHI^)cYW?d z?;NS;?l&HFg6|3pvmQO7;-dw}0%q-^ubG)B=WfNGpq29No$F%){Nv+e{%znH3!BTT z?_5^L@_6}%vXb)2@evJ;%uU?u*0#1|g=Q<`RvzCkH-a3ZtidcNFR#*rxwcFh|JDqT zeha4+Wo2#c*t57FX{u^aB-1scDCtg z#{fZk(K&a%cBRLGrK7%RSYl#t)s1Xgf<@SQ?d-NeRz}fLfeN^SDUG3bsS|lc{Knq? zPh={>K(fs4cEf|G|9FQ%K-AEeFpDa`m#<&H$2#~C5fVaL8+1V*QD3$5MLbsRU%vFO zueVZHFUpPuwnmzj*A<$OHKTGur(sqHR3MqYmY0-dP1dh7NILA2%Ya5ouvI9gS^_V)RcUMBt-;tHNS5z$@zJ9i!(?B?Vu z5E9;*pC`T*s<}=?gimqr9u48uFMBtD=EHUF^Fc34(&jM+Xc$wr!XYDL!wK zb-LXQxCXj@g*C$1JNaWL)E2QWJd8V*$|}Z-K0zxLjnc&VHQU?0;(SusLv^R&JD3`>CzKp`)}Qaxx$7{gvTdAWc11M5J-L4s2d}%kojU0VXCY>e+Jm@IVgm<}Ib zk0p-a;SCW z$Eh`KDUr@C$htPzxvGPKk9~d!U?NTEf43 zHz7J&76ICd)snV|g$O*S4ZX18GR|^BqKKO90phjQACDiu)CmkJn%}vT@)$RHWJP59 zo1ZP>pd2`S1iCMG6?g0Fp;i8cQesyH~dz&^_1VAs0V z{kzTWaa0kRP;G4l-gbeu$Gq>W%nz|rz$IeQu3$I6?`D6H)*h2fw=y?ZVbbnKwXAaB zxij<_hfL7+@DO$dzl_RE@EWs1Rq4&FCknQT$#bk5JR;g-tv$U+W$WPZ*n@*GQV%QQoNe|qXjEw|M~%i_-2#nTA`%K-l87P92{;o;%2_sB>{zF#&3?L$!K z##Hy%&e9@ccRo_5H#(BrOu4@x9X5MzjLr0-j%JN@w4ddK1ne(!+eFfpT2C9E9ymDw zMC=uLL=^cw+7GfXL7D9x~seS`6I&Xs8r)z z{%z9kC*FtcHqErD6QAQII2C}XplpY)Htf9vxB&^mF~-il^x(A!>%n>QeAzLJc%?|w{*`EF4JTn#L`I_&~j;# zaoftr@g*e&nSp~%?lbr>cz}B9{;i72N*|xg#V$6$YfD?t_jH*&9KAxx6YqHcX(8?G zd0%B!B?j{~*Wme%4&Oc)Wa^fbCly)+o!-C`(Zsll~ zHrC)v%fz#znkM7@EYqgB{jYG~7Qbl%!e{=-%E7w8SDxSER*zhaK8q!1=41hrDqM;N z0rZ;h<^O>3L{?qh&ri3$y|_F z`Spo*k}}QofxEjtgDCLDHZJt%?-sdWO^Y9j-!iYlqBD>ATxTQX{^P58*Qc+mY27P4 z=J!;?e;8Pdef|FSq?*4H_|t>#rm2ngcROR2)pw4Cg)izAFL9(ruiA#)kXbe?-c|ie zm00Q8Svw(Q@GA4iP`7uyk|g5PhNbT0ci}ZFU)oz&-Q8=21ciVb3h-We2ywYK-GPLv z!(3XKxC7>;*L{EaRK|t(LMnKKX43FwRRae@L{)hjM|GWT?o(bXf6(EuYPP-x$4u zN2^_9-HbxDPl_%q&FWDX**G|$>hyQE^wU{%s*GHycZhMBcrEH;%ncNsK_UiW+{0S+ z5te#h5I)=Dq?SGaZxP67++18_t=~}o+rUI?slVOIRWmtx@u^y!Zb|9)zacv&;{U{| zF0wl2=H?1lQBrVtB|*0tEN)PcWF`J$7==1IKieNc5RwvXW78{ntsBu>9tS$- zg_dikRQw*N1{PYA?upIMYK59o9^ANj^LR(cV}o+WYIZ*A^ntdPmYr2~mg{IT4T1mB zac~X(v+(T()dkR|1MFgF7kHGXSA4~eUMe<>{Z%Xr%PVO)^1DRBRvs}&A3CjYygZ_K z@Saok70n0!`t`aZrnBiJI%pJQi|mlV&o96< z_4X1ZuzaXY5u#RPKbtd0L#2*|9_2hVG^%NtKX~J%)L5FBbdmGRD$@`^=;;@bsh>d` z&8%Z5kw2iJ8aC?S|501%?#SlyWfhy2W3j&uw#|aB_%PCYgl%W43JNDJ;~UF+5uRP& zG8O2mI}H%_9B0aAEF!sfd+{H3 zo8f(gguoIg10d%+u@1;}j6k7-_|a_WA~>~izF4o4uP%uI!-O1#j{?z)qO(g&M-9Fk zy!`5<_DdTZ5Jn_8Ra@_X`tmI&)M)Tcsk)xa!f5acNY@mN6fMO{jvzPDe+_P66Eh&E%any3__XHq`0v0i3XaBA@b7P=^a{ zW~N~#ZKj|gZLV3;eo=odwBC7E>W$=4$&vy!)iab!6<~FL9*z**XD}`MXw1jNu=SLHE{~R$B@pMfy!Xvfr;2sul8jIuG`yo)~m|y8@kTKnONI zuh-1VipOsFF~_Uz~_I_*{F=ne7oD>pi9cU_xc@B%Rw09*mrmXnFb-9&CF zv`gnxi)EaET_4SImG104V}A~;*Edvnz8@|Z&rby2hi@i+^9>{tEg(s(~x^jbs zlY>)vNm*5=0qkNeRuUkVDkkIhqKp87IwWod-@FO9d9P6M3N+Gie!~Q+Zh`qe(+PRX zzzPMpJ72iq%uq-E6cUI%&oPOD!d7d`N}8GxewfXZX`#Z`eE1;7`n6bp7(QQIO_4YE zC7SLZWfO5VDmw1W^a2N~N4Mp~%ovIo1G5yfma~@Men$?NdVSRH!o|H#vd;w0mqSWi ztFE7NL@?9Yi5rvW*7?t0rsLkg0a45R&Ji6xZ^*2&LvFUx2On?HK-8b_1=-Zfm{L8q zc|;c4-ODOyx9lb=dJ)M1lLE2p9oH>n^c^S?*72M;DL$2l#09kh2Am-1K}XM%sKEsc zlzSq4JrBu7>J&nF|VQ+<}Ag|8bFp`)2efssrV;R%n;zYl%ra*tA!p0KUg&$}ms zDCG*+^m_rFQ!6^!K331K`;L-Iw(f?9Ry;cs(+^;sN(%_SF80dKPT+UJ_P7xr#XLRF zyUEHerz+rxgIMKsnV^uwd%#(Tclr4<0jp-kNE;w^19xCNn_d1- zEp>HaLsLOD!P`Pq>>ss!E56k|9x?Y$dE20dv8@Ee$cjTmkd5I!8^e=aWw_hUF~Tec zgm-S{$H$XbH#ArNeP~ep1muZZ;$%5kr(QWe7J`)hm zuNxUnw^wv_$9r!u`?yS~V6_Q{n8V@I`XcT8^R;@+NrR{kejgCs&!HnbT{`#Z6*B+vlL0&?;WZIWK!X&+d<~^^DX}NvEL#2?qXMqasE@ zs;(O0>b4AssN34(M$wkthB3+2Kkr<^pmMUH90kBvMQ8fE`wzC8T6RE$!K%GRM{(P) z-MdG?nHOldD6YP7OGJdy5xp9XQ*LQ!jgQjH@64j^W&2oO9*0u{B%<{6X$WjPHxTuK zqx;U~eGu2lT4v;qa`V!B!j{~w;AU<9PRk_{5y$>?IVWhWp?vJ&#HJC2_aC}YGZ(0iI>y`S$!kA;SFSYHGBfc@n-2ew#@)YpYNQUALpMRCMKSp5^aP= zLN(Q{QRa5-0R8cw#e`|ln-X20U&*hPMJU{+%U7%ZtQVD&6X%Y^v*oJ%ZDiH^JqWpJ1G#=?=H`Hm zL?V$W6iSVtd|`0tQHgr)lly-DK~?T2mX$9_8LcRfU?fH-e>3QGS4_;ixLy^k`?qa( zx_e7uc#k?UX&mU(pT8~bpOI3rds^9v=R_W3b0fc3mvpmg%wJOBZ%V?n#iQ_L&;m}` z^5MHwRNGO$5@UB)f1jPrh_6*2I>jCJDvF4VyxC*9ytw!T)`Uu49I_YLnRbJ0B%e7y z1NNP1SJkuLj-K|89vhob2-xxQ@sWwY$4Yn|WyVsmvxv?2=Rbsdys!5vD?NUWce-nccXk>K51N+R<1glwHj z&;8v?(l=QjvOv-h;L9a&L1zPd=N1-56h~Z6+Q1-V^xn*KZ`w;o-sba3jlXIp-v5r0 zC>-5KEjk$*&nqe_3J3^v$47RsbcIDnchB4K+aCTH_>1_yvyfhiY?G4i09?7&eJ(Sz zeRXlF|MT{{J_bsX2+(OC7YARV^h8vG3r}@g+OY2Z$_cyeFOsWYz?nx)Q*iTM@a`@S1__UYS;!%g2J%t=*zwqkRm|!y zJdfjHlNQxKzoF6 zS(tdX+sg7P6FOj!xv{l=mdi))P#ERU*nH&?{7IY+FSU8gz4Lt$CrM;(P6kbq)AaP& zTcA)%$=6Q~57UL;yoT{#`vL06GBySzJ=#*z=8cJ}-#q==>Z;{t$BFV^$;wowTsgIV`S*zzK7%6SU+>NpBQ%?VMf#v5GPhw zzpx`Cw%7oXTGgW>t6G6|zhI^ZgjGTFQ);p!_%{{Kcl)gIUOMUnW$7kb;6g?<;l_+Aaucqobkm(~H739SQ91&Q{>o zlpud#^K>{gVJag-0^5KNF_LX>Z!-(-QR^OULMEV<6)V*%hA*mk*q{^ugAnebqMGI( zfjyN-2+l7};nFlc1Gm#bujT3aX6d`Y@N13nLrEL(*oXSRC9qOXK5pUHuV;u6%L&eC zI`H~d;<|E^f?cFkW+8%Gz}wH%bj5N)hSH8b&zm4kMtkqab$a?%JP`~h+VYOZ=+I=Y z;&8P@s%!1k_G&4=SFrrm4v&%Fzy*a)!raYl_zPDWIP`iPoRv?zpEpn!AqEEqiVX*X z-@ktr2ElXndMmNL^eZX{8xvcr(7EFp+5MD}{s>=e7G@StTlrBqXI8>`0tSP$G6D9C z@cAkpjBPAQEbBC2fED7oJ0Nntm5;}Lzm{@+s;Ost@L!4vm64jgmJ<+bBT`7aIy(DJ zI4M1MQETH)C?3gDb!M!y>sU+`&$RQ#a4s&oqgFIzesFlOc5mV2#-6C~Cx+!O8+Em@ z7N0s9I=Sl(iZ41@P8)g!1LGi1*=BhTWmPBKaWn8pO{TwPVgAnQ+3JNzr?CM$-O{Mj znnq}-_gNytRa9-;B+_-)PEKc<163+eajTyagzRQJ=2sAd`RhDAH5Rp@Rv=njT$=<| zLQ9A&Tp_hLMIzqtwQloDPJu4=&%V?=h{hd^A9tGeSn&eZ{euIBeBBM>E2EnQ+WE$wA~4K{WqZ6R zGWFQ<=y&LC@sPORCL3(RHMym0hlK-BC%2xxc45G_wU!eV&k=BEjS^Xlzs5}L@9wU8 zoQDmh_rQE);_7evsw(wnm>)XUpZjk;Yo->e4ZCc7AyLS#VUukD56?3a<(Z|AA-gxS zSv277|GOh~1@Gu@V(JWXK~9XWy3nk(OqOi8MLr-X;G-&=RS&Djet--p%)t);q=PJ+io~$gRsk_K@eLVFANy?gW13mw6IMW-(2gXj-gJKJTP<={=#Uw z@*75mG)WF-%DpjsqAUji{=s84HtzO{G+3lPo5`bm?-if0o%3mI`kYX0_TV>8PqAmjKt-yfb-y0(aaQ-MMnO!-*Uj7NH zpn%{wLB6RTIZd7EPvOy8YHD0YbEA`5cSQJv*?#JzzA`j2BDzBu`y@y zcJpQ|oGlyFtsnexPi#ure1$4}OaO>trI2hz!W7uo-16LNswly4->rz1HU9}vp^%-I zo;^isAmPQl8CPRK*pQdUvVU_Q`Y7h^+~g2?mCDpgZw*aHzzYAZ8Oa5XW{N4iBv|tk zfVtz>9IM}CF7HN9$asQ*n!t*gC;Iln8GX!*CUR-@?p=he&1L&4fLKB;x1!`X&~LhC zCyxfhK-CKxeoyzky%Ucqh$R-GVR1w#W}CH!9}o&OJ622$~#S6pe_4TI+E~SXPvx7Bh9dn_xWlUV$1UyeHpJopk8P6M#8P*pf^z{ca!_r%k z5G@iOYQsa_6yT`{u00x`U7`l2r>*pEj`!tdt0-wH>ARdgM%yK?ta1jS?&3nGrf{BU zOUejYOL%yjEA@2xvO>m}h{1c_V#zKHoq<_HrodY6`Iv1BaTwy_tx-d}t{1B(dZM@} zttTKm!ZOvkD~`)u{nwX)^xjOdC{|&Zaq=uEa4rm-IFq_N3kgmDT@&lp%joxap7vYs z3Ui@HL!Ux)!O|n!Dp*lbxmz-JjrriUex7S>o!qys&Q1}V<&Rs{dyy)z=ule;4g=Rs>k}r~Cl-T@@jYLwRUOaF+n*y0u)`EeY*@EGTl7PL zzR4ryqT*uUZ!2e?P)Z4&Aid-1#z8+5DN<_`*9_@KV(+7iKxq?Y^2hXJ+@`jP3Eaz~ zNbxJxY@hhPJ^8*#g7!|O(16sNq2}f%m;a&jV*2Oii{VpOgUyvoAl<0f0~H6G!kr5< zg#+W`8j3)BuRYucOceJe)M70^9lZYu@5Es{c2cx}YqxuDH|K*Abqi56Z@)eEr{Af4 zKw+SorL)8A%tcZsJ!rbOjwc-dAEE^K9YF>V59s1HKTpla%sWt-TD$?30Q~#EA8sf2 zfqAI^B710=ZVz$rp9KUn;{iN<|CwYD|LKrL^u@nSJpTS0-vjVQ%n$B3O9# z|L)H;Ae?4ij)oT4{8O8u>mWPHJVKf;YmZgQ{Pp-gctFJ<&@5p2?_UYG;Q9}NiJ(s% z@l(|F@!w>&|pdI%hPd%pKjISMgvdBGRg}--9aSzlt_46!62_qsUB2 zL4@W4U@=wUVV1%#CnpEz4Hlj5r6TZOU0VYuQQXUc%L4rTKpuoa@}GD;T|e3~t_Fz} zJN5n}S6QGi{_mUR@7H%$g#}3hxUuPS93j>5VN)-d>`_{x0lX{zY<%nGakl`djp>kB zs5T&A6K}J)ynMif6a2V@HR$70E zVW$E+JK$k8xUwhp+#Lo_P!54Ymtrjg@5LihG^`@L=c1RYaI z{PV!YKtvK)oH6I`fL=l1tNgRP(hjk>k9 zuC?`7&$rVF^80G+zC+69)%uIdo}M~+2&S?dI5@yl;B(l<*3q)dAe7Vy4I;Lxp4)uyLbHXnOgfIvM9oOx+zOdQp>b2aY5uHAR=uWzu@ z;;~Kd%T(;2-eH9kk4f6TFHW9h`95eVx#n}i+P~~T8o&SgsFZ`2n8URAZ#~nDz+=Af-t?uieV4+L~(9KR~4n(~Ej6Z+gR6E}otW+&&))J3jlDJ^3atddu0iG7=;L?&ULcbA%)$_sdF4 zQ;J}woF7si6l9&0kL0{2yd@HQ*3>*}M{d4qambCp6~%306ZB~{K7^A6BH@poQ8b}3 zO;OZ74}AO{y#7=0ovKMSuiJXB(K%ZKy+=|$7SF1qhJAeo|Jt=1(6rDb)lB>mrUk{c zpG5k=+L68ItS-$Sv;ox#9{ug@52>(u;=#gd+=5gx>ub*uKpEbsKY=)39t~k&1H*Ua zZ=(Kdu==i=<{2z`NA!xQcm<*XD*3U!U2ZT(qo)_w0q*Q_jJSvtpgO>VVPWY9E+7<{ zH}0?m{8Ns%sYOgo#vx#+{I)(ZDCh>Dy>X@XmOooXKU|HU^Dp|d3d;zX4gmRaH)w&Lm=44 z#Hd3Z0ugflKPs!L%2F!6WK#o-fB8^4g@DQw+@rH{K+;8?dQrG;3M68HmqkQGRH;7~ z7hja%s*JdC*w)?D(3J(h4f`prC^6?CPwdF}FSC;J`p|h-?Kqe{phg zSy^6cs>3dYy6}qyaH<{+g3pA;7FbSBck$;JiEIITfez-Z zy_Z?ILxFE3c>nLM=)$yyBnJVHg{pDfNJn?~5IkF37rV=+$it;vvnKaBSXerMIS!p$U!;x zZ}gtIC$VD3igh@*zLZ9^Wo%lE^W5$OP@_AJPl%1-?Re_IQl4g|$M)e}^sz>{#h1?7 zj?PD_XbY`voh%!}2C*$Z&yD<8}+49fOOE`;Tr!_cM$-WjK@k-9(ct2U8tRKr6F*SeAr%m>gEnlaOFE`@J;+U z)mX)qJ?dxfE{YD%9)X$J>!f75=H{`{QEJ6Y8xCOLHg3 zJI(qUk6ssA^nYA}@iWhW)d2cK>n3GQXee3hvK6d?O>5W^nEz<_K598u6WmL=o&F%P zS6)_jeQl}MZ7D|b?CZ?;uQN~b3i7S)r*R2zMaw=jvke#=G{_Z0?sr{6+X73TT%a7R zQt0*|7*xk?=?HEc)y$gmuWzs{d1fHlsK27P zO9cDe?83sBc?plB^@Skn9}{iGVJ1P%^1;Q$|r8!&A>wz;+U5MEXA-eGp*GRQKOvumv}wt9LX@$(A^kdTvvEerW_<*#Uz49?00oB`(FGuqTy_Sa_5%`VQ7b5I zEd;p1T@FsVm63ikGa^u`aTv+$?1zFcQ@31fsny2QGFyB5xnHz6ei`5-*I3z!H8}Em zK*er8b;?N*!bAP=F?e!TncnwZEBTRWj#yg`Uvb(n;^F2zIy%Z3FHonlk&71M;GwUF z{5ik5DQyraq^9l-`WL4f&bv{FSjcU6fh+y$4_GGWhsnKSq}k_ZlZi)e7%PtxJF-w& z3yVqQh8qIW)j;;&b#ZI4{_o)*4f_!#UT7cSW3{ENw0^CA}k_teA?N;V?2 zeS#pJ{#Iy}yq3;1o0P02MXm&m>sIPwkg>#yeHBbi05iNR|C|>{ixBg@M*8E%=BsAGv5z#I0v z9uP3<#{|QYM}$(8lpCLBbo1CaX?XRP390yLIFDlm}8Oo+LZJ09(I@6*?y>Mdunc$Vu{3pOjkREuLIwDMtL2` zu`jcSaPIUNlF+?bu;2N*nt9t8h#cmc?8ZIj+^?K>+uC;<93dMP$l=4eI?oYUwbc6} zU|9pn8!^~Fu|2ykv>K2Oa28Cm@=tf4!~HHJGdS`q+5D*H5-13N?;ZCZcr@%GGIJne zT&>pVQWG0YP!iorBK=5w^lr}qS&&l5_H(Y9w)PP!;S4I{yo`+H^(8$V)Gc!ScwrY7 zdisz_VcWwGwO+MeUZNfzvkVvEG_~i~s!y91GZ>i8e=Nz{^~TD~^Q0ROT#vhWEG#YC zdl_*k4-Atl>rafkNmb3}vROlb3|ut6njSg#g=U8X zY8q<%tADP#DWH2fk|A}Jj5}8^JuyKDsh9FimyeIHh-_szpFJ^xoadp~eZr`et95oK zmevxwnwlR0|AYAm31$D#kh|{rH$BmB%*?rz9tFO#u9_MO#J6TPW|w**C@~A*?OpB! zMGl{UfcSIq%_BE|#m2nXW=A_5qrNPlJim1nQC1d_KlZPBHr{?`I3CEj3*TG{{QNYo z4zn#&(@UMr!(ro2=K|QBei4Us6B^PKhelX<+3t!4wVIBpT zTms*FXOYFwaQ^z?>89UL*tSQHP=R-PMI3iiwjM5<5<%YULM|5<_uf7(C0|b`Gn=;0 zJ{*URf7mQcnxjpjZ+Y2=59ZUq+b$b2dwQsXMZiS&zhQ~G#XO*|Yqzm=4L%IRIwdiR zcNff4g%iBgvnlNAwrDuz1Dy)M218t2;4h%Y#|Mk71soMEUfM`%n#$WT_HLa2@IX?B@`{w&kqHn% zBu{2ZPGgf6tqLalVm|+nda)Y_sG6Vz3NC6`FUzp2N4QsG# z7=>-w0?A}FC13Mir>c#Onlpj~|H!!$b-hk_3hPj~V!G1h`pWE7EXA#0zF_kcw7$`b zMokd}nTY$hOiIYvE+-WRPJ0}OCs%k)7|4f1oC7xYe#zyy}?-r;VsfcAOoxJ9l79eq}| z0?Bz>-4$K!JPJ)56faSUcQ0Nxl8u3paK;I2g0PS8Y@dx}@NQnGN%~j~AwEVarvKGt zdLdU#?1Q-22UbnC4N9vT!|h5;|9tkh2M>VXfR})*`dT27x@>0?TC@lUYiC958;^)7 z?IEDeWxjZH*GH|)9tzis(}{7sF2|$(3P18Hb{p;ff$>;pk1j60ePW^dm+f?|^VYBK zSRS)*Swx9_dr~vb;t9y!PfnB+71h}Z|Dc%uYCg@uy&lxTS|7WJjMxi8%GhNJx81*G|G2Y^KpfIXa*veAC@0CB~&FH(kQnPJAnEHSNYAHiOOoy((R&>6zD zl$7jdPv-$zx1yPifAXnM^>1ryQ(R0Q;gli0w==8U5_Pfh()NxmPK6(n%BxqJq!joIzm|WkuENOaFG7_$0_3d}qfUOE z?92rIWtQV41Ozm9#`Cleen1&yVoO!qlSHhzaY;WYspyWd$VK|1Raw=)t|+AEq<{VT z-N7v3ei85d}g-77Zt^rbV)M*(ugoxst?#%z|3snm@N*Q z$*BL^XYW2uk{DX1w$Hj@*mweY4qcNS3rIC|0JuRt_L|k6$Oawh=((bj@&WGUaIEw6 zQHWC=X4%BhkZXhyiL3@*TFqg<9aVC(2q>YpTUIvG(x*WUXCMvyhO3+7m1ZlzqEaSZ zj2g|zmA9OuDgxd`DVKwq8sA9X6lZ38P~{8WF6nCv-f+!AGZY_DkqR*gxVtut zMIi9pcI)${@_vq%u2v)^P^yX~eRg(mdgJ(~fByWA>0Vu4X15!*N5?pL>95buCdBGj z-3+KkEf*X-e$fPZTXxpgVBJ6!+D0SE#Yo3(3T+~G9o}?z;bqPL-WXA-HVCS<1!Uy~ z5bH4VoK@wNG^P~ErDrqb2gDMB8XgL1U`T_>-2#)R2h=K~H@H9l%gdPw9EB(`Gk3YE zx3+GlTTb?nz+4TvA%p?{#HQT)*`V7yb?(hSpEqXbDgX3IH?Pju_ayCxnTudTgD$Vl za#!xS6WDXr2nrE*B?=gB(+CJur#o4kLlHAJEkdJb>m*-(lzDP;66DDl0y?HD4jyJK zBGl0EfEN1z$BNRg1-}<^!FzS+wov~?l9sI7HrU$)`6q{AE(w~F!MnQ4lc|<( zu3L;26Z6?|7R6ix;gofIz{cczo125+)nO{WOn`ss$`#Ae3ZFC4^U*#9V1JaJj%7c( zBAfvdq9`}TnXM?W%Ex{dB`vPWV>j049bj$GsY!|; z=To_|Rf3Zre2~RnpYaP%)w1mz9Mr1JLv7k29UL|I*uf#-F&-TAQqP6(+vl}`ICpnL zDwZQQ2=dfaHvAD`d{FtFehFDA{1+7Aq>d3!)7%PZgzAt@UqHZV9UgRtFrAlkuJVtMu` zG`B4UQ*qgpaA@e_W5_4CAEpIMfWF-7wb#L7k9Ljg{4v8C@a1UbhvSQOcTQsA3@%}YDc_*q@gAXfxQwj>ACZj||x(#gC<{pMY#1C|?z;MWnrXc7{ zq@*GUZ2n75{DL8r>Ywwix3q-UA$|N|Wh5~^Xs6xwkNzPYybN~J(_F^WFL)ZhXXFAgaUsYJ@%;N#3-Kae1ZF7NsV*XVhK10t~>tT1c|zbFHzown0IBLSJ9#9q!jd zQA}Q+umed!8VQ|KhAxSig}GH#MOCU>o5@MkUS1ddGyX-N6tSKAi}QHh8=6MjO5>+b zAtV4|U2$;m9#F;0InBkQHUC%SNmDa3X+(51YJze?{AQx_arD|B?{onpal%u3Bk6+F zs4fo?5s&rnx?)iaz!qM2jgpQ0KhrtwNs>M~IGKPKMmj9A257|YGbh+zufJURsj zmMN*;jC`xj-CZb6bybkyD6|AM(3va58v*yd`SrM#ycH|sLluSONF}h$B#^) zMl1B1_x5xxEiF0Ngwx5%lQ@#D_@mQ7q0q=CGZsgPCx4Li{`I>TB)4zh<(<$RwYf29 zBE-bbU~zy!Jxq0HC`E1D5o^VDn`!SZ<#Z{cg)JJQTa0y!V812#`}CYa2DhTu#NcPtRR7E)7bYZu%{f!gb&fy!7N9OJ8Xo+^F2Kid1+W z9)RuUY(&6{VPhO=-5Ev(2xw4Ha8MApu+{&X-#Bw@{ofgwDsZ8C;wsq|_JG*Z@&oZ0 zYf49Q@1Cl`?VTvq4$S?+}IG6L2*}Z9m6tikgUth)`BhAr3nRn3Vze4M8hQ z0Jb1r}OSXN7jHGf=pjp(W%{Xd=3j!krzQQ#?0kC*~7}< z#xSxM7U?;+v8VxT5mbWE1Y#=b^DD!a#*gJl-dhD=d{ z{kSucDlCCjs|*a;CrM+(e~%53!O&4&u&`@fyr*_}Y;e$ReYH79X22Km?FD9M&xx2d zdShh~ydFEI9>M@1L&3=T$~rqRpY*gT+z za)x4E6Hg+$Z_`<@GIFHIu+!GomZVPB`h~7k$M@CuaW!r@JXUJ*2;hQ7Myh})y|#A#k8fXVb8AaW zmGbKoQ`ztOfQ%O)M;KEBO%Qs--y68VLxx)vdF2EQCV=+bN9N?{=xA#T)-f=Q2Cp@0 zCG-{vwld}=!@X?iE;`+Y^J6T)0(9W;e}LV(uyChS$7+t3j}J8#z|YTL87|KJpzJod zg9-YK0RE3cak(14doly!7sfKBJs20qFfXNeU*!4>dKtR(F7UANJQ^?~Cr1mr&5$;~ ze%(f6ZtnIHtgT@740cICPwX-y@J9s81!KWk^EEEbRyPkfcXw^)t-DnG1?N8}KQgWC zgXnUmTH@DhaYSiH8kW!EcIC*q{VEW(=o%9-n*xrg}E|M?!;+Iu37%NSCyPflJP zGA(s*Ks#T8n=?DM1%Kb;mcFn^dvTawMs8`ho8v#JR^Ja4@};<3UzCmf4ty^incov1 zmNi1QZZw4)sZ2`R2B)P(2Qw9M@#jTPe+rjY)#UsVJI?LOF%KIC>vnJlXVR+q**nq- zB@;93_7C$jj*nxh<|Av#~f=p$jLpg`-<~Cj^9Dd5+n~!y*&a}v2SdeM<-`RhKv{H%RWcnQ=32b z8XvwUsPr#>VAhV)!2G;t*$NF*efQp=-8n}O0NT`20Zwan^s|z1@hD?;*2_De*F7Q+ zy?z`1bJn-HKEJxCNsbN1fwHnPpfCI3sB*{RI3J>6I9V<6AGeSG7CwD2ecA@%_|+N& z)(2D6OO;^w`MLT)^!%1BA(G+at9Qxo%v+0nUlW^Q4`2H8?H!~wWhzalpZo>I%@F3V ze^}7h?mhw>?i4!il^jC&akgXqQiGzFIE0e06Aqk_@ZpUzBe#e@`A5mOX$%_>z7Yru z0Qz$%tM<3xArLBEKHxqgswuhrQ^;ayWkqTVB?>{_{|)z3lEnBXvA6=6@*=p?-I5R>TtJKVN$Jugcc{(^Z52 z(=-3)g=g^7bw4_^9{rDwkS;E@Y+hluz!UjD>c4Ox27eUZ@{Rfz6rEy}&htM?$0_W5Y;0oi(ryL~qojEZ_%I-pfRJ3GI<7if&bzF^ z9tdJ_pe!LQ7e$k#N%H=cBX$I~{QdD26}AmDn5LR?0(HoEdEbI>w8G);RK$Nq_lFJK zYue-ul-y3>mIa*>5!+$=S;5rW+E1rNY|v|EC6;H@+VjZ6{2&rFh|w*pN=WnUnZw-G z{a4sDkx}0mf;_U6E9~Yr&9WR9GVp}LsOPqB1lq!y8pC1M(A_=VNPF?>@jWnMaGy_S zL!-n`I=0FkPFOg!%C8Wn=Q|ZhWUgsr67?#qaFO^?Tw@jP9)z22S&kaeXrLGz`5Yd= zBtlgP0z6%@SfUD@mNj|UB0r9Y)w}BShzProTid`FdL2SnqdCr(j z3-3<%$?4hL>S7(20|%j|fD>eUrb@*lCG)wtrtaLieV5K;y%OZB^34Kpc9LSSCxUcO z;B3`dP1Nq=(8|+66=G`{{N8Z<28s|EB{~ngiHUJ!!<5SkOG``4pF(pST37Id?S^i{ zS3E@lZEb0NeLH^suC0Ts0l5l4E#hl`!t9VUx#XMTL zhqPCVK6lPCf^@*?uUg!T4ctlprArRAOX}%J#Ej4~GaeQvA>7?`S9pd^)4OuitThi# zVuh#r`crRh!D~Pg<~2T{V3*K^1IwGb*SPUsI9cyq!O{p4hT@btV7Hze9}jJhM2@tT z4MW&B)S5yD#8B3!pF{;P`FPw$6$0cA-v;BDJD=klJ_bNmpJ4J@%OIDSWr6kRI!Dc9 zqXHNP7&L%N0(Br%P07dvwJ5>rE?=d-2LMxx5InaSCu{fo`UnrU5%%pJFIZxLw+p|_ zND{ny%UDd}du^#ZAPEG9vxMW?xOc{_<$o>=2LtpxdV3Sf${GV((CTb$i{WGy8am*K zJk(1%7PdEOu;%tWpbLA|rY77U*tK#tyL0F`(}^zPv-8z?Zzjs46;?YAvU|a2ml6;d z%b^3N$_&$d*%?1iXQ7!M$D zgQk?CABtFaZ{Nl>o&7J#CUm$IZRfZ5l*PEANpl|qkL>>KlIA?~5}-~DP!e$2USQFX zSZd7lNgK-C6SoweLtNK}vIp#`l#~+nEhHE=bRY`k*76^&#}&Dbi3#LE)rpw($@l?x zTXTV){I}wv@Zo3qf1jQW1sj1lM_O7s7RebWgC3OjoP)};ru?PzxpN^g#%0+6jJJfx z$57sCMW~+9VY_!eVj*UKGWKX}!M&aLa;=%7{}`9izjbsP7k5RL6$ehDNtZ;=7WIqD zIY_F2=rYXIdbKXlz4Si|c1m-2^5*-yrUs*j442_)v_yRqYRYsvF%*1E%=*P0NZcV4vTCWQfP75obDEhg^Pd6fBM0O5u*tRS*K@{~ zR**7%&-hB%_PxP-oTd_jn0bhM|1tLHKwlsD$m+5jo42>NtPlJFUVX5;%L1>aug|xL zu|akPL{MvM_pvcrklVZ4H)UNh10d^er$z7wrj3n!tODm6e#QTT6m*>M3>yv+Ff*Bt z1FX6L|u$}=O(DML5fLiU|^#E2w5Qur;z#i<@MQLvs>`y0=f#SLXB&q z6s(p?gRKzbTrpqiJOPTYCbA@v1j z!Q&5lR^paUN@)2Y`ntNpop(B*r*rLZ9CDX3*%)2mnpIa{A=gcYwWp=UKswuCC`*B1s^@gy2Zacr^f3eY_4+EJ<%zE9d&@4>jQ{b$|Pbz^k2uG2@IZEGp{aaD2 zSpfD_@=W*){!(EzwQ7iXe^beS$~b+mU?lMF z{Z#zh9mg|A@AS)#^G<*6*qCD$DZ_Ke(fxMB&&438OV?z`Vq3(cM_7>SbUtZ%;gFcc zb=GFha+Ynz3ezA=)~1PXiW!~vm(O4B=vWxeU-6kqpc+=^X4<*ipxO`C6Xd{{MyTEJIC5>*KNr1B7Am5zpZgF%86^$;6Z{E zATx5Kp?%Wvt#WE{QB(b0DD4!SQm#4j1jygs8W>=)u4`VBy%+Yd+3~y5{u%nL*srmr zkTB?KOSMO^N1eDQN$df>GN_>;X`Q_Yw-X7I6Q|{7PV*bXBfYTDV3F8>SBRRLzy9^g za4gimev?E_=Dq4)eFexkgIJKG!)dYas4ZOpJDDKMaMQ4pJXB!B=kIx7$xxRKDhiDuB;C=tT$ToV5 zMCL)ql+L#aGax$Jwy}e{;rk;|P=i`5{)k-5D=1Kgyaj-V4HKR889+GBWB-3(%wMkl z2gdwSso8cglZpP=aZ=q(ckHKjw3)ZR2jr&%+2HKd$p;m>P;dl=Hpz4A;Nxb+t`{07r$2a*Ku%qhhn*$DQ51#|GC!s0V05(q*$0xu^g| z3RE8U%XF-1>y|0d{$*(VZ+Not3E zfH>^9R-Xz*ji}|K1LQcw>3UE=TG?Sfx$IHbpnDa^u84`s`FPu!&E&2RV5`QnJk6pVGgiy#A8g7mc5f0oeNojl@_Q3Ux2My32YQ;?SkxBF}TyK z@JHn;q@0M5 z0&d=!g1vDj@#AXK2559{XxS>Z*N;`{ZL_R%6_q>X0q?j-{On>wa(0LaLk z+*Mt1tO#CL@^<0-o(LFlz%Pp8TcE--2L2tvTN8bKPOBLsefgKMjbc$j92Ns;?Ce`< z{jJ>RON>HDs@Tp!g93o&(@;?X6GV>|OYg6yhM4Ks%6lQJ-DyTE=4_L@-t3lPVEQw+SC-_6=8@jSrc6#BE)gl2Si8$x2N<+~3~c-Cfh&{kM>pFsh<*smK2c$=lAV8N$A_H%oZ@%)dhaC1oE=ljY_BRZ%=g(8@HYZLxa zvoh0OsQ5nh;SCB4BM)meHZ}%zn@omxB&juc4<<+PaJ1rvT4h0bRF&}0; zJZ)oV*A6XGQh6sQC7bNw<#p@Mt%21&2vmp|ehC#o7M9wjl$0)iHCS4pV_uxNd+ZbC z_MHgGoe}qOW;YfW*QpnGY%`75L23atVYe+uHW=Y#FH^`uKW=t|19i#__IU*e#Fvud z(t80C@)ymGv4$WIGtk$krlfXRo+t7^`ONgq4z&l@4f@Bi8)u)ql%Iv{jgNBzGFc6m zjG6i<$ZsFuw-88ec^2sm5@wgG1})DP2xdGyl*<$?Iyn=AgM$ae{pBYW(r|D{J-&B% zXv>{}`mllk499DZ7vp|x>}4J)5Q>m69(%U7NDFy5uAEI5D~EC|yr4Y(x>hUb zRt9iPicGc9Mi&C=8cmg|-XhvX+6@Zqp>1b$lp3@s_*KaBl87rIe$ms){TQn4VJC#^ z;LQJ57u!H-@MlL?JzLk>YB4d!1ev+16_(3s^E|^*MM$mrITqF;Z>bu!-6RIt_U@YR zgG-FeWyk0k%q*-Hu0q(C2?I{DFm`Zs4d znCUlXc*vM&(j8lq@X=tzZ!N9oA1&>o*HzW^af=VsMHAGJG!;-%h9NEwEyDn9?XhMp zoCQlOD~!>Ih7)Q$V;`L<44B+>DGTSHLl<=O%!V8PnV@b~THf(OS0#vhBchY^V~*^~ zp)ICMlJ||a>M-)G@mIqJwGGi=YXiwkcu&{do(}%#T^aIGpGB&ecU~45`sl}K-A}6N z+8Wvohv`$ZO^cnW#}hvRtv1{A;l7-!#^JSA=mVMXvK(mgIAfgH9vmDH@JYjLHEd{U zXvfJ90>S8=ms&8yKZKrj-}RT4JA7``4;9PEr6Z)EtvJm zajV=(1# z9YjHIk<{eGR-gk2ZRjl1-^SqRGd(W07+`IXuvT}kj zh7iUy(1S-Cy%dYL%$njfV)P542DJ`TxBs1$!(&^lOOTDUbcnz_*#CYLeb#2#E8mSd zX|jVkY_&~?9uAk0h}-h$XV|vrT%;g}UU2a8Y6Xe4M^mGsi9}6cHw|sNTK*~m;dXY+ z%H7)BLHX<%NU)fE7Zl=?8Y~_EZ<<}RNt^E@Yeb@;yoAgYCFIisLe+Yk^1;!`$;p0Z zEm@$Hg0&$tGn1cbKQiz2bhW9%4h4x{V}^WwU`0L=PsjN=0A)u3 zFB7C+3|k(kdp7tT+VYiMYGoaBtg`=fsjCG8{}&$E2y+&NUcZNwh=>U8wkYlLEO1H& zM1>In;*kLBEM4_HBdJg(n-%fJ9nRrc-R--hWU+SggISZOju$wWlI+7M)w560GNb+D zXYSmUEzYH}54rUReZeQhM0&U%jdtl!%%!I{Dgb8ag{CSZRFIV0Zk`WQcd;*63zm$e zMq?3}4zAcbPEnc~f|c-b0u^NgG#^9+i|sB|+fX2JGxl$sg=VrMZwIm#T6L|Vv|qiv za&sTcFo_S9R5k3{_bNR%3ePM_)gIDjhpOi3)zdliY`**zQn+6m$ab6~^LwbLWkiMSyfrD!UQ|X;*PDZB}QbvIpWBl43L=O0J~%ib-Ki=N*&Vn85xO$ohieiF3= zmj2mStAQxtzrns-wx4-;KJF}blK-QZWgb2OAZ!I>3x9u71DhMO-2-Tpy-_7%F$ICX z&E&_=Yu385vd8PA`uUO`6eJ`;4wJ_wIs=e33}%Dz8#t4>1rKsRPF(VTgUl8G<^Ps2 zn^12RU)Rn}0zX6bEbyrf;RFlifX5GYzO?5L;(KF8g}WQcR{$qh^Vc_^zcF?qrv`4r z#spTgOzKR?v(Gi_23Mihch6X1p|x~hj`K~IraUk0yC-akdu*BlBh&8IsJu&8F}BJD)rpDu3Nw9>Scxf{j`#GSzH_iDO3391AKg8gd3kQKp+x<%I5%D=iuSJVu6fI8YR^BokpNkKX$C`gg1$5xuRj97Wo?aBG&K1BAHlp?&@ygVHfs{s z6;5|SOvb>yT^$Y$IPAC43`|tua17~Oj6z6qa0tt~XJ$(05d5hVY&*sFfxf}*Dj4=4 zhAJp1s+0PI0c@RMm%b*xkjpEEe&9*Sot7yMny41&uM>7^Ej+BdS9Z-*IdcX>IOu77 zDVw&OoJV?kYdBuw#s;?Bi^pqjm$KU|{f>vo?+ju&95Jm@eNL&5!C{YykIzB*)YIv@ z)gR$3)veok=@XIfiPoaxO`7a~ye8;H>8!K(i6Ta3kxqC@p=~}5Bcy@ykSlMs;N9w! zmbR`g`K&o@2@Y$t{h3>T9pOn|@2JPpP50y$=ySy(bc=-ebItpu*19?}8S?}G zTp~+K%JgL3KnM+o$$r=?|23bS%) zB{C_Hq29o2s}yImKAmgWGxVe&V|z}R|Zy z&L=TTQ~-t`H!(w-7EalIGX0PE7#{&Bj)t>ByLc9Y_~sIwow?Q;fMj4Y-ZEAD5m7Jd z$2idcGUt7Q`=CF6I3!uGEU;yH&7+IQLx#KfRBut83o(jL7M~{biPgAw~j{ z_ipApK%uw7ayTAD5xF}T4OXZP{l-7cr=ECz^`%Q~o23Uf>E-BOb-TfJ;o@NPXtDts z!l&L{d0PH$ZAOi`b!NDuiTd~kJ?|n4M`h3V+YwGCe z3}aWicymVZ-@v<1zP@py#oc+>5goy9Ln*m$~(Qju5+(dcZ z2v3B@G$kDc#VgAVjyM*EsJN*sm$~W^>HS9PLPC@n&q91Ln>q1fAnEF4sKjp3!L~I0 z8wlEDxN`CdvvvyJ2?*ewX!@}ex2lPQTI$LBywl61=iM(ww zGCCZQZDnamm&!tQV`S85n<83t@QChW=}(LH5E&r&gEEx*32}i!1&=@_vRQJ+tE%N7Wi-36w=3qKt5@AuCt zuwIEquz8{ujTx)kxpwP;{b%Nj8Ee+K##K@V#o?CZP(|m+wU02nU;Dd3>udYc59kNSca$I3_cVg_v{;{e&itqcN@ChUYZ*>VIv#>$2~|R&oGL0RHFq3;=1Wq^ z7eXpiQhve?us5z&Ttg&o5MEnDB10WElyY>GGqWkOclWopcXz`%bsHcBp;($xM--3UrcGbr>|$K4lym(V>lVIpqZ)+|t$EKYr_Gg4P_G*XHs@dW+r%#0?MDS58T*IzSW z-p^HU+WyNe-hI`P8q6r-%E}ih$kM7A^_U%#X;kw+5A}kY`QFGiTSTfInM$2Y4L_K zD89o~9XopKKEt?T^nLhJL#45`HE7&I!aPlY<_B0|%TGVS>ED!AESH~2sdtj}X)-)h zq*@?#!{2|3-V8Lexf098I{iTx$DL&)LpyJ3IBw=&j;JdbPbdsI@$lqArV$gYb-Q&fzAI3F-qo8kqcKVrN5BZ0iXizf*Y#*7SkKD?T? zwt9MUbeFJ_Hz>Ycarf`v|Ml#V>+9)AD@r(C1E~*gG3*DAI=F5tC`t8?GgPtwWTbRs)8DqWu^F5Rlt4#6@b`~%PFA+lXx4;T z2rxE+A)0L|6f9m=%Ese+hDE$9j?}?{-xgQ~L)63LralDEF%`bc9>l@M-jl1!iKBSV zR-5?R&y+Gm@Mt&RFj3+eq&3`dQ(v&|lx$5+PhXtLUe=F*lPy{umFgBkOUqVVJXc{H zxw3-E`26dcerCTxe!jW;))-3P=$K<0puOrG{_sDqsCnSljK{85XrDsS;f(Am*3D~R zXJw(Qj`c9#+2)=+{SlR~iC#8i61R8WMFU~Pze}0zark?`vj+ zrn-48g7~ZVZ9nf)a!y;I>V~PV;Z8Iw!7x#}eQ3(EbS@Da` zaF!4cLBUptQSNvu5I8*K?fXc2vdcd@sI`5%g|2Nynle6t7y#ahyS<$(`UN9|=(2KB zV-pikf`;oGf2kB(mgMGoDn7V3MRXtc6~fN$=yNNcj)sxr7?u|fh<1%9w)@xXvmPo_ zXJ$g=(eodm+l6@Bbl&WMuZah<_Hw+YKP{1@FstRT2tQYQYizvI9#xq4hJ4SyqFav% z-v=tDL>H!-icygsBOB4h^wiXvCxyyJYb|PYxM(*35VY|E-beQh7lZ8w`9%UmHps(; zu@bM-7AT}#M!?c4Js}oJ=IxAX$cu~9TMc6u5)>TPOzmH97D$cao>Wj#=^lMj+?K9v zEZRO)X)G%#$#{}n<}kSX6K+F@L<9`GY^*M}68-QkzT{qgCYve`!-5g&sG`YAEB8qq zh+!nBKk^b8ZL6|47ziiy8{No=AoI~82(o4OoD45y*~^UbjCDMj&tdEqiK6~~n5NkQN$k1?=>r6{hLBTEHU^A6Bp}DpF zV)L`uj`<1EkMMCAct-xD8NUVbs*l5vusTE^@$f_|Deyd=s9}&Bi z+C`Dk)wXO=UA{#(F&1qdtsO-<8HGyiox+h3HldD4Rfeve^-T2UnPpkPXaHS@2a%wW zFud%J@_nNy#1-wv?Dx-nHX9>hv8qGE-FyDo-Fg}6nL_2!yVEBGhn`6%E1iY=gkqEB z3m@zqq>Pv_q-6)SwC{Fe{evQ*Q%W;5piwFxo8>O7g}@^C=eOwI5a(sr#TaLU{;z{7 zPWr-*4~v2ti4^Ia$KuBE^?Z#Eca=ggW(N}wE#<$Jm%k%8NgSfSEVpmq-E=Y-wj0b-d- z7!UAxE(hrPqtFfu%iO7vCKWQQ#hq+4Cvq5mbX2%MTH0&VxSX94q(vB*;yh-+I+drX z9^Cmpn*aKFQDS6tbnQfq3KgcTjEt4s=A(|9nwp6TQHuM}J!WEMMUPki`>$EZDI&>- zfy>-qP9ks5(6pc zps`?8uy>Z^MQ_?hH-j%JtJo+)V`vf=_KFlURZ8&E4%OFZ>YnRFDckfEQ$X{LCipSB z*dw$_^2MKpn7fwee(&BMeAAYak}Q+`I6hMqDJ;}_&+8YE--&Zma=+(RR#t8tIY~d) z)=f^z%xtg4ZZXn^_8Xv$E5GOFem6sO*u1}g`>509NHbT?aj)gh`OmVe4Bn=c)}~OY zsi%YUTc{(`9r3D)_1W386+^(GW@R6k2ccZOQvK>>nvrM9&v>0gg1@g!@=OrYd=bOF zLI)S*^knih(4%nu@EF1*l#Z&-w{WZrqjzv<)+h2i6LVNkTM7&F7GqR!@bP;z+btGp zk>f7@kMv(5TrOIP=FN|QLIif8U;TJkLI!pYr!o~=f-L80( zI)41?%Bq=_6-j(xJTdAjiPP3uS5Hri@6Dg@BrhL9^Ly=hXuLGC-cOnEMcS(w(RP?w zqul&_cnxsP+r0ZB7VPfCBTG%U09PNprKerXmi_&q&Wvg&*2Se^*$EX1nR5Sb`fD*1c&dBs*uui$_q(D}Ufhdg640a`;nyG?AFbWzd4qhRTs zq(1TV(Tjs-UB8Z4f(Llh)6*TkzWsVkKYnP-0!P96Os7g@h5FzDyBWI6ag0w?AxGg< zfB+VGlNJkrUP&?a3rYh610R(kBLY)Ziv)wXDoV9Yns(_1)oj>hQ46Oy^NrThKTX8x zrkq@)7|tlT6<^q}*5pmZ4HE3Zz4Wb(%g^Joc=Y)yiMD%vZ9v|YNnEvEw!!*gXU$AK^g1>TRj`=lZGH>YYS&G@G+UXC+zl>rYT|2ukl=VTirlS$zjDBTf8hTgwa`pGcWjG63o*~n`uozv35ghv&#$zS zjQ`J-pmU8|^RIXC!Cxl({g+xkGott}E%aZz6*pT#`-|=cAk zydWR%pYMVXe0bIdSl<7BsJ;DHZ}RU)%xb<(vP!+^N5w;{O&9a*XMSL^IedFjW9Vqi6zxE^(I^{_}O!S5bI| zQJONAB`0n7wBfJ})>M1t%nU zw`{%1+}hm!Y#v9qJ)8{Niy_})!g-@E_~&kbNng{>!Ja1?>fU=B2*PGAuyPmshQEx> z8K?9u`t|3(Ew)i%r5BNjIQY&_>U|qs6S~VJlF7Y(Qc*^OfeoNq-2~g zFywRPv?%k9^^IZI655C3i6~@<0`pLSaiNdY@vbDL#}cnb&w65Tfbpes>CdB@4z%?= z7!PqbkyYn4bZ8jO#>T2H^FQFvqAf(K4MOZ?@1V=(S}F6X2q|WB;;mVz-GBb&WPI}Y z@W@Y9^{|P<+75Q5d>2j*`?bHK@!R=}4E@P`ePWr7Z(}%}_wTo?MAocptt8&^B8;HC zXZhovsewtZ!8_~qpjJl^^%)v)pO}w2ZR9-3m86((T-SyvX>j#od&5RiVdI96{aPL0 zVq0Vkou>zlN=fC$W_N(W{?#6|VstVwT*~}h=)gCMM$dz3I_!mt_hd`>neZ^Z0?SHS}l@BOH zpYjeo#|HMZT~3v2>u+h~%4NzBaaqsAUO0&*belf_dDBqJbMorRSjD@jw^%%z@_@!D z(aV@595d8dh+aErO*fYK z3%9$}(QQ~mdU8QMs){?cNPvTt*=^t(&6V75fKs+G-L0qz-x$D;K!8{3*RUsQ%l?3S zJko0hiL@t*tWZN55u`G4va>U?hp9TpM*T(dmI_-WiX*@SM&*$h+PwhxD9RI_ZjzF* z!VS){7wnu24mZk-ri}_Ta+iK258l}1G6iN*QF0?=x@=6D<@}U*FK$kd;L*%6bBE~j zta{NWe!e&1X=B6bvbH;nvD}l8+;L7vSKVFX+_p5dT7=1$_z!+z5=A8_`KVnv?zW$V zt*wux;*CbzNf!J*6YtNXMz{*}ISIHYBYi!}d$QS`d*1I}l%Hvoy102X=}p|wsV-YF zJCHFx{cPTZvGqQ7v2NUcNzw4FypxVXw`ZMpSSPxr;x+9JK zmlxL`VPL;T^kR`oOAjhlIk~5|(Y2*_a;`6_i6MMM+V%0|GgnxM#5$Wc-w{W$TPR2h zwe}MfAzi|-vBR|H7hJESv`0q1XQXeg-EI9ds%5mnRllS*In)O}|1neTk};VdQZmxA zJA70edSea39!^Jm`_6xnjQfAbm1@%r|NANSnj7Aj4$10A)2Vx+>+FsuSKhlk<0!vR zsN(zaNoj|uroi-v!nZKhK*UXq{lax9H@(j4(qn?-tNq!y9R(#8c3t_ zGggl@H2lJZY#{#578dzmcTJ6r43Jyb$BN9h5>ei!GDY2g#B>`{Z zlNuf&LPfniyZ36B^*#p8;mS;~3ij3P8Px`i>LMK%s^=HYCJ+R@H9Xx7kYC+O} zqP%yrq25QO(6&2ei2@FlpC{glOIytic{;h@Wo2Q$v%B{C)8DajKk1+$M2roqD3bmC zQp+9zSyXXZ3AOU+&>CK^^0(+~-f`bkx8V50UNsyQrAp^5Ykcopu%IdtEehx^78U|~ zwt=isxq4#nP*U=O^8U74LsCncL6Cqh=h*ne^>vchZ%WTCExc~7KaTI&zU_>ZRE%hA zZ0!~J5zJrTPMzuc_UWFF4;hNYOSaOIQcFwg?M6S$0IBD#y6#vM-KHY1*OG5g-J1w?Hx{R?I3pe!Tk`XqMy5kA(_J1Qyb~# zfU$OKX!sN#$Wmbc%7^}Cuk#5VDaAWdFgS%-n33%?Kqate`|Ea#V6^C@V*W|xHJ)>4 zOi!3&$>Za$xD;}>XJ$eZ)X0@9a(Z@td0|B(*}teTH9gJntAl;X-T0#Ge}DAK4=PG_ zkAB_VH>M;muRZ;(@l>-}i-ilARVG<*qLXGWaQ*d^H-DFAE%iZ2u(hT2@sTac@d+4F zp~hoaQ2JN&?Bgudy#s8<;mojP_p)>^Sc&wETBs@3=tU0VPDH4%9ZEs*^EPC8wiB%` zhiCXkzKu!Mee@7o{RzkX^CA4WmLXEjga)R+XEtv$-tZ3L-mQ7rW>bNS(t;lWIxv*kpCt ztut2LIBy)i(5|vku(h_(?R2znmt0F*YPxAEhVJ|ha^X*p4yxo&hyFe%1lh*lr>8OY z=11#^bA!&?`Jtg@f3@_*z*SgpS+JLYo8ZC@av|iKr3|vyJ{hz6~uo}&9f<3%>HF;$^WXme2 zW4n9PV|TaTXQyP?pVWyVOC)@KJF^|WdO$)Dw3>bqMzO(`f={QD$+V$r;vyyO;Qf7v z`ms;7eYaQoFLz3|du)6T^92@}K)ps5BDq(&*nE9-Hkr!QyE$Ti++BQH zy)#Q*01;K>5*-<1C#I>>9?mr%b9wiD<`YvYx9?n?tQFea&R-omZKf*hl-1{PL>_QCB}mrL#=}*d4F-VcAC8!bft3e%0&=&_q;`Im^|6FG*lez zs6b)JGj~g$bgbYQ9(*MrU()=X-Stz2NLN>%iv}f>gWHNinmv{dmo1#RRIqPo!$Jp} zB&La}c?z;iZp~qRDl=s$rnr9*RX;qG1$8PGFS5-yA zY;U`&^k5%Xk@W)?OVcQY3d4&hK?iZJ72`c43gYtf)5wMSX`1|8_0c~y?^g)HkU$SX zp;lp}_8uh@=@4o8NH$bGa-zT_D4E=ejs3u(xQv^@Q*OtguXqTKpyf!}6=^GkWuZ+qU&zf&!%qr5Gn_`>Ty zZ4k{i`W_ujp4Zaex^?^XaNGvqo2O3a>wZq?r>S~gl?Vj0$a&NtmWzerZ6q|5pBqeTD$JJ?XdBAB#~-i^aQlbld5&GyF#@b4&`t`IQc51b7D*umO7sMw+99NhiTp?>Ph z#V^!_sE3>!Ot#}ets=(;3w>Wo7?ZX>8?TA#0N$9@uyh_)2AN4G3e3IJ>NW3YL5m$s z#sDUC)R-nQ${YBo@~NYvGVlGt5E{E{d?Svhd(fno^(aAFRzjm#)t=`trzyediZuPRB-)@0XK%lt?O`9|ETolO9Pndp)&1e$8mqAX<#WaO`@JZye5|tSJAMl(!MF*Q3!fuNWp;Q0n z7p6OTf`)qcE5QR11Tf1NX;s&Y?obh=_OBJZzB zOmY|!E&aSO2zG!+Z(>Vu=E@^_kBIIw5JW8wWZBP`?2#RX!U-)sjI*X{=IY9N#+W0q z25!UPXlt$L?=9c65@2cR${|_NW}h%Wo1HD1^sW{ zO{XeqI5pED+NyQa&nx|c*8^0tSp~i@7#>T}igo5~n-iN`Sq&d{pD;w+srn?EjkCp~ zj=NBGaXNa8qbedQVrXdSu*$tbBfl%B$A8ZyC~YOOK5_0**-5j?_Wb@MSc(~wBHU~l z^YS{Ci+^e!#8h_v`ZcUSdoiJjqP%DQ39asQZjWe<-BK>Ow^=Li;$%Eq#2Q%9PXhQ4 zZKi?ea*n*<>e2i8hJ&5>xJIaxcGmTaCrktQKD_7Cz~89YTjV~Q3#(ezV4Kpju&<4q(>aI^%(#&nhQW zBs{f-XHmIF^L#u+zrA$XpP3hcRef22)wzT-JICK+NZfCBh7ST@NH66@$F)D&*2WeV zLYJC0hkKKojE(C)eTD@Y4*e+Rgq&rzd&hgDo(t_$jcUiB-c7Z@!IG5dU)hJ<`eJ7! z&AprP#I>-Uqp8?g3QxIip^E|Ai3-a-UIm>{AanWN6bh#keIC*g^|Ipo_bsSxJjU;} zA==lIlB%z@%O4(WUPcXrsS|vE!di#Hp0m)ahCg|cMp@lmLBaVJ#BEo>hlreylB9^A zJ)^u>+MylFv^V^WAa-^hmt6Bo%qgB5$*ixP!Q?j0Hq)H>bb9u1ZCypK>bUMs6kD4c zwL;G*R%1iHX~a%g_Jp8dYFgI*&X#%M^>4thQ?Dk&^$lLMp>GJ6=zUu7WUe!2K^!5O@;HhJ| zXG&q#c~f)mOS|h}tv6P1;XhxJdH?pylM*S(!8gu(1BEP^GL+a}>+8rHh*rw< z$Vf%_8yo5`S3e=yY}TTmhbPBW%0KXos#>E@mqappupVKNTIP%PQsMh}2`VUPc zpFbL&(E)i5 z(2)V)C^$Ea>Vly{($dr-v9nqw#3Ie8<=WzoI_Qar?-!#H6BSuqT01pA>3GBEtNg+&ysCfI^qjaNOmV_- zkyiPo%W?~m^G@pnA#4j*^3HE(TeQx-8-r1T>9)BfVae0*;w5|zqP-;OF@|pM{;~0nS|x9jWFvt z^UlEOsL#92qsyPu-B+p1u56Y0{ZU%<>l&SHe-7`qnaz zYVMw)!D?VnkIU(bkO@r(9EJj>zZV5bfVvH!ZZ}Xc#jQlv;nxpG+Ff^5MGjRMEYFIwy0>6vsDXcOYe*|R+q!C(XXdBR%h3j<)tKAvd6lHcBkD$+ROW9u5$9V zW_!(>(i7j_aORy`N@WyCU0&NhuWP(-UK0H}CbE04n`zfMf8CjlFOwyjqhD)|MwS0v zwS}RK)BZfmPUGE$&d81%F|;ZbCISzm2LrP<->1vlhL_2~{+c1G%nu9Cry;xYeBr1d z9cqRRjeab6a*pIr_h8;4v?x#~EbNVdy_KftY7Xau~q^d zZeQPr+<5r-wFBy(^Pq$V%a=rIun@(s+>lXBagp|M`Hy4cik)=j{x^iAL8J_d9UHh#)WgE8r?%K>sl?An$50v)=w5DOgU@Mcs!@ZMB5Y08iW&lY^5zX=e4}4jZM87d zmhUIDXkkkI{AR}h%U-2vSyH1;LZ+jQM9(3{ z@W+_h4_U1rCNz|C7`a&7h}_0X4Lcj7D$6I%EbN!gl2*^Fm;1u+I!LFFQl_Q#udkO} zca17qKOJ?MI8m!h!qV-?j}_%0}4nrXPcw=UWVJ25x)XsMYITZI8T%y`r>kvYBO zhpR`u20iz!kD|$|W}&LY&(l*#SEjcNVE;h*EaHTbnlU{g;j;%cil1*ZZ+%TO*w4M? zj?68&y1pKYT*_OkbQJu(1FU}Yk$?;X8-Ke_R`qy?$mQZ>^4z>XsC9%_oK_uc!d}-?d)yu!8We5xtUoW*bx27$*b-qqRFi-9Z%vflQ6pOWi4bx z^bWHmC;o^c(rB3zP{?EKV-X_cczD$9dC5+8WT>z?OB|nZ8;@f7 z(Q1+8fgABzsy?KknixuAbq9YD5ElJ2-Y|{ zwgrs|B#(KkRYm;Vp9wKE!pBPJK1{BvfH79lKP$5aZ(s6LVKM&qpgrU#UAX0N{i;~# zxakfktFkN`)mmCw85!~qdFI8t#ohg)=_Wm&${NR&bC%AC9l5?;h?Z1PQD7lt-1&y|k9Ny}$e3!c6@6Bjs;WN*uzQSA-9{Q@eluoM24y@k-(^ zTP%P%S1T$qdhpFecg9$`2SlK3?{BviI%`N|(I372MV1cf?CtE$3sG{jn1{9wu*pkM zZ&cg~2@XjIos%{LQErk!*Oe7N7Wbx!_Axjbwv#jYvr9DU*LA;SuoLHV`KHf6__s7T2IlR?zX|jWX zH;{WsU-)MKA%_v+{CX7c=!3sEjFnR5sK|&bR65U4gyxqHdwLCr(VOAcuMG?wU?~VI zi*hd`sG~z&Y&XlvpR_qlX?8YPSvgX7sF>(V?S>F2slkD*x=r4MT*-tGe$x6A+Fu{+ z%YLv=&|>a$2wmF0ApH&9jAytx2)RIKVFuhHeGLWs2HjW}b?4FdFf`YSKjkmdK-suV z&e5V_K!z7Vote47*a`{vz@rDHcq;*p1Be?S@+G2eAtWrOv#AYKUhh(ac|JlCT6eb@ z`_ZLBW z?W#GL|4n zNH<6$waA6kLPbghM5Ie;q`N^yKw7%HySwhhy?=Gi9p~JC&;15tZ-(UJd%yRc^O;W+ zy!8TFuacS)?@Zh^S*@eN)z9X_bQuZ)0wtxzYfmpbjJ|#2^avNP3iZ}qtJMBe|Dpa+ z-@%(V*uu+K@phJx$4kr8PE=}S1e7($xmiV_$j!$(vUib; zXm&Q2N}o;h9n#WV{iA01#j{aEFzr;nyTG> zB^(`#a$DvKQaYi$fI(eGDrS+xY(CA-(8gGO9=n!Yd%t6fO??;n1gu#|g|`?_I%_HsN3;92x`9sl z1zXN>DHUH@2b+RoPO%CyY`6PHU^l9iVW=psI|?N4o`Y^-0t#VF8LU761ILSPNR3(| zzx&MyyJoY{C>R+{1L3XL$rgxHX3x56MC*@!LQsGK&56@X&(5rel&*KtuZ802AG zXMmW%Nh|7>fOyIe%dQ|Lnb#vAhh6{3K*uFvS(?3NQ{fF!z&%Hd)s~&xM%N++dq_6GMYY| zmt?oG_U}di)NOyrj{RNxHgn3&qRIE&JC*$jE(=yeF)=Yab4^W`N9qxd@b?WaI6Cd6 zV<{tj(dni#^8%Jk9&Po?Nady;cC_(I3*F46h&iR6hAiZkm)p{Zn#9CoY?;o&Ob;MV z(19<=P81YC`{#A>io8~5Eiy*L{0UvRcxsTb(!M{*g_ zm+}g83*KY=klJXt2ZlRlI$An9{^Jt;9VVCuDYo~tSqLxnlH}~B^?bbVstp0iru|t4 zEUqz3vtgcoQ5tJKaj5GggrAJQv^Avp1w~0#bA@Gi#H+%HhM?Omv$+ysIEJNnzrKKhG5e6<++!2a-d1=(Fv=HNawD0DBkbq%yNDhhOcgYHHH=cy8lX zbS`*X9A)g4ni~jSzzzYNcvTh267?(>4*5^!dfJr^yL(arPbTIUdDz)`MR-LSuL}sK z5YoR#x*X|NI%Ne&EG}NBZ~rO46trNPmR6QCEQ~kvXU(mWu&`zJlpkLsNV!Gv$n9(r zgQT?;hB_9o_+rdFFR8~ZiONo%dpVG}w?&^yOPKvbDFZ>)nDm*%y9C>2!|z!xG*~RI zqs-m5edYLohGuW5 z@dH_Jhfj!HrysOv8JU|(!P~qMjPG9toCG8S)5)Vo)M8Ij`oV+Iu0_EZ7e@+$^XD&E zKj;meN$!!oi?Ur;(iJuAy6@kap(M{CQ*mO9H8OWCAR=P^D?26O*6 zKVp9h))iVXSnC-9i14j1^mU8f&TmOd;96t%OOejG_EZFH8Y{mlk9YlD%Qx+Lshwu} zq0u-{F4^ar@bmiEzI2HaOj@5(Mj?@oGDeUI z7}OA))d0BqrKu~V+?i&IlR-gdBMe+Z9r{;g1>Ju*TQ`m6aGf~!!wy8BYG#lFO;NzF zSe)AcPI)Egr<<>DWwJ1*7NLhu9|`LRf+5Ns>Yz4R$K?-WADs4^2!D)5=VzDYyraOm zL2-k<%>E-*m-8bKERLME<1y*qT}_w&G-o}MS9!d&Q9nssPo4WC}Hy>y`iX1LY` zhPH2CP%o`~c)mp){OKy!>C%BNUM|*86dK9xvun6Mm?hyXm{efWlT@chhfr@lLO=aP z*}2evQc}QM=D#{qf;Yof!}-22N$mdiIwMSUp~5?wb~*;w?KZVHYAjyv%F{~Xkl>um z52tdSmra#f_nRWn2U|@DeZ$>LZ%V0#>sV%PzBSqHsHD#IYkR5ax|5lMyc2NSyGs9%j#uW7qrKB8RAk1{hootq5L zU~*F1xp;HWR~-U(#s~+7pImCtEOE9QPh_JT95u>TxpkAU%(}Ww`*=)BCf7*`e(H0& z#%~P9uEwJEa`qa*UPY@psJ!EEU$hUW?6(;%F|+XV9D3K1Si3@Tbu@`zdFbl8yT1oa z27lkm!l}XAv7&Kz>u67WD%8EjyDZG*B<5n5Ds|NhYeRl(eNxhAu^FgA!6A@`w<&6i<^rgGX>%D&WUHWreN zQ6LwHa>Cybyp<(2$Q~q-FA#PA_J~&J7UrF*JW(S{NU-uboC^?33&8R6;nqcUKEsXV zyV-Y1q7y5eetz*RRmW(FxCXhEyP#CO(j_tD7~QAyuiU5|%PZ1i+hDTLP~|ezctcD& zdP=D&nztHGN3X9YCcFM#TdkpY+GcHu6+4HbDYYnOWvnTNQf^k9i8 zzH+m{fl?|Ph3U>t+`9ETZtn;kfkR1YQw&d(=G29PtKEMu=BJ^LnAI4;N?@SQU4Kahxo^B3* zA(sx8R)M||Mace+xrKqUEQkY|s5a`VT-hjTT!Badd5#Dmm1&&OJ&VTQU}`r314Qt9WFo3f zy+zTUo1@;}ryCbHCi<8NXj(-W7I0mB<0SD;P$%k9|r3z6TezZHcj4I~?qe3I)lw@y=%#)K7)%o2ocVGy^UE9?8JZ-%D2NcN6GG3UGefVW% zR*@xRP$DfkZOzi&#~W{Z&5|J&f+Du~-E?LP<2&wct8zoK*d@Ft66FzLP|O_$^m5;4W@21bwr#P4s)fcpAOOERd=$`&w z2u(5XDGJjDspF3+GnScV{GS(>hRA2xN`sy;F3_whT41}^$NU>3)+_xHE6TnUL`HI+ zAqGvX{g&HV9-~rtQzIk4J37E`^O^Rr<@nU18xDTuZaYFjBEjFkP=R@ILD-HeQ&Y5I z`8FJMulUEm#RcN4J_{}x&pChI9k12HH)6>Kg2)7T!j-WHQ(ICoyZf8r&3XChi@nO& zUqg)3&Q2SANHcM$Y@n$Z5ajRd>I|?mzSq}{-CL)KKe4>QFulD1nOiy5 z)>3^hydT62hOLtx?GzRsZS-qz${Ni#YsQ!#wMl4pM8L`b@`ljSjF5*8Oc7&W&Z+=+ zrnn=V)$RLAX9ijeYZwLXy2LD>)86&Kd>sE<=ojXT+n>A9bbfx`K)F48czDL?-O(Cd zl0m$I;bV7C0w7ROk+Mm*)K8s3x}<*1&>2@G^nPn@Bdzs7$LbP6aS30pozQ>=PWNk= zn*cdc6A4)$*1<1d3Ts zytH}Ra2wQYgW}>8koCbSY5DvKbQYP zM=|c`od=hT2daB#vv+NFc=`neidqY++R&t#S}_5f;>h_DO`L=fj z9lnZpO~T$(1{2&@nYM_fMR632PzGF03zqu!t4L$)Fmk<55@0-2xoyh@`^QfXe(Y4F z0&|$ai!)Fczs-$%!7tUs7E!QdjHW6m_JSUfSB@!y4vyZB6vKbf)9<4c6+6My5&#k) z8S)o9n0-;9)n!njqt3{J9FT5=Q`7dy*qD%Bp8^~%Y^9Eij*9jex-j3SnI1?1y8;Gp z3qm~LQw{(Hv>c;4Ut|LafZc1>1cnq=dbQVN<>hU#*gNW>VL=omH^DW@P%^?XGK5W% z@p^NpNRTFFhE9&&_;|ER*{`7?i}m143;StdlNaWY&!oT+SS&!H-N8IX*u~0#BACT zVNvUiPmzqQycAm-yTO@9TbakkjqD#QGi5k@iv>A5;_uoW`repC_d^EE0eqeFX1D`S zTx)3Pr?QC@yOhCURzaTd{FX}Ry@B)V^4v^!Rfo5G!|sI+x1ctfD$f2HS~%4Qq;{Gh zcl4`}Lr3Ts2GdqW^0zRt#yTD7N2p1m7CtojH8-CX^NXpl^j`dc(W;P2SK8iH7R(r~ z0RN}rJh2Em0PYOp5sp~lkWt5c1bmY+cic3fh7dUZ{SBzyy zI~a7E5ky6wiIP_&!ZU<%nq!{kzwoi zot%yX^!D+o#u+y0V)s01U{r|BJ~YKeGN*zX;lPah_ywWc_Ya@KD;)Z}1vK%Q#+(|L z1b^G)t(5$}KcovP4JvE_Kb>&kaY{m~vGv;Jqy1KoTUuQyU$)gD3S)8};mycSU;q6m z+Pe_us4j44AwI^!*(6CcFf6bXCN|up?CRE!9Rq^{sw>h>=I3lm(WkG;0@({Hu@dJ) z`pR`EGERQ2%}vU{cHpAJZ>vY;KcWVcRoKSRgW}lj@higgX}qova@yMqQlyMxc#aRU z%L?OYe)Qbw1CGND39&zI$2WV)u_#&~%bY;Iu3NKq<25xhQlSTX_}Z5)U8Rl<5AQoz zsuhM;{$B*8gHcp7|Dh-CW>f(-*_eXzOS{9q&Rj!De}5kr+oJ)}Xs2+Bb;q%e7fV+~ zP1L#{mH4wcZ}cX0`%)q3F$$7DCd84Bhl;$tTI%ZRuMO}ew^Jb-WN66G3xK$foDQ)` zNn*AN6Y?y|03QP6tA%y)eB9CLy0^1S5HKM61a-c8 zvTA7Wl(r1ZqH1gw`JS{RZDGEz-@Es|esat~6cmduW>pbHMC1|Sdqee2ojlkLMx!8k zS#fb|6Sc}s&%*q;*tms>0t4cpZuB5KEA!Ib3`wBxG74}}TWl~EOKQogIxx^Be9rpy z@mlrg7C#*_4jF%Oq8gmteez!_3JTWTceAsScd*iA15cGN0(mJ+GEoWa$>$yC@A{#r zF>=fhH3&m1MXALR!j-;yeHhr{RdsYCEZxrUkHjlrOa#>XJ4hBvr)_NIQUnBfq?(!r zF4gPv;@v&05!tWieBbsRpxUQD$AGiES5pJy?%EnW(u})jUPF15Q_II_`Z?s=_n~G{ z_(2v+3k%1yWGM<`X>v>xYfAYg85tE-Z}J`nU%Plt0tW{QY z7p>Ckmw1XT1_R-M$tmgN8@SK!pVUQPUYAwt^YSt$#64t38=HKcJ!}%3!Ro>H923cJf!=+0YzmkSrb(U! z{68Ef!HGo%*{`6cRCB~{S3g1A5PYrmc)wlyCiyb&j(z2>d#cYkL^Zf<9W zcSciD8?6j62Kt8C?%+yzi~J2wYj1C{ReNwFt52e+H95CVBY9=kHZIwQB8<^@L`>P4 zi#CUlzI5&=1-XvniJ&x34wu!l2~z15nDME6as*DEqN3eqz&Ha=Ufm%&T{@{WFw&ZV z-{7;@jg4NI>H}Xzb5Prv>XtxMMULYs1jW6Z=GEontT+#V==`~{-0sSwa7*Q+cIB|B zaZ3Yso}0b^Y{1*27mb$LWp}}11`b%M4%da?&fXNJ)Z|-Rxd0&mjP}U+?zPse1^xTe ze4k0ZC(+qkepjFNasI@bnwyqlxUB%8CaS5S5VorkdJmfS-q-0>UW?g7U2~gzUtiyX zV=H{3zc3o>)IZq7!^-F7B^^`gb)rp$YXyGqvpALM;M6Wa_r0OrjUpRn=HO`P1k`>k zO^&nLqA>nWbfx8suEyC5ZUGeU8z?Lql=~%~J~e6ye__g^hYR#CAdW_gI1NqgcK3UW z!)}X988$sXZf?}gBN7i@hLxXT_rSi`pU6% z}4yBLk6otGg0%zW%CPMYzXcVVz=*jpKW*4Ie$?If69$0RQl9ult>)K zr(riAo|>9)R=ZT1j0Olo71NTU^*`?S_SGMhl?w_80<`W~QMk;H2{n5p-;Nw|(9RTl zM)I8VFFh`N#W#rytEXN#JcWDYNs9DI5QC$^Y#W;oTKYkN

(%1ltZ->_o8$qOd?$4`9mG4%_k46f# zrT_EA$q)N{PE$YF`TGy>s*{I!>R+;Lm~RPei+~@*Bei43-)}j(OydE*~5ky4%+D?|+J&Xn5U}&)A!#zdU%&zk43a z$^&a}ln}c`US?s)!#m2U{V?xPeWt)lcnK`20flS&)iJw)uNEQp)>lEmpzS4fNkHg5 z4FXj`_lHT{R}9c{-Y8a*{DKrVq^_8er&4Mz)^n3i=SR_u7iYRsp}}+p!HM<3Kd*0( zEG1aei>LSt*WV$uom6QF!Aq&~V1F>UOQ~Ss`_R#@oeZFT zRy@FhzWnYTRTQ_W;}2Tul9!Wv6;)zX$-Mrb>WFIU&CDSECR^U9_@Rdmb~mp3+#9X+ zVAq{q@R=nEdn z=d0-uM1j_8YffoMW@c&&h4(T3h2>E{KzydCKtdc6Eo48mX1uh*oSCL}GSR!oZm|(W zUMTtx0q39Bxv~NRVkgil-X`!_oLCDm7kw3j){x8-P36bzMJlCE)+_#p9`#5nj4#^f zS3zDu?-RO|8>*u=0DzA z#l=!12eMKMDk_F+eu|1^Xsq#(|DN2FNU(kHt1@74xNkUN}TCSt~p&PawNN=VM?aP8VQY;5mof$0TzkWf}sNm@&XKv_#e4Fh^YE}?tef=Qo~ zLs{@~FX@ts9O{~4EGU(u|4l1OnaAtzbxCiG*qWb!E-`+s0sg3%FL`CLwYM5>`UW2- zMWiXgu&bae zG4Tiizc1ijhn z>S6r4ufMMjDdb*T@BC!Kyl1N7lwo~I3n(dgn^F)oXl7 zlS(7XqoOhkm1V_e50~2j;3@IAz>r`KRBaHJiqdla1!jX^UbGW z41NHkFixmJ9c&E2vo9MsGimZUaPn>^){mztV7Fq*fQkl?F?XcO*sdnHEYnoNCS->{u!x4sfl3L#fG zQR$LXATV2+i|w8#zZqeTdwG{1z3W_FKA;gveA(fVp1gsgoq?UNL)3@Q_XEIVU+${~ z+TPX@_LJP~Y#_;Vuw~Aee~W#_c=7T@OC_cRIdMBFO#r~ejFKi7mH)^j^HMrNpGO_= zySu-?wjnn=CFOH+d~rfTVLHqWPq}Ib#nGH_+DNVku;I}8XOycuDWuW-vD`0^bUZ-GWyuifho6OCI1K5rny zd$84|Wqc)Vs|eu^-%lT*6o!BQ8a9V+zSk#2nM3;_YV^+cgU_tne#Xl)E0#7_Hn!yp z3#TWDEyW{1p$YO#*mgzZm@?CVzqXtm6c8h@IV|3Vw-0$p*o3ewWd z+EmN<$J;4&3aaC^zAh${*4TA8hGg%yLbO|q#ZFmrk$${j%ct#6VSnnFS9eyU1v-v{ z!4mN?b$~I*o&Z!_#|88rhOHk0f~q)d?X0wNbyI-wRLLoba9u7GGcj2f=_4Vg+*$|^ zLgg9MX7F<-TIHsmh(Hf@_u1J6egSx{0`dsAa$)6I3-oY~OUR_r>c$*HxzK0XKhI{H1@Z4m@@PaLRs98l(7@ISy$;WQo?^zJ7ddAqFTC$44W2*cP*)BRdnvDPi9*9=*L! zPB-Ja=kY0ptybsku=yaw5}R&bTVI_bg_8-!7JU3{de_XCw)d126w&?pNwHrYy~@p- zyBh+f_{=C@O^DX>@@`Unf_te)=HVsR#w8%IbZJ7nG>Ffj?Hq#C2%$_0hn2dFjA@R@ z&_;q=R8&d%;KY~H>C%AG-|z@nn+i7|+j7-E8o2KyCT5>d>ONoxpw@DnjiHs17z(Ij z1+KXqyaLuR9^CPK0&>EmCXu5fHrE)#$w{f3NDr?!XAcPeQ48)gk59c8z>X;ds#5C) zew&W>ChGA@6OH4y+cVRV&Gszar&*td9A{W71)sB4)`Co3Zkg_Noa3eL2WNzXiP@YS z2twI^Ay4G&0z*R~C06ad-6=K=1MrIkrp<|TM96+r7lX-AVZDjKNZKmIM?G9!;GRF) zadjo1ZRVyM89Xuic}aHn%;>f4SD({}%l8Hx18-9i0A!F9EWEmfLxtcev<>M2u^i7f zoj=jdTRY>*+6UYFR90>`AIE0QH%YD61$b4r2<49jHfa;DRa`A8=0hHKTs~pPNvVe| zKy~Ep@SwgT+v+>quFp%?C>Z{d%^}d0(+?`c)7=Thh5fQl`bvZR;$?QAyZdZee(QOoq_MWixKc( zfQ+Ds5O+8U_0umB_MWNDziZEt(pvEZT~g^7;moDO09$IXtz4gKrUpsY%I*|LJ2#g% zBMoARbj+Pt2OA^pQfzJ^${ux{3ITS2{enY#-GYFio9W0|*UMlx!jnA3RE)h~ zy#DdS!I!*rCZHIVx7x7i;O-8-KXZ-v=@#hY zadkAV`y1rJnp6r(PEP5lbz1wH?C@wEME?K-_=GFw3fj2$X}6%FAi5!9F!u7xGd?S` zgW{*kCMNSJ>d_LY<-U}TwdFkRwt%Ay%m*ZIBIxP4G#IZ--$H=k`)YC%t2gT4pd_hh zUL820i%PmWR^Bm%-5@Lnmw>BV7>+dULr|eioqYtL+t4q4F3gwRp*-FWVYw4o_nKH{ z7$l)BBlQWL^9$v(aCwBAP(OrOzDaMG@&~ourKfftJ>Xx60dRgQQ>vxj`^0=`3PzN@ zHY=9EwgT-6r_}*aQXv<|eDUrgHvzZ@Pzqq=GQ0hKeWHZ0tOV{b3T{nEMKo)anI(Pj zzaeZ=|8o^}&*7j4CUaRxJyz!!%%+@}k)C`mv=88*z=@X_^mlW6A`S~e#kU&oK?a%Z ztI8OO6x!H`LW^7iRV4`%H4H}D*w{-V1#e}$p{Zfn%I3>6#_N~W)zmELhDJ|9|9X4F zf`dnAN5M_=Wl?EJkUiaFT)c(w$V~zC9s|(1fblgjqe6Z8>gBOFIiO2<>ELLnZx|F5 zOp}v;xks3;w&Tiu1-Ac(AOkCUW$Oi^Aax7|UV^^5}~K_3GBf$Q;VU zoiF7RNnClTVMT-PosfPnQ4y^448*T>n%1F(CW2hE5R3;FLR|cv6 zOL5hW&T)r9r$7{TYl8`O52Suy^dP?CE*%e6;k|Uy{)SIx&uA8)+H?s9P&ZKr{HpB_ z?4N$sCwWp2Ej)(3;P~Wo_5k4%e=&>1ZqmW_h>z$7t#rRQliSSVP!C31Bflg{8&*ch zNVop8Okaf($6Z2Sr3R{X=6Omza}gue&EBojAT)ibcsj;Ot zq1p8Bshn2Zf->!wv&*=BF9_58EH{ReIP{9cXObLI6Mp589A>yGwExHy#W z?}zz^DBJI99oc>Xk%|$AhHl89Mx+!bIy$ULR9_X0PeAmBniyN4ouw3B?4bE@l*LyCZ`8uvm?R1o9(@p+o<6(X1_+~OrL|US#NDB3 zkwWa$4FXnM2Y%?S#h%>bwtR9_eEY0_l)(PB{Sa`(sMd+DgBw8y;vQi@pd1qk{u)c1 zzq0au`{|*x2NyGxQu|k%L@>RD`#hYVhnCzj1c{`RrlQ~fqz=d`N-O&Up;8H(6TP@V zgc@Bwc5v-qDqol}d3`?%J87*vqMdqsru zr{3TGRi{=gWxswx(O$n4qgH&NT)8tle7u%);=S6uaAGt~&jXc|*OScd>4}MOFhu`4 zm(hN#NkIg|>^QMnLRl^B>EqFTFs;Rmu0F>7IVdv(4q%O8!hv2u;Ig(^m&EW0U_NQE zXQreyH8mKmd!~Gz7_IhjjW?-{f-<_YQ0~EeiueIb8om=>yocHta+QWuqX86x)@p z4cn~c%Y&CfABa?pj4#6R>+6B+=17fC(-<7GU6(uNkwBpXA~?2?Ormh8?eCagm-}q9lo~%XptEzfr*D z=wqB)c|@uYFbx%`tuS)Jg@&E5YeB33qf?a+-JDlPbfTG8Qc{Gh&OQL~U0k3(fofAZ zTK%~BSj50>(wW{$V@X+Jw)|0J@i(>*w%Q-tOi2lXGLKxEeu7P2hs8;jZj5tz zgcmGU?0Ra9jrmb6^0>XN?IFr4P7esrY$r;a!7{rDuM;u6?LGCy7FU-)vO3^uB6nBu zVf>IN6ZOO0bD!>-uI)95eO~am8T?q|yIMURXzDRW@VKIQ)x@y}sXVhdYX#nU8hXsBU-#3&k(&f5faIlrhxD^idFfO{Bnzque{p-5 zO&_|rG%_&e#~1E@wpf-(H8nD7HOsu|i`hPUgmfLp0Y{5vC*WKaz|iGds)9f`jzKSa z7o}hE6;ay~sYiP5%y+%@(l?p@&0#^2;j+U$Pf(|^6=q+gllq~pVvouC%ODXvr+|aT z5+Vczo4Vvy8bYN5slgf?@$Ki2?b$(Go!-}f-QZIrI@8b5Eh4LsX3@#B>+UY+eUFy! z$(Ij4)v=^8u2WV1yw^#Hvx$2eEz^aYPCZ5e*a`;>iJ`xZjU9xA-Kd;sNEfsiC^L{- z%x$TBx!4)<5R4HtA~j#q2xMqgX4@Y>SRee_ITux0p%W6Ju9h)e#(zR{DRJ4Ce!87I z3o}hh*9BPy1_mPrVs%|T8Uyrc$a%)t%1r#53dW2TMMd`dcX$ZMV7~2n2q};}P-TIK z;9t6sp1UDd3^|oI|=WM)ON|6&Ejkdr0qgvrN z4A(bSMk2EIUldWsbmVI3?KKIpUcap6fDuOM_Zr{p>O{a{+0oHeU4!alVtr@2<UeCQ^(*Z1?HFyz(@ji zabB9)Hwu&9lazL$qW5G>`_KO;-9!Pf2-#9E;$CVqfh@3*@llGm%1T-{NC+p!;t4OG z-*cKa1HK@|g4RAX8qJGHI^lNxba3es=Sdw~>vAkA1}@orT%vynDS3dod@4Q_5*#vH zhYklIT#Z$P*U-y_kl^On+ydzMBr`awIDxVZ18XgvTWf-0nHhqmnQm_7b<8+Uf?w|U zBq~KnA{sdH$nSKm)cZ8l6B^$l*xlU;)&4CTpqcA&J!ET>ZJ_o0gKwPXi*w*dT=#XM z>>%4J`zJZ>(qb!*r4Z;ZD5>2N%J=l z|IPlRRwf5CJ5$Lv$=mPGcg7mbo12@5w4xztU1Kd5lOOK&Y0y6_0xExvDD33K)Zt80 z<%iy$myV7yo5NhW?@_mA@DzGN1I5jZK_?LC{M5BlG5%Fg{M3M} zcS4-#-Me?08l}y{=IFSWJljJwT$SQ>JHPH?CM;|f^a1SvQZ^&2L|LHoJcb&9A%VfD zlg=-EvO7VQ`J8y5`zIKgZa7jM?s>U6wdh*0jpSX=LQz}ErFbP5rM(Azb9QELoOuD4 z>a$ms##!>p!HNpk0-iCx$HV1OB__D3>nqQmnWl^5A}Z#AyBtOL@b)#p1y1S~&ak7v z&VH4)F9>>}dp+@Yd3oC&`!}j@{3=^rjr0DMSvu_>BVrHDpWt%8{;Ntfm1kBw6x@?$ zO)!!QRD?p?tYG3}+iRp+PZFqNw{Y)mujx5BI365LBX>dL&=|!(s82-2Ap;Qd5=C$m zn@kv=G`OQ?F3&DajV_6v>|DN5P38Htw45XM;EseIa@L!Tb)PaV&Mt@w&q&egFS=WS z!Axxy|0sCv-?ZrKzAW$O6G%ibYg+=WveCaIG-0WuHXL0XMK2K4>2vun?dJCMM#0{q zVIg3}3gR_9`QR4VCl@Iw9Cpv|U%k~=Fm76)4+s_Je~S&e7V#%GX!(DP4ce!IJ2vVL z&DgdMNF4O5nB|SPI{RuhN!{}e%*@`u{!$7YDCQD9tQi56?ytY;m&ctXxw<~4OV0zvm@fTX3WJgyHQ6JboJ_#6XmlApHiD* zRww5#!e|d=_kpcCI-im`Jo{$_adClB-@E(U-N<{zQ2=3s+XCvIl|~uPvcU^F zQ)#67vtL1od%Tkq#MdBZ-qGwxBSXW&v~;U3BSk7>SbCyf?D1pJC;W81_&L5zBdMK0 z)=U9q`3hMX0K>7sT7SKehI*D>_xEXL7@eWC*~`U|+OCZ=vPTsnC}2H@(sQ zPjrF3e>zSjS^hG&h=1TscYgQ^1lhIND9E}#;!I}xXxpxHvrmqa? z1MRSv<6krUMPm?9(WAC@*{8;jlJ5HD7_Iy(hAXrdch>N0`0#x zPilwMN4R#`s)tD?bw?Fjsv;zF?Uw#`_IrR=j?H@S9w9A4R?XZ_L;Y!>CFAeAKar5KiQ#e63H|Wxl!dht zxWa~q3Dfh|h%&nzPEKcPRH~|Kj?XTU5Z(ALa_yPAa!goM!*C0@^1L~Fc*Dcn*Do(9 zy)-atKkP6|r8tabQnCw~vho1vQP@12lt38z{`A9xJ zWv}>Be<`zJhdcdAu&X9QuU1EBZ z(`7w3;J)(Vw!6mC*uqB70sny_Y)+urX z$7wxPKD1K3rHL+ED+`oMkrQYR5B`=820a1&^%tVZit+yoI~)k&1I)nf23n}l(0xhj zufQy?dJfET;qAz>H>2U~dCBp%`-uH6k>dA%jO2c;l+y6Q&ja`dN<**YVn9J9=4L{T zQ&3!b^%Gco!Oy(lRcv0mUADI1h@?Wp6Yyd?(T|BObH{pmX*B9-s2$*!H!cOn%e{DM ze3A4MU98J6rB#+Z=%t{AJB?O7Cq8OayoYDqenfhA6Ac7cX5NQF@Wr zC#7z0t?CRWCB=#-#P8=_0HKT1eT1o@hfd9VN*==v#-k(PF;w(a_>kN6wf63g8b!ot z=0deM1nC~}?kL;YIbZOG#?up1-4QW3kF%GHOV6SA_gdIlVq#`Doc4-B*kiB{?%1DY z5MbuQDBuyC1XNraU0cVE2oDE=IrKo#C$+VeMl<2#<5D0ncmzB=Jk!&pIqu*c*P0zS zM}9NM#=5tZ+TH!}LiSW+bU;v46~0Y#q=+JQz%^MlHF|Zq|DL;(dh1`gJM@9oSDSlw ziWo>ZataIk%O+ESl?L&8rlyY{CrG5+Bf~r6GoE%qD(RVtjW)*}D7CY=W;P!UoNXgj ze+pi}Kp&caq!#5})DY4I7M7Gp|ol&P-gQxSkw1Qts_V z%!R8hX`IZ$v9Me~4Ag`;9F1-IY10YJeW$4(bVtgHfKDH56x)Pk5XvTr#+7bI@qXq7C)mH()bZ1csLbwRX^GyQ zNnR}1^n(oy8YTwp+LhuctGluQJRhuGQ7?!twk$4540bY5>h930z^=!&6yD=03|*f4 z5o8KRn@v-Xrht!j!Xigrj{+LB)&|%`4J-|lp*s2!k2ZYL0cnB-JK}ghvpeKdP>hh% zXgf#28pA4GY|guSd*l%KRi`8t_?hY$85wEmw01k;FC1 zWrVV3lGh0oJ3C;*RI&a$KL=_7MP+4e4LJ!Er~)eYsv{uPLr?(I@v+(`+R2b2LU^Zd zlU&2#gznt&0^{)C=+5cRCv0aW(2@%uc6=qyEZ=Jj-N|^62LeqPH=-t%r?(qo4nh1q z0b$Rp<26FtjeB4iLrV?R{g2@_O1xxvV4MIsz!MwmF-OLL7g@!L`T9|8?&#qTy<8;T zvecNoV`6Lq3yvms@@WXIWc1AJECB`aE|}JWiUIX0Bm`}(mxP}{>aou61dzji=EOkV zou${ZUsfo z_Vx9JaJsK46d9})DE*{od;&cn*?RYt8IvPvsz)X|JM#J2FFIW-pa;0<=?&XhzC%4^ z2>O~U-`+7Gw2*6O?F&Y^aB2$IJn(*U`&v;6UxS$$$bII*Z$@Rrh*4hXGyE^q#B}6{-~UBEQDIOU$05Q~B!JIx6ol!liJDRy`lm zLr*;#H8FaGN+9N;8N7CC=jz&)+VL~g8ckp=!{n50_Xgw zn692KrLBSX`G=1lIht)jVmMGTehGIUZC4)1vWI9FJG$&QYidN@UA2qJM(mkf+@yJ?aB5xB>FQ19XE6C^J<$-^?ik47^Frd36ryI^M+W6xGb-d_7-Ln#|*U{L@$Snn2vgw_)Xq2GDa4a7k z`BHV|itkBiYQrXZ;lg$6utrh(!0h(Sg1o$mi7A(S7b9tD_T&3%=}S|KC=VU8c)4N? zFsSr;64!d2m>B#`4t}O^A2^ZP7&bvRRYHP6$_x9XB5PYqG6Zk5SB6G8M5s3g`M>kF zwy{Z1PAhYm=T-msDH52!gM)+Bx&hKjkSeH_o{^kvTs0km>+=rhcNnq3+*b+F+t>Pf zo-8bg?e+b*Eo6rHNm;NZ@*&CK)*N#D;sN)Zt*G0VZr7S2bUj*g%Zx80gR+9paS3JYl`T zgv%)hf?@2?5YT+jpWS_jHwF5BsEYZi`&>r)QxO)vwl?rA?Y)8gq36Q*3(t5@z3dUb zPQ}3-B*%EI3vq8|->D;7hb%~fiO?I`9v7EK{w)}v$JCr1X8g}Zc6xL3uvSbJRuqUImIeAmlGsd_XR@3#peRKoh~{(qBrnnH9|-@pXo_`i z*4M&JK#?9CY0(TC{&i#i{_iV-KxGxfdpDo@_^_oRU-27s&J_fR2>$2i1SA2{k}33R zqk683*BceCtis#I0RAlhc5&+Fsil|XcdqR{43pH+QH+v?HXN+ebk6-ajhB7?)Q%sQ zTFTOsW#Z@Y&*%1Z?g~-iy*E&<^76k!c4wz-rqiv52VXbh?{5fYTPNfi=M~(u&$Cyl z=sYD_PbgbjQm#2D(q{C}%%wk{yz=|oE9HOtJ>cE{o4o4(cnxVY$7gvKnxlt$+-Ocd OApTeymGSWToBs!%ILo&H literal 0 HcmV?d00001 diff --git a/docs/docs/assets/img/simple-trace.png b/docs/docs/assets/img/simple-trace.png new file mode 100644 index 0000000000000000000000000000000000000000..4aeb1868d64d32b14552e89625ec761813185fd8 GIT binary patch literal 188840 zcmeFZXIN8R_C8GLf*^{*Lq`x0ks?((O793rFDkv4(0fr3ks`hKBE9!6O0QBvO^_nJ z69R;g|G}C0&0NpSHILuk_rv76lAN5g_u2dGz4lu7TI=2kQ&W*8BA_I|z`!7qmy=S* zz`)1Gz`)AD#|6%0u&4zCUm&o$>~oCr5$a9gM}nD-yt%S6#v|Yu9|IdiiGg!n1bB;q zsQ&L*2J{dE>(_Zq42)1K4B(!>%BTR}*RN>ceO>2o-`FuB7EHs>E7b%}+;EiB1!G{4F&gP48i6YY!Ik5M}sPLI^m% zKF!HM|Eq|LjVObTvKqalgR>buKL-~F7lRl9Jw3gMv#Gg|x|Gb{s{_A887y5~9ECVJ z-QC?e+<7=0oGmz?2nq^va&dEVbF%{_*ukFmF0Vbhj_sW_Rpg4XevCPgkav4mcA>^ z`YbPBNlNX-W(@IdX{k;QP1`C7wHIm+@>}9Z5F+c`-8<05wi?793U)ywUR}duu_avN z1Y8hIKKjY#qwo-)J^%yruRfy7vAJ)PM341QH^z;E#AP_c*1jc{=2y}TU zAt52u`Z?{+Uv>iMCpNa@t>5Y|F%CYyTqt?G#`C}Ez;*XJwIw8`ZFQL}Mvhk(hLYFP zz$4Fy{=MG^_-RK<-pt)Yr<3mmlHb2vwhN}fKO^m1t)Q$^PnLKCV`_wkwM2)(RTiK+ zY$gN`&Zm->aPO2xKlveOb3GJ+yljq_XHJ4{C!cp<=uB!cm#1CFa$NKh!64`Lezs&~ zE)1N1Z}vY7p}N-6=Nkkw4eqKz&phh~I-XFw^` zmKMMx@zjFgIbu{OF>PGBu$z=7(5I}8^Y!`Hm~OjstSl;N?;D?xG5m&{1V|j+Nb^0N zjS=?;jsL)^x1GD|Bnxyf?xF5?`&YZDSU3)4w{!GiK_q+Xt%-aCTg!<=Ito z|Bgw7S8tfgs zPlR2A=v>n+knjZQaK9TyrkRPXhFZ8p)D%usHq%#(s0(qX`Fe+l1|s9_3SIVY<3AqS zK+3`z#CU$E502Q}QW62+YG^N`Y^2-@Gto||vl(a156RvS1!t+(u4!=YtZy?rnm7@%qiCtmlYm zMLg6}c-z=_dfFs$E%W!=(LDuo;!ev_okHtrzLB-oQ`5Yc#*fT~NwCC<;XJIffjc+VkLFtx_BpV zPZspDL`yD8R%mw3b$j9AE!X|c>2z_lECp9%MVg4`_w+8f!vtZnJpVsv#TdqDgyBl z{2hiamO)F`Ycb!q1#fW{u?UlL+lQ`+Sz!JSZj-(4W2t}@LfGAEB@O(w_szEvfo{CG zPlO6_wX9d+cUhUq<0~E}^H{h+^y_VlMCNKrkeA%yE*1%HD0x62 z#UpB%T_u~**i*$??iL8YttaOuhYow--&)D8wr;;mJuRmzSNVLt!u&B#b=~K#_3h)|kU{Q70FC{` zwM0s=SWO<3S2le4#3=hfG2!vv(UJ@r0nK1s&+slcY+oG}&(q!yU2OJahWfAbRCV7w zE7~dI@f`3tU+Q)j0VhfQj`CHZ@5aNnEcREP-De+6K(iB)kHk))OXHMN1^Vml^i5}? ztJK>Ur0ey7r(@$BjtjyeoHavVwWy}lwNA7&VHX-FznSJFX=vB-s~%;%QJ#B`KhPuidtqJ32{0ssA0g0KYC-3jb3la# z<3+|Oa|Wmi2sE5ppHP6PcR>lc*|Sp#*g0lyOAS-jttA&;v>XRvVeZ*4XzDFO zrVXzk- zdti&4A7$%&{6e82CcF>lm-bY(D zoc%w#L)q2ggVqWa8NN(5)1@51Tm>*hQt?Pvn+^Y*KEFuiv<*gy+i1xH_H8YDFG?}~ zQDV~|%wyZt)9zyM)bkUAv{|dJIQSROV!qVc4QzLqfULG5o^i|9&d~O{#cfLa3Vh_8Q*%U^|=?qYnoVfsjL+YA*Qm7 zW|URgx4pqv`%D5S<2SrpQ_H}cbR=2Y;!n-|pSw$a4!|&65AhOEM3hI#7@)?ogSjpH zz()Y`SeYu(ZfMsehGvB_4|rp|6v!km-2-GU01k`8HqTKYv=r{d(E5=`=;i8RZJ!NjY9eKSbe* z(Y#?}fEf6;EliymvHHHA4Ol}W&@2FWiDj)&igDeZ#(sP(*0eQXlXpmikUKxR*IKhS zS)>7sv$^L=+2+v{BopuP%8lQV`z}nYNA&b(OR+1S?XBm-T%e{MHIROT-6tC1`uUzS zOTssmWYD|ht+@+J>!0F!$0rk3h-0ZCN8)C#*o>i`MeAdw)Hp61e0*EHD^`ktX&Vg)i;` zg?){*M+pAf7}Ez6l8{7qIbf*J{YGJ50-&&11)IgYzt$7cT|nrmZ%F{7FzPo7i(SX5 zG9mFbzhhsPfcTf`0ad_+>|a|$xlEvN-stv#?*GIY1M`34{4d@O{r|-IUo_zVwDZ3x zlfeJ4wX<9zCgA>jx;!V810DO<+s;m`2|BA6X)gh=rWy=UnKio-_d0oBfi>)Wkuil# z@SY~u4Jpq!XOw2oX-U$9zBP3Bw@=(9Nb#*Y+_zLBo zX*<(sL`*%C`6x0xxbG>Zpz~_noo}4c22IX`d!j9-Q^oggt7drfGu$GS#Fz4pdN(80 z`4|%u^QlMBe>EG?dN{?}6-*YXI~d{QoH5-uPKH#<);pc@eFICGHKf-DrdHqf-#OT> zAY{lOLD)*sl7-wJ8@lt-2Ws+~|HlY3eP{lw|L&~P^AS!Kd%hEG#UWhJ|Jl7d!yN#O z!ni}*O^F^>8CH@KZx&@-Y}0`_d6jgbdrTBlw*gzzO8egd&NAWG;=spO3CY0{E|`A% z(jD-edEq*qAc@kwbY4;twHg__^Ny18c);_PJ6vY6rAz{RiGhVCUJl(_Hu zfAjSNYne$(aI0(GpwtdqqzB%d7iEbTBhicsR)IQ;!bN~{c$kjpLzT^i!uJH0tdh~k zQLhtgFal2=EFs`ooI8muqxetZz_<^BQC@gM{|)t8$a*p5i$pD`LKR7sDCA&-m6?nVxd=gKaw6*=W7B|CF3 zo&Pu;Q^^pyKPm>(PhdAr*$GWiM=GC^4t5G3Aiet11Z$MixfPw)26nA|&&FGhmK9eP z)>%cnP|42P7|6I9i_!nQ)}na>b@-y0b|30cfo^voa$sIb3?z@QrROCc_Ue#l#*oLZ zq2P9D&CWR;5PQ!y!|#NIfxnoFno$aWc-BPq`s|BZk82M2rZF0Yicvjgbt||BTEfzGJ_cl1Qk{ zePxG@kH16<;bLkbR@u_caQX zVnn@(1sz}c7i$oRWum9H8K$1)b%I`JVs7l*`Oh%&4a2oSBvId_>L2@?+;j6Xv(Ty2 zYavup2=J5s7W!+8rZ4g|SKw>UpY+o>u9F+=CG!!T*xK%D!tGj@ln7vqy^NY+>3HJ( zFIN0-T08KB^V)r6HTYba=57BoDQ&09MAmHn)UK!+tg56E9YIgRcs&o<TVpQEmIr z-m!6<#xJVPhbh(IOWDCkKlLjHr_1y)e;z+rxpCG;j+!3y2ugE${!5GuB_uLLJo9y` zj(5)!Iklh7)o-rq`3NJ}&P_)`_l~<-gf-;eHU-(LRNTB~3{PeNPt-SrU0KXSsE zCUlb@WFODeTIKHW^pcfJ)$Hf(G&p86#B@jmHg-Q{sy^8l-*o|8&NaZx&X@ba;9O3T z>IZ|i9ql0uHQYc(%rP54=8N@_lbV&UG8oH&=)5 z1A@oPQ&DRl^IBdX=kg=G&-|A6BQXM8tf1JUHx&XeeS9T?e~j%yAOVd|EUK= zdRbI6J~rm-{DHGCo^M1d@9BUq^8qf1Bm!=@;J1~qksPh zE|A%~)6hGVTq6f6RGjGp5y<$*>lEP;TMkC?Y}&De9}?ph zJdgTb9Jioz#Qoyqexd{>xB4J__<^be}!^B$Muf;Lgjd%~(z zL3E8BxdvcYn*MZQ_dYMhgvgTUpLb#b4xjpc;X?;rhl7w4w{G<==7?1%W_uvpQ zPT8m)oR14W6L#NoW>NPm(Dl3eW;{mckXHl|44R`rM(pvMrO=ib3kKE- zy!|E{45e@__hf;~;JxX#!Q3_|tCN2--X54C+grWj@Mvw&^z!Sc*I3>$Z{o(T;JjCf z6=8X;0#8?~#8@m^ME*l^-AX>|Dx%?mK3Bj>)-zD1?^V???^Z8>ANtXTuIW~joaU4t zk6O039p0q!JH*lXC9&x@yxUte`26a%m8-FS%ZNW3W#c~~>N;@pP{B;vU$TAWv?c%J zX5TtHrP3h~9%`#mA9mRttcYW)XzigLO{(f%%!+C)$X!Jt+(}%gANQANmC5n-Qv1-q zEHPkq-CD4;uxSZ_(Z??BK3MZCrR@7cBN_e^G1#W8H?w^R1QDy<_h}o7-7+SLL-pR= zyE|400juZstnG4;R2lFKrV@wylnbHO zz6$dD-Xz9vTnbV@+L^%0f7SIIYJI=3?)638Cg&iMuARoYY2@RY>rJsjP;kblLN5d< zs``&|A9QyE_Y5}9f=Bo8_p-spxze8s{qm_VL@=LhAka@@|JN zrp0adw1&X--(pkr3LqNhEO76h8@nC;6l{{1`YFkp)^lx0q*fxNS>5@k$f(vI;!c`5 zA-BeEmd}l|+9Y)Hu6KE(P*Jeea87Ze4EAr~iO~g!1&j4ACSdNWr&#cQ#t%Fhql1vE3bPg8%gU7;8jyc$7&`U??7`w zm9l)jDast87(M3pr!u^jtNw<7EhpP+#Jfe!nrL7k&uas(yp4rf96@(neU3U;qSAaE zgqg87VQf8BN>fQEu8Mnei%onO300lqHD2FBw-|YeP4{N>Qj(I4)yaAz(wHN?zqL9T zE{WX|$fgLc^WEBYx0rnu$@;yqiE~8wJets&CAD?R*|PT z_2PnqE-h8W1P9yY$gAZNLGFRt?!5IJnn!e7hi}`+F4{lu_EbH*#%G2S0jGQWW98v| z1MzHr6+R9o#r$7XGPt4BQz6k78C#ru#l2BE-?K4^X+^F<&PVbmxw^Qt+q1*Fx67#} zHaP71rg#;gIY_A4_ir}ApZSG6!wVw!=i1V3dAso~SzkkaP9!B2pRXUrhv}pp9ZBg6 z6d_f1{9M(7#m@crLd^NcPXkZd>>cj z?(v+Z@e02sZ7UqSMI-iLc8CkFOS^^4`r)PX^IN!qsJ$GY-$~Idl`C6rc zkz>PAz2%u>{C2urpA+1H?7BQW(Vj6}9Zu~n^-hgzuZ+7PHDI>A>3!Y|r&0>&^C6XQ zf4JH#KWouP6q1z8DC=2OCS5Iy(&a|DwJf#U<{_Wl!w9$w8}yz%Sa(J7m-%A z-!m%5@6z;~*6Z&(YLCM>5-;JQmll5H6R^R7si%0OJ5kQ6-zV`vJ=8d^dlqid#G-e+2HcGW&-Fn+| znED7d%rdky4;QD_0g+|kCBGJc+NdmpLaYxCW_zPDxw;5YX2(A}vxBhsV!*@0@;ncNKK&GG%VjRvFVcwh~bexBlbsB-Gh7gms}e7hQ# zy?!}FNzI}TE{+ElW^Hf=aUD%LKwDr*e!_E5cemVLFTH|Bx;HBM%`JaKhM}|ZHuDp~ z_-ikSgTAvwqkKX7=u2fIu7A3?{TI1&ml0X$qdk}0`F&@`jkILSHh6PWS6bCXofD$R zOCXi^EFF{b-E~41=8KM~Nm=mK=1EnDCTAO`xWH1N`c(JoHvUkE5}lJ(NFm(0OQbdi zUG8E^Iex#^o;zDX3vTQgb?87U{2WGh$pC`vEazh2jf#FT`V7mc@{{w3pv~mq38XOP zh!%~fCN}e(h^4`M zY>xU1>E^PC%D_(F6!RCc(xr3r({m7S0HVVV-e(@pNmTCO7mECMkhiq{ya;vYv{+in z#~-e4&o`L}lLol{jMCY6<68z#Q#lM?;v8yxjTZo6BO0u4b2@(2HHK}&{MLQXuX=fs zja_urPd9DnM@^0k5J|KkzJ+2yx~#;4mosI{QuvgwH7EPYSFZoK6lo{IPH5pA6#@cl4id8_}Yob=}ZL zEy@lm9jE3`j1L#ZlF?T1Nus8UUEhgn?s|Gnn)1l)z6%JLF~|lS89~|HWJ&$h`|)*& z?Zj9+adJeZ%}P`fbOO6lRrmysr<)?yq2I1Xd-?Rdh4TaldFFCq0RaG&$Sw56Slq=N zuA}@nh3|bgIhScDMH&scsxp2BBkl-Anj`ADlV9w`!nYyv>*aQ~uHqmdcVwTyP{6LF zSQ>=c3IZ9qx5VB&y9ENtH*Spc>Jj}tJ-eu%cS71#v=ltiE;5DmOik&Z)j@;`y*Iyu z&mn}1I;+x9s+erlJReM8kC*j!BPu6*xuX9VK@*Wl>+-orRXOvhN57(1m?hn5vCehz ziT+7bq&VC(Q`T(uDF1>t%QE5Z{a_iKk1IK%YjwS?4PTFhw%me8<*Cj?i1+D8Fmm7{ zf=Fr#dw?av>dS2TY~4nPzD+zR9QSFh12RikpDLB_U1Edfhm;%4uHnbu^n7XkO2bPH zkx>mP`~~V}x>@xwU8pu5Ai-3pUaV*G9Zg)~@JnZ&o>l0t z3#ATA8XpITw$rCJ*gClv8IR$Kpg#_iW%HmfZHg{|L?!da`ujKzLxruWCg?CWkiC2# z;7Zz}>b@?<%&U{FxK0e>lLRoFR`5Df3{dBuBd9Ap-Ksb7H?)Lg0s4l0&W9B-QuqB? zM&C?AB|EZ8r+?3)1~>%f@y;M;<<;veRwit6lgX4b(LJdX-31qjY1BksA$l^e2thWi zsRqDjs)q*v-)T2zYsyCcS?oJR7Ddp+eAh^e(Jc2`zHPFz+jJCje3L+WtGQvRK~b?? zoY3ga@GJ^*_nn2;)eKkmJlKIn_ z$l7na-&P~ALZ@n|R-^Q0gTAn_VQspUFx<-ApiL{4-!85rfg|kcrBvl7bG^lCyPAH9 zCZQ~>_`L_&_pyM1Vn0_ED6TMUU^exB_BHmPic2e^q?v6PU&Lk1vS}md!{I44kU#Z! z3Ziin6b!<=1Tcb4dDk$}Zk4}9wJ)`hOCK>#8Dj2`w0F&k$F^SML8go93hOC%}_vDJAsD@ykm$@c1BfZ97g*Cx^*%i@O6QaZxUs_;{vc!YD6XsqKo-~#R(dPM(`_?~) z(Oqs5Qvm@48Hi7#9q*}PWW{6s&fyjou`k4XwxIF?6quT7#$ym;{n3-F(fcODjWQSJ z1qJ8x*B3a{BP#f(bLfal0Ivpy$z8QEg*oQR{kk?L$U@xHY$FXa2J z1~sOXo(7#g;S|)C6P|#dHDfVLPinLvmYOzwkEP|tL7GC`!n`IDWtk0YGkS28bRS{?&;{Lc7TeFexc`~{P6j;l}guW4Zpp16GcxevLuM4 zG_VT-^{9Hhh@=re&M`D;g!|JzuD4X}H3lmfaD3Hna)&ky^hOjck z9+EC)DSv;_^#oiF_Z=RlvwXSJ8WV1JF zRvLd_g?`6$Ihj4uufB*vvCjS2>6j`ka@Vhq7dMpcqNRX%v7{q^Tdhebt)RhW^D=r< z>62k?w(rO~-MqArNHPBdYtPNK8=#1%ZUyIso;B0;RYv}t)3{W2Cy<=psa7kti9ZsD z#SmBE%d;mFOUGpt?M{L2ZZn(u4pydAgOV?w+EUu~e5BdqP#Ey58<(R>eSXibG81)( zoE~X0@Q-a8yvh3Gsb}J5We*j9c|#YbkpJq&{hZxfdM;UnWiJ)+(dxL+3LbxzI&{R>+}1Wp1mU&8@!S7MUrMmB7W`KeQ^G0oaG!-44C}FpoxA)gmw|0=l(VYaZ*@?oa3K(88mtDy63i zJC=#0Ty7bDPy_;b$w6((M)`CYG#M*7j`e?K0hnrGY9LUG{PX=pxo`cQ(A7QjCxq8J zM1uG@naAo&3BRR$ls46{Y*fVH+w4)e*aT80+%Hn(jnHhlPScQ|TPv!^R@k`ax&P_6 z-fK$WE_h$Oc_)m8j=zG#HV7vkF7Ex@Hgmi-?6AHm?vSEX zcOh)^JiprNteOH-2_r-!4cBH@uvj#z?txhcpS6L#FrCmmtllo1nx9Cy*?5;X$1K-s z0fOt`P^4YxClShoia2^}6~}HpJz=xT{sR*;O^8J$qf!!KKA13kD&_jqAKfirZrR`Y znxn2Jm48y$tfAjD%S6W}B>1sz_1r9Fbe67t-KSyJE*Z+Fl>m2Z{*S$rv8m>L|AYGZ zPBIR92_CwQXnZ~1KGUC66EV92*I0hDN~n9)YvM>&p{rbU2ngcbK>LVjZ|3rtURW@Hwm*tHgi60q?%jg+lYl@M1qqzSAXd%K(Q%J- z=cMEeSrcD|+ep1|MJ?@FQWm5y`(QjBg&!swlP86&3ZA~`sFc#U-fkb9Xbambv5re!teaQx$5S>=$eEY{;ze-W)B zgZ}AJ>yTx9j*H%-=+~S{CnhaHZxjk*9Ulcq#~Th175&m7nnvX_2z#1@O`_Jby5DK`oR zp4jr{A++2wMZwMuv%g4_ho7E7WS;N7C{M=Y{nno8ygTKZr&Y#g`;vy4zwv=vM+&@d zvkft8axv;cn_@+I6FyUG%D;(BU07VWjMNwsgQ%FPa0Q)?bR0)AyS2Lseedb4!LX*GD4Qk!v znD7+3EGvkI>GKbEln!hnQMKXA`LyAj*=r&+|1wmzqv;;hS&@&n6PGV34JT{F68M)kD6ba-l4|g z!@K)Jlzeqwa0T5X2%Wje2jiX8g5$Ms`lG_T4Ts8*uwO`HPq^`rrxB#P{Z!hR`r>}f zB=?(D`q+0%;nAt`TdtE4GGG;e0ewUIFuwtC&>-_o5`R|=7-NHSIf&$)39HC--t#KnCIPj_qF&QhmdwLxu;6!IciJB&Io*#22Uu?O! z&%71!bWRl?n^$~IieG5F>J8uAjs*S0#-6!1EH3P}!)x4wj{i=c;M~C_ILO=VzE(WN zuam9T6*OXde34?+&V@41+-}eCZEm1iZ1FrMBOUJO+G)w6P-odwoSt*tR-$Y@+@mAi z3rYHOa-Kfo#)JAU=)M)R8!~@*EGa@cHzZ-S{Y;9C)97xcqTPe>kUd9fjU{iYWuW65 z%>s)a;hNkgK{m-kcGWbY^I}iXwe1z@%Bkh7slBSyR-qhYxSM;6v^B|u3GDihC?Ap- zl(+Tp?m#};%s^zM3!Ru6jSi0Gfbi681;4EgR)d$4x7`OzPIHHGT4c)Y=iVz`qnph( z+jDWNdI+Kkk9H_;j(0eJk7WV&@NyQDfkePXQ5k=!cesFLHw5N)zr%}irq;qvFc#$A zVC69nHN35mvZX16xAc}2i+zi{WKA$R=) z%K@M=yf<)`XCm!Xd*OGnDLhdQZ)wrtarc-=7oRCBD-{+Maf9H|@gq9f>-Rp~8yr%@ zU)=xLSf-M=MzM_}h&yZ?eFT1!}5v)M(t>B=Ruk)D>w*0a;}Liw|Y zKias)yv}8J9BTM&o-18V)pg(DN}Tho|E7C%tsBQLzRI5byJURVj*z01rJb;qf)v{d zraLs9%dV=H7VeASl40VNNj5*f8x%H_o9XrXLzI)-(F%0E=!%x#1D?WRJ&rq4Y`t73%(EQumXQXHx(kHeD}M4Xsqw&yP1C%m8TnF+_mzU1Avh`qtYalw<=UvRKW8kPL; zr>&c)^0~NWAHD$cd0p{KrRbLniN~JDCi*+L%SHeySxrc0ofsWQW>FxA-T}Q@zx1U(D>g6TbVMDBl zGy(Vw#E8k`9$I_g5>#*9_U7(aDEU5F^%_>jDt3D{c?SU6y@OZ4o~h^Mu)0CUn5mnftd$Hr zborj)GpefEY=Me?>g+&lm5>03b-y+9ovGJ1YoBR{^UqmhrDiSgzfpw;on~c zEcIB5&ws2}DW%kQ?Z~FoA)T+xoPj@2_kV+lc>;fGs?(vCN|;pG{cOYc^Rq{kOJJMe z`NcI+Z@AL})2&=*aM*KYRk+immbs%M zsYNGUX!T6ih0&H=9|{~GIW+cE0!`n{8ypity*}znNNk^eKZ|TEEv68qr!PX-@Pa_@ zFmL_V?E9@CCa6uhC;};l$aB|fy^rJEqa6E&FSW2(gci)`;@-;Ur@x(b@D5RQ$Ga zhq275a$kZ+N-LwnW((a-n!xKZ(1QC$j7{#lbL?25H+hT$qRt8M$MEfkvDM0LDfXEc zH_r*lQa1J)u!1Pl0B;jjk&M-l?Y#Dgg2ykW z400AZzcy!cHL4QDO>s}loBPp5qBoJwSuZx`TgvIK@~`55CNt&T4|F6tT~E5b|q%ng|-UQ{yiC7aOcux;vp%O&i` zj9s=mTuB0~1MhM#?;fuxN$<`K2e0W6%Ia*}*`u5`cpW>VfpC}ry~M;kqzs(9csZ%9 zTpyaHgbzh|6?V^FU4>El}y7M;AkG$utt*Q6tZ7j;3EcI-< z+eZg^?{Cg#Q2>l0s_bYunXZXKn$8loa8tSpuJnydLw6`Tr#a(_W23>+!G9M{SL8uu z8yj5?_h?Xa1%RGKj0&xHO$&R~^7@h^XH)J=hpt2GMGuS! z?UQb~i+hFQ{vmSWIsTdh*t4hQf=|J2><3RK>Zw*0K^v&$=$@Hf&}NM9az3P6 z@h)PCsLt=R=lFuS)f!*+`X-|7e$xnRoNMV%D^=Iss&ccHf?@7~69ow8?N15~9OUhUNBE>gQ4eaNEQc%VI7<6hzZvf$5~v*gIH13mgFng`NK4rgEE z%Jc;WZ7Mq6XcIM{-qw3VJ)3M#4ay2vMmxkztUDiFw8+v<=r8Ekvb6Fz#BF}_K5wdP%x{_Y(W(i{((pm8fC+0)~6fU2a zQFlp}h|~^ho>+9uoiG2ORXrP#@R|8YCu+XqIB&UCNJ@$rZ!FAK8KDff=N*B9 z^F)AA>p0#%JEGz*1s5~VGlD=CmMCm&%MVvZqR0;gGPgK-BWVbe_JLDlos)?cO;h_; zb85sM)$bb#gP4OLgnHCRMa0sTleO#8kDxGg>>Uy=)KtSFLev-iy_8Iz_3xS31T_kF z7n)&QXVq9TbDj4D%;9ndxH=a4p;6R#XpVkF2o$EmY)5ah+#U=FcpGBv7|C>Yc-|hI zuUAL8XFFW^z218dPA#dP2O0-5XDJ0aYHl)fRbnbOTXi=h`dVWtPFOy-8X%0J@lp>;b#z)sR1AtmGa7Z zexlZ=XDHdm&|(mXQHIzplf|4tu-t@qT}nb?856S=A|$x4y6^W~+1N|ojDmLw$Nf|L z6<@@QU)Dj-Q~7f!3=y*Wv@p*g>g{w}l@U!7JR#Mtt_^wGe?Z=ogjn&tQD&aF-Bu!b zGtJ-cNmKqaUsg>n4h6SwAL6;)zJ$MS_!Q^6CF9x{Sq8;{Fy3o~jO7l~sOD%QBZCu}DqQN{(H*5}?|Zxk=UJKeU)`3in7 zu-mct{=AopgzNI%xSRI~x9`ryE7{bDPAZbDPid+mr@5mX_NWRr4>G6&14-;7xgI{p4r3fB6mRA+Lfr+ zEfdxA*FOp096F5JS!n7e_KsdOeePGOaZ4WpWGDE%kc+xEPRHu0xX{>eubcx6-H;W3qHya}4~E7;}K+uc9aDQW4{OXf&DVx+^OCn3b7TL}6bAtIxGT|S(LE#awihQM zH?)HoMS;P!vDt-pb85N$0fLTqGOv4%)uz@bp=0+`!AUg8iv)`6Y`5XPQUz={n*XaN(MRioDl1lB+0c>-RA>M7CGvv_WQvC_A z)zYU!dTc6+ObY7|+aKZwTSLuy10WLE?9c#=icfCk-zVp~^3}S&#BdrLu#ha4*Czr? zhBix);Dl-G+_D7S z0v6N>g~o2}aTMjEtI$1UPXyJ%$2Mr~rDW(WF3CWjwbEs=1a=K*!8-(%NX#UVvcJcX z02;j&3sCK6OvYr4m4){OF#1IgvTpne2s}%TLhw$a=Bj+Pk(85&3{GY1t2XZuQ_vXE zYJaNsR`QQ$Ckqz-m+hYZlqL0PG3qheobI{$eHY`_EGCBnBARNmvFP=k@?gX^fLCGU`bosA zzX+rR_a52avgmH+sX(jsCbBxTe<^&2od-1Z2KY?E5HZi zynb6ks0^w{0Qvi8seNxafdGGuMv;4kMcKAP2b!S+Rj=lEb#P@&@R_Y&s&@<1E(+e&9l2v3E6foGX;)URLxir#b%(}CpOPr zWxML*$!e+<9FDFO4o1l2U_|Hs~2heh3OZ{rdo zsDz**AR#FtAl)!XOG=0|N`v$;bXh3fJ%EU`bT<~w&_fN~H8jEyzm3m1zjMCld>^0Z z&-cBq_dhNMX6Cc^UTd#)uY27KE6hOHHGF%%ebiRsRGL=E+$)lSu4yFa^jTiZlZ7wp z%!3*?1hOB-sd_A%^_$Rb$dP7=hy7PF!{_gSg;gvZmhr;KbGyO{HDSBi_twMqn80qc z&wCuUN^PfmtQOKra1mFtK)*}5c`X?uOZNH!k2J!~LNC$KD9+|pWu>J_h56jzk5okC zPiffNyGgFEdXovE00wsbrc&2pdY|Lj zkLscH#;>b;gXtkMDiA*bd6_t?qVMpK#No6H-W!DYJ&U0c{JX2=z05aJYn*lN)*E@Ye0A)IRm3vr(PPh<-4M2zv4xkr=cdBXmOnVF0i-KF)Lbu;4+*PYTHcHW zon&vvr7fbEq+h{#ix#KlXvy z5Zy>obog3Vfnyo#7n3*hc5-n3qjF}@bD7z<$UPV?ax#B1k#CHea<(P4L14Yr8)a)l z7iXRd6Nad|o2Azsoa({cH+>i`;$G+U@wXhp%^G+0B^#;tjN9Su4Fg2*J{d?;jJ7_z zs(3H*`!S%5r<>5cxhLGvYZA5#`v@-!J@}FBeW=y9*jCj_Yyg{T!df0L4^i;0(@4dO zKGzc4{9y9QVLE-|n1dPh1GFerAzU6ZeBtHpWEFnW!Vu>A{y^sb5C$;27pq=Q{!A(l z9*5-9zB_7Ox_G$c>oW`7 zy(K7$S}9nVjWYp9+Z2=Zt_cA4F!DY3Bd$ZxBQU<$OL^`FQuP0D1LOj-WWp)lSw!od z0f$2S#F0QBB#O!Rhw6{q*8@J!qHN)KM9b9ZRRP!3x+ z7xikHkHgmfU1Tu!6(y-<=KiFg8HZwk@f4$+Rl2h8#%uOdkyEuuO!!shwH0?jym*f) z`;2gbvr8iIuNe#WR+QD&q0VxJj7ZQ;Z0s3F^pJt8%#Z zi%^$ft_&AIUm5|3qnNC1KD*X-Cpr}rcP5&M>iLDoA>-nut%D1qLCJQQO>@T08eyP( zgG|ZEAGMy@MP*CUA=$sLRlZ`T{_$uYD`1uozcoo9^(qzjkFH4XwPCKL zJDy*ExTts4*(t`>^%zthXoR)V2F?2DA)CUVNy&~OgP!yljx?TDG)?F+DH>RoCJ3S> zJyeOPxSCKZRRoRTVx~nuD7<#^S)D4NoJ+9ElEt{|^jgr&A3Yu>mt7iBYg3#$8lpZszN|GRTW(8HPnQc zf0}I3Qx!pV$|>#kZy01Ekz*sZZK$sC0|kR~Q8cpK#p6Vu^{UnR4xa9c^7cS*bjoni z0wB$qa27W8kGd#Zs%E(v0@HU9XIYc|c+3KtMQiZ!dx)o5ql{ep6|z6c&a)OxTs>me znobW?|GsOEw5*yki~~W}3HDBVbV9m4R7I!U`i+-4Jz4n&hoJZ2{lxxk)<*1Ii4ztc z5;t9QM|PR1g)5vq>uRrbzCL*KVTVsxDeSR*c+5iPnDGylN1@)BkwWL>UVfhystVaI zOApqpr7N3?WZ}i%G7OAL=7>jtc41HwXFNd0^v+4c8eQTmaj^-^UBMd9-KpWWpcGgP zY7&~es(U^YLf0|z(Og#Q!T1Pp_6DB+8!$tC_4Fll> z;`#jj8s${3j=IVef+hUQxC&d{_xqd3?d<`pg?euVh?es7WVy2(nc(9l{^mGBDdPo0|V?+DiwRZsze_MMYT0wr|_aFHU6*zX$t7sS0cWnDV5WWyxM2GVQ=qYs9n}5 zu~H{5T~6<(=4qQ%W_PkZUyn}giH z!hM2u(SeghN+A6pY)9zeQG+JS8dH-^gnh4?Ttvnb4WVdPrto@}EW)Beqv@5V$%2n}I62r)MkTGdECGm^+QZT8!1%me&#>XTP8AG=<* zv;{7lI#wi&8j*E7%I-{%tU5b|-eCr6pv#G z&vk8>^#qTI;HT3I*XuL1g!pbNZ7Hy}U>*BJ&`gX~t~-hRl93Q}#i#e~7@z>#myE1& zkMTc_&AfB!;Y_1)VZ0XYO*iR7PX3u@)z7cM{OL=l`5F2F0O!<-sdN=+WbVFUjy%4{ z`ZUc?%#Kj^X}$1+S1L^eTmO}q^U|eRm58{kapV{0kxaq8ko{ZgNQTgCb-2pszbB91!P_0@y>+PrLF~2h={%z{I*dED6|_^93~zZ*dMgI}P>9p<>Oghf2Ph-irry zND=A|U(kV>*>XMCe-Kg33=@+W8xo%I^W(F5qKKqfmnvAH#vFH5nzxoMYdjpP-Flm3 z>-oNty@lXG?|o_&wboat(b1ISi4e=N*QZYmM@;D)0~=rhiAuB;JShbD_*(7JtQN?A z=U$A`*sC&cWnYow(*XNUbU~C7dgIlHCm{lbiPaLRXp!M=WOsJ`k?7hG!N&2z{T^GJ zLYJ;vfMDjny@xbfY>Qp4WaF z&(Ar5N%22a#TL?K0!NCUCr9&iyTA4v$R{-UK}b$`8K0xLA+$GHBOeEQWwHlS19NAb zy1sWM(r5XRk^&)!md06Kd}U2CC9%$X3$!)!Lml~y)h6AHUc2~$`pv|wVpk!-qTok& z`yQ9)fD^#QIA7fDf=s*+qvvg(uAOSpN#0e(8FwW2_^TdOyhLX~NL2$7*=BoQKZ@7o zJ<4L!W5VP;^a9A9O%W-|qh;1s4ZE)q+-YN}HrQCaT8%qQcR?aDx;a^G;wkb%s%qzP z$9qndtsD5)?5H~bQw)u+9N%)kEvf5iXE9rr(%Vs0v*JS(?-MMn?qVyPclPeLz1z=KWs^a( zt}D5I<4v)fsADz#vFfmxzPfy5osqzB%)hYesdgtyY4@`Oc;}*dg`}B}VPhg;t=jY#m>U^mfNU$Hr)$`K1jbb&)-tD`Zz2u`M^NPv?tVjK`i>!LE`l`(r&vc(js$`ev z-YVTNvYlJwtu$6tp!6kh?IU7XUedn)UJToa;cns$h*6<#S+7_B0{b?oT69&p8r6g> z)!Z+}bjAr3?mif~>Xg9QF!}&gygO>Daf1ZtrUsGb@s1TYzw61J#n)G#5;70xrytH} ze+&7E1De+jHht5VtD~bwAKqSdc&pd56aQdY30jGF(K7t| z8}ita)fMvF^^MS5I9*L?vAw6gHUj@km0sN+B;~OHIY0z|HW=ppCN0tJwrK^A)Yv{; zOdWn(@wO6WMQ)?&*hK@8yxfg;X&ew+V1F-d&o?6KNRhVVkuld&ROmB zaIR4j_$sKzo%x5Dh(zGt%8&qcna41mx5VDp!QRR6b1vIkth4+2F`7IQ`VdI2P?M=> z_<3tE$PZY35}#*zRbOU4QGYjdg- zlcA}0xsdi|B8`HCDKDR5_Al_#KHFrAM-`#)F@pDUu^&OZpw$lo-9{g{ zQ-MpFt?<3&1QpnJg{_e9H6G%LLxxoO2Z{KPLL!4*df#I1GgA-UA#+dCm!X>4S!iQO z=x#R+V3vQ6=SJw!n#5TcLL~0C8LoU5qrcbz`>z5rF?Nj7<&7f&{-XLOzPnSjJGIUU zeU2j+2=L{Jwd0-TiO!c^sAZ@`hN^mTW1(APGAyrawLD9~y&$KZl|rNG9QH~vlIgmx z1`_AYovnlq3daR`doc6}Yvb9eUM}M5R5BSB7X6i*-Ohrfd+|Wuo%B=lqyI!X{?+_+ zOneclzd@yX>7tvfBE(Pol@)F`{coM@Y4g(O%(dKDSiMvwvOh@9MR9)g%-oOqWBm-BXxPS7lFr4CfG%ee zGWDbEzp3N>kLr!Q1-6!wS`#7j-6?P(Y_F8XnJyMkFRxt{qQ*TP2LohG$uu{{Cb_!73=q)g?!9aMEgA{~-FL5+<~KU#9> ze|)=t=c4-C(5Ly9H9E})0xh1YME%qt`tY8M4+kY}Q_tL-qrw?@g0$mOEhZVY@4Hwohzec|? z*Irwpv!fANI!!bY7FM0&pP%aU^Zowe`C`7GF}E^(dD8*IQNtdWW*_^*fXrX5UIZ6c zKdPlg=06Se-&Yh1H=S*>35QsrB%{srV48D#3TAI!!cR)S=sbe+tFcNf< zA|zMm;Dqg8o8@uU6I|B}^X4+Dgh~GOwOe|46uON0giaW9y?^T#7FILBC;!w>{Obh>J(w~@7a=Z~81k=Q{^?JU2S9}R z<+8)o-=3;}h^k570~*Ej-VVG}f9J1_fzmE+|MacjuFOBa*n3T&iY0wCWy$|{{<;}R z)Zu4-{f%4umxCkC0b0=Scbj`({C3Cx@jx#LfViDffXVOefBX9m0SfI9R)o9zBK1@3V9b7AES;!yMd zlBW1yjtGeo*cD^p+rM{te|P}8h;uP$dD`1ue|k0io2#IU2fOn3548MsEUW|`vsnG= z?N(s%tw59ixip+lk-sfPEFf9jS$BT}#lDD04SOv1!BR1p<92$~!+DbQg>Q;{3XzNv zb?(~=xtcjU@zi)E%Hh7Oqp8 z**?wiOfmw4n+%>Q_BAcPDn1%MVtq7+tI{xg^iWM7zRB889nSlRK{aPtAU~f zG3e>Bkh{e|(soyFA@+@s!3H(Kx>IWbR)ZBT?e(8t%URpa0kirsE_!#pkFIVjjV;di z_L;4*W~rveKCz{`xIXT?{XT2{J@@=vLG4B?bh&S2hr_VWGUf2hv(DO7buy6-sN%X3g}K=jdDT0;DWwjKQ4;~g@bt5U=FBzstvA{-rBrU5gh>O@Q7jDR!99Rcl~i5~h}!cfNWQzqJ>dd#Ya=|T z0s6M9E~64Vk(zwKJYtu5#3K_*vt)G{YYxb*WH^YRUiG!n-gU_uPUr@)z5;JIwZPJT z8z#+zB`ve2LHbObhG!?j%nmFc5nBaf9v#d54G4?jaI9V;{NXCA(z=6Y#?+bwKlB87BBCG+?!VXQi7j;*KYt@l$XwvGe5ezw*8>{1Ri&lddPiv9r*{Qi<{iuw?nf z@$6YZ#@D>{`QhS^urZ7xIt6F`11S^SfJNnYoN^Mse4lCCT=OLwQfh+ypiuFR(OE5^ zE?KHER7#p#;kL5lWsM0SbeG%U&>Sx9@cAJFf!w`a=BOc&$fi?bTE0GTpRl0) zkh8^bN%3Liomb`;-5!HS(rmJ(zk+XxDyawE(33)PsVA`#0%KP6m#hP zE4(~3%V!}X%tmdvc2_m7a4y+tuJzfW^D1p*XPwEL%M%O~-1_U|DctK{@tL{mB^F=y z>Q^_q5iBEJ-yw7!U&Q^kPvgyT(^yJm`l|>6P3)lAd0_qxFm!HL?F}7cb$FsXV|cPk zDYPJv%VhEWMWS9+infapXgMpySSf@#MQfw?5SPCecdkE zpNUA_sV4l^b0JXIg-upiSon+bcZUF&pUN$f5rT(3*5{KI-M8nQt*-(C2z)JyrQ1=x zz7R~I02*~>vI(gbi&hV0$A%CJAuBG)9=a8FcR~ClJZAS~I}sL4SnQCcg`d z#^E>i*2nUa>I-friP(Se!kiFCqn;ZxYy3tfh5&>6y81A0%+cD-NAsn_e;@;Dt|to5DG<(%)j z_R6b$SD>HpdLyT(!2zf(f08RiGC)&Mdihmw^(N=%Ssc`B$>YP&Q)KYE#sm^(Ofqui8rJw*zH{j8otLM^2;Vi&(G72CJfWYBVRoRjm)lKHO|Dhgbb{PyRdeBv?nSm`6fy_mhSc*wVgFH0SR%%zgG^l9{u9 zyqgtWzpYeDp5ZY#3PUa`iboXBPHu_coIL4X;J!a`2Q`(u||^%#B#`?GvizM!0itR3TS z#>Rf`8Rp@i8A27GBSHfm-RX^9E?GDzhmDbu7>gh*IwPq?&dM>9-ZoVl;>=(g44{yR z*B8Lj5DAr#2F$5kRyTz06OH@RjoQk1v5XO{3R?_|8khU>;l9%%{Nh^ii1T8{fkZy6 z-3kK9i6rjOHu7ai->ILt~ZLpqX;Pw~FH@fU%IA)O>(sOmbe z_YBJP2c1y(RmDrMp(^`9mu6_hPlS7_f7wR^-R2%mJoSp?87%35&A;TV;ZMFO`@o+XqS%tPbSOI88kaYD({z1 z4lm&X_ynR`ZhL*H50)BgP0QsKJgzgvQF}6oG-?fq`q8n=y0Z=|0!nog9}Uy2X5TV{ zIo5gsc8GW(xhH4GK?S2|u5}1f^E4$Nt45*%3ky_9!Kk^Eo?ONO>ZKFRJ~ss|_h^ko zyr9mxwgGZa5nO=tK<8M+_|-;`Ed){nU0xfwl)|b$jw`U_qVs!WB2haxrm6i1(X+LV z?(Cx*MjTORB?><^{rq!QE%S41-3U}lqVfp>r)D9e44D$V>1Zf52R)aVB#XX-Ct@BhKxfx7d@1AA3G&L zhs2s0dC^$GC(zCB$yz?uw=st^Dv)XeiJB)XE_At`olzo{aBhf5FCSUc@D{pl8R!CA zzpJ850&|U)%W>*!xm2y#Qp8;SDZbN+ia@^I#GTlQCg9Gtdr1iGy~-?1*gVZ3hfRKJ zZ~p=)%4+A)tBUCi4Z-H#B^((WZ{`r6*50T6i+uR0zce|C+GauReAly&$ViD8 zvXQ*#L~4j!LkkF??otak3>mb?5~PNqFe%5#vjgOl<=$17{%OzAGqu(V6g0lla!7V% z&~{CKCxEt*&uYathS6Mg!t3zUqfeA7N5@^hFQU5*j9~3=oj$1+`lmNvs`+VA@@`Go zZbKC@7;&Z$#TXM5$07S9X54Fsog20*tzc5ZzBH3iLZ^}0dFKkggL4?AU-*3;?$rAi zBKSvdzf*=l2>bmFH`+=nj)tLWSxN~Hp`+oNIqHFUeAijM!V=W!ygaUbKF&(@<--yy zU3You{T}?!+uh}^flH_v#;Q9{lZ1W``Z%26GXhiY%w%x>w%H4m_*F2tK=+jaD6Mb1 zQz89khp ziO3e}SJR_B>X@knK34{yK;IJct2je9l!if*Ds}OAj?E=7w4+Fj=25^0=8L}1q5>zo z(lt1DH^yYT;kAjZbj<5vYr1yAor8zp7mdm_sHLBTZJg~@rp}=rGj!fX7a4zl4$X&k z9re`=4(8k!cJ75_`Q!0Yb*nH*b2qi&NfDucghUCtAi-+ z9G9U4PU|0UkIaO5hhpqIllO}$2$GvO?D`AduSpak8s%TxR@(NGxtQ*bG5lz=twcRr z?cLq@C>!Rm0&3Q3+tYo0rJfRt$*oTe`DuVqw}hu)G!a7LAaL9uyC`8}ig{!oFjd>iF2BIRVrIIWVqCFpPsym` z$-{sOpige+X?+)5sm@`K%Ox_gu*$79qgdL-?ec(EKDk5`qZr2_<5KO{!f*w@=L=hd zE^9|@;U<9x!PEC66#ae|yaO=1Yb!C-bPLUd5#=E#zqfy6BP1QL_I8(0wYxE39Pk)R z`bX#`qGg6WZ6o--rk(6Mkzor2oI@i^dH43^pE(rRqB|i1R)%t+)FPBc$rcvS(e%ZE z&;4qE*J`19KK4cw^U%4wjqsmZ0Bl0;+pn{PwKedT2JlWWL@}CYpM1_&tM>)LSfzcb z=j>=RrmroEDUJp;>CiW8!}20%h)^1>9T#6VS?hz|%NFkB6M-~}OJ%riU4r+YgS2{Q z2U+V)>}pdgZHY`t&TDGJlFOTqJdp`^lF1l!s#{FeZ*Bf~u%q&+gb6Sa?xVVRFWbpR ztr9#&88OD~jlcmgO#ytjXr>2-fa#ox9Um&ZV*ja0fpc%GXK0Kr9AEEErPo!RnHQZ+LkG$K&A+PY2fh3q0HT@S>*kcDFrD#k&fY-O zzA#bL($hhjiQ-vW_rpPp`I%2aDqF+2xUXku;BTc%NTdAxM)~p-N4UdAD2Zvs z@1I;E4BGY-itM;F4rg|;&Ux~=L{xX>+Tu^}3<4ULa1oI6e+j1G8C|>Z;j*0rW#{kb ziZ&6lgiqmMgO)Xhy%l~KP3H`Pyse4=Ej)J0#%D7SQ}H>qv{oQNXskBAdW=DDd#;;p zw#CPkc=hhK2VtJflYxyeJH4Z%h?(zb(2WfmS0xlDeNglRwBWjSTqc=jJ{{w7o04bM zoinVxao>`mWCyhtmizU%>%+$kwGW;zv?#AWm_F&n$^#nszFRXgIg{Bn6yJS0jCBM= zN+%9Np$o^en0BZ;e>#3$&oXxV+~g8n0C7WPAPdM6~LJuyDN8n z?M@ilolmad)=H?31U!n4&egI`?8Ir2&iZ~nrF=2mO#%nCybtd$(a2PZ4wE2Ac4q*p z(HsW4WBw_eMuvyp>%viY7-o2BJ#5iTHCR|KHpu7}+AFClj0GY+rDh%0vS=g1wdIo^ z7kK!7aG@74`EmkuAjnW+uKHk8hb!=tH~KFMSgY!E&3L%c>>4VDzU4_g_LZSUw8nik zo>}6=yV3yTWGJ<`PrAN2_{ru#8+4TK`3JVQ3beEvxZW&lS?Ee-qE&X6Fs_a-V=`*c zTZdv{u|bP>5*8Da+}mAi?^0CBXW!=ie9E(K7@cn}epI^H-JZmNcd;RD1k?Q5D!rd^ zX?wr+tLgW+jx~?Fjkv215b<15o$)nI)%tlAA#+pBINcRz?Q`!-ne-}>);7TKr69ay zX;+0P=FeAY9y|mPTt|w>^p}fK1nIT;dX3CAS|i)d8>&~9ptu^$UDx30l5v2kFCR7m zU#sz19{Lm(&2dBgpsuukzP(+`2+ymbxLOWe)B-{5d1RBggZDGp#Z%290H=-qDwC@h0+3G zd@)>?IXg4@GxPnmqN}%VkLC~`kG1^1TGh129!`gYP2)9nXL+Q)k*B3L^AtUp<=oWU z?N5UUm$X-i`0{=2LwV*`O~fAJp2KAQL5H8uS9pKzftSIM{nOPKo=={S#wH6nDTHlk zix7$J9r~^$uT6Bocou6p?03;LWjte;9mgwYOX)IP;p@Q^l?R9DYYe2X`5xZkn_#O@I^Rg51C4j`yJq*sa9j{UYrxN`Zg0U!#;MBlEA^B z92ezF9##j`OJL{A5p7oCI&G0mrS73($p%qb-Qkl%#~Uv^3!}ZVU-Q~sPjQj<&8(ZK z_RMI;@i2s9V?Vq$K81QR^cOXcr*t>nN-vcDN%8R>$ z5o@Vl6%b~X#3|@F%BlqWDmStx0#Wk1#dR^Sb??C2Z?@ja(_GbxX!z;}k{jW5X zTSLZ&9`k{a@6XFPHHkP~ml)?ew90G+y%?1y9xzPpRlCJ{2Hl7gug4Rn-ufpx;w+ev z`q(9Y)ftHyl95{tqd2H&(<~auBD{#M8N;=ydsaeYT8iP@YcMCnk2aiKlrAJB6z|zR z4vXb5Ij~+VGMD+eoi}gzDZP<$@MPJgB{cD_`_4jXjBoMt_jNogU6uy&{b#DF+>-pI5?E}Ihbng-Z%>n| zH`>{I$uft@KNy7lNy)ELwyL57uvQL}n&0*#`ji_fsE*N=xfue5~H@@a5f zk37dUPL5>c@(-2^BFe@-Fg`V;y?6# z&+P8B$SooA1qW|;7>N6s?N$+NqZ@0XJbmA7A%5npOQ^2~4pJH9v;RxI_kUE3ea^RI zDs=2htBAewrp@z#B>%n>n?O(H1axZTWMjqSk8-UgoZ<(Q>>pkzzHYCFeFNrN`L%};k1(luD!5R!#DH=(D8 ztoJ@UJ!E;9&M(C9i>e+gQe?1lcr*DcIOS zY*B{!*iMx8#;z;XXtcua&N&uZ^fjPsm&l~EmduedGRdx;S&L1ZSA%;lBB{j8nHz6O zq`@Mrjknl4z^egT4f2ewar_WokwfYw~IUjVj|Bs^t@_3%!X5zw=R_>ui0bbPT!#4|b`4|M{hwFd#gC z4bD(u1DFdR!>5Dxoc;}A`NB6kf!A5rc(KSi45N#Rqy*+l8*gq9tS5q7BMP7@3D7f( zHO*^}%Q;j!yD@|E<4xf?ojRj3nSj zxIq?3|Ke?b@dm$D0=mN%d-vX7V3WTX67MbnUdaqrg5tj@hkyPP$t0kPe5(}v3tHMA z)C)QqK%9AZ$&|zYuZ$l&6vPXR0w;16 zSCF&V|6Az)x6uE2w*SvL(eD5l0KAFda>?7Tm&Bjmb~{92T5YN?kYQrsI)0ZbI7pZn1E zciai1oj(|I+zu!xjLr-9_hnB}#SBY?Ol^^To;~3sb%#4+mm`^#@5ON%#gG;$a?6uT*x35d9^?NV5A=;5IF;w_ z>SccwXP$q)za>84Yq2p)HFd)!5sxC7Z1&ljAI){8PSTjS`x!s_dz3eyb9YK>QSf(` z7E%ZJaU_M~`hSaQ`STiZr#{KmzsH~c=}ymSnRQOie~S+}BM0uSpN2X$LH1J?Q;_yDYckSK0NE2S1hpO(&08qx$La{RPwUW{$lV6lk2`@ zF4^B>-;&1o!IUhzB9$WIsycqAdhgx+?o$Zc~%DI^R4JI-;GK`e| zN)$XJYZm}vGEO5)rTxtna#H0nvPV)H8S>}BVO%t5JH3wulB6r6ytF$}KaS}aB%5E} zR*D!O&T_S<&A8|J&d)FVwcww(E&StTzL%X^NU|#)!TR!rNJgkctap5X&oVpo2obx^ z+#9iSiI^IH6ua})wlu~Vy7n@ z%_Q>~J|l-e2;rkhi6=N^f^J5LRy$vOaa>?fYj*NOdAp;qQQ_(Ew4d4V``>x+ZCqzqoa&?2&o8%H zv~RRohjwe5MwUKY>dx<0i|(GknoEYP3!G}Y>ikpea7)!tCVxE3!#&OPY!<-XH6MT) zDkSm4*M_SN!0>Xi?-aK~Lm|cK2MXo6 zZhs72w5oS{ELC&hC8mo^cQE#!Ik*vJu1u+LF2d&jH6W~YhDmY$pnP?1=DQ-6>Z26V zgzJrH!yNUTbkGmM!k(J=$8lWIdyLlV-zf4p9=V614Uj|f!CxDs8NYQmc!T{Pd z+553&4s5cc=d@t97&s4(J!R6`pMDt|bratU2e4Bq($pk9fG|=GGTclPx1#*XO$?tG z<2BCD>npvV3qcacGb33jmMH8!BS&W1%d%6yKk+mVQRXcPm;=pl;JTEVrgGv`;#Y-vlYqHOVwa`U~*AJFiN>60z7nNMe2b5zL?g zG3MC+^OYcR`gi-`(kXDhnD2`$K$ZB+G6Q|cNyhtY4o%zywidw3$ws{mg7&L`A#Ui7 zOFgeQ|FtAkBQ(=^*THuX-p3Ugt2^;Xf;WEbQt%awiaI0HTZ4}#A97Ttc$DN5;N?75T;0WzdK(fQm8DWGn)LjiJN#b;|AEx) zxR_-wh6tW+fT}wzHT`jaQqr@x;5FHd7UZnkXlprZEv+6@EURlOh#XG9a4;}=@dm*u z`X_LaE*HVfN++A;igB0j8+~I}VseI}EOD?k%Vc&c)C*M@f|Ar;Vn;G6(9g=Mf+2X@ zLc}MV(+{FZy(8iDbTZr5nfq1@$sD!%mYarN9?=58(gtOR8$4-XalEBwqa*p1b^ z;aO7E^W0z4IW*}=EdeE05iP%s4%1Ac))=-dP^?emVXD|bHha@5B`2rxoW4zGC~u;6 zRXE&^*^OnmPtK_yj&teYymA>b!f|*Y+%3F5)Ms0bA8#l^$!E!&P-

2tiL$1}J~ zS@^Lb?{FJ+PwO7CNW>K|=~6jQ1}d9eif&xiFzfLkbpe=@bkY#w(Pzp%s21r%HHaY@ z{sX&C#xa_cbcw5=w#Q|B$u)pnq`luzLW$XXH)Z)Md_9@!g$xfPz-!yHW<{R5qfIEw z!uf55g`5}4^c%cJFluGU{sa$|1lTw8EnnNEDiG^j=m%*2_%^6>I zK<-sQ1t)Y-O1Kx3Ea)g|IqFU5)pLf*TnJO48jl{_l@zOSDM=6lLVff&c*H?v?3LqA zrM&i}G(|jSM@m8tQ^rk-OS@QNwJx!MM*2?nmNoo6)4Ey{)pM3VK?}n1&)Za96SF5O zbNCo>S(%^O75c^@WzM=A-P+X8%V#^4r;1oMivQ;DPDC1R_F4I5B{pKfEJyJvA*6@J zU14?feKoavq6Q;_pO1HU!ptANS>@R&J9lC){rLgXR$|)yJjl{F0*`7rZ50wkM9Oid zDdTAg==I#b{t%8^9BK=c|pE|^YY-9G1)#9yXbXC)d@q4HgVr3YRrV%{TB)o-&Xe+3(?+HMz^N}eYZ9*&?#~d?-J0h{*kN*( zooUz&WbbL`a#SufD)S0Tu^+6V*vBePm^@U{EFbuGh}%P>0y;73D-kVVqG0X1=YxMz zjle|G`IP&dm8d5Wz?x@GG@JSIW@o{})AVo~F(1SkhwHsB9`oJlqxzd|^_-dhikl~9 zXB~wRau$QRkCo4R4%jh!fRyX-c&=ARST5p}K{^f*$02}*?oKjzavS|=O7fgBoK4%V zdVXQ;aSEpIN}Zhq!YFQ%c$P=d~aDLGXN&^##B9Plsf$ z<2KPVlw_sUH4+k?tVTn@5)-&>smqhYjDYHGvets!r1*_{h&B%pki{GTH zX3m|rF)la_x?m`WPX$io2?7Xa8ZmCKuYyb3zF*`nq<`9GpK5L7HiG5#+Y-CagTf4bI+^bg=B?o zkSlnU$FC=r9p1oL_SdxTb;JpiwSM)k_%k{~1zI7Uymu7D1QMNzW$Pw+JzWhx46z3| zr(eiV0E_326(Uje%k^h5W>(EMU*GPHnT9lJN&VJJ9_{W3TRfF|>2?LjPlGD=;{$!Y zNXNRCNcqv|`j|oNIN!h7bFM0XA3WiI+{9$rf z;j2~C-gQ~h2Jq1WvT-FK6%~9_XRZE;vc&sj|MslT&!H!gHd1I2Ln(qLIaR@jLC9C< zG4j26M`R~Z(A$;lzzrW`!o&=!%)qgYWP$b}6UyxGv$Y8Lq72Be_@_D=!)^9VCQs5- zyZs_fodS9OtZ7|=0utF(a&kQsy1m9V31XFKl8|r5F67UET6aQ@WT`qsQNB`0f_QFN1p41ASVU%M?B(&|zKiX8*xD z!=Q3%2mao@rPTf2EUp1CGeB#9%Hva>??P|Pp=g%wTefn&PDkxa6JgiqO)`#Z-eB7( z)kGQ6Pazx*=RZaqysbsL=(mUaX~cQeu`KAhP-_0@wIZAsOCGn!k&PPTdorX|M63VCGWz43~-4ImdMSXaCd zO~$F$Lx+YIuB!v_iEO+Az|B+FGh6&=(4_hV_P|z`Jeq5Oxa~jJ>g=`q;n|xW&75=% zUX2KL!`(JB+>nK5wJ$ETW{puQRI_^zEB7=sxe}FudIZx>zN&PvUqC!@NSnAW<`LbK zcoR7<8~+gC@?EMPd&h_WEcaX+HHf-ph@3;jC%Hp7GLl;Iavn9NJk8-EF?RB`sCA_& zG%8Kr^A^AM*IudMIOV>S8q0$O^~y3`9eIHHTJ8n2Emv*n5RfP&&x{r6Sy^HgPhpmO z?P2!2Yt|d8Q)Sjwu4}t*nwzGUq;;7+(31f3t8>XO6jAFw*KqsIry=aTOaH=0hDr25 zL5!axAdD1E^)5(7R;w9^oP>LmB};GVh!33v$-KTXtLKUjNSl&PZu}}Nl^i(2alu{2 zblk2)0$8*h0fnaN7nL^FayU0*_^**Z(_Yqr#;LH{Zf#t?t~Q!=I86nTRoqB;q1H?7jz{0%4T_>BmWiYb!QsOOu*9y1M$2{}ces^kgATh47 z1cYsM4roUzKM;@MHijFIlI|epFzLrrm&6h0dAwEwK#NsW0pCjfv5a#1?>yN&pSt_A(R; zH_9pJ&LuyBTiBzZwEO|{ttH>J1jmW`fyu3I?S|Tn=!Zp(lX~u0+^8z;N&Hu=%Uxk< zpqcSen%&IE$W8MRu9%D~`$jSsKW$o!uY+a41xA*TF&ewOok1E2Ld+}3OY;K0ybybw zgue6NJHT5ro9&8RqXnVW6QHlC0X9G8hUmqT&TuqY+FiPehb{3-1A%L@{C81PW!Lp0 zr6t{`WPe4kIeX~$IK>n7Qm)FgXSgT2L?tfW7SPB(y#REjtS?FZa@{0vqLUuvKz(Te;-$G?nHE8 z6?hG`7xZj10hMZc&e3sRZtnW23@^?lgRysu-5XHql>)w)q>}ER`ATZ;r*6FetMa&n6)}_A-skT#K+p5y!+L0aBVW8m5J%mY-HUZO!RyT43Q=`6*=bN zeo&Rs)M~A1e3j#-MX^tu2v`_EH_FkZxG*K~<#~a6H26cAg2xDa7q`N2i(9t7DOIqI zpx}JEqvfINaq6||d3P^d+dAY=y@Yql0dcP~z2Gu0?^VWAf1y(a+6kl|Ki-kJdQA2Q zn60-EIbwB622>!`cC-xc{20Ad_9h+B4xT)~>|I|N?u@Zwbyk4}rcCrLil7M=mv=$= zxTO&#%^<9WoR+d|%y{*O&Yo2T_NQ=I+B(Um;M#=7Qe@FXpbh*!JYsD+uSh)1H_LP2 z(%u6kAb@^LF=0R2er!2;WSTGs6ux5a5D7`)BRN{dKY&Q`lG#kO2X5bpQAI5mpoRco zl6vY3`V_-tS-;P{7rC$U&hAw-b%G&I6^J`ChN+uI@Q`m$a%x=~8SuY&<0P#69srt0 zT1&2$NREONu1w`{7Xvuv>OOcE3+ahvo|yS#;946H*NgO$a;POX`0U9b2aKHo@WZGW z?Q@C-$is9yElMroMRyWhx~#*L;xOdwSw^6TRQGwqquv!at&zz3-6TqG2O}m3UIB?( zzGdO5B)EF-10CL*sGwg$$AMe{Xs<|%mR>(# zF=9s5`dQzpK#-e=NP)c+G&#(nZ<6DttP~5>v!I*@PdV`&NLy$ck};r zeL-r1UI!8RG8mQAB8}WlE@A^tq=sdg$H&XV?>}(CDs(jH{>(3T*h(P^Y^>%X({piO z-P_AJwk5=AXkxwUQ(V0p>Rdo%QXK#y5_-pXO@#I=PDq@;Ztob>u_-Ivc=Wf;Wl@wSxNSDjX>7Z!T~=iY5vh#8#iQitQfX9+NQ*-3_nG?eE=k~ zv8;{oZwi8sym5xTv9o&DM~ge6KuT-dEU$`{cV1Os{Cj!7&YVgbe*vHa=RIJYN{&x3 zKmhQ#!|KS#DuM0jpp57wvEyYd$Y0k8max()Ax2SpAXjo7x?%sPBp-*vMeYC&=8@*M|C znWt%&f5FR8V-aKL)`-I8eE{*^hH$I)MyU$q8@GaY>r(Na*lcye&Gcuz-glO$Hf+O3 zKUS96hSj=9ktHH}23=}ZZ=hTm(!c^~#AztL{(_(EOvvdi#S1Gt%cK@QfEGPXe!Xl( z`u+M^;6V%DaOCjca>dI6RH^g+lgl9)9BRQrIAI*M8o8X*c~hq4(5 z!QiI2I6M8OoJrTA-r8$m{|ciYJ$rF`pCxUXa}4XgPGyfKtdp)LpTF0{W>{$>D0|g_ z4Wl%PZxV5Da4*1rzHaSUN9r!h)8g%la#w(XCn;fAJ~wc4Bnj7T@u%2Sj`B_y&D1KRBNeo0!6ONf3gz#=ciHoL`X>l2O&<_U^fwR%f0R^ zPSs}RygE#Xtl)wqL{07$DP!v8QdsUPD|bspWDqqF1h&^kG&I9`$T4lI-&nR^yFiI0 z>(2bTs~8C(nHzAdz>xg)S0m=kn;Abw8S>a^*Ipe(V_vM^4T1izpVq>(Uws7#+RL#Q z-r@J?bVSO1qR#qk6@02p?)p2-BhX)Cu9HkR?otUW9A(`VH~2rTy<0Mz1HMiD!RlO^ z#Fnl#`A5{!#N){{?!8Il`yc7OI?8O7#K4WhcK+mFSt#)J2}Fq-`T`P%*1ej(dcHcWN2d3h1*Qt6kXvdZk3QF5Zd)D2=cKRu`Ouec*#U`vaH8(~UUkg_NxNSA#j>B4i-Q9yYNcpj z-e?gDMf?tZ+HDV#_)M`_!)Cb18aYghzhhhFOu4IM&T4m3>~2~%(M3eFcE=dTO=-P+ z&fmAaAE!kA1lH{Gibx-Cu&zpcVOmsw+rE6#&H9cHpmBeRcyjv_6k>q+T@GD<=3W+S z8Xg$0GX2ek0}|B~%+F_%dfh-j#p+A4^$Kvbq^{G5@HnNpDhoIS=M7@&!xab#DIb)g zL+)Yf3uG#Mn)}G)nx5~(h>ROv6SweuhE;Vs{@RPI;&Qv8!8LF4y;M7!n7x_rIenTC?zK=ICY6u3vas^*mI~aW9|}oZ$m(Pgw_FK2M=iH4 zLq)}t&AIHdJnPZp3sPC-28-k5fOc&zl#NHKxy5d{id1)CTDp>Cnpb6w!YG+7xle4c z3M}o@LkR1ukpPMeg2W(`GXKn#H|;%on@?*(rOpJx>s1+SB|q(DEFltV`x6@hPDwKr zRa=Fst{U`;4Z3_%puCoubU3poq%c17v0LgdQIt=-Mj# zxQ8d{f{Bl-T~1wShWTE{Pw;2)lM)3~mD*rLq^n zLv^FyOgawyJ@oQt(8E@&H zTy4~bpOs;Ce`M90c0pNYT~rzn0)|=Pv{SED6j+RJB6^p%^6s1pe5Lqizc{7obw!bi zF3K8Eesl5v{X+#nN$cGS$$H?#z$7%a9!r6rqa1!J?u`2efMHF*C&t1jbdm)e-e03& zuBP_JTMKo1xz7vE|CUuMB$@adZn?Y%4i~)7TSM}2#Y`Fk;Cy?{V`AGkluLQ$G-SZ( zsb7r(a2BLpmNiz}eF!k-8khVwJ&qKlmo~Rsrk)u$`0b^H^$gUI{E33&tdEr;s$z}n z$ZlOL4?|9AJcODYPxYgr3AS0epj9x}08}a;!gNx_Y*mthZ}e+TH*A_$7jm!G63BZP9}m98MMmeX&?h~_t+ zMf|q-E0N&(>%7jVax1BOsjJ5*QbG;{n0Oz- zhLECyCc`MjH!It=qYh!+Z@(_5?55r1Ed-(~wXb4ivk-gYOdPFn)nv-l$$H85E`{$>1+eX~6?^SQBIZx_JZ=gdG{o<4j&#{e$U*?UGxt4* z#SaB$CS#u&gFJG*yBhaM?6^N=CyqP@DA&GE6!gzmA0lODiJ!X>^+gnDG^n~IX7)tq z{O0#o=go}_FY>R)PrUc)CxGKH@S17(z^&u6K!@^!Dp5a7Jtpn6BcO7m)ugmI#VAjc zp(_pSNyQPRvaZYNsrWmLjr#|APfG6U!Ba>99Ja8$eo((`DRD8`DVjm7-th@RX*t$l zyVKW;+*0C>JoKr4&@7;c;I=XVmI7AOIODn>O$LCLtRSUtnpK3E6!z?lJ7y!v{_LId zc$a|(&m^ZF0tIsQ_Ol%JAwZxZ4B%cI>ZsYe?s%WOxq9?aFsfP|B55)9U@HxJ)}LAJ zuj|O1kurhtVf<;V{cu=B@_L~#$@g)Jv2^Z#ITtt6EMqQF>5C@#KKdCZ^&Um+0Z*TI zg3EV>HPj*-sEqV}+c2th`sP#AD1gRhKX@35niLZNVE3|)f2BaS1{fLc;if2z`9#H0 z=<}eveJ}8lYgW6Oa&rr#g4wCHT!g_^xZA^HSk$RC$sxbzS3%Q38A9k=XR6&F`T)Xi zKj7Rq#IGoZG3e9(+RLR>IDId!RGrgP4!8icJLn47z`h|8ff%yH*)Qu)W(E;nyF5%Y z)a$yYckMYFeNX=aAn?+aq2qU|%c}FUUXtXimr>|=^_ZD>-9@i9cL1`hyn_ZLP%Cv> zu)Yz0F5X;ru%0;9)DrFZs<;`QYVukZ4FruU-Wj8TQ$FsUVcZk4173Iy^TqNH8A`DV zVWjl_Sbj>+H4wdcxywPN&45bS!En@KsDBGy45*YxgPudIJphS>C-_9$p5CyAhBSLM9fu?iC7{T6X%|u^lw3Wbvq|1Q(O-frGojWPr zN@S&d8FXCne9a_9W=mlIx53iXpe7Yi=-b4pu;yR$6TCGPASe;g7>|wQ5JOgBY27dW zotrp!XSNc#_6ZPuo3%LAf5#5fGgIfbww)mQPs-`g0i>Km5IvfO4k*|He>dTF#Jl(jB+n(hX~l>q#MjPQ9LK6(i2 z=JNsYtE=qCj{?Vi1d0^30boC&cI5&2;|>6t_)%JMM+(SOuHTkEdAnUqga-XCAe*&4 z83@OusSrN8CB1zrfUFi3>^U}eFNonM2;vwAv{`PlhW!Ja0c^Q}#V9-tX4QUX?Mm{d z!ph*KyxtTRr=)YJi^O>YAlD@Bj6M^3G%JeuO$$|v;)$O2tLv(S_=1IrFq@TIB6r@i z5Zx!gQBeUM>CskNh6LN-9`Ft1{muHNRh@XL)U!O2PaK;H0?NetLr)$Ddf&8gzdM{# zTYz+f1Dp8zUs?b_cAn;nP*BMNL%%9=aHVwBKVtwfNa&v42E^s0Juwjky$DwOk*sJI z*4O*%=M3sCazfaR*lgc_^>>;CADHAA<-AQ~5t9FQ2SBPpeBUq8&yyUhss9#Z2B@W5 zSNT6F{qsw>BM>&R6YECcKpw6YqiB<`p02O7wCoP07tpb10c>7LzxZ_}w`8tMHahlu zQiTj4SXcS;BRx9H5HTv>mbv4lTWLQCL`R14ByLFa?8|6|HH|OVvTN4LDoF$89o;o9 z&zJJ{8o!YWnt<477kiL&$7^=>_nWK2J#{~C83SP0D_2KuqLo6ll z-ID{L*;K2!9Kq`{b%<0YK^8%+;>3c~)Drz8xB*s8N87(e(XFYl`{h9$cL1E`s;OzT zb9=e`OzkqOP?_eg2O@TTI+4!xMF0R`l~Qq47UQ0*b;1|c?tV~6-gW7QLVkt<=Nhbr z^{E0lsyBr^IS?IXH7*GCoM=2&mHeRgEXU1>VP@GbBR1|^g^&4~4)8HrBEP-9%Ezyh z+hD037ez*@U!DQDSatSl2M4C7Eglx2o`=D^5tTz_5}n!hT@;LYkBf&Ut?3 z%grbyTU~n_FM4hE8`V`yPY0GWqn#q1E5QJ{h@-6mz^?L_hfiGOo;O&L`nJL{J39_q z-U9Nq;J&5b1}`dYE%>eFk&cQD5Gr)AKleD+0Kdy4%&55jX=+<(Q799t^Cj=9*2St% z$xgLx`a<6}mA67~!g@!Xk|iRo;mxY2r#nr= zaMzdXLAaOt{Y&vl+AQneTRSaI2BS}@{7qjYKqFAjQH&RxuzDOulq%9bJQAtzM>`IfK(I|`V23I+bBm^c?H`K2<6-|ZI>7XzY~^Z zzfahv6l^GcfWUINwPS2IwDL;Tnp+;f({(;DwA}`J$k@5{LylLu;g>>bn_LA1z?v1r zWD5aOOy!-EX=v0oz0cm-uKh!xTO->x=9xBVlo2NOrZ)+kGM{wEh_ESnRT&y0dWvB! z9*}~RvCOEW6j@;-_mq`gcg*z_fkt%uq&-R#8EE-b#_IYmxowd{tI$?(Gr2h+Srfif zGOwluM>~#gJ+kQi+^fm*X+ZmYu@l+kw&!{P%Q$ljvQ|`Qhyk*r;I;CexWGBiXiJ~} z({b><4W8t()YloE8B}0vlx}lB-M?iz7GuRpQ)=E(X?KBU76L&H!Q(@9lq?rNU*q@< z5+YeDAo1Orf_QL(6j0nd5U-r_Am4niR{)3Aocln>p0hw2#0xh<)DU*fnCx{VUZj?Ved z4e1e_6?7}1k~5%qQE=#nNDx5A1_FlmC_YD>ZQyGH$P+T}i5#^!TYvdIN{J}+f{59o z!fBnHIWQ9F)3a##7~vk7@jh1ZXH%;E-}H&4Gn7|`b1X-;Rr;Qm$QBn*m6aeEW*s<& zda48QT7@cBmeK|xnO#ijJW1kTpJcysXRU=P@0pZKcmi>6>2i$MCLl}Z2@2h)wK~A8 z|M(UCN7lv(kU1@sP+FFxO0rEK=4V7o*>I{waEJk2+vnz`+J1#lIN^D_S76?UobZG- z;)|+VN1X6cyshdRqH@!iKbT;6IeR=p`3LiZc&UQUluw;jS^aU`fUd|PxjRwly#8OY zET&*btI3~uEoL7pO9x10zV!I^qAal-#xb*l)vobGnf(NR{)hT|n+ilJ7`JJ}CFOh? zO>7Qm=1;uEG)tP>d${oM{AIg1;Yuw)76iaeT&D4#s>E5(f6V2-#pP8`|ikA4a zS`+SB=I#L9C{P{Ii=&~dY=)A#=~w@@rBV1fuZ?N@otTq4z9muv4Wz#{LFp9~XO>PrUrRkjqMY`)yx6%3B#uxBq(X4^1LRv(%(7ot|1Qypa<(L~$4C zQcG8?!qEpD_!R};JTMCrv=M#xH!2X&oJ405;2+bkkn#Uyo$L9c2Cod*o{SR(zg(0JfL%A>hNI#jKr$W)xhgT1(ZB=a@hf}nr^8hm+~HG)q}k^I6FU1$_trA8kQ zG?LPlS4f`uOTpo&CsPjS$-L|21MJ8^CpRJ6X5i9iL65=!wlN@Hw;wBh3><7pUY+v# z#5J18yE`gF%fw4XO+#gJ%u{#HFXoa;$ebTaKf)qp@ z*zSLs=IKt6at$35uX4aKV0^$R|IdHWpZmmgW9%cK5B*s7&!=o#psyAeC}pjC%6I*H zkS)5>;hn zw|eC!$E;^O8I0$bDdWXI%4%QDpgfeHMsg9Ms`8)yde|@>um!qc*cek=vFcv87WpcV zY5DMD1vCl$$Kk%>dp#EV#nz(lf%o2&$Kcvjo!w*j%co|5=mDz#-t(*y1#=X}L&{B< zdMi?1p=hcGuEi!SB-0lRN=I4ydp8vR?n{=^kr+wa`%**)6C1~xNCG&x^xCQ|IuKU zG;^qxg>HskHM(z_Uo~Fzvvbcj?9HUU$vUw}bZU0Qz-3F{2jslZ7yLi{@V|^kanbZA z)GkXsz&-R6B3N%YP1isC(s1^}RE6F1iL{8bOqs`5r-!D>&qT8pn|}Y%>T4xZjqq2q znh6pTyG+xQui%^X-}?H$JtOvs@?B-2WRDfJ+%6FT44*vf^{^i^Mzx*ZfX-vhy90BIZNaB~S|IzNp0&UL-o7BsQ znD{=r$jPa}sj4QoW(}aidgiH>`VH<5joyHooGl_v!Isbgf;)hmH0sb{!d7{%7{xAYr^w+RGadE5C5W0F^KXO>7hSj(;j?(Xa_xjcS}BI4hPo zS(_ZL*yS|d4Wj5Bg8w68^;BzKwWCoPe)%BD1@-yemd~$&=iH-(&fJC-*{*a{(Pv*z zRXm3sXA|YJzAYscw^qN@xD7Pnh5O8i9eI`V?Gufjs5aD3eg!#|R`0Z_xf+Op=GpFxDRAbtUI3jvuN#XeYs@a9VU&G6ovKv%z3FW zKd^VF3Tj>NdPf7OseXGi4>cO46g`*Ha`(vT5ALKVvm<+@N;-nSp9>RgC#1@GzsGGivL5Y!LlZbTlX7GnBnAT{_UDsN0*^2zsjID82E@#J#}F*}a#;Ev35k zo#me8Xd%>hv$It%o74b&Sm(ze4z}nM`>nc7T0-5;G8_BmTksQA_ESlY4Vb=iyB|aE zvoAc8IgDHS9|fX|YHl{&CjZQgf){`w_0S)3<17a%no%+NMG#DTy}44_WlvID{1)ey zAeV=|ET0@>aPqbi9wp+XM)sHMnwFX0)*Tdr-o7p8z&>78Ykar)#8PT%Bx1MbmXG&( zNe8qCdS8CK-x8HlhR}4rxxZ}Q)X8#u7t?y?XauhV9nI{eku>vn!r0)PrJN@=Vmrdb z8a|tNIr#Yx&1SJgYUGvP2e{hqFc5h)E>g#^wfC*q%xf*4Ehyx7_V$aoV={taWN2^S z@kqs$;O&iF460-Nc8f)K#_E=|1tT9T9UU{ZK+L8h#U#U8rn#O95YmBfd87M|Ns2-R zpFDs6ZVW0@#Bdk6I=Naat4b7!b7Y}z++9Eq1{yHmnKZ>=uxY(&Aen>pFDO8S z3W4C5gLQwDo)&gT#&3oA$!)!*Ok3EM8>#40lP2t`Ur|V{dRgKy0yb1i_OJ*+H~FuI z9HhC)zjvR$ZEY!T_fj#}Z(HQKt&rZ`=ErJ0Y4k`B=L_3aOVXq&>v3Yq{Uwsyb8z&` z(}aZc`mP66K1oGGwPQ}M$GASFF5BC=#9SINJle$qQD+V|Mw>o&On?qBI(x|%P3KI^ zF9)(e=KY8Tdk|9RpMj(4rK@jpy03p#^G`MLb8wxkor`ZGtATsLinwj;V4D5d>{g#b z9)^|*+$(!=&h~@h$W*lAWPQ!PXDEtZv~l^scOt+C3waGpcF_Uv$a8{q&eQ#P6ML>y zw&=+ldRE_DqWsDamjGoIn%a<(DSMV*5s^ehimzGR{@xSs18E%$OK?^_XHk=Yh*34K z&kOSJ$V_T($f`K%?G~aFcFL1~irm~n*gYpsz^|%8G?mKN;yo6)r3lX|>#wTb2u0B` z$l5r&=M&Xv6UO41q#s-g;Zxod5e zv{XT~PIyyWWssFF+2l)vDAQ=0&u(}&J9BOF`+zS&1rMJ4qRw~?k`VsR$Z9*xkdEAy zRQE3H%`D=BX|XsHgdm=KG?Pob;LhcnYts#NSL2|->SPnETr_GqgloiB06gxKz{8AP z7UDN;?F9s~&+i1E8sm37uhC@Q%Uj5&@rCL2NFsb;i&d*01xgnWU$=}m`8*A<>0FWA z9!u@cUqL!-<#k?6oJI+5%v>I;;<(q}H)2goS{wTm=eNr*CvWjx6uP)pP$dNuEIUUh z&}V7n+pcqzcBK0RCQx5Z+n-6KPu*?-5$(Ged3<^%?5oKxTlr*}fmgY1Yr+aZ$ItL;Tv5k9o@YVMQ{V%h& z7DtF#sGlfu+HV&1LkDJFTMU z63mEjji~BbN8`|BEpb`p0i7GETOs~!JJq*cfVph7#HR*3&X52VJYG-St@Ap7%=cEk z2wS@atOu{e`q6F%MKgO1yYueHr#0&FUG$@{t6K%*@9aPpjK$|Q!Wq^p#K6!QjmV(c z&}?vt)u+)<*$-)}w?88m?j1}ykIrOQmA!m#7*AWhk@eDbjyyZWc}7LePYn1DEWst% zj@8yw}&xW2S+7;rTkZ4G)aJ-??4Bi_^8b=%i8Y6A# z=0bw1gT+3d2;B0R*$k$07h5HmYtZB9Yj9n4Nm#hXHDFN!Vo|B4}hb(fyvjO-hUIg8?-!4&uFlV}Sx^G~JA)2CPc4gb-u}X}SgW z_*(TTRwYiJ5rfUhZ@AX?EJcNSWtoLMrseV4-dMG2UlY#R__^BtV_H^i(@fkiz$GfB zOX<+A$B*pF6-Q6!YbkYk9cge^dGfv(^XloigsvqZAVxarlgx-umBMcG%%dV6&?d+< z=!EmIS{f?V@^RjojXrq!K8KmQ(wd*DH6r4{>F}-hoP~9=*qC6TG`ggKqy0GW+wIo^ zxF;ya9wW?5hQAU=Q{p1i1nQVJbckiBCpVqjIqbw0RL8d5DA>g^O&(%+0m2`$HetUq zgSt%9svEDy?R=kf-zxkhtII9OfwNS_I>zHq%a+z~BdoeT)CBMFEwkehE* zyyg}MIpEMF1=@|f-q(_U^aD|1AdF(%3yM>|Gdhbh21;udNH*-=#UenfQOrtrvV&Dz z{EdCMNfU7Dn|*9TOIeQdZkEp)QIGUUzl7}Hnq&rgR|_!)j7^^71b6F*k_0%d_|n>w zt62qEntDX*`R9viTN^IZ`2OC4b_cN?U?dQh_s%GlVVR-_KrI*lwc972X?@*bZ8@}c zAO8J%QDfY6_eYZ4e)_x89N&?}P6)E>Pu+5UR2aeR_Wta&1(8;mq|7otyx16=iGSO{bH3m}oe-lf zqli6-O2&z+n6^%v#692*M^T&ANXS_SXT2M_qPkskv(C23Wl0dlhIT~2u8$1>QIHd4 z&)!>>(4-&wr!7Hy8N23_?kNY=y`9DzB6M~QVu~GBnH~f=9LnS(lAB(mmFu@JB)3+;|kggZDp5O z+UVl|tDCX5RzsLvR6CYGmfTlT4!^1|&s1ELPKjmG1F3b5v2Cs|;rjQw81|`hf`FV7DD}Zq8DfE3xsBk=e_1x+1)PVO ze1`iF{7nz1*lwh>eJEDiDs#T|4cftD%|pSzXW|yhg)J>UDvJdjb{UTnpb_qZ5e@2q z4AQoTJ&D|ZFMdgS0_3+E;_KRYwZpYfFf>_GA_VQcIyZe^{wF&f^Twm^hD|dGf?Vz1 zl&_j3eB+m!L7tZM%w7Sq1D%6+7Id7!6I|zh-L_7R)>cHlKY&4emmvGU)r5Gg!dlC# zcT;3e_$it7rWYN?Z7Nc}vt9P=-`Q1V7S998RxH)VWipt&F6hP2%DvMN&cpOTIU3oQeB?OZoHo#)E#%jVN1^uYYoh<{=VL{-6sEa zy)E8Ey~+O|gsA8~Aq4GR^gOVi?qKqxZyVIQ+w9lmeFM~%zOM;9We)On@o?<1k4;NG zr(TSNiONWG17W4#5T?8^%+_x!$F=m__F9+f1TtZYODqns6f0EB|ALDGK%sps^#dWc zRRsr@)otmX%Y_}^`D@mD?7agLM2q#!ECL>=`tIV`kddB7_DhtftO)n_x*5RnRfI-p zHSsC2nx#t2AF5p+iX}Xtwv)c4d^x|(rc7K`Q#{F!D-Tp+)qfNq&DOgNZgg;Yj1&a;cf&^ZXIHQBL3Tf)xAkq8{3%JcVmWxmJDTxH~y9NDWV<&a`CHkq#^-?%P*p2%on|l)X zCmZ&Z5+d5GGWDl+5W(|c@(R7d+zgf1UFB6fV7VlQqaW`tU)QgBqau5gb71`&f7DMj z%g~k9a=Ht^8X~Eco{TQd(VXy`sq%*<`%jKw48b~rCT+1*Y-te3iE0$a_abtH*oICh z6oqyB&2`RL(Y+M=lSkmNJLz^91W~d@Swhdy7Rv{j0UOM^(m&breCKx`?2O!V@W5lx zgc51IV7!gkaohKR1)>ZlzLAP@k~(om-cKK>6(CgEgI{vLfYS!^(G)f9cKJHagBA84 zDE>h-a(mr50(!QEUsJ+A^O3^7Af3?LUtYxPvn4_iihFmx=W=Ei^BUjWi(@MGeC+@E zyr5x0H{bXWWYpNjQt_#&{Npii|NdIz4e#8@h!|~o#Wt?y=mm}~a{CPkCT>pN97dU; z6DaQ}3KWog^@d(}gsfJjmQf~|lAkjcvFqTp>Pwh(4D!1^?}B{yM^P0!ncQAxr!x{@ zf`u6KZ`0KSkHSTV@!w^vE^o9Ua^m8_Ubm8;%u3^|h|4|a$Z+PWG|Ig{3f(ZM7wux@ z-)Up&F5oAeN6%RgJfFc&TNFVK$_(4DEL1qM@zZdB(VE{H)F^@_e1^M)C=wpf1}EUd z49i@Cq7=K#y^8>&2BRMrW^2S{agWo!144&(Ga<)|ZC+a9p?6C9C^_b^aXJ!R`g(bX z&{&MF1XYds&mmB)d2@9Ns-;fcmW5+`+3`lPQ5Uv=bHKF`mi(=)ga5S)x*l^jCjlpv zYc}Put^4D$v5(JkIM45l{%JWo?Sd`ZWcaY3@`3UlXqghylr1_Tl!$45-*|5_ zo&vD?szu)c32HgNoVnX*mX_2{>?y6kmO8r*T09}D2tE+*ld$^CG&`Buza2w) z_zG{oQ2T`)ba`*0bxP8e6KN?P%QJ3!emEN{=Q2grQ^BW>T|!~uP1Pa}-D!(aS1OHb zw`YAX1{N6ZHL~54Fxkk~7}~V#-rNfFDUJ4b$4nb@`8PpNaotdgupnz0nN`o6=~EKa zIqS92Lfihh zcUO!WMPINa5(c6`(p~0R0l^!XIq#k}-whSEKhhIe@~?XY*ttkzM{g#();@rIJrOs1 z%~dqI6_VJ#BV*-!&vvf&EBn;dKJz($m9yLqK*Xo4OF?&&`}X|Ah}FNR3!6M;H(tHm zO~=g;x^}!)y(KvtL_c?gU0HD+II*Qm3+LTAE9AxpcrYWykn>yT8?kd-dE{a6ne^-6`oD!p< zvrXFlVh3ag%DQTDWViF}e*QmBu&m9iU0Jw4$o541si`KGWkdEOcev%8e3Q6HbA01I zPW~gdPj)8^esZ2zGcKtlUT=DtseX@i3b!;5^G{)IS0wKWDglVz=J|1@7&rS?S{i8+ z4>$iG7ocG`D-7pOwpH2>e+A~Z03B5cWQhk8+n`2SOY~S1gxf2Dy;;h6eim=P|GU~M zN}p>(QP%K+#TH}7rp@kql19-g#y-S9AQ)L9zxN{yC9NZ$+9V3Q7!IC0;B~yIX>{qg z*hi<;a#mgT3IVPQNJP1W;4VLjEy_brElXEpfG>`g>*_NW}Z8U;@la!j=4nbooP zDymjQU0LH+8}yd%;aLb>_+mdn2m<-ZL%7M?{3B=a7fS)unaX31uEWy61HOu84Nl$_ zYubF_eGmd~pUQW_e1mLc$8QOq&M@a=rppk&bZ-H`q_j3@X3?C}!Nvd#VV&yrLa{^V zT=9JCrH_LC`_UbPyS6>nvDmT01QRF)%LGN**OB#)G{HaRq z?LJw#p)tu6EO_i0kGqXKL$L>8nKoFHkf0$WS{t^oKOd>JWSx1smE_n!?5m$o*9R~* zyE4sQmp#sj^-uZte%I!ko8>N8kH>^W3qR`7V$WonQsf&_`Ur~53c`|mquROa*w;L# z#IO&skfeGh_0T>A*ps7`IG6W^LAZkm;VH7Um+XR2yk5}P$9O4D)mp2yhpkL|yE;-V zPXDZe!ai8-_Am>8 zC4aK;WzO*7L1yR3E+@%AHWHw$enKE@bfAj##s2-2z5pk;Y3{Z2()yux=rRA&YLk4q z>&Y)};2F$BSM)QQ6jhnRZBNc8`OXbuaQA%+LCy-hER5&djfUG=K5UrCYceg-6g5^V z`d3gA9dYWR8qqNQe$1X<5wV(TCa0;|xTQSoc>$7w&vBu$A zTkuhgoXEr!`gr4Kb=X(prDD}5&8^wD_)Lx6C3xe0_6NWGnE4X+Vva`eTlnyRPZTn0six)f7h|eQ@^3lV_BLkUdse+F1d%8$rT*@}?)-5gq#k@y1XY zt4Xd*f@3y2?Cbvi+ug)J-@1}@0P_PgQn+_*5+5IUBr8hsumL0+}~Ay6n40) zEd2n(OkImharmEV#L80u>tMLy!>_~b{y$uOQRorseSo$LL8|nY-v3CC@V!R4`rEx8 zh2dV`yDw6X_PPIETm8TOA*r4&Nk>ISA!eY1#28~Czb$a2R!u7Xe;Cl=uau2xE{GW@ z7UaeX@XZ>|9JH5To4p5o%>U~F|NcMeebh4q5+h8n;mKthBkzq94riLIdXp-#K8IT( zuqPEYe_%67U|ahg1QK=P`|?l4dig4En#Z4#&xbGb^scge(R~S_%ZEF-&Gj3yvJ<;R z0*_}FhLAcf>~^;qYUGKmE=|5^$`u4Y8m!O>$41;@y^-{ktFBNt{L#@sn}@0AZ~(>W zXv~&uOo;C3yIqHiGmQXS_3nfnvM`pZKdvqLpX=>UFTYOEa_G1O_!brQyK_v4Q}eHi z4u5GXb)t1^eAr!>;!(W)U@AH);!e5r;ZMrxQPi?r_+Z=TN0&YT9{XvQJMQpxwjV3Y zWBFmm=P08HG`Ikey1Hi>js`*NDwf{z)#th)Wi>w*Sb92m$jOX21utE(V`@8kfi0H5hp?FJo{0{eLE;q^@Hp92_z_`b^t zAA0k0W`O6knr7Y67rMvHgb1R+79S0Xk(!wrA7FEcg#%lfDe&C0?Y~TpKCM0A8f7^c z{liO5ZvYNvYJ@rZ3Oyf{&6%lr{7er3MBK7@1S5pmw~$KU{)Z@#5D5$b^$mCSZ~#~1 z13XO%cOP9mble-KS* zI=}&#CEs2;`gFFZfsZx&O;VB1R40zCJ$CfMv%m{C zhj1S`%wRBf&Ekc^5qM~#Yw`yl^izz~W7zlpOR%>zwc zz<>fNQAcjbOflfAvnu}>@Ni5zqFhj>VmS@k{XHcfjXldu8vUbXFYpbpJY2P|yL$e} zmMXx+R_U%hvR(?{P`8mPIY$m94cPKBjqBF`V(+b^s_NRlQP^x?6VfFJY`R2BK|uj& zK@mw2Y3Y=1*rb9If`TAn5R%f}2ugQ1(%lVb?!A5P_j$*8-tRl(`{SH{&KQoHd+oK> zTytL6@5*`2IbDtD(1fM{Sd=z&RY6^KYhi9z<$ZFILyaex8Je2EFE(q%RDjavqJafO1U zi|(I0ptuzXy|8aPH^`l1LiWkKXH;s1HCT}SF;4gFc^?$%o$A*kvk#5ow^xk42>l;`FEiX-vAKF3HSY(yE`tPbKTRmAL(nuXc+z9?&K5NBUL(PKEBFhVcnsLI(IoaK z@0{n%!CbLZTO-#!G0_d1TGU-k2pbs~%$H^r3KVhq9|0kYw--lqTagu>6#EYv_c*)b z0{!l6Mu2LpHZn+(*PT?6sr4uA3n=3H0gVa_(mxs%3y>36&Z-4<(_j#EN&Bk{9QBcI zS41-_^zQMmz(Bt@JB_?t-c^O0X)b6j4cmJFRCqFcJs3p_LST^?nQNrcG!q9R;O$@Z z7pG8UGub*KKrCT{M1L!Q_c(!-lIKrAzd#055t}uIELw-%16TL_N6>yqi~^|Ld`&Bj z#vcY^8&I^_^XM(=v+Cfpc}7`WLMRl`@WGQJ4!7t~7hpR0D;Y$t;DdIxU@G=*R=-Ru zTKUKGY;|4kEL81Wp77aP-tdnHT}WAgY}$6FIgZ8mZrr<-XC9EXh$9t1r$2zgpD>U^ zn|M`OB-;QO-zvi5qD`P@0d4ZVp{ZA_Ei{n%QA`LW#qGJ^&4pB-73!&6gJnvTb*UhA z+xnQl*U!UcGd0pNSn~agA014?`j!d$ojG8$wi$d_Pz`91nQKbbeTunAw|o2TD^Zpz zITyO9$2_-gTa?qI%<&jqUlkq`B=_7Ptj+ zDDr?Vk99KfkQX-}r#!*KbSwePXC?QhO2NPn%SUd1Icf{74z^6 z$l)s?2?3?$*{YAP#={JGqX;u!V?>0K!&N*HmtKcYpB-id`iY7Z(ZQT;b)AvJMx+6) z@yNnrAC*27MHc)SXJwKjJ5G}E+Aq*A{^mDJB;f z5`oWgKBDlf(qT#mRo((V$p$<;mD%PeH$k&N?xZA>;Yw9>A|#6Aw?ok?paLku%D{rF zD6()r(JDOwLV53?^YtGfjZU&WY<8`XZC46t|5<}oz2q>$9=e;WE% zx|jhexp;Gu0PeIwL+3hER{~E;^J!7Zg>wM+f4iWKBH`yq5S&i-v06D1?MfaA_+h}@ZQ9%Gdzg*Qol|j)0uTkWT zSaKf)1u6)b@}&VQ!atF#KCAjcWs^pR?(6~Na;dibPTrjFnZ8}7x2^^f438+?coVob zI$YI8iz~lF_VY^VVEcB)w@`>Y0WR~uUEE}5g2%sHo{d@P$ng!xWJ2RKpsx0~)RP=? zF_H`fN!l|L3;aHc*Ypcx%r)(%@SttZ;R<{jM~eR06M7VImjQ75E+PHNNI-$K9?;mw zyQ-}?_&<4wq?A)cfnWQ`hO0SJkET@hF-1sL#!4(v-0uKx zXhPqhC5i+4l)xxSdMBuKAV1_Mc@7MAf8L{kSK##=h~vVyA_E^q$HL^q5BIbi>cSth z5;+NS?}~0$wdZ`bMiWU4;8cu52kp)X5-7kp9alNgTGc!Q7{_4YDob*BHw{7o=V|`U zR9OO1tvl~9ColL@BMO(XuMYr|k~L(ih|(}ncA(73WzUpRiXoE&pu@u^t?6If5WQwO z)x@pfC)tSo;kx4Tp0S$fT4W~)qwS;R?^>!Ju8xcA*}Un%T6elU;YLYZcNj3_VIORe zeD(wNnxG87P&Wn*-J8#VO+xDfbH6&$khHzxF!|os^3J(ybem~=Ur$?ldF-4gJ4YQr zr{=AxQ_=RO8fc(u^AU7x6{7}F`(IwyS8S)-H!F@ck}wcBux#!3y%II1tc+|aYR>u- zalIX-;i+aruA{9L|EoAl1GK2EBUg@QNC2|MvTcn-41^p^0cI{E#Iapne{W_qPzxQA_r9p3n zMzwj-1WpC;QO_)=KoaL3fWp$p?01YLT|6V!_^7fsXfv?&bGOgX4h}aL=+?WAv~Dzi zCxNYAnW(WuMy6mSM|1$EJ)M>)7hs=?q-*u>JxB*hBuV!h7fmG;i&eOQr}*CTMi3j| za2HT79_(3vaGQ4jYcwx`?|z#I?HeH!kv8KWB4m^TgnE?bjRN>0M{bv+@ed&GQ<57M*O&w6?uah?0RzDcDP zcKSaD&$XJnsdRj_4>+!klo#05#P&XBq*2N{ui14O`~U(7zW*@@AcM4y+)$K72@owW zaGQEM@^*lFm7I6=vfWF?C}+{BWC+-#s<{tI%4;ObDb1Pvn- zN?=R|KRmgCqII_cp!F47FAWaK6jBJ-;gT6_<^7Sh{&z%D6Z*j4=7nD*aku^1gd@$xbGWwcy~fKG}imY~C?d4P_#32{YqPNM>N_T6va{mqXd zv_TSpKWm&w4v=JJa8(2YgRdLR~Apmx}OqUsWROK-uHB3#^?TMn}Vt=8JbM}D#SysZX<@x-jy&8R1x-xiWDNFKU z^zLA#_e5$U=~9^A>TV7HD9XJep~(c2_IXF_HcANL-O6BCVw9Tze<>r7bo%Jt-&S(H z2;7@vpB0NQdnJx+^~NhNcT2O>txW*H>r|lP!-5}tF1l5S)>ELO_+{dozl>3GNdkN& zAj3DKrO<(zDe&CQH9fOM#oijs zU^4yiF;=`>zvN;{q0Je{R;>`X`#WG*KT zoV0+hc#9>oN~X92xWo@FXnfCK-OVhDaX=66H_{!nHNJvoH9sq8LfYRIsgYxzg6_Sh zGkf>t0W1#f9GN3?P**}r6`7__swg^ANYFr(11+02EjIoIA{+pbP-;LN2}B#DH)$n- z{ErYDyQF~|cr@5-^H(C^o$En9+Cr({2VbIm_TVZC<&ury{hQhK8nJ*7y1<|;LB798 z05)4YfRhLll8SU4R~9UsW&rn5b_{gg66vI|U-?dp=DspgP4~yI03snSkS#>aS#QWn z)mdALQGq!pq))p~m2G#dn> z$UwiEC!?ewN=IsUKe=3UO5J3wq8W+=&{HU^2xLsK7Hafm_d*9o*(;GBrX^9dAarY8o0N@ z^8+Z(4-7HARd@Dtj3+D)x-K2;r0Qxf2=FkSt#tO8BvURSViFH5&eJY7hZykM)$8_; zA7Xo>X4hea`k>{lF-bfWxG@lTfS~MubCSK7yQ=ZY=MsXDuiTK!4d1Z@k}ds9Eg+A* zrma98x1a}W4=l#QXXvM*@E#&NEO>PjZS_|GjWd*WxVTV+F@FR4+kCS)6lDwF0$WHc zlAB2i2Z6r3qgs5QZ0}g^^_TE>xYwbkO2EbEwP-8WyO~YF> zMgvI(7y)#!&skmfKtA2+sG##_`wnI=0iEjTkq__&*}5MC7b!XZ0BsP10T2ytZ-k?x z5111P$PZ(-pj|~3Qt%}6mDD&Ool?_u`U=Zc(V|^fNz|~V8%{C zI*T1iqW)(9Iynhi^JpD&3#nrYCt}buC`G`WQ^E+m%sprjijJt`;lS?WqS0ac%Hs`v5->sT3t{t!E_z!hyeG zXq?IeG}({Sqm#KLOURe8!ZpwhGlM5(X%z+VNH~Llc~QL?AXNl9GnV3pepwxWT=FOF z!Pr%L7EFtIu&RHl~7ZaSIE=~9fckGdAd{2n>L_JCPl)Z;1w;AKiH{-3}egJC{Gf+$c^QyzIUqY^|Phg6yp5a#3V(Ue_m1omx7wS^sb8S0)3_HKKok5;*SN4_ZXg-?{>rN4x$l27pp} zno&2gmo42{88%$Vk$UNN`qX3VqeF}~S3DC4XaD7!uJ8h7`>&utGUXjo+5QLr8xaoC zN=)eGvPxHg{gQ)~&E@Cst(mvzYClNH17y~}vyPq`Ba{W`P#LEGJDae?3eXW9?u@1z zEqL(I?>FcYujao|s2XVzuXlK>*XjPNdVDz7*eH9sg_E}8|?mU6V`MI@_(IjcqA(M0nD z8yAdAa=x77q(${>Nfn3$St~M|wDiP)bLSsibab*r2s>%=I#bF=-n>CHW1B#GF=a0)(1}wg;Lp3_nFxW_ z_c%%LFp+Mm#JQ({re9E<0ro=D$;25(rSIW-J;Sd_lper9An-ta_|TtIVUxPLSN}4B zOB!`o0{?5$4FsUEd5>aVVIVwUGE#LT9N+dNE~G&s%)AHeA@h&7r?T-u6)g??<#m_m zKLY)JJ7=R!4K$c}8mHkObna;YQu3rH0y#EF2HMJ-La*H){MD9pK@*DK0EjLYPKhU) zY$>iFG2$Vzi-#tgFd&;X@0Ytkvjfme>OL!=-3PdP>!PZNdY~JbOyi*cyEq&18CHhi zr*OeVs{wB?FLK&yVFu_o4DuERU`tzLf54^(|C+J^>aNCN z6q(OE09tWae&*+OpjzXwUxWbg0%^iRPWl=cRQ}yq!Gjtk8eN5w(Kk&%2BvM0xmDT^ zOu2)KfmF7vC&a+1H0MY~$AF38SEt^5lp_V?B^2bO|I7h||Fn$|5FjuKuDK1m z?y`8DHI?FevHq*;IBZ+H)Oo*MtN%$@hG?S_mgv>>K0g1ft``=I?)dP(izxvQ97-d4 z(TX3e5Zbkku|}?wnZ#yhnqH#h()G68Oy0+XRO3HCYY|ywbf31fUD^Ly@|Tc`%v*95 z!hEj-{odVtWrWNz0b+BNBc}7v-lRI1smk-A%f!Tl%v0We@luf^kIU{jCR-fhdor~S z7E1W)dT(O6^VkN!wD|HX0F%BHPT>Tf%fFs z-gmNn7iGj`OV6gK&c=}0zqKwlsMA|hX*8Y%5xaaJNK^{gpP-#7$+|00Uo5nMp8(Ch zKc<4N!tit3u=`hsr$?=NQ0g zMF&L=^O-lndTHS|;Vx%qzY6BFnni8$JwO@re%d2CF&p9K3Onyzy6D_OH1z*tqv-G5 zMCQWObo8px*g>gFG>!jz7g7cx?;pc5oL4iD&NT{IDEj?}rsGTyy$`SQx9z*27Z{^B z_Mcx-u>#7X*jDd7%m3mbBs0>0b7OAVmL>AP`S^eL7yqXo0(ay)5TN+cofUM>?0+6l zF*00RUC_AmzZtYSGUt$1QiY6I{ui9|lgt6K+F#+W@mJLU$EV}DfsgT>H*fMkQ|_OS z{C`QpD3ZwL=H6A1XTp;M*7??;Mc9DV&_b4o&FHX&(Eo!SHnd5WJf-Ru4n;bo^8f1a zzkuo^f0}P3dr|K?>2BsBw#5a=Pxz$Hb(mzEJ{|*c74PUGo-~=59M1m&bIMTK79)g1 z(m)QP^dV<$6Na21?`IH4r@K+C1sc-n4_ZUgWcY3VKaQd1i&w^;|=ko#azj3n#+7>Lin;?dAnWBnVp`3W)U z?=-5o0(bF*ZCWfFRZ3K6O#t{}^Rk6SG9?*M^aK@owU+?V7@=~#`)?u?%$>qwmT^X; z>{myK?G@62Dj=bW_uI_-+#|-13GpV`q94SSsRUkzIRvDnRDqLo5^ln8A}PF&|8X!i{;-`h&Oq6RG)~V z7_+1S4D>hHZzB>6Toy1^pJqJ}1HpuML_|kc)kp^KrUshI+pviI-v*J%oWhdM&71lO zk_HOM9-%6S_K?03sz?9y9gzuJ`t@B=i3`+)tV6v*iq*Cm zKx&9&>s%v9Q&kQEWd}VH{`lq)k0b->pK%wx6Z{(kA{9!v2w(X`(K}?1P!&zyq)Wm~)0emy7?+orOw_sm+URvH5pP_hk**~~g;h~D z;gvdf!3#W---C+Ws-rPv&x$rrYxqVFy(ziaxKt@ZBi|`{e$H46t-U;3xV4z*32OMu z4{Hu550gB7=e~M=07v9{Emn2iF? z&1>&=D`Yd{Mf&N3pM84lal!xM!c<765pqV95B#7C|Mrhr-jDuHl=1I(;76m!e!|xe z+TGKx)dR-Bv&C6P?=yBC8qzsH~)b)N#xMyZZopX zIweX?ABD`CmistGc45teqtwU}gsb^nHqHbwbGf}^?K|%*zIymk z@HKc)%o|&FYiYMjLjTXND7CZgmd)8%eV=fH-`Bb0p+|?5k1DPw{AHh!5@1~Zd`Dpv z2A<3Z44bDf+zW03#toAK#?f0Py#|ol0gPkrieY;PpcXC(#m2-o$>}1MNIqlwQe1m9 zYyHJ~WM?*9Y=bn2fSMd^G@RdhIdyr#5zct2XFXEz0Bm8Xyf(61T+DZs+GC%VT``8! zMEB%}VfB0dy3t5h0&M)}sT;A(vY}yLWW&O;wfU&PCX^&#>#E30Up8S|FDg2vCIhBFW|?8*GXd|D+uOTc=>fl!RNNgFYoy*VN(?Q=m)K5R$01x=Ddg?Z zg1=ho|DHz?{(R|B)phMi>Seuw*G+Pq^DM1AUT&>iCb;!Wr7ah5^f_bLceNJIC_f#a zjR<|}VfENE=T%M=y}FaHCq8|)%SS@a(McWq+~K?P zRlAyUbv)MWD|j{beUDOQPYvGIyozG|6_$0`UTZl*{F5K>N<GOYLzBgN=f7Hf zOAx7J%lDtUf*(ftC$RwE^w=a%=LbF*R|6nQ`U$@|!~}GT#cN80V+g%$2~Xs`QF%T6 z=l<4AfYDO79yd`{@pdhCS+MWQ_Y;R7rNLdf_X8@z9*ZA$o9PH<*F8Fyr8c3gJwLD# ze4gw4mil?5cKfH}irbfGy8|T>muJ$Q7q&cLKP{7aE}a9+o&pmytMdTjNlF&Ey~073 z;_j6DLRZ;SDjvVgXgNFX0|%s}(@%GO6e_Lo<KX#L9=9C}F1%&KMU|KV8$}UHd>u?1HK%&p6Rs^U;u0dNeC}wT^HpszZ~tP^7!YkT zv}Fj~%H7{;)VOH73~pzf;Fr;qEhiCx%sBm-7GtxU^n!fpWc0bszLndI5{Ikz;n(P> zUVF@Xa{6BQJNV0)zTBD?XL}JLQ$F+I&x>^-`VcZ=MclAI3P5$36gRn zc`6z3HM7rBK?9YOQ7pJDwmRmOQWjSh*bqm|)7W%WJVsmUiF{M<3w)O{_6F%MN?#V) z*k-#mHM%qJd)0p%g<$8ZMn5IKcBENX>2Q0wSE;Z~JO*O<{BTHr&hrh=WQ}9P41K%6 zOAZ(~1wiZt;ZgtnXRk0tUA&$c_=%`^9E`-u{0xoI(dOT!6RP|8EXuU68T5-3CQm|AURcPAgxVH6!myG3byjYLoQBUp z=Fra~3uBha`yWVOJS$pa1si_nDSK~C`y4Cx$iV|6)EeaNx{@0IU^ER_9tK~r`kCr;V)&NXsaD$8QLRp={G=OjvzNw=&nF@}vOk;$j5nz>%OpYH5xUDM~yk^>gq9O|EM zxRgRK>Lk@2J6W{c2hltD)LVTh{w*nbNY}MG-p}u@@8x;vPIIZv7{}~@)?)C)6SvhI z*U-fzD?{&ezNh?ZBbwWBGu31^01Wos9{HcR%tAH$`lOo_d1TWDGgPIQ2uW$T`5Kgm?Lcsc)GOMBBR&OT`C zT^Mzjva5NQ`N)ljFykCCX}}!fk_!+bNL@BY^a7Y2(~2c5Gh#nxMpYU!D~| zn>|^zQ<=n_9U2S8!w<@5~%&ep6l}SR13!bP6FlSN2*1~DWeyr~ zR=3-AO02Cx2r=)InEfXNL081TIg2_jyg7@!kjuk4t3Fw=3c8p+Kiw=87n<&<_$2DA znpqeQz1}u%v528TP2-Qz(|w8mN|05?B9GdFUQgO1k<}lvVWxOzcILRWTZg9+@0Y z-@JF-tjp)42fy);*$9aXmBQhJ^)Vi}KTIUG^wIf%NcU8znyy;^Gp)?poHo9$EYWYu z$>Lm%{GUj~T)c7_RFzu{>B4zczm=U-Z&Vn4@i|}2$)@Q>M9QRpl_x$X#QC^SJikdW z^N2)Tit%zt-?#PPNckQ#f}UW*PMtApHEoQx1ZpT~U{+w(_1QFo`QfjF^<-R=S||52 zHl%-T0Q>Vh`Z4~vS8~p4IDTEB;mKYqB-1JJU~`JcG4PqUhYgQ{&NhfhV_{&;8Brk|Ql-lfNXt z@ESOnCi*i)0Cn~)>!-T-5U-#1ZovkA+0Tm8vGk~!-eM#f-PQvU>kWp4X{ zIcq3py4!5dmnY-#SFlZjlLw9>0d=Q(fGc9ho)LHo%_Oj9pUxEml2088|zLuG4wi67DVYnk+T@>;QXj0^nTIda|m%rp@_Ye90>CEUBb}!HT&= z*CdUIp0Bu}hf}^S&lVw|=|3k(GznY8erBQcuEcQd!NIS9vV z_f+py)AnvoxK45W)UcT!R;Q|M`Rx16b(09QrQ(Z?A zCqi9!IaW_&o`p!&C?_gfMv~gT=!g}F**@DX))0q5O*_3h;_<@gk~}va)LMFZ3qBt! zvQY90&(bN)ytO-?O%NmYmTo8}pdO(+T=CMc!0B)!Z@}8q&B|WMc7Eo_psXUXrG^nJ zZLYZ^U)c9}-mLYm z!5q2}>+#W|W!vv@PtsGB7Q@_k)6%{tjZBt>SPpoomn$`GAMfgaWS0n}PY}+DrDt)x z)@b+b&UEdSC`&a@?|@?+fmJArch@~)g@Ik6R{E&&$9&TJ=QAOj4@i5gk6k^V+m-k* zMbe0Sg%&fI*H4m~3`)e+@)347Hs3umkW~t@keJfa_s$KJ(kbd%Rh7)*Rm{r6%;&ACgd zM{KuYTuR1fkMS$Ackx$;vfb4le!AT-PC?xU``Tab=*xAj@;-ODgl1CVS&)<;*|mn& z1lx*71}bK2!%Vq`HPeNSE+h&sTT*Css6_LYeZu{XkiU@N_}6TpmmqYMg`sjzUI z2C~r7nn4C@4!#FG$2**ZiUU|emq5)57$jCWdukJXI1XmW*2;T*7Vgt4N;GefcKrme zXi0mssq|37HLcsikbtX8EL>WhS}(rkjV8k}^h9Tt$%P+rJe;kw zR@j@GbZDa&-}JBcETvv@BxI9qbNQT{Z8sGqP9CkohYyjmpa*_QH&V1dOMEf!na!oF zU-L@zCxwnXsaf9xVBs$?G>mNi@JlU9!!aOHh z9}`jg49&*6(0LWqQJgFkH3W=NeWC5q*6&RETUm20wbY)6J}guDT^i&GkD};LFa%63 zAD2n=_$O`qreC(m~MWvO==PlzIH?Rgn)3mf{pO3BU$3nKqFV4cM4I!#3%U|U!Bc~!m5B08VK8^7khZ4I@|ick=dP^CEDj=aqh zvol^6zfb7m-uo@6RVbiys?<(?;QQNsZVgLL0gN{R%3tO7sp*7eI)kW06%7=e{0T<1 z3)z0qy zOXj%n@E|K!sOxP>aA>aQVg1hBo|kUm9G_{K%W^Ms$`G^#s~lue;O3B{I0TG9Pj<_i zfFsd$^0<|kO}u?P&}$}!@wf5T^E$#~;55&uWe>?>QtQTOcym&7y<4frqkeTiSlnAQ z!z!rtYCxT7LketC?F1ow**1H~?I%CdY3g^m-ZFSc47%jsP2w!dAhyB3XdP>DvPiZkx@qMa; zBzNvkrWo-#2;L|-IpxAXnNq&xkAiiiS5F(1Xs-@VK<~1^4J9{&sFU({91(yV;;^!j zJbP>C#YM}TkDsedRp&kQ^+8YlT$ zie7m}n;37)djk&qmoj}W_jVL0@6>0T#9OM6MB?CyL`pUxg$G;hP8G*nIuR{p>qpzH z{O&vE%gYB>YC}vrKWV?A5g2I6kq``Ff&GeEzI&`(CV7BmqO}P2#Hqbrna3}U1^?t2 zLR5YIF>ayhqnPMvs`T&rAS8$(dzCKGd~RFXq_PC6e9 z$u=!bze1AjdIkl9MDm_Qf_R*jKhHTPR`XNsmve@2T1L+(1(1P=oUeUn+I+(n99DJ0 z((jWCFd<5m5|8D4VBGFp39s9z;iI|bovwWTf-piX+h~1~e9al}ron4Be=OXZcgc?Dbdf*{Z4QbFAgXyau0R_v_r^fa2|3v$vCa_Kkxix2jBW3KrSlOT~C~< z`i+d@=c~70)LO#%e*YlQUmb|_nc2uv`P?ul@M=9EBXBqTtfK z*UMqJCYzHi>H;Umo!9%`P+v(9a$JB~l~!L|T-3my)lGdk&@`fiQ>%!Na~opxBu)Bw zmV1@cnoMO6t}h0kvh`_x&Z+;sr}%}SVJa)y^6};(A8=5Pp!}TSu2gQ{5$h{Md5*L? z>zblXuXQVlRQx%Hi*j{!UV{WdCH;rbr_qln{18s8l|~YixE-XpIqtg@0s%FKTjL>q z-5b3lH(-(%Znfgxk7h0^od3X~6jJw>`)hrvDTN;A%~5wvCfy5pwqYW1>6OI>7uc1x zx~j(m-|A*u=@BJv1ODSH35cyQ;VoefCU7Jj=O1xO*cbR4sfI)2^OFcM55~sINTS!U zh&v>jxQdVBi(-;!vofx4NxiyY*o(M({37)9nXf;FnqS0-9KC3j!E4ODV-i)Hoj!J0;UY{^y7`s}e_sD?4^81}F~xscQ-gEyPVx)6eW5;Z|Mr5IWH`wt z!}wuA-AucSc}4aob5zwxHxVwgrdy2|?N=&v7&gs7|5;SP7 z*82piGu+~~`6W<=^*G@m^A!EhLtI^zhamlKFitc2Be{h9w=J;)Ghot)%eL|9_zy=hPrknPzTa|-%E+35w zztDN{&;pMLxrb*)?NU)AZq6O^u=-7G0#k)st(NI~FB`9v{zA-i{PDB4^m>}n zCctTRDOp6^zbrZvd~r1Q*0PRLD0Iug(&yD|g!ya}_J^>$&sp?1eNTRmyk*3>+;Ygm zJ>!SJio|du9r&$B?Tht+>l(kCt}0_@F1_s_sWd$|;~y@~>~DgzX=B%)D}_;A@iOX> z56IHUC!ylUAUI^FiNlM5*DOud7FH}s9!pJ#7&hE(TZJ~kV5Fp-j5j-VzRJ;ay)ZY| zsQ*qrIyosSWEx~#az`}vr4OO_lgIJ+JPh4dRmmwo%$0C(=CxWKuF*t*za^dr!14w4g*PTDLq7T@n zi8+j2+jj-lZ|W8fu~P_5t3T%)2;o}`v@&opS3K5>Wlh|o{Ro2+G{bJ%B}h{f<5YvKE2@qMrWB_C4Y8KRrd6qMkh#W&rcgvrpRJ~ z^8lh4B!W-wmTu(=x!R8M3k*J(eZe#$gXyxSM{~Vpm$B=PTZ=$nIK6(xA-t+@IdD-c zS=?S^Wi_pvVCZ`uQ@?&`__eB#8E-1hLo?hOhvAB^;S;-*1 z$XxT4D@*rNUd~aeVz^gw6D&501%BjD2~?){YNG(rx<{V$BHyGK$UJCGSJyFIj@?aa z1HFDNUDpvjqqwaLc}cE%xRHas=F5qfL`)h^v|NXoZ)vMpxz*jCQ|_Lu5)6ps-RL{BnW~9JCOrn&Pup4g z*ohg&rBedvhP12PxJI7%d@eZD_sZ9(3!pm*?T*-8`WnGALtO>^{PpS$bpdIbbA^Ko zra}gT2zwd(&jf~|NySSo^+F~i1wkJ~q<3jipw$2`%_t3+k^BvpfQ-wq* zCdkvPk&8y+o!4x2F84=-)J}ci75j~djQPR(LwCxnu#sD=TZ+{>R_)kk?102|Qd@I< zwA``s)A!29l4X8>9+ODYWo&IRk||Il%!m0)@tIi3q}W#PdskV_B(~}MFFqrzvWgGTSgwploimaYrXOEK#`#y@DUIg3&q z5}MlMoY=dH-iWgypsFRtU$Rhy?`i<|qjGo8xN3@kT1;YQn!iOI>~;E~j&jC$#z zq))>~3H79Jg`G3)_yHXB;Cb1bUrjBprrO%+MyUAEmB z{Y9@b3dx4DhH%#9fb-zb{&Y=zHpTC`A^If>jz3y+E>x|AA6ltU>VBBDkBL?BDo016 z;9!Kh`}bE_Z~OWXVui%%N6^h+H{b18eq*Z1{7+3u7vVE5AnR#IW?yJn#x3-X;rZg% zKHiu>Jcxbt+do~mI1grY6+9gY*l%Kv2PNG-j=p6OA_#)2>El3}VueZkcE9Q`GH9k@DGcYCB*XoLyOrOqvXY3RB>cOfUM<__8zz9hl{+KDMtaM0C6j;@xdHbiJ9xXX-f;+hIolvy!+t!r~!l zCfTei{Pe5^F49z}j2BJUJ>*YeJ{#1RuRPY^%^)``hRZN4ACZ)sS?stN+sfiy?;q5w zP7c9)sNDH+*Wr%sd(j#z(?FV+drO$%GaW46e9ftF4E}CTd&f{BM_)WXcYUAZ?>h{@ zXE@CjGN^RAYVb1}>xGx!apj$t%1*t+biyN{9kbu83yAx{sUV)*!J)OH31jQq?WE}? zYU%wSgH83+5CQ8;Yh%@8TE7hP!X)lXmfB*;epgM+nZ^3P8ueR!!(c-E7wwhY+mCPK z70xVlIJ!JJT1ZgBZX!(63bOB1-BCqI=(S}ANz6#y9PQVeQn}FVKE4tK!Qo+lNHa6M>>t)}{TLi``t-(ePgLJm z`Ao^W3j1=BEBF0IjsqQ!9bG|BI3A0qh6*(j+zyKWQFyx77x5?HTi6Zuj=Za@+?NS!yfXhg3$F6v;~@yxW`z-C}Yds%xqMVgB% zpU`10)shKO@@endh#_*dn%wKe&C7F$-Yk?`>IVGXMx?6-oW9vpSr^#n^s=>%m~t4 zQhHz0g2x}~#QPix2cHl@b$*O3eQF`(W_?xjtkiaGNa{$3Lt&9IlOzmbuRp)O zTbeSJ7z=aUrK;;w=k&+WWw%LTuh%Dog-c$yp|ONUU6#Fc%sCVgAufH-tr2@;L~ZaeR2+FPGehsiNLfeDZX^6k4)s zN3bU5vG*gM!Y&+->DujvM5Z@>ADMyNY zEE|H4f{7dx=^aI;-!51t+Qm<;of%_`dXSk$>2sZY-OxGiT)UG!tsJ5=>5zjg6-MQc zEWpor+b~5H6Q^)AW;wE>E${ha5VugCKP5wrKS=>p*nWF%y<)sp>p~ilz+Vr}gatW7 ze-1x0q383K)FmehzF+*R+zB`ZB*BsPPMZ5+TZ|yz_l|Gy^2qwIAyncC#&~Skr#tlo zvC~iGz2HvkqTM@xX6Ns}Y@%)~uA9jJNHRpMXrdPQjv1fEp#0ON{YbTJu=cbO#5GlELI3C@r)Bivh`vMd~u)-0N!i2G5cE(Q`Itc zE2G?IU#I8j#;N3kDKCffYs>h%+#{d023Bjwp5pE%(!ddAfUb^5f zUs@5{8@TffYFQwuobOdjm(heV?Drb_st;e}vw~f9@k1Z!G@rw)w`Uppn}`A`LpHeHubhnAkb3ZcIe@zu7ZEj!|B8uoeP78$+{WjNA4v9_7~OS)co z#EHM0#XZhd?b>`H|4N{=!lXj{KenZT;Tr87hWP!2AS{%$0)>vB(ILw)4}7 zlIGj%NmVK?oLu+1q(5;~Pu+33k8yY8YnR#Sfz65_i4@kkJ@s>YgtPV*4c=o681d`Z z9rLa4we)W7mF4L^r+@1ykc*8OPl5A=6l){NlR+hSs{6?@pCJax_BNb^KYBSFGA5uy zRcJav2E{A3y!E4CL~Lg*;FAk>g>y=Bkb&urX69a?x=Xde`)Rw9yy_du{un|Z2=Kv% zQPtOEHjLlXuSgY@1wV^8J0{=`@F%hZ8++h^B+|myI{6l^izBCou<8-6+|tVP1_kAh z1;1LUat-^j;{=?4JmT@$_cGaJi1y9{5R3ZDf& zoGCsq@&C!hGstQFsY0hmxeiDIXC8*J$P@i0+6i7@8U*JXVjk4;mc5`AGEH$7_a!(8 z!EGSeTb`rqZR{kXx4D=-p1$k0ogidZ?%WD|cdmOpgr$S!$c&#hq&^g~vskO@jlonC z^RXK*g1Fc|Ny7Bb)$+B+6n^}iu#njSr;B6iSEn1E3L2Nyhl3fISb2H`P@*l2Z@2<` z&V*Nch2FMYud-sX%Qxc6Pj{aDD8$ivdp~SHGu;;pBuboUpuq}D2Hq#j_?UUcF6(CV ze%dzgTg1zZT{MSlS=NG12(GH6wO1DumL{|kQ4at|&rCu+G-DN3MJ)X%-+lL#DM&*} zjYb5SUcZNwvG=tzIDx&6M1`icx2t?5pKkoQN4k02{v2Ox9MPNJ8*tSWF_R$niDJVb z+5JZQN)E{M7yn*P_+9k-M|vEowFgO4A=&oheP}3{RB7}1`d#zmQ~sYrWqPlEvsq405|8ZYdAE~dy{`Wi3@LyEVy}tA7JU5k-i2Zr=y+n-H*=Csr?B*B!T8ip_J} z#j``qd4(q^$~Cj&hgP*mYGCo(V7Nhd%1h0skU|F7Dm0%f5ch!Mue2N^rNyX(a)W7H z=^${CsJSXl*_-Xf%WE4Zfd$=6WasWVCCPnL9v;adoFYI>4g?ZOSx)fWC3ThWsKB(dd4#4tomb=VcyU=xPN>f-hcf8Ap}?P$3&`9 zdeslRk)6*RqkLrkeqgOuru}Wiu$zf~3dl*=8Q3%6G0z2GNrt&;r=`ytaLM$EctM4mX64WQyB@)Hy?!cW2lrq3N#@kBl^CM7(`)+trY)~}UwXOUT? zl<5aLD@JPeIzAld=UAz)UW!D+!?>PsJTi~w%JQz~lR!=Fg$t@(yW`M3er?a8EWrB& zij3d!(<^iW+`&An=BAbhI0nY1pwB>J=VRi)J#Fm>4hcA^d}IHki1V zeb0SMoqi^XexvCT(^u_dK@(NkKQLDaX?fBoQl=nE^B?<<0LrKggj$hBkV_{QM zpYQ6gt%FpApBK%lpH>qna%%L6n{s~Gygp!+;gQBnZrW?yYn$@pjSoSz81wdBnd9a2 zP62GpO9j61t?L%y@^jbXrYAEsT4|V|UU9!{A1{b?`eO@G=qGZ}9)4MbV$$OH^c;s1 z^Adg-(M=iEw_X$4?r@OO4i*z>KkNA|u0q`jHwz*1{?oo7SfRiM>t>ua4Ph0GTe-37 zcS+^{J#1pEt`0E~)AqzuyFOPw%<9LYytvmWba^NSJnq^Q9?x zyi?A{kr~aXGo5-|;?c~f`R_D29`o6B@MBYxp>)!J2zp}EI|)2We|T@n!^ z5%1M^%Yty~>O`d!XFdl0p;(b!fA!V(EQIh##^dSAtRr!QnB-8dg*1+zQdg3V`t^a^ z6-7U?$TC=3*7)lUi0?v=R^9GnXKG}zt@K-DulOtf;Zl>+*%uV!X`qCroHs`#TLsBT z&AhsEcyVg=M{;0Mwf)4o?nL|j46^8;fduxV(Z5=usiI}~VP*dF@*U@8LU7 zcNI|aV?}3LBKaaBu5akMTGPpeEDm8(vCXMhyf)upJtkFq_z#uF#2NST>Qi{z?o_U> z*q`0ZoUdApee7Z#lk$`uc0!)N6uj}ansM^1fw`~5VC*7xvqgldI6?i%$?;f)5cX?Y zWx~vn@VZGmf8qLU`9O?Y_!CKU1!l97Ww z`QBJ=E`Px?7|}=@2Cq*dPch9n!UF1O$|pR-{v;y95CV=?>}cjx)FZp67j^GtT+m zalY^0GsgO3jm2iIHScxbbKY^?*EIDP^4=(6UaX6gRqgO^q}a?F@WGhLb+)Pra;lrT zXnS$nB`@&UnPHO3<;vq?7u&2nGM0W)}>6)L#QvFuNbSVL9P6U{4qF#PEp~)Ef7l-%DaUG0i-SO4~EZr#_QcY15cBA&E?VA*oA(rk3ivnDU; zpi*&b+OxQF_q!ekzFG?2#^zMH)j-StD&@J6+ub%t3^h)d3!Jrbja z5U}@ijyjDC<&pg!GnSP6ZL=u%xwAeqTLOH?Vl|K*eGp*N)EJb&qdQ$NA;o&1!`jNp zp4@UX%r796yZ+(L=-JObD(YLmp9YL!hc*Y3WGibCo+XHONGTVKDi zcg;Q)dex_*S*UMN!1a_*r{{yJYeYL%Y3W^kX|&F&By8 zsd&`eQR~DBSSO0r3#aeY23Fp=>yS+2eZe}^o;E^95zlLq6cLPHp7XTyUhGKuBZ)DF zRL*pFGkoV_f=_D_o^rY~Av50IDqf=X3(jd=6p43|;BZrI-f4#o8p!N$h0RR=ue_aiYTCX{?oXH~yf0{B zWW6&>vwFK5XUOS!pB2D{44)=X5(Tm0>>MMMsN#{{Z)!7wiGpKp@;C5;U1&sCsndK; zmQx$3i@qpEY0m@1PM&A{i}Av#eF@jkz)xZKl_2$p2kMxMKP8%fJUB)oBiFg~ecrPt z4^00OhTTpF6=90JyD=h2UAn%|_lr^sek_;{>;EGVJHX@s~s z-)IdYn@7pl$a=4ii#y)1ChhGUrMo>=|2Z?v3Sm{+A(dK7VZ#d0C8yV~0FPPwrR9CZ8v2J`4s0*v$iJ zqot89vkfB4^@)nlAwqe_@+jrMsSV?K{mWM%U3w?I-1Gb;X#4^jBoo>uW*CNa!>CF~ zprhC2OFhYV5$-?hF8h0MDAOL$-e>KUF~xT8h(tdNUjoZP3fdJG##_iR0PO_&Qc@o6 zGaczHczk~d!&qSlc^W^rAH(Y>ZTJtw=*w6dXN+7KrUE}MN3`-uT+s0i7Rov%s2QVq zEJ)K?g-}OI61JgLa#e&Ujg`*8UO6zdBK+gHynk#&7JJWrJil%w_NWpI?^DP=HMaIJyuO*#;vjl|NqjoJ};v z*>PRF?AGRp%{N9E-r%ZErb=$;vxAmK111rl2a0p}M@@*MO;IH;_9e|M-=ARml9h_^ zZW>)UxD}YrMWKlHhEK3z8vAWqj*qHfKAP5{Gc_X;qfM>ea%kg&q> z!0wNNk6o|!g8Guw7Zwy7XY7tP=SB;yr?3=lI4g2LN$eAU5V~RP6rje> zTa4lmu9(y4Pne|C;(7D~aP8)UO9+GM`w0Sg-*Fi{%eHahxA}&Nex{#&+?O5&?FkZa zKVl%2klK!j{`qd`JZYr9p~ba+r5tyO96<**SMs;(tai+UZj7`tyi{=-{;{R{*oI(H z=*gctNUIVIP2znm`EMl{(udbWWetki-;->(rZqA{j(Ei^SxHl}CmngJLj=gq~-0NZYAO)1zo|N1NSPpaQdF^GRHRI)-AI!A5W6y^bW%P^VmbKeq zq0T?8Q-4}-`wwBxrjOQp+NAd3yQN-Tu6%yiY5EpHa7doK`5;K`+bvPM`AQN)fv;FZ z`P5u^PS4S?!e_}kQZ_cNQs0~P7L9<+F&xmG->YM*(>QS>3)W5?U#AL#!yl!1>|wr& z)zS&3pG8m|7c>aC9lSyJ3nKxZV05Gl`oIr-)$|JCOH@BV*d5`Y<7HM8WP)g>R*`t= zUq8beGulmwIw&Al57F~?kin}kzO^{KEt^Qx*js_ z30L(?0vL!`Ve);?vkX^VeGU^OEe7K_#=FAkCr|p6AK1v_1;2edmuZV=nd@s#nF!df_jP^G3rz|`_FBRCtt*@QE^?nD!>8PQsDZM*?FQ!T+hR@~j;ttnB*vOC_Yi5rhHNO7f+ z62jBiyiFK0A#I^EaHY6<eR~>I?_A5Nk5!zxmEWPIfUS?43ZfnL` zj&a45q><(I+_Svzagz2X=s15oF2!4hI(y%_aJ{ew(a5p}H3^Ftzb&}7qMF-6S^DYn zi_zK#L5q8B(!Fwli4)|8`Rko0no z4Uev0v1`{j_Icw?)l_KnUT#c0cuFho7I-I&%ZQpNcP|IvXN+eXOUNup{C%*poGL0n z!NBZGtrHQl6=nEUN`_fJlU#~01r#SfLYh>OvDA9i^g8ad#v&{ z9^anou_DPLW5p>Z29>-Q_~;j$cW-H;;x5ctGQ4DY5{wrr?v=?ysVuYWMGa4a$k?@; zvw~g7Fw#D$TR&*BVq)e+JDQluD&~9-CUgl z)cc&O;QlIU>F!|Y{SwWG}sf|Yc)xL$4<5qui4>2Wdzn33rRykh<@%5j7Rdd{U- z39v$X%}1}y67-n>a>g5loS9o@(^ObhkbAii%b9qG`vw02IZu>E{K>A+dw#eU%0G|m5BijNSK%GEezN(h&Bt$mS!Hx1S4%tXk}sx(m*> z%8<4fCv(P|<1sm;#hhiY2MpATIbTlPo-{9Re&}EGU6lS;5N&-H^qsDg5;Jx@LD)els{VNUQm?~ zvdPA9G)vy>TIx2-zTcZ7P7fo;z@?ozxlbSv46`)Y;BTGMpEABD9NxbQI*VesKVKf? z+RIveuZe)_*L1v-(q8zkKm&iN*kXiIVy>#11>NYB5$8D} zRPO5&hYEu=`wi}-?T4urC4y;9J5Bpl>zf-47p(e~*GFFMeoaN-od4P+yiw!+{#HgUn4Sz27kgf zB1{{md4P?eZSv>+?=dRMFNI0x;$8Q2&(VW z4wTG_bG0-~9?J~++Zdb+@rit#b0VgZk3@AHGPAzX>N-h@7j+IeOeRtgyZK070hi^l+tr zd{4k4HlXqcz^{F~SN5u(N7dznloOYDIbI4sFM1Cro%OebTQrkd+ULjH#<YKrR=o4g5W~KuKu*#f$UPq?DZZZZvXhqReEzND$}RG2%$Im`ITb zg{#>TN|<)8Aa6Ldn)J8 z=SNbBiDF0_zWz$HyPp~g9B$~bn}c|F17$9ll`U`+P^3tDe|*F0p=9&b>$!j0f?pO!_v{GnG1cU$C~HAvI{=0pfu@%q1OaBh zw63J{&lqa&e0$a3|8fGfdEME*b~)9n`W^1RfadxruLtA>WBJzub=Q|5gCU^rvxMgU zD0#;N>_O1aD%IJ>24Fj$LY`PPEZ=)jDnkte=4C`{I>0?gMUK(@T; z7a8&05Jdco3!u6~Xhxq+)Pq?s*{S$^hr@ot$~0f>LEymwrLItb+{#9x!0QZ`nCSh)Wcl?!p-mN_BKW*hkgo{Mx-n)u}Jmc$ToO#V^qnu%&h9rkX9GY1|Y zDDfR%)jy`$NF&#?>Rw|##WWAv$uzaT`(t@D{MS+k%X0pZmvBuxpCPQVIVX9QI?Bul= z-Q{QHbJnyeDZdhD)o2iEk@e?W+239t{vyE@#oLZ;xY#Q_K#I!Ukkx3g*!&H3$x7sT zUfG$4*Ie-`@t*jL_>2#3QQwLkGOO!?gtcXd@t2-pZnoP?3ht2&3u^P9}-J#STsrrTJogW za#rJjqJxsfZ#9@vYFQo?S@JWA1$kzQ`ELj>SAuhe3(;3+toCT`5nS_bYRJJ2k#cDih3CluH?~Y=zaO_m!|>$`fr+3o*Gc5 z!jee7V&VR!Z&zp(wHhKvi}CqjtHvg8G{F1nG-$1x6HyzTtzT80v}o(ov$P-Dv#l;d z>~ZU!_H4P=O5a;;j;D2a-!Z32dDgeuX(RF|K(`N>I+yb|W}Ia52J79fjm?4x*|AEU zUE9E>fIPbF1a=*-=8tnewZTQf*!)+B4c6PBXjn9WtgfTU7Nt=NR?Z>TG^h1E@VW6B zEz@PxC=kc2DgHE4G7^qDM#uQ^W--WITcwfEQUCYa9zG?6X0ITm3gU-6KB1sCW=aN|KO*jyv_E|;E z2IaZ?2aK0j=YZ)jcVkQg`e^i4FRejMeg2tTN%(@<^4;e(PkKmBW{j|;USVMHFCd6; z0;L43bfcIGUeJ1(YTXR$ZDg#8isXxT`{)WFWbp#nbU9aCWsh|IJn!(m@O?yK7A9=8 zv6+>smVHd4;=(_}NvGs{|3?bS+7SP?wyN{3^FapXsVk$fdhSW=_`cwJ0#1rXiErXE z`rhYOYoqKbG4%3kTItx4*tjz{)uK!^jxsMoZ7>dORu+y_hE5@uSo>mOt^dvLkl*vQ z3u3c1m^V4bq;6Vm;Vpcnp*6$63M-kHNNC4=7D#A4l>7K&PZ`J{K$0=eH)uCU1AX)= zryP{i2SG#kg*g);4Neo2NB7SYiw)|ZqisNW zCTKrN41~t_fI{}hpV>0(?>W`fl720oYPRFYoV5foC(K6q>4)aNH}AZwmW+z+(*D=cvm|wP_URoL84X4%EkN{la#lGLIiy3K5(AyemLbkK6rMDZBSi zK@J|x|6U)hxHYrSEbhauKxw3aS+`1;Xk(?_161&aX;^|L47@&6Gm41>e?AdU#|&sz zx1h$O@b;-7sjzvn5RhdNOrx64TD~4llNieP$*QY%?~?k*a+g)b)R*xQiSmpIm22ZL zlI=d60ZXP}Eck6)!9&oBq;@uGM!j^TQpSg+i?ie!+%j;UOpGTjC9u_2Ndiri=25_o z3C-bXK9a&sHY$Djh|H#;PE7qCX?uoAdwpktMq}9ZW-I=k>_xkl35{Yu|D)d*1ru7d z9po&C)Te6Mjy^1Cw7DXMp-*EyDY@&_H;j&=8vO0=whhe)UswfwxJlsmNmlu!Yrd44 zd&jm$15}q&VWjHk)y_#DXoaNR#;Cq?&W#A4t#=svDht|F-B^~tOOrr#S1M7cjpvZ~ zetiI$SWCv?|3WNo@aD1hEh{1pTI-q-4^m?kFs8U zR@(Y&+$}5U*+Q^wL-jqlsb3@-_m(e^{eWZ|6OBdZ;f?wjeGo~m<{o#?= z`;~)l6E6m+3ngg9>Yp=7KFQPW&0O7SJwmm8pnD(C3Rox2U(rrD$^`p=6kJ!_az4X3 z*xfw7JeD`bzBb<-x7f$9A2HpHgA%=yDn9C}-7>QDPcYEm($noQsl27 z#oP^T&(L|?_r(T-vX(`eDWs)Jj2UHT>gk78cbs`WTxdPb*U1q@ z>qE+?iEnPK)2zDlT4SQaA*Lh}h;(&n>~9UwAtK|?pZI&;@Xm2dmbVzia2rZH?P+US zt8TdNHV4JCofI3?IDPS~C<`BDJ6xBvDZm!(cuj4j*O4R~x&hO-%z46VaapyTqPZ(- z`E(#ZP=h?%Yc?cPCcylw1Mka^e7o4!ZRC1d%#1FeiPFwdG-ALD-&X=O-{x$1>N;wV z?jspx)I9cn1S#cUMPRS2$SP1E$2H?E&l>#=?(w>d4M&4C(bdLl0dd!)q^zx%()+biCdoEuO6W&eljIqG+A1-Fr zr_z@$)Gc{O<)Px>cMGl?d0Wb}M)s*ND25n?$$sH9s+}FQ78BsKbYW8KwqMgNeLp5eMr< zl6g&>V!arQ-pQoFCBf)l0U#?-0MKYiyAUA6tOIt?cU$ zg1N!o(n?-=c9dhxB-$Q^*k~WL+;^2LsgTK3S^jYqho?3DqEw6AFiAC_=Aj{8J$FCu z_9H2?TzurXL{Puz*~sEowl+qRawmc^6Ccp z^Nx1YL4Luc?W9Oiyk)OFC!d?psxbRu)Y;F z)Mto2vL2d4Go@>Vt*&}>tlS&YCyZLEUJqdvO(QjbZ>~4VU#Yf(QF^fR>#!~Krat#- z1E;@5Blcp6NzmfVaIXjzDux4OFyJs&fNN6=UqR7%Yc*3sKw1SIRfuPViy(*qxB!bd z?Xq(gYj*WdLpz{bGB|PJ`Ez~89}f4U+&y4`#X%vskom98`6MXbhQEiuqS22JjB6dY z1v)~j*Eu;D*7;wNNWIq*h_@UkaP_h{^-9*K}F7ME+;zyo^9fB4? z9$a;p(XUxPT$f-pOmrG^4(*tK-dF){0G-6Tm^zwxM3WbO=LTeBp-K%b1h|lrug~9S zvv>W`&?vOcI7L5{D~>>MT9uKRc!Dvg0AjUxfl3t!&0Q&JWYFzUEfkx@pF@Ehy#n34 z;_@h}J(oVn7fsZ)5XFGvE0u4l%dpnThZ0vKgVebx>ZKx)BY>dEwV+Mc5d3=-ctg|( zkU=Q^xAgDETK^XBFWE+`#u=-I%Y~A)4p)GEm3>PjlrzwqDrtZs{i+w>0dx8*@P{9f z6Uc@MFL%$;_a@BEC~hTg!nB+FPb^9b!5sl57Izd@9C)o-Ipn==3(oIn0HqrbWx?Tj zZJPiUdzc1Tq9CO`Z_G!Ft4|i=2HavAz`y--MDqM2aOq1e0VYr3iw?65WJ;w zL0k2ow|v#0avQbe?%W3aP(@`F>wxmch$s!zvYJf)`Fj7Fl`83H@6B!lg_GXsC_&}& z0c{lOOXZa5>iqdq|4Xg-uOXv63+I1E%^gfF>~t6Pp>~MC3vic_tpk!i@B){7DB&qI zS^x&P+F|2-&|-^JMEaurmGplN5U?(V|NT7Ym?i%k68yJWp-!YG32mu%oF{A=;7$vp zsBDEoqcZ9R2A}1#|9OF8FlsFKY+v26XVF`|21R%ow|&e8~=~h z8ut-kfW)0?t(78Dcm;Iu?|4zChX0(ppsxPw2P9C2H~V|@|Dn!E=mzxvlTF;^0v!Xt zLc8~>fa(2v7XBYkBGI8**NSkd(`$J8=fPHfeV-Z;+8JCpm$SWIo2S1>o+bj%KuvJ|4d64M3n;Pi@Eke z5!4YgN?*-TFFqS2i^Kda1#aIi=#jx5v~5h<`hHp0}hmloP0rJevHPo>KE$1s~?ldRUo*s$mD+9p`=<0A_X>}Oo~I3wYo$K z_fg6;l2HpsT?NXd<*;x3Z)f7KOjvMc#Zj^~1*15!UoY80J?>S*hnh>*Lxx#L)I4b; zJ)8>tkO)TnW9q8b?LWS_f(Yuq22uOIitu!tVt$eIk5S|gAd0fcBlf>T#+3(--rUD% z@`tFazQ_!7YXrxC8wP4fnCP|G;B{(Mz$g&b;={?P$8GBTm3yrkSiRRdJIYGzX{8i}BBp;d5ZsX4H?`^F>_+L?)U) ze}?|sSNp5AX>jB*co)D6SrF!3E2H9p+PSA_Z&Ih@H>(*ZxD2 z@Kw}(->CNBG5Pr4ZR}a`H&;X8<1&x}t@=A;hC6^s)ZgL>0*1PJ1M(ra%C+P_6$0FO zhX%Y3io{aR{SS#Hc-%L`zjEIZK{+fH@byPv$2~COzsmYw3IT#4YSeuTf4DX=M5;qa zDU;QI{s^V>$+#4JBKU7-*xEz03GnpEDUU65@ ztgE3Ocf|fz?g(b!svVNOxKxB!0lzWwud)&;z#(I$FUgk;BcCRToJIj8fF;C$k~`p^ zTi;a%@7uBW;x!m=2_uls=-$#8_~L-d_=w$}<0&dS*Z?AKOA+o>0M3#yki^4qMikd2 z+y|dNlCdk`lV=duU56zO%LvT|y^hATd2OW( z0r#bki?z@&XA>?7hf5iYlcybMP(|$a&UlCZW)2Un&?(Z?Nd{CFk58)a8gHje&7bVj z5+y;oxpg7OGuLS@JL&RmGkac?!p6n-S?iz=`Ah*b&WoRJC2WAVe(BU|8`3fSeB^!Fwv1wXrQSEDZ+$`#?X9ctZuR`Vkd2q)qpQ<) z+WFaZ4CD3Kh~MAU5T1^7tUGRx4cfn&7KKDejxQX4to#$S0ZouXsU0>LJmP;=J22X` zSN+Du(-(|hdY6Ma8uRJcU4N)az4lwp+hw1Yh9K(B-tyTWv12WHU0*uy4(qYE1<(0` zRMN}4&)(^#uzw}gRT?||-C2-rDG?>iiY>7MtPf6u-qa1yfxc^mhbV5pXxX}7K2#%RAlMe|{sa&S9M zUiw2uw`DTn?CbGgvTj4m)=kaL+2jVy z-(LwBk*0AsO0ypU-+ewuP?sFojXFv#(h3jo`*O7s1(KXLM!B3ePLu$r=QGRGh2H|_ z{KgNCHiTQp9jSgLRjX|>w~ZKXE0l_hQyIG!9t`sLM}W#&M)tl6hGW8v$9|b^rBV0^ zFqDF>z@X6Ef~m1v{h^_t^gyZMuN8chrhwjcO0|JU@DNsI(Liwgj;u zw3={?#kh(%x4bdIDxf?F> zw<=P-#OU7K$1@n6Z`*IXM`_OZS)<8U=WS+hRc4(uY;(F(5#c$flkXq&%({~(18{WQ=%(X<`NelwPhe=@Rrw@5e*y+Ip_y9Sl<^5g56ePLC>A7sTz5)r~7uPR=dSSUD9Vj zk7!1}TTpj5Ey{*2z3r{=ye*_9xGpCxWnTJ1AkMfgB((Z4MT_*=^|T1+D`3}PXO3GF zdc_e!!kSs$wEshm)r;-%o?O}1BbZU_X$!$j=xYt;6aP$?ohJF#O9HWs&ilpQsqq4@ z^Plysc#nVEWIz!W-}kz}<~3h{-O7LQ#&74(;9>1h?W41&`q9E|^rJ=IDfC;Lk>VTU zC^9ZFMA_HM-$%b7pQ?+THb^uwe;(*fYw3`ZXt7DK71p1z`4mek02=e4(eup;whXIv28c;D8TA+6>2~xjsAUDYC)igcxKf0{t zjI&);*Y$(cw?oN!;?(>*Aq27Ry#Z1keL5Pi4ByJ@D{R@Ti_@6%E1)U{ZM}#}qm
j2 z!RxYS{^ad%z&yJ)57@h79IqbKUSGMDxb%CVeoLcJ?(vrFA1Zn_L4POYWZr$}&jrM&c0HWz(d=<9WXW9wE>qW=DwedhZ=HLh{ zBdTS*W?|&4Pc)@Az=DNd;(pAPtabT`(|)m&b8nFw{?#B!_2OdxwcSRI$$Zdu!~PHF zAB=I?C%bBn=U|n3Yg50=IkIdd$xhy>d!$3o#$f-VK?Tmy{GL9j&3T6U@KltCpicYh zG9_+R%#pfsrjG{TDBo45-m`S!Ry~1~UR?hWF;+icc~AdgjMl6qyz6vtQAeNa{idCE zp!~_>o{($ptAg>CVE?W!Qa--9mtPScCHEcIGENYC<^0b1@u}}Xv*P2c0?D+ao&<3N zQ0j|$q_(L=pcHiXi8Fil$~0=-5U%!f-mATA`ijUdPFCzMx*WVs=m+CwwHS^*}$C{_teea1$1l zsp8D>slzd#&EehcY-gh^{HzQLqvnj9J%fYz0c-0&p%BUquO&pevS3c`%HwQz11>uf z&5B|hRKr#H<(8J{6;18FwX0Ak-WdegsQ}KAc#%5CrP+%!eaQmYpe9~XhJE+htIOr zt|c&nP-rre?|g*$bIDh?L$>}>d2+!WnyspPR@1rucycAL$#{)Cj}5*QYzobIfo>(1o*PrNrGe7;5)U-H}kRL&C zSW!))Fln#M#(uuLzx0#H(CS*o%4_#a+!yz;3wyN7cnW^E_!uplb|foSBY-+sTg)u| zqzzttvhitPD4>uK^m84)WiC42jGhlW-GBM?nAD$#K;j^KI(dnlzr?QINW)dQrq*)I z{Yx>Esa%r8Ex+D8ZO*v)5AWQf&g!nth~^WjZw~DN>+Go3gaLYovb}@#kp~x76E?%u z4SRNy^`%RkyZ!RCnUs_5O`ugx7uUP6c8@KOqYf!;6&&z}%E&!iAM7lkFLtU)J>*oa ze_8XgS18w0O~FSgJIC&~*{4X7Z}6M9NA+$^UvFptf^n_ZgwmI7HS5FK_X2Pj0Mr9Q z>4b?!P>b&e8#BR8w3CTo>Jtp|lRPeb701lu;)RWS9?`hy1FTN-uX_AD_-r zg<++e4>7kEe6e}tFlp0p#RlA>bo}SV{6FjG8DDg~6S+BfL?Gb;T(JFj0CDl{Ji@&@|OTU!=|4fJDAx)!uovipVaa%5IHOR`BQ=Ch&_bAhNA!eu1e-S znZV~Dpnh)DSESQxZO&u47flKA8*O61Uz5~WRzg;1Jj9%Emh<;8kd zkDtdd?gGSYa>n7<%6vf3zR`Xu|Fl!oH=%vT{@jT>w9H~ew}$tA!t@bNX3sFLpDt3~ zPok~{`~85p^64HyDE7kj)dyCbS3|kU9h~COQA~8V8V$-!c2^fAl3WBk*$Af=lwK>1>F&ssp)s~=U<@@(+++xC$_EsiV9E@np9UBbgM2z9G(We ze0O!}#>P!LY8S!L;0tPi_pC_RV;ieujQsoU_^&r9`1!A3)xtW9cB$GGNjh1hUXlCyh{|%qz zBB9tKk{EXJTrrhXVyKw*mk%u0jh>zCJr@C?ZA%dnC2JVHs?_6UvAq-QgXQV;a>~65 zIWwf%awrO3u(<>X38%4u0{y>isqj!8W*c+oA^Ba9<_T%9_mem2WSj`N#uA&EI*uj= z$x_#~$m7o35H|r_UPX-g(zXD#70b)c|()n3Xsv0fL85^A__hdr3g#fVo z=}4z=FMeex&+ua{jLoQ=bJAgAK;w~iRN*2)fCpDJ@he+uzi+{Fb+&UJtL5(1B-9H; z`%BzTwPxLH-{C3Z$$-Kt`^bf{OVx{Eyu~n?_W_4$3KoQZqFzj3Qkujm@tJf{Pklh{X6sdf4;xFZVsyqHn*Dqf~HQm#_$6 zY~)z8O108D9*p^Q)BEOX4jO-}ofYx-h;k@bu+4grHLMigpeHkVmbKU@QAp}7zg=XQ zCBuWXdiy~rn9p>p1ZzPapO)dk($xV;x!gF@>%Lw8c`a}UJ!|2R?zxl8=J=u3DURsw zwEK1k54=Wm3hs-Xg)GY$Fst5S7acp!`@M7A|H)L(G1VAD#=P-!v&8Ca^yW~}7l|a? zc7C1bi{ulnzL*XG(TE#}*J*R1&u@XGnnsg2bt%0O6{!)4IpFa;#N*p}MfK75Uezxu z%4`b(&7SN7yIPRf8o=rFTPAFZh}@4iw2!f5=s~O+k`)K|{?nTw*V2ZEgoS0J2}yimizAPzkp4b4%QK+E;e(av_6av?z|KK?E1l6Ss{~_hse2#c-(q zfR_Zii-Zu- zqT{*a2ditfr^>x~syYUqWxCaG0;=>l325)>b|;G}S6dX(PuV+v!}`RF6taw9c@=2ARuF=^l^gH0Brx^>lAJg`puu^Y zM$3|Axfxx}Z=C|^M|G*1DAA{-)!?`;7)?hyeq#9`%Or7_vlZ-LK{#OnA+;MFL_m=1@2f+5y&*FR(Z$YzvfQqd z<(iGe7KHqTL#wp>M?TkDMjReId^y+-2%Io$A)~}snQlk7)3$Jf@5zNzY(!!1ZdwfH zGahZ9Yv0N)I!q{_QJM0%QCeWsM_n@ee%i$(2Wix#LLja$J?U)dTK4*H+UUuZS2y~S zMwVLc)0u{g`E@c{Y(blTInaWjgUh_slkk4YruELs)w96kcyZCTOru7cWlqNdPIH*+ zygzU3xqEaXDix)%OcO%ciqjH!tI>?T%8r$;4O+H7sB_xncy^$;CRU9W?D$c@^|AgH zZDL7LSfrg_#L5)9_hV-qNaWGh6lbPtG6j#MQ#CF`#fFsPq&J#Xc41MoSiBax7r!a! zY`x-(_6>6jlOdQJ-pn(g!M7nZ99sI8TjE+1Z2nYIrOYYhSzpD%(#EOa>E;||mEhF( zaC1Qt6|hM`6jhBb!U7~luDdmW21!t1JM0Wp8wJ;FzpB$dG!9AUM( zFpARs_9AD13+3*!G@5aT z_g7&?wUb2l86v9*XBGMzjVjaib?zazuN}R_cV<7&?+{-5y?75A;O%XjQVnl?FgDr% z8}iJ*6z&MG293YZdN%4Fz0_!+Xw7?aN>~;~*-*r-w^~Ze9^XGxU?DF^PgNSqn}*Gc zX7+SKgT?Q_^ZZscf0w60$>*Nr7fHVWvsfP1ArA)ivf_5tpyPdieBENBLJGc7=VB69 z>@JGzMOu&1<-HA8_v0;YHm$<7sF$ZyMWHF;HD{K172@i_QE;QRK}F-q(f&_TKJ1te ziH`(KzH2i6YQ*2K+9w-x4vj-tew0Id&!mAvHHj7XI2vHkQlMu?t4vcX*NZMo)>G%X zVDM)S-l=Fmyr;6pL}gZdrlW``;l&B4fLitZ*{$w{V5`J z9XMGjcA+3^S9AbD3b?4#qNj(8PkA`JW^8RdY6F=Hs|ks+mv;KtI+Y#RqWC>v_n|4? z)T(|}n7XPPHVbDRbjAN1mxfZ}`hwq)#-9XPftf_S>i2HFtuv;}@#qa6WUIa=DO;Dd zjjR!`ab9-+!R^5!`Zn_M&n+NI!>k3&zwwArP1{IKJ)xoXHQUb`1@PafAS}9ye{t-p zoz^=9@T_;fTsQo5a@ZE#4jJ_PL}KWDIHbnG2cGDH1Q{LifA-O%2N(r;j!BSAo+SiD z44T(0+EKtCdM_0S0G&}@o^C(XN|-gjYTH)M0Q+-&r`2HQTBVKsN}n^1C1vTJ!LSbZ0)Eptm=44h0M=Pj4+r(MTt;K8lgHENxy-V^XU32S7|&S-O7(5%G# z^kz=$_y6ih&uY|-$3b~MzUVRq`oU_uDdx!*Kp5P~@ZzdTt=eiI?yU?P-QvyiYoQZu!C*=tsBGW~M;c?4{OQl@COyRv)p zK3K|0O~kDyd!1@pYld7$s)P~4V6)f_-$eWG3zfbi~TY_#qTb#pj67KbxsgssG z;GM|n-R0m=;BFPm(fIP-yT(iL15QQtD(mAd9 zCQL-aW(6_lS+H$mmr* z^8GYR1-K*ltShvbV$;>cm&OMTr&11WKh7d45)UfX$d7AuYror0x=0EQs!tDl?Tdy; zx_$2rZ0LMLV@yMqq)~`Ikl>3*S+jFZ(F|M+2WRBWrp?+}UfM<)jq0armMlF*xB zRE|jp7}So8(|xmkKwE8PT1vSb;*-!DQ*^56a(R!vgi!b_j&Lu`O>J*?m$2s7I?e+Z zi4fxUcDb$N$6GfbXUDWM_03vEc|Okb+)wXpm^LOnq z`0{zw&N$SYT%Z5^+PGAu5*;13b04A`c2r3x9fbP$49L?iS@jMqpe*m9JVCAt#c3KK zBf&)!EFg_`+#Ab+oO1MjQd+%nHSV~cv+V4-v8Agm`FYwGX+71D9?LnAooMT3#fmcv z0Tq#S8+Q+Gd`sB^r&K-d?8+>i`Ay1!L_w=pWhQZ5JiS;qsWjwEG(JX{+LDa!=oMLW z&O0m!S5Cz8|E~V!VjMu(xH!_;voIB`}cOQ;vS-y6J6(awKH??u1O;|l4B z=8ddh8AM#k{Z_g3o;3+k63B=FvpBEAiB4&QnAD&#ZI(dr6bxEa6){KpV zM79$)8h>lQi7NsHU;8e4w;`9W%sud+2nM8ZH+>LhG6WrF`(7&J2Bb0~_7mEdnLO>H zFWct_4zB4+Hqwo>-j<&;etck^dOu@%Cs}x3$WG$aKKDu4?L%0gm!r|hdg@^Qr*J-l zeFw9i4_bGk`V=-i3d{!@505tW*bH)mZQozY#X5{A*qE#6Gblv|gROd4B1CX*bXsYI z@oZN75HM9b;i7Y%us74!IbJ*_wP1zA)v0AA-`!RVC9A5(dls>4JG{KYfTK=;m5SMG zy@yA}arfr8ko!iUr8~pPu1FRZDLpuo;~+G~vAuO=vXA9Reg~0vv%9IO+@SAe2MoTp z&9**QU3F8(ufaQw1ZsOl&Yd!lsk*+}x@#iCvGYqaV7z0a5T2&Bel%bCeq(R@bdywBE;=0c6v{OsOVnWKAm8@sd_ zH46m40kc5(0i6^!m<1#v(Uyt`ph3pK@@b^e9OC8iF+|WI@EsrMT?1I-WjP6EzHZe= zx8u$2*g2I)(o$d}&exZLk_C}o*PaB=J0@9vpRyotkg_E#K6b}Zcz>5sEBAs|EBBzp z_Wxq8T0|rS1O%iTX#oip5EMij1f*L8K|mTrT0)WT4kp}&(^UU$0F9(^M@lGP*HsOF%A z?3USX`F*FFj}P~VE1YKn-jo`Du<9>iq!u|oePcJ zY=_77&~?)F{zrnJrBESymLxJYkAjy|!pI$nZ`^FpNb7-5!=+Oen41gS!fyD`E0e&==t7TQ!(h{|b$r#t8*=Y!32 zsW*M$a7r9PxhGB@#{-1)LortJ>NsdmFBp9D#hFty>15ZoVV|H>ThQOx!0 z@bFNrDS)>9)2hG2#6-Q@<^v0TJiL+{SaTHkQ~msW zpFX&HAnj^wjmz#L>zNFHLnvJxX>?m;mYVNbg4trk1-q4@OX}5a(j#RX*TDg_I~me= zZ17f4+#)7X&=@mb*k#DT#uy0cQ1omh>1%j%?5T2OfK%r|!4lu5RronPb4QPp!$?KV zkm%}sf7al;^L-9)O_bGe;9w)qA`{=Ese$|ju)yWRH{<$h4t8DS-|^^tzV+hUb=F** z@!k0weX-N{=q?X;Mn6l8{IO1?9-=*z72>qu^XGNZK@!~odqq;{4hEVyr^)sITDjf^uoVrHVqJ#iM8vHWQhM@QCpDY6VR8e2xn3!jY)Jj&EuQ8AgQ zqt~=t|3QORv$8>b%VJ1^+4RtIJm(TMQS)K`^fm0#kr_oebxTl>RfFxu3uZq`lR@iB zHy3{mK?udAULbCcZ7bmOtSwnCT3oAfqyJtWZmsf;+kf^v$ITfn_NU+WgRUtZYm8M4 zMXCLawbA|BGLB1mD|xM^@P$z)4cdS@iio6(oEN15?r#i zBfubGxfdalH7r)6-qLt~C-6q+r>;m={96`3)q>G0pBR@e%U{=tX@9l$U^vem7y1n- z$vt0qSk3T$w7XEy**^0@@wLbsTH-P%2XogBUD|2?A;5_@i-%{2X51EpXCEa$eK*Mr z9Ej|sA<&Mflf7bnH;_&7!hqwGAOHU!LU(i=Xrl%ro@!yB9*3e%eZI&1N$j z-`!_#J~@(nHWOf!E--lgQK_Eff~q%z)kxVjDfG!LtnsXtz_lMwzV6vf?Gv~t-F3lL z#X%$4Kn}}hZJFqueO9)5`R;q-YR1OD;Z@(A6p|wupXH z@j8{%mY$s@zRzt>yApyb{29*kXIA5AKUI9kbHS)HLe-I@O~M!ZS2UVX=NJ3uqgWsm z$HY+rd&X`m&ohD=y#<={ry>D5C{ba=Wa`pR&VcXy#1N7p&>y|E_jnsrOMBr$|4-UY@b>_F`1Mst3qBG2^bPaiwJ_p?C$xQUz_*mdnugZIbOoFV*H|#mdvJN3)(=(Xfs(nnhySw61S(Fv;Fjs0e9_z>_>c^d-(i2;lF@I2K9P#kD z8aY?Fc`JA;Q3f}SNu{ByLxVEL$7clx4GoP;Ev|%GX(y52{hsqp$Lm#!A5@~n18`kk zU5zM5=9DP=znYPLcilTdaq6fX*;^fDIEPLZs!{fFE|uhay&A#98yw1fn5+p`o~Y}- zXE63nOc!*2W^EEy%`iAe{7Rg@!`Ty=kPjIu^1N79 zDJh7Z(jmmSst~0UT>RV>gRN6 zOKdWcPe-H?+~@5VyESFNcK3d>$@QcjAuS=IDtnX&a8#!4hfWtR_ zw7>DB@cDA+#IRL`YVTD!L)WgggZX6Z5YeX;HV7HoAa0(Gq&FVo<{@G;FzGEWh*Ji{ z&1*Ff&-iWWqlkN=6dScCelKj%dfNI)BUet+FPM`;xr1nBB=a2V`!t6{R4BpQ4>lE) zR_qC(5l-Q#d1m!AX|55>{JW*A<1w4}_3j;%360izsH_$w-ZD!VQL>6{`FS$cg!Als zD{V)Zjzu8OZSI^n4K~aD0q?;=GJoBA)F?ULH8S=X@DPO>BGOwN|IjCAH#lc^zp5E2 znp^0;7jSw{iY=KtBfp%xX5K5VKn`j)hno}ic*j8s&>@m?$6wZO3UJ67fjJ@ zt@GT(QReWW|MTVXl^`Dx@M;GbW>r^;y zRO|b0X?|4mfttzdfgN#z;gDW8%EfN<`R(OP*&=Sfwqd^t)emS=!nJV8eCpTu{AOfU zx~jg{RR3tr+XbrIuD`nMmUgLQb4W43pN?o&c`xCxI(92^<_SuL-6CBG>%^U6x+Lp* zv~8342NBm*CEf`;h)@f-e!myYq5Yxm@wwBp*)g~Cz3q9Ng?5Mrcf5wmCgU<&mZSw_ z*?B?#^m!-NZJ-LFXp~vbE^5l{i$E^7y`C|Fzj2_DuTUON5o@GC*0(-6X_@}xLYjQ^ zrHxb_-h!-0)Y{}R{T@+sSm9?~ zOfCDG%Z*i*z*bytUM=I}*t-}4MFS$jo!t?o6|)Y=u_fKb4-przNk zjmG0zPi-Q(jN~UfUEXr?M3tBkxC0{PWL;9U-mp^Ib-iL`Ff%6THj1NS+?oE{Bo}f{fnhX6?R@v6%!xJmy%hv zB~|E)i!JodG%8z&YOoQm>CL600j$lGGK}t&(g|H@;EKe$@w{BCSsXtoaGP%maNWv#Sf#u3YAyRdobGM{_c!dVNv zPUrgape;BpnIz{vD22bn{?)~CnoYQR?sXps%5SmUs(T!Qwnc2EmS1F;rSx-AoSx66 z5H$t16>ZINbnnZl&)gD0s5=pbG*#H+O}8YN3Bwqi9)q*q+VF9W4jf*=<_pzeTjnXz z4P8Jpe*$HZtA#$#XHuvJZCk0cA#Rm&(hyXfWm!Eq5W}Lu!t$W#1LvEWP(e$T1b%j% zY^&7n?!8JEsy%N*>H{1S5=-ImzFP()f;Pu7N`$DC=WTzr$~6T>f8a6c!=Bc@Zb61Q zrp~0&N-ftpG-hSD}-~5kt{G=r7>mJ;6P}ZYxI7%08Qs& z2>XKh#Lkw2#OsseU*20Q4@$LT56??i^&|+XoVdrx3hgP4Z%O~yDtfh6kZRJE=U2r;!>DNsN*KYEkcX{qYR8ZY$v+WlG=*P_97f*pY? zh1u%|dZJoN*(6%6Y}b>!X(EIiSo+_yMXY4b;ruF9I62&*?B&coJyR&An}8L(rh>94 z-|#ZagOv06UGQM^Hqh$&OVDVEIiLL0v58PG!}_U7(+HZ+kra_}KOK|!fHrf8>yBpx zSbrhjgAn39SSfZhz}EL)5F?oQGY)|d#W zjJumxug5NP#?`P`7U*wDyYokaJCb>&M^4@C5KU@@&zElI6oBeh8(n$4 zN6UX_%bRR4`_r9C?|X$Zx9B&O9=F*Zb?QzsE(lo+6vaXhD1(4G9GKl)&vd49F-ycP zg{KP?{=O*2zVseTyYVn%P+z3GZdZm_RjXpQuO>M%JA-AtUS%6kdP7&a^h_|J?z6#f zxz2{ndAU333Cvj;3*rsQql9?z`1k_n!8~}0f`O`xLQjFLewDA-tBa_Q`}Kuv zw(D>wou*Ab*a>n$V09#3z#+1$(sYJvsFdTXS^{;D_iu&ei0{?t&TM3CHG&;fq}&oh zo2ZtqD89Egn!%*9a&}hofPhoK`>yl$eE#-OINN=-Nns|P?-Om3d7N~y>s4FI#~k3s zZkxS7zsBLe z&SPAWB)AJF?ActDd&}=ViF-qj=TS5RIhD|ru)nAZ1yzLrqb#5&imvWQK`Ts~;1)QO z0;a*N3D_7Z=0ks}Wd8eI|&+}W<)U;fXW|M|Dd*OTu8+UD$6C5ZW? zd7Pclp73uPW8>g#k$#(Jf_q~?U)1G=@RFtWJsd@D6E*h+bEf$!X@AJh};721& zFsnb~zd8P|8`cFPyw~L8mjjGMK$^Nigcv5Qurp%<&*%9d2SQv&^=c0ZddDH(>`3*i z`NML5LJ-Ikl~BAm=(xtk-}XpCncW37U44h8GXUT7)8h^5Vr2G z@xONS_nulIZ&(^acZiSH69+mXb#H%qK^z{df4Vu_BRMXP0*gLL4c}dVibjCpxHX@= z221qsmnE?$U7dyDbr_uxM;1lg$14YUMQK=+hh=Yp+v>g`i;{;b?tm=H6?o*I@&8zq zVPvW+4F0D+5aIJ|nx7Xwb#z7^8&g~5wB79hul>!y5sA`O#lmF9gq-IE|H>HswJe^) z7<5?HloZ(Tw<7@}54!b}Sm9kQ2|)&uvKwN;G&m6%XfD=G4fiX>f=B)t|3l$_{#}28 zO!WvW<&2J?8sQ!15YYr019q ze{2f5A_xW_uhiS{EnT492U_nl5tl{`*y!TRpmgN^OrEzD9X@0d{2qpvc?pLG%6MM? zPZGvN*jj($86HeQ3n7zBOe_eQK!ltF-5`}2?mvo<2{kh$bC6+p83W1xF42FN=wDQg zg7*Ic6;%+eZ*4*P*LA+<%lYlu4ocl68KZx_B!LEK0RjyfqApalo^V8X&6Q1m6sKbW z)8o=58Nx0Uv>pf!J{LQzb=1y_<_HC>T6?EuX$OJH9c0}HX7;Rsrsh@EDW#bP`u{BX42lKauB+OC3Ok( zz%wd2hjXZ>r(O{Z#sq1~@vlzZj-n*ie0#yshBLUeBe;#(Q^;@5Z+|4-*8D&(g@^Od~q>gjZ%I0c(XXJS*YG4+i+Wjl6j?q0SgL`E^6ee28tZcGZ4oysnGPb5=t7n zLaf2?IAK)K1iDM73zg+5dEPED84f)@4(rZOm3;;o%o6UQ*Bx)^Jt5$JOCQ@>646yv?SC_%2bPkSsYD)WQkPOqUUf()8C7iL&B7y zDTMYB73=!D9NkKpX!iIF>fzD{uhHAIGNs`BXxnlXxN|y0b}HUKT6^Q=Mez}eVGD*d zvZDF`L$_^-wgZmTAG<;d74}5ROT3Z6Q;Y|(Or8ntJK7PfKSwMRSCk$%0{>Y=S!|J^ zgy5s;DT4qGLZAOc=Q&!ow~y&(17DnZ_g7ylhGlKds(~2_mDHP;}oLVyWcFR z^Xz`xnbP?5lkQPZEw`~UoFH$k!$JP_wwtT#v${Hi0ADfc@sns-G@w%f$?oeG5M4F0vU;a2gyxtFUi>@K(L}ffCEZ67Nw`r)4sXF2XBb zeCa~BOn;`UrM10Vjn~yFvkrdRaolmg1WU@XZQ)0QkB6p*7{UGc^fchvLT13A z$9aEaoo-_1H^02h!AA3L%vpZg@$l=TkJq=YxHCK&LXx0fX^5MN7iNlu1te|K2B zFR{2QMzU%}Zm`MRc00T}TH_WNc2!Ou4rZq({cO!|Gev#_5L0?vvWMFgnJ?GPC$E2y z3y)H}z=1`zKbpYpQjxk$m@vCWD+Wackt=KuEBx_jMF=J>QvPOQh?HE(OBv^u52HKw z&v8)^N%?d_Z03%#s~0I?&*BUvpTbU?QnqfHzfFy0#2^>u@(1H=-42LE$n9mfC@iNe zP|58SA0GIqg4o^7gwHhy?iOiZ_@thZU~cg>?3~?pD}B(7B`qp%l$42yJsPxa21-43 zDIB!tzpzqB6H7ybAlTnD+jC*Gc2VxQrSmb$E?V6fT|`~;v1azLdWzKR1?XsxB+$7z zP~}Bj#-U0yMG+2i{KXjPvjFmMx8O4AnMKH01dj<9Hg2`Tbn~gy+E_LBXA2(S`#%=R z!~w=nd}oOb*M@2=VVpqmnVPMm3FWzt@VK@pW@#_f)5aEln=*$rk$6E51dE{n9rn1g zD#cH_G7W9|gAJWGx0z4)r;M`%zOgjog4iemUvd5!-!Dgf0)H;B-=t%c8+~9H&=zbD z@C0ctW29UJ^M?m7jays{j@Y_x^H>z3Oc*n*6jvV(*PK{YJU;H zT>j(c6o>`VzaaH(u@PFEn)Pztn7R#zoGYzjbirxx2M3IVbG47sw&uE?^Q!w(^3kLz zMJulkJw;zn^}}zX*dY7-riz05X;ffTVyU?qi=ad2MW>VP1>y>)En4-eRGjwHrXb4b z-q-On0G>~RiORB=$Jyr+A&jfa@3ogr_oAC8 zjJpzgIv$<@YMOXV)7mi8;V07=YLt)@cKvN(Mv}2hP(GKLM3#v3- z2x7Qh-ccB5efxyv?l)At#`eUVVjO_t-p4ijN9WrF2EKUSn6A!REBY+unz#Q-nx6{6 z>s|AF8@m55{qz%~B-qh#_t>|B>1L(FPP)`a%J(t@w}qK@%~-R3tL|SP`{p4yn*OU9 zcS-dtQIpbMC;K5T6

-CmlUCMhXS!3fV%`mT7M>k}IAEtk}5MXwG4z&?6$ix_hIQ zj=&c*OOg#|`~|edA6AMWqDzL`&?Xa3K3Ml9fGRT`ODX&V{ne5iq?H!hnrscI9@V(sg#7=5*D3UpvV@c+7QX6C zA%TFoF=xEr^zq1Hy-wutv!&!zO(uFE8Yz}!QCj^-k1gv=9Iu&N`SY33%Hi|dXUeZh zEmz$TDZ6><`7HVzu~EAwXBsOR8T)%O%k{Y!>Yh52l8ZZ3YoApRDCTtYjhA=m>5cht zGF0L}fC_D^VY#sQB8ubT)r&tEnI$nj81K7p-QLU6I#WK{^Nne$eBPI%YupyYT=-4k zd82;TxhJH-oRT!*@q#72$&x?Ww;@V<2lp~>r2Xf8w+lB_w9o=kwMps>^4IaK6=$gH zC}h)4yF|>yd;?#ug)NQeMhmrT$=UA3Yqp)6A(FM@F=MoHSQ%1Y>d)2;TgUyIfl<&t z_|efl(MNF`JyDPguVySFOJ?}ABkVrS4Sg%} z$Bq8tJ4@~?;rB;|Gsf|^)$G5=R6j?An7;@j<=vVut6-z41_44QlQ+eYhQMDB(R#TX z&3)ct_mKEaGok%8tjv2Ku{B7}25|!D#4LF5@Oa5ETp35IGYDwVAom-%uN+^AEotkS zZO*j{I-BRT|IshY7s2!W9{wZo7FnKx-Y97v=HF+}DV}y|I_|HdDkW@Z4bbm=i&&m7 zk5tUTLW`){tr-kNa}k{oVX4;}-kmC^s+_a>Srw^Ee{RkIpCFDmXIcROWXv`tHBQDt zv`opopN89vDOxJwrBGZV$xo;0+6|*uML+H^M_yNbFdHvmRJAQM`B^{x+x<+7VP!5# z9FgXrqd{|r+QV`mpN?@8`%&1VWH>YT>vaWj&&stTwcOlXj-3@g?clzhB`uS-UmNm7 zHQ?a=&FYG4XLFI*1$e6hwUO*)5Slz!5uwRH?`TNie+3bmp|pT=xk}!IEi1N z&d+kBOqn}fCCX%kX*G_uzQ7}zL-#|qi$goZc{FtHHtLOqCPFl-{dp8i9|u7j>+KOP zpM7gcc)<<|JpV-?>~tJc@U%l2@@J8ScWyEG z;mhaZQXHda)~sXJS8fdEi5hL@hGiHDx}HH6W+=}+i2rnwr70xQ^OMGz)NW+o{v9m# zC1r9R$Co1~9!o!!-f_AY2}&D{kua;N^;?d&IZXb@;=Fv*m9yIYWXMFATEH$D$HEWK&r5KonKYoqO;5PGDFDl#NNK2S9M1t_M&SUm^puhrN2AI_Bz43 zeJOi`x?0ly%84f?F16*ke8q*Z?@43ET&L0b(@kp=W|~fH4Ey7F)h=$V!dUBTGxVat zB(>DE?+PMss0Z|>DJhU~=%%|JWmk*XG{3)HpnmG=G?Q-#qLwFe1}8`K(bx#d#G1-N zZ(3SR!@~g|zmWNX4Pv{gU(CmPT&cd41nWBBj_;1-i+2IJQ6b^|7<@G(o)iZGf-V}` zZE)G^ZUSvkZP~nAR3u2cS2q{CRX$6Vt`cWXgmHZVXu{OzXtrbNbv2syD`*nk)1vXI zeO|hO-({qMA5K`{4&dm_5`D4#Hd4@$6wb{vF>g&gO{A+UaF^X%Jk2LJfXbci#UHHd zguKp$_4jS^bH}_^fjzNxE@8YEaSNO6ic2F2A76e~E7`&^qimyhk6I`*S8iTE*TQ9^ z%Jc9`(q!*G&1L84=3ok=pPGl;r}t{bpMaJpvKT##CNZFpZ{ zj0(J_qv<~1T3WP^O(&^6iNAMIQ!Ur{q1;&GDIL#O*V-hLz{Qiq+#{>p(rYtw{VRxw zL{w>zvuy*yb93<_LHOwM_ZuIh*fqQ_|IB4g^`H?B&e5$kh;3S|xm%C+y+C^ZrgG)p za+p?2PLg$Oj`ofSvw1cFi$$ZF~M{YCN6AyUI6w7VT#s2s>vOH4k z7sYm@Tiyf#U(fSMi0eE)UIl_bMnWupj&HHx<0Ob!r+t)U7qHFnF3exY!Xgk<_MbXx zj=lGexJy{66e3v^z}k;0T|TG35a!EHDYip`@$LO> z4d-ij57S)dI>STW@tDwC^?%kAr>C#{Rm>{$L<)ETrykGpip53p09rx8@A1gGIjYtc;UMuxh>ISblz=Mvz7l zuQuQb_3B6sV{W`#<7;Bsie_Vjx&?I!f}7(y1e2LUI8^U$nMvRFdH&VBR5~vFTJS9= zE9-lj7UrdlVv_}nb9i{}UD18t>l1$ORsZ{u{sI69u$HvNs-{NUD+JL2WbN`LSy?s5>mZq!Dyxi|3Eph2+i%ib7jSstmlSh&p2 zN2!g+1^66~UiJ6}k~P(m>8alBq>?ZsP-#|)XAc(PKRBI7r+AGI%pZc}IzlLk?2ufC zG#K{_4cfEuxz&-ve;pS!(}#GQS@AYtc!L%e z4uVH2KbanZ71?cbro<%9x` z$xCd%3pljR2}nt=EvZb+$wK`R;{l_xqH$+Js+@ZI{o+6@*oCeuNXF?h{x&A;f<3Ye z#=h@i7ZPM)7v_R4I3r+g0c019jkq&Hq4!a}uparN72oGGENxHFNmM<bM!(j zm%HKxQ>Ef+c8D<6@2{YrceVP|zbP@O>#;S9YxB8IJ9assNBd?&Vfz{CNiDa=5@6@h zG4aCp*qkR)$<2L#eO2%7AAcD=T*5`dd z3+rgiPL7&>TW!Xjn;UxW1r`$_ZBw1WWP_8P1eS{VIjw}a_Q5TJIuqJMW!w9+Uv+Nh zX=8Y!yhfw_%Y%bxyO}TTkLa*vFfBwSa4>NXJT_a6YOVBnKk*cgrWsmdcR7~+fysI;n{kU`%`;XJ~lP-{VySnfxHYclh2&^X?Zoj*= z#g}%w&}z)&)TrOea3n`}*yN<*2j1|DpG+_M9wmtx0Dc}NjqH^TZ*vx6&%B&4c=J`= zyYR5=GpV>Im-}+8moN`Mj&xvTl7M}K!{-fE95^624L}I|tk?Pqjl1N)1!T=#!&*aO zyPH2l?a7^)=pw#OHR--v#%6rblVqUYmROtgW)NPJ$F_$;ahZ8N1u@wNXo3(5yu+_oj>rBn-sFE(XFTapsrv!j~<=b zz#GF=j!8Kog<8;|uY8Nl+_@|)b{!xk6ZuFYEnm_n7@|=8^pH%rKz9`~4~{(GRnZ6w z%~OHSAcn9MKk6^G!9EPs#nWqYVG#v}9WrSOk*U8P+k~O7sa+QMP&?R=`1o#(tK4!y zqNPi8b%kF0Qf>`VWvp6ySPzsorDv#QhcMlVV!Q3OWl^^0`Bu2{8654zRXg;o%J3_i zfODgBeMGyBldQKVWW0MaLnUuiURxWq9tYf$Q}w+fn_soO-!0CcuXQl7;?UF7-ymew zWMjs@uAS%4W6C<^*%8;@SCakE&{Au$?+VuZz(>}Dqw}pL+HdO{F1pY=tX6$5lf!xJ zg-N-obzF0gYBH}&??u)=M}-|(O87-C_et+^yQ#*TSwiwJ56gcDMcB<81+qVqqsJK^ z21DjxfB1*PyR8!QAHfAi?e)<-hSnaF#gl1O%~0g=PxKD@VB1VdZG+hcK@YVHj$fKP ziVXT_l?@6VJl(5ZRH5GO*V<_P7a9L2?`a0vGNcLaeloN%{Z6Mt^66e@LJIq%a(TZH zs#G68yf~+Fwap2rN2=Sx0(|yXGlnoyDFQALM?XJ38SIHMQ61H6?6kKY>|2=(L<0Ny z2Rmk%y-|4A)0J3t+ybcgA@0b##F3G&Ds0kg2-F#)N^;q`KSGueBhz6uj!3r*upJQO zl|R3`R?MPNVt($^gY5J-XP_7iUXF3(!U-&DM|irwc6Kd1kdlpWZ2+uFsBWmn<0rzf zrXS1hY+=#HoM}Z3re)G;89!NLb1JSE;2*dpE>^X_@l{mP(>lZ1SFgskUgMoay|?d2 zy_4q6)BSt~I9|)u$s(6pUEC%a)*w~p28%|;`_GoE?_x0rJFHO|(=PAJ1mGIZedvJR!$RTbNG~@Jv5| zr`jD7LJIZ`^sW5dW{wd&@t>>5&vVQhqzcVtzqj7d#_my*+i zDBG$aT$0SF@Fk?QKtmgfpqARB`S3M9>ZnqkMT~uNCZ$)57YVl@anh@>Ed+lI;` zdA$JIAYhwkbdc9W7b1aBHeE1si23zPTj zeU8#cfX}+(=SH`wzP~!+w?P89$S}me=n;-JyI*xiDVkbR&dEpYMR`JLb}a{+zd`gl|Bvu{~vL4UM343@HcRtf?Zs zR*q2P6gr+O8#--QUA>+l*G4V=4N!PH;~y603ev%7?XD=!`mWJ#_M&pUMAsO~`a<_S ztPXrmk8Q&>L|h2X-y5%?nDo8kpy41GLsb@7$E(}POpIGA7q-;8AhZjrKg08^IePks zH)1Vwv_TwOir0Q+NaG#&%Drs==IVL^#ygL$RACM;g850(RhP~0ChtUSgJWgcU0Me# zNnFl@Y%wWw@Jt59&oB(u7ao*uvY&axiL^g0UQVNX(kxa%d0Af4gUX0orT2r}b+F+g zZkds?+@q@{xn$eC1Q95JsSmRG+(B#rdc+2xz}dzC9ux)}U}V~N1U5TC8bO@|op(k+ zz%*b3j7%Fv4y$9Jnr}ay=JRvelbB(58*7Reu)mB=^vbj;O=!*h%wiFvaPHX^omMc1 zRho6H9KG1}gk(!C$3EE3b;isTI;4_fSkt^1_tuy$;$@Y0N(d9hI1P zd$+FK@B?`M-8-%9P93zSKK9mmw&Ex*+WIOu=2Uam5VQi!4O=yFKUBOXyB^@YySNG| zfwVxf&UV+`q0rZ7ZcyH~zrU}>L65a@NoM^neE>OY691esyJa>eKUOgX%I#C)LsAl21m%s>q4l-DKbp&^&6Kz1z3dIF z?oDOmuVFzCN*CzjMvHAJvme#U=uM;l9o9g>i1E||Jl0qV_%_eyu*38@bgHDnGnDy= zGX<{br8ugZ_GWs0E3l1T$IJUxo5Fw_^DTnfVe%`>1Pz;EC(7%|> z!9#+4OFyXeszbz*DgM^{pg@tR)#6OYIPw5%b0a3*yK>UMs7 zzV9a=#quTk#g~uaEhb>3T~oo?9a7awf@kd9J1x7ck$LyN=0RVk!S;NQr1}oTKM7?i zc(_IdEAyyao(|2l=e`yew$>&P@>32lwj=!x#ZpbS0c0FC8ib@wd9vr5Y`q>3B+bO< zsHI^>vOZER;j>y*6tJC*&=~grR~WYjW19eD-Ri?rrORqPS0+<*#&kv9d+$M(#r6Ul z<;v0CiSp&C1Bcb5zaXc@w z);P!MHe^`gdXRFz-E?}P$RvEeZAKy8u4%%3q4(Uon{ENjW1HT%nHpI?liok(D;?rI zJ=`%XXLW%JnrjZnQAUF+uDAxvv$~R00|cVSE#gsw=3}fU)A7?#SC=9a6i2)#eZhR& z2z?Vk!APBke9@o-WM@7C7TI@Fv|32z6oQTt5!S&k%+KNiNLj_D14uG5R1yQG@y7OE z2el$*{*L)bUKEXptC+n=w((QPS>y3qD}0NQvQEP!t;;1XBYIV8!RJuZ6rzbgA4gn2 z=vdiOMHPHuek;&Hlh#-h2MvN6(#gpV9gEg4h&l9xAUl~z^5o_6Ame>jqatiHI7wD% zv|{h=kNoqPB4OSk3D&UX9TE{JXDDsqe6>`G4r`}OLia~fEgqhwP~G-{nrVpw0Z=jw zBHQt&C9+nJyRepT8jO@B>MrIM&WQ{BZR?1Ar_Xbme-c2rkHi9o!($m^CW738zpbClqWTbq{Zuj`y z;q_)&NI1Xf%Rl!%ofN~c^Zn}zkHOefY1+g-=pwQJfPJ+$Z=n+NvC28B&cF)o@1CI6 zCk7D%c5vEf3=E+GfYr2%P<{ZL%uxgk*aT6Tc?8Sj1_tba@w8e%mN!Z?OCNK1u9c3# zMJ0Iv@V*snI>18Jx>z>i3Q4W6vSqd&S6<9iF0$Oafkl3{#IeadKprRL^xXM?OOeK4 zeRBpUIw80#(pc47&`ne8n*^4~HF|@Fmuh0xXQ#rCRXxYNM!y^$FeW#;yN9g+c(~Cr zFWQ-JTGQ=^+4nGsbGvn%htZRotNNp#6#c$Ixr_C>3;snmJ^fJstjf7E*43&$3a)+i zjLy+A@*BDpZ-c2suIFz2jr|4%g#8zk_pELB50rNm2hE)*=im~DtbFxuP3hWzbi4Z_ z6ZxlWDCmc7Dsqiha;Gc2jssX3NI>EWmVOADj-S!Q`0+9AnI>S_`4gwk)fK#X-c@^I z{Oz3rc*5!_NPvGL8Cln4bRSE^F<^vsWxiqvLSS7VfwehWci#dnnE_^vbwH~PrV0rL z|7!l}M<^0FEcLz|Pk1MWlW=E#P?n}TPu-DufUfGA)YCi!A^|H!1K&P+LDpK*{L_p*EqbRj>ti5mcP|$I>m3t!sz;tbKOr?=@ zbYSmp%LSP>P>S6QpzPPeCgfl3h%0S~8?b*+WOsX%)Sm7S?Es|jlswrF7Q6(!&w`F~ z`S1~H(rjZK-Vi%OqX+#sz#)~Cmr>z}qYz@Cve&W`Cft;Kg)$i?N^`Q+h3m8|`I2uw z$*UD)<(9+Zh{mqzk2a$st4IDmG*>SCt?1|~i0&*$tBD)}R!+}X>;{&0nd8T0m#N0r zFY_kjw}WTezT{y6s`uJ8*9&()$K=oN#tRbMtw6(bg33+AGw{Jg*9G|qsLNT7Qfqo{ zAa*}O@C*#ly;or-0F4CnruBVeIKtQ)WvLYHrXn3cj{)P2FnptODPIvJ$ji5IQ~!;` zqA*gz=;}5Ht?>b!wu8VD7ej6)5MZRR0OV8E-f%;QjzAc2mVq?WZ!$2vGUegcK@2$J zbuQv{ed zJ%CpwG0{eWxcBKmP`DD5?D^7RId-8Tl{^2sCU7yTGzhxqDrrbW-Gv0K=Q*>r%bO|h zriQ?G2*tn&AVj&OfdE+~0G~h~(}VQ~224dNuCICnrb3T60sa)QSSYGge^bEkRL;d~ z6MzczxtWZB>F|-z2R=2q9nv<08J1j`kRBa=4v|}`*?J$JoZk!y%1&J>m>XcFe`4~W z0sp${`Vg?G34`}3LI4QpaJ5@P4{odiNz?Xl8FV*DpO{$SH0g=nl^7ly+az;5ZeOu8gB@_SmFks(42$vn$2Hi^fPgtv`rz(b} z$iDwin%(FQaRPl3`yz1ll9&*jnjEn`@E*Awuy<%t4=J$D7-UTrlt^~|3%++^NXy9w92=w+H{}2N04F|RqHc4Us zJq#Qf%Ku;JEv&`=7M%=i-nMdyz4tl#tpbaqh03wkV0yO(r5@;Ti2EPU%xKKsFNM6N9c?VnAEd{DgY2P2@HWoqC zKV}LdXzTPrRghn#>VU1EM2>F`YRNWNMNG5i(|w_C?5;2q9y0z#gGV*}wz; zb#cakA%Tf5YYX41AcEvv4!g#%t zh;0rSZ`rKM_aMoqPX{Ulvq1*wF$DN_s&qlMdyw#hVR)H_SMXM1|ASh6pguzXA5H|i z4ngnZqsVHYlmSWAslcHZzQj!w%N0ze=kiC@&a;8;KZH>lcLTK=xy6COcKlv zuU2rYSqo%QR**0z*hGpr!VyRq(;Vph85QdP2~HT1i!Apt6(dD+u8rsBKaolOt~&X5 zFMuA&Nk*|Qr~;I@zXykl?tK24UGdnz7Ae>20TA6hw8pp3VlxhAsA6gipbpd%@>qQ_ z*ve9e7LwlapluePdPqe!geu^3PLl3lOtg*-ujtssnT*tNy+EvDJaQ={ham)%Pa`Td zPX`o>4sHuooj4Shc((jDDGAE|6FID&1=-T{P=;5n`@L!HU-yM89eRg~S(TiuQPG?U z{E3t7pO-gqKw(wA=kqVnxEr3cJhtr5(Doars0 z2MIV&4xxPW+0wX2Z(oMWO4JO7!*HPnMIZ`n5O~q5EnxWgkKws3)8|`AfKX`Gc$zlOmQmfmI=tL(Ee-GJ9ASsjILoe}6{OJcLT*NTDT+ zgGA`H*C419bUfOjH77z}@0{Vx4?LXD|55~fM?PsBakJ=EMJ#uuDUdyD-e$h+GV8p- zqG9$ncYYk&Tfnj8+&N@Y%DGqSyopIaSjTX>^Nd#5_kB8p47_T*z)ln1(aZEA=v>kh zFdiMV`h!edyc`;vbSbSB3k2mR3cJYHD#hQ~TN^J_4BJ~RG0q|)CE+n0QG`=uhvlPe zv~u;5Zx?3tWoa2`I)yQ*h$HO;8(*J%?&Y5XJ}R=V*AZ~pE$XmSd3_NJ>jNHjZN%v0 z#*~fZKjJn|sV;#o$$XXXUh29T_NIrzLfA>y3j>ZfZW+a0?^vy~(FuFsgv-ZYt^8?2 zS+2PAj@{hwIYV2TsZJ{icRFQ|OwZ?=Df6~Y-=9c}x2mp(3N1!%Gkr8Y=m0gC;CFf3s?L?U?K^Fpk&q*-PU z`g*Es54cPl&VIZ9u`^?*@ippAXo@~#H+g(JNZ7X8aZLoBS={@j-9fMPI}x{nrE{Tu zW?1W?B*M z!)G-o(b#0nG8@w^bI+JD$@zAf3aOW9dlNo?YnB~;5$|73Y(0_68%!-IvDc(ZB^P!z zO{aWI-U{`7Gv-OT?bG!%W@}MsHV{2;hDOoSWxaZQpBZgpbeDn)qt%ggT`~5Ub+qar z1y&}_5J!}ST&w5#j<24V}mba6yt9qFJaP1nSz5>Brwj;{L`)iCwiZiF7l4A;J z!vEM!-SQK=UqYS@OgR<_5=o)vH#w^y`7?%*N{7TTsRqIiuZfvbFdpm9fJgk)Othrz z!R0=p*SO48%uSI6C5w`v1rbxN?l~TJfkhE$V-&OpzeQzImbYh0le~X?|Bj)VLKNG2 zCU*@euYDp%@eQim-kD8v7KSRN7NZi-PLglQzFl+W`oj_`uHc|nfMULFwhM+Z#pC$=J?*APZ|p(MRa_|F`jrYor1c0esohoO+#kq28MNQ4 zr^;B^kcd;n))=+ZZucj~{VL9I|HX0F|1!_BE0v{&Hk{PL&I9U|0q&rtVcMQzABZ6G zO+GYrTZ0U$2mf*m5vxTPZ3(|I`E;v+Pp0gl=GR2whnl7%c{CgOuc%}~s8#x{2F7CE z0@D1`i5za*r3m$$N7knfYhyiM)iX?fytB8P>(mV{pn&!=^L;~!ck@$xjqbB^yUkL? zIju-vlbd2%%#@Dh!o&^nMcI3;936EV@XL;`B0&9 zw)Pewy>jQE5{q%U7M1r!{cKZ+9;8!4G~mbm9x%*3Wf1LyF|SAfFLik#pz2u<5!9-@ zHMcn0tzCeK?&cu?aMHPdavOh@{~M~Uo6V{_9p5Q*7q}r~X-aV$RLg~Fix(UCFRyNz zZ*4u6EfmPZ*(n0t0gJ&!BZhfU#IHl>CunCE-`;%k^nl?Q`bjGFwBap6ytb^0a;U(= z)!n6Ho8r3tC!TG2@<&enw<^u&JYoj0+ibUPuPpbDj*E&VJUW9jD|&*dA7&qRbddo+ zGHxiaWId5wkIaf*@Z~QZyMH&@i1*v!?n>M>$&%DfS6<-|bjs-R4nYUYKHZJP!ih!4 z%%w?VW3642zO;;&4K81x!OB7ur(I=lLIurCg?!}YdpWMsPJ$j|YyxgW$Fy46!*(Co zrcS1fAJmwKlKW|G@9>R(J%%7$RceCoRdb%Xo0PYnXU4YRuxFCiTUXZxAKm}iLJN|W z<(Z9YnYAi{Tc)TqIJrDjZW||!mvZ@*nfgaHBO{^Oant^@ME3`Y0VFrr`4Ol31&Fk9Z>KT$Q&Qd%ZqOGSo89VorlIBb{kVpedD~*lQ)%^n!~H{s@%8U zS&mjQkJT?v4JJ>d!I5^Qn9Ebr+Ik0Jv8TtS?*tPb(8pd);DkS_{P+$=Vph>(d;+umwN9z zN^Re2`pBVIb20T1lQO9p-cMO+r0LWDBtwJ{W!pq^^Og zyb>-l1dz?{-{CKe|HPcz@E&|6ZniCr8wowJ8RfJ zMu3D;GyzcDvyR0}n+ZYDa9T#1M8h3ZiUd&?88D4_xnJQ?2(+yZu4O3GWK8Jyf3IIW zr6!q+D)fpvO2IRKrkiJ-5GnrfWF$+IJr(-v@Ex6R$}>RkxOf6JDrvn&ZF1<@749!C z)w=ZK4HcQh3k)q+gk68wEOVa}(|jQB)$a0`Nz|iqIp`jE%lx}yuoqE=>!UJj<;_}X zDDV)|p+%CJHs&VrEz1#!3Pts)h_${9<=zHg+xQB7^-b3Cl_C4|H?dsp!=VhAI9~O{ zjB@V%nW_rpAIH1LACn1f>EHB*lvd@G)zG%>{oRTBlkG~!QZ{tT!b7l;rEIJEHr_^=(9`qN-Q?xNKaBY=86 z^mOG|ef6TvzsgITkN5KlL@ufZygTVA=nrAh_@odT%x&2n;aOkEV6XTSL2-btrnx2VUdH)jGZL7czd z-SUDoW;jpLQsaPegQ|cd20X%~X0>l{iKHc+W2{>Cmha@DwG@q5TaT|S%tM)fq{`1rf_4ILK_Bv#x-w7>f zZAhNQGT%QLNpC%4A80+B&ZAXq?kzB4z5B!T){Vm0QL6(p?i0q-!iz?25gE|rDgO;` z<#?^=OU)eZr=%@{lY$;SN(r)7QArgBWZO_6#8!=&neugXW1`^pOS3bm;EFxn_>iMg z{uG}$j(3Pm5HZDzTr)I)$-;-xgOgx-$4FswHtS zzsxH!G*RP1@R%+mzDFW7hJzkx^2Z@h9~$hfQX?jC;nf>!;`SQ}(WGmF=VhNTa8LZ5 zvrb_~edz7!Gfg9-e%aCdACk$&7<-M4?cnf;pnE!Q^;aT&wRu9&(b-6q^h~=it?o1E zHl~NMuZ@Z~Cv^%sHtx-76;1#2lI3Fy&Z-(f`t|{mk&A=&Mt8p3zG~R=rRO{EcDCg@ zOrPs^Q;UlwjFe|Wd(wi=+ER;*ORW8^W;3bJLM6UenLKf-e7^HwXmgL3QOF$zLX;eZ=T4|8(5|9$$)ELtfVH+&kFu3SHigKuJwjAR8 z+Hf(~a2uX6{k70NTywPZ-iAT54G*R0@qG1NsKti!@>llS2A>&{7s-+wQH@`nHnIW6 z`WR2&wSO}>`dT63G3b-0o_IU_C$6HLCh#Qu?kqJ^%D#fj}o6EyoQ554? z@0T7jh1(_S4h>0NE3u!J5&xizPGCzMy}9UL7eFTza=GZD&3?NR=6Meo4vh>Mv0$s|Lt`eA68n`?J; z3u}Nd5?xC{+=3Y1mFf+5S#3Ice52kx@~Ki==0o8t+Vy}h&u=x0;JxAh#Ez|gC}1x> zjUfgY&9>e*AZiC(HnyKPMv=p}HPq0&bZ`$PVNjbi4e1?L^PN!356|Upwr9}S*HwW~B#)5U_lv};h z^QqP?phehA6YvcmUz-1jal+jLECN4>EE&Vk>jF75ru!~o*d!y(*XSxQzjwyS`a-su z4t^b{RBG;g=s4md<$R((&uLWtKG10$(p_kgUsb3zhU>_Wv?1lORi&(!CWwo=*25`i zX_hmYBUNj+ki9GBreU=ykuO>aHS2w^VMzO0DNB-)Q7th(6N{@D+$j=8s-5z~vX{>w zaCM`RXfPEx#G7#RVc*hI13}>xRJ0~Wj&j~NlZbYOcmV2>_^i7Yc zB0fPDW+@&!hA}-O`QfKqW(@UDVhy0yuM^qYP^N498D-vlz^)(W?Tn}ODUw+*(@91>Ev<1tQ>imt6OgX=RW&GhqN><&5)CZ9H9k z^2&v<6*QX=@drI4b&k#{RT}OBleCTGlURe(l~22#QIJnt|NN}d-0V2e$UX~#QIfYS zBFV+YGIMUe%!;?Xnp7Mg;J) z-R6-CDzn~H&{(ubvt7OBBhB>h1zH5YKsV)5YqHPa`Q(Ujh)ez*^8S2CCQvluoBQFF zHqU-FuH0m$3FQZ5-swwZHP?o;Zi~_TQP2rebrNpB)wb#{z4K*jC9K5gbslhI4XaMA z{ZZ^D^_3uCq72@tw0G^1iy4YffYK9)C;9#yH1!7fm`AtNV&Nkd&wD|X;+O;ySnf6d zT8`nE>AYCbDo%ZaYk{AHneXVgRdr@v!5(j=4&_Vn+t`zMQg4P?Tu~fSu>6sGwGth9 z9w8sE-(>5U%tUqw)rEhpFD*1xz*eqVa$Ql}=_7Y}jmXvpkIBlgPbKefz3FDKl7}^$ z#xSD?6iCzPVImPSt14g0whe_F)%wz(Y@Rf|{8Z1IymQZcbCJ5>#!rfu?O6#^{Kr#+ zNb>)LJgzyP7O{^*-l*gKfjqJ(OFq#UulE;@@&$}?t%=nN69%~ba@MKzL=6evc|Y+g zw@5_v?aRYiNmSBrMx{s5fyt(I#e(GzzaYlDd zU{@6pOtM}HMFIus`$(+;L(v<2s~oZcNFn*LnJYEOn z*#>j2kmR85|6t$p?N9{Pe*rtP6!|ik5|9}JK@1>UD>(%=a8qBn)K_0}9Xc(}oiz3V zQub%&7v7p>_eS?w_VzskF{{4Bf#nSMmnmjPk~B2$;AX8Uj%d&6$St9EWR4y3)wj`g zi>B=7!q_hApVTgG=?uO)O~rp+lTgYKjUrlfFb~D2{s0L7l&wzGzhg>T7(U!2TEz2& z%cTL%@ploOWd?2V2z`m)6F5{(B#Y^Df$v?JOp{xwz4 z_Vr%bkCp19JluM{=Gjs9txZ;1zg8TD#oX5C$nJ_{AWa=GU4UAUQPK5jh`E+J?}m`Nh6|J5_0>qfM2tY|TYq3j7~4gSviTNXelz@3();BebluXlUKINCOPkMyYxmrz+MD_e@yiUh znze+06|MY7^4<)H=#pE;+hRdN)U1$<1LGK!ofK)zo#k6VdxOKgfa4PxV`0RI_nbNQ zu31Bw=)z@g0pJZ4;`JpX`lWB577ISG&3ydvX$NTy)*63$Vvq!sJ|2 z{?lZw;*mc<|4`bW=;WvPCb)jHc* z262qz^Vs>fM@{rt3Rp;cvzXbyBzwvh4;pQ8cJ>zW$-|zKR*>kB*|E48&BuOJ1$)jV z(>haY|8mivdSFe@h@~MjC@83-9O=%QX|;v`=&dP0Vf34FA`T{Zj}x6veDU;Q5{DPw ztGSDe_vdpXSX+(Qr2TM3f&0Dm*Lw?oEFKt1ucF_8Y|vOmKH~c72pPV8_N$vmbzYXa zRW2A{_QoyugaZD@W+`=EG~>%90M#G6y#$H$To7%}JTf65O$lf&7e@Q=K^V_J=%N56 z;z0*)!@m7wSz%<2%a5S0J7oGk081kn_iG-k(9c0WcIVX!;3W@r3o!--qZ3`)Q`grUhiXeYX7BXD@ zSkWXqP!t6XZR7_TA1nWHh^U_74DZJS(RHCO&k4_B+M^8`ew{tFUaLOLCPBGseifx; zdWg_B)YQ=xMjLd|>%v8H=a^hLI8zG$X@zF^a%X}35!0dXdkCSv&qlSs1Wy@4P>^UQ z6xM?%X|gnZLLO_!zH|)d1@>fGe^RVdNhorGoTNUW=D^}MY!i+&xv2fgRYm0EwvbI8 zRnvuV6}gxoCQ{u28Q+CwIAK1k_MUh=yROp|^-1@1b<5Z{p}G2#4?kXxVhtgbTJ?V;n{qn&-t_-;9vjL$dm7ad=bQ;ueS$Wm2i zuR}eHF;xbzAh4kH{wR@UJM?0#%caV4NHwCdX0~~Tebc~f!8d52p@R~5z71JRy>@7k zj|GF|nf#Fk3h>;fHInj!(aRaIKwn$ZUs*dQrsT>H_<#h)GDJ75U)reF&gZ_t$8>h4 z9G*hG6Vr(3rq(5m};|AS}wqE$Un_`wDK z^MG(5-CjL@awwK%wf5BVZI3Km5z=iQNz|9FF-e8TG0LlL*$(R&GQ zjTSaVrV5&pWVASF7$$LPzq!|RM0W`QyXhu3YD!7(cq$3s&x6s{@RK|Doic&Y6zXN5 zvWS3UB7)d?O3V^1_m-vO;9=}D;g$+&@9XQe!D9xh1GAj%+Q41r=m2KjD?YhGLu(EH z(L4d`iLn@;3lX5lBVMio`EVT6G6hMMP*lBdR6>|fc+0vDAWianf>kZ@w?wAp&twpi zq<|-UcDm2^BoZ0hJaM3U{^fb?BgvTlg74wpKGy}Z-6g!&Stf2*r?$`6s!tSQSP6v| zDsiFcov002+y%AJ{V&SjsR(QArpz)1_L`PSvW6H<=|zhpt5FoYWepU)9dGVMAiX8w zkO7KB0ujh`PA#Q_rTRc_KIH0}ulG5LZJ9fL4kRVOu<6Ke2&6h*j(Mhay1;704pL4j zx+19*)G{Y;G>CP51hGkDcP^p~E{`TB+0u?&Z?lE+m0~qX_^gcWJ$|M)dYpZGkWkw4 zQq9+wNx}OGGIZMSWGi1ZrPIX>TJlWBzrLcN5q--oQ~o~r&r%K2kl)w)cYaL9^w;A7UO>-*q5X>F=^^6KdTc@>qE$Ac9nWrA%nrlgxkR5##I z^~3Hqf&VSdBJk|tzn++SRCP7TSy%255TplbCFk+oIcS>9GtuLEkJ-^3gpQwYwYa!M z{~DMC{;sG<(;7>JvL2lO#`Y~~7Ha|#s3@#8T8qZ53*8oOG*J3ldn!WE@XG{>bVW(wod##@X{{Fd#Sp!FLGO*3IPY_59!R2)p*N*Zi#Y)i@KDg_rf~|M=XVimGf=en zaCUl{O(mg{iRO+jK`py~UcOc$>N>ITwePHAM=^$KRLxW|xL7ef?6V|e#);cZO$LkB zI{Rq8RBb>Lvktm1`o$?Zo!jg!eKf)g8GpUmZ%|@twpJ=8VF$97!E^b#Brgkt_)Z@% zbO2wOc;+8M4_rRtY|RmIuwVSUs>KfKMaI3zZvR?TrZMh1nn|OT-yEB&jfwV^%z3@+ zgcjQh)?ai}Yh>tU^;qV1MSfa*2TA*N41yQzGa^NM-xiu#3Y@hZ+tP|>B2nY#8jqm>M}EvYN@Ek#apIM4vtAU zkRxr0^xKL?MT72`Ivm7W5|n+G3fiv&6JJ*S2h$}c+SE6DSh~U3Ah(=S>O)g=W%dK< zMdzchF^%n+sL0;@NYG_%axvgkqvq{;QJlo}@_au)!ve%Y`+kdlnY@_*889x%!fkJS zoP}FS@llyd(QPl;H&V{x$y2Of!w1ioSX+(er>B1nz3~!UbY86ZipL1dlvJqlThD-n zSN6#^yC3&>fkxW>XGL^z#S)O!!Hgt5?ER)1%)au4#Q6`!_F>2 z^6NDKah*lB%2#q3p0OLThtNDi@;%%BaUJ<6;XYe#U?9@V=^qcjoA%brv@^9`KHwqi zzJF}g=}7W(8O<4OTpq+GuTc^87J{Z-B=bQkx+ifkf1-%=C&c?Z{&{4~F;Rfp=Hk{tK4es+EgJ^0lR?}6eYh3C|3I4{Dzqv^X#~t2543NhK zN-lZW8$;pSeymB{vqyV&f;Sa3fl(j4uL1}sty_g`MUzo6)1hsh&O{eiUw7QHAIO_& z&I?Go_H!S>Ec6X4EjU=!=DiO^piWY(I43)}G@0kwtqSWlQx$!dJAqkl1xI8j6ghlU zJKq8%M9SO{@jx5T`)i^W)=vDdJ5Q8$t}IAl6zlydGTNp3i4)P@43Ac2Bton@WLc`A z*FH&1subieYI<;v(?#O$&IsCyrOtSrY?-r$DP_*4VPSprRz?XW<$4g}<{wKyr3KOO z*`lYf6JPI#3Zqw(A~kQ!T{EjuDS3s)5`<#{cKp2ZM&e)hyP^9WHA8!>y+s-LeIj zp?~)<2T~+ZdR1hSB)lzkBt?SDK>st~{4-H*2o%xcBbVrr>zd|*%cZP;vRC}N!*u&# zqIBWi#P`sc7$MxP1;vta{RUTqy?X6TT|Y%d$Z)Bf+E>A|#U$O{8CaR7?>pMAYJVheE~_}$&rGYPtX3A()GpG=lq|K^vm_JqB}7F-)6FZ=$)*rEU$Ere zTmK>rPlC@vS_!-vOd15gu)39`ABs@3wkl#zViRB@@mhXo5O=YFkejNG@s(#QU@lbI zJp}6fNwvs;UN}HKbJZS`YD~0UuYjudZIDF8OM@s&IC{ZQC82lq0)c zF}g6K%!Beypg*+U^`r8Q?`uU0YEjo&hKQAi*H_+!-Dd{o&*2>x#SQq7uU3}(Q?!18Ry7KaAQOic*IlfE zOaX&%yt4s?@p}TV!YOxQ95@TWK!ZSv#%^@$DI2>!tQgxk>99KZMN$mjVyhxnBn6L7 zM#oOZSZ$mH^y|ok3t@{?tjknQ&-<;N*FR>euyS>hN5|Nn#0dj|5W; zcgu4lKik-b_JpdR3pH}&aqWM!*^2MBO%%r*?R8^;VOCuI?-iOams@La0eC%ifJVt_)N@k;AWlau(4 z`Ff4tnGYAteWGNy3KoNV`W_3P2v&H}Ex_uMT1E15G6H!68xyl8tk&g@hL6qaIz7F6 zzeoBfl??Xy87}s>4}mSnQ&Om}!V+$4A6p7+3NCfh;1}V#JImBEfIn{l46Vv;($?f7y zbSJbI9+>-aFD7t(iReo zt}-?f4LTk3Y1=>j~3PQd}@|aIyA6=~${*FgBQ0 zwQzyA8C(%j93)OSy%g2{tAIw@R6yhV-qO-TfIvfTzp37L3#jyRxlya9r`4@kjqhm> z%EYR@Ej4O!Wxy^(Mbu)7iSwA;^|*gPy#e;Mhi7{~(ey*HYU0A&U|tbNt6%+gqWp)v zEKR@C`#jl*BIPRKe^zPS7Tf9q3-k5BKo)>{-#)LK*Y-`8ivdHZyX+pyeEMENB(q5iXfp;cW6NK(hvHNC`cV`rkqJ?4 z_jHN8)Z~K6CNCICHn*k7K3ND#H1eU388Tk#=)g41*^zHc(aqlddm<)=FE{y4AsR75 zMw*FLw{rO`d4IKYAr8(+Zd9H5OONl2H6LN}$Ay8)Eb;!{^&sWU^vQ7Vw!W0wb%Nb& z)oilyoUm7*e^CP4h-KDK9%ksA{Ri&&G(n4fQTZm(>Vj7ZkJdM8I0udDl$s)W0S z>-C?*emHJ`ITJCZuyq?cd=f~RPLwt;CF@%{aYrYfB z*c()6m7{wqFGd}ec=2esD!;$OjH5siA1(jxdjJE$>N5#X?RR!-g>TpUl=`Ch1D*!p z&2~x0)>x10#CGk}%p4<{+!EZXxL>MKTx`oBO}q2c?oPK*cCF$VIH`=zxn<5vceOXI_ zta9=Y2+3)p4&;N)GV8M{jB1+^<@yuom|xkXzuDK9>z8&0!Rl>UgBdL}xaW_=U}8a5 zD=`wyNg6911lU6M^Sn|P8VJ}F6=uEcEAGg+Ui3a!=i$e`t=C~Trf>Vh;F40c@Ei0T zhffp}iSsdhI>`cHaq1Yfpt4nT(;!N>M+7~!(~CKpYj0gY`M&G85g(UN|Vw$a@1pR%E#Ef`)=c3L@B^kk}V6W&`{WPzb|K*;56rY1t%wcQ7)>ENLcirQ_X*wsCr2@S7zfJ}I(*0nZ(^$ZAUW4}dWI*L@Ca*<|f?yqd|Mk*j(}?%* zpEH4Ccd}*VJ$R}JUL(FY^#CHhx)K)%TE&^?-hV@F>LSIO0YxE&Q8!S40d4(|T+rg*$zn(Z^zk9>fUxM~HYS2In7MG~Ba9T>JcL-dCB(lUIB=fj zTdM{o_)34652F8IKAwV$@rU^!`X9`P*}pn%f5$U_n2#s_&{w~ij}VZo{lk2~p~}C^ z2P(KF|H*v(*H`-&7Bdch`7ajp0(=Uzf3TP*|AEB>{C@i1Sd8oMnCfr0)n6>e1^yNO zU@?H@`HRIoh5!9OSPaeYr~ie;{O=|DhaCF<+a=mr7MXv=j`-i>5dkHVZvPN>Bu?m; zxr+?#S!5B(34Dh8+rbGb@<&8K%MDm!0742`@y}4B_{gUEdlJF31pG)w)7ynzgzx@) z-buHzAWiIbHYS)s++o&&;Q_za60f0&dBl~IAc9n0{-BPEdRzCGdiV$*q1K87)LS6b2KDv z_-XX{{{XY+KOgU$o~ zu_IymA9f@?|BrSgaCwoxb|hHfTXO#agW(9r+&^|CSpUP0lAHCxy0BX$d+S=N1iipSm72kBg*@})X4Vg4BHz?qu1Bwr6K_DS*Lk~#6 znkbBEu%tnVLClo%0Q)68)FFp2h|w5-X5U@lw$&czdAjvDoX}%o2+teXuP^)W%iIsVKat5clpXvHpmIRX$=^y^mIS=D z4qPaq+=68o0{+1Qq7`C!VZu_v&_q9~zpmTli3-m}8%}(Fngqlo z8A{KfGqT$grO}#Se}<3csVD+5l00}{WIfgPYrVZYOv*|C@yhQAk5iYkRb`vjY<|Fa zIIyW$IF?788Y-+Q>~OU1BI>#mMsXQ79Sf500K3G1!7S;xO3jX;Ew`FVJ+}2OM@+U0 zH5LF{g^ z5I_*k0S}_ZUR%M59AM;$Qwmq+;JY)d0$JREa=GC$AA~V`=(iBtuq5G|C*P`{czWDe zvKYCQ+6*rem~^{8sCT%b;Lzb_{<)$Q&q9EES6IL12rg~rIsTTV`m1!ngCBV>u~V37 z7KKFJP9qyjo|j4{a2lu@8a23vSPZ3dmgZi*D9zkVv}B{kzN^#Iy0pQ%5)vzUHQTX! zzOWG$-icg;M1GdU@mvl-T4g86MbEC2C5xW5uk%HF17YxMK{qA|cZUVLd-T)*N{gK84z+mL z`DO3|buQffcf=^ti$MS~Km8VpfD1;{FtB?>z}**S`;Y5^&=krBzCZ9x$tN+Y zNgSB}YMM5Xdq=W(!T(V_3&+^bR8x?k{k+vY&yZB+_77O(qa+>=`N^+Z@wA!6$Vo#d z4rQ+2vOpQ*)p!v{zFL-yWEPJP7AR_J8TxI;>`5Je1bXbFOMhJO%_$>R`p!UMnGm{47YJY?}#N;0omKTG_#Uz=HeH`(1WR0Iv)_~3l}&cC!& z>fmwPn*q24DiBxPv_BY%H6Bc32!Ea6d+GW?olh#9pRHEFIgx#JCF8-zHBb96=UJoR zH`4=YDs@sZ)LhN{HO=0zz{aK~*2jrEt4qIRWC8++C@2^%ZEFz@{GU$fR#@c5vua^Z z9E5K+bH^JtxF#63_{xtE1F^rncUOju7ztv15)u-uxusd^Zl69)*c*;DwLnKfN7>Fj z(*)mxXwa9G6=LoC28V2I@9g&G64cu3t$gjad;C{>(OYYQj$+oQML(8(ju=bx439xi92UXJq^G^jIPfs#<++(MGqf{(HyKMko<2Wy2QWB6m!r0c$4;;-!&MP;-pdFpz-QQK}zFXjV;^t$%e&R>{WCmGxPFU*Md}~ zvLDccOM^asa@!?!ee8T+wPWqO#8k<3wA=o$NqL}~iIrDwp9AN?5*;aPCGT*^AeFVtjC zF|a;Je*ut{qiuprPv||e^e`|c5ni#`s}FPNHCi=8NX2VXYEy+n zo4i9F#nS|=>F$BOGvCL{En;qlylt&7j+xqLBqt%#h~YaB`%Cputa&fYzJtqwm2CNf zUpw0@iQU$V9|$_S{N`1p9NX?X-y?}VrsjUqOrCxQ_LKpTwA2537fpqf( z6Nzr6F#ek&;r<8;#l_k7%Twdlvyp4^p)@Wj0s9+bQW>way__iWpZ^y2_ZI`nrB+G> zG9MJg33yyJLHYc+-vbk5{tKQ}^Ntrke;()#J7lzlIj0FHa_fA4&XD_-_K4Hlc2ha>|Lb>x+r!tHS1+A0CPcqJ}J=7#__yg^d5Wfx) ziL6j^iwOq%TaYE@4hE>XEO#y;2xIQT4Nw)z$_*gH&9bB~j0enWlWg`gc|Fo+JHIeI zde6Y>vU$#Nu;=Qv6g4>~`Eprp)Y@MZ*Nbp-n0C1_+vL;jtO#ElnbDN7!OESn7%G>P zt!-OB+w7JODHnC}gemv_H_x93xTOrGNXWVq)V(-JM#b6f^w^@Jq9Qr?j=NzRA{bhL zg;ejdEu$}48!{=_qF5N$DcCqBN+h_n7fSSWVfbJ#L*xmFYUV`CS?Qs zvGOGBPlE4ZJK~@}Ul}qX65Men*JU^6h=(=Gm;$G#;C?BeX`&_8<&TZ)_%64|NH#G% zpR6YfScy6HBe~i!k#dfN2G{m4=W7=rB8Ia}cI1CW=j-4`z_aFgBWe%_6+mOS4miq} zrl$d!Qvb}k225x)S?9+PZGO)kcaxPSLieL?H;@c|bNp-VX#N^a@AFzb@O4NPc*ziV z+w94)JNfl_@BN9~=8FmuVpe^nt|*G@xcznaiL2QH?0zGKSOp2NTk!z7IUQwn`4sT8 z0$ihu{?{ZsQ1ZWMl+9=lLO<(-{Yu*`nj{!(NS%uEoT?yR<#Qz-L zkSqjfzeUoCH=bwbqo)I6byF^|#s!OB@2hVLN2Q(7Hs+y1SqZ+zw^SxV+aNB?*Uaw= zs~oyA^Az0SG`MFx_`J;c#jjX@-1WvZzgtirQxs`v8;WCY5pU1%J9p6eYQjn><~o_K zZTnuL*9rVDm8q^;@a!NycaU=_tQ5tEF*9H_az@b3WF*u;md!7Kw77YQ#Fm#|KH_#Yl zI9KYAFwyR*^^iD35yc+1Gy_T5BA*>K5c=?<-?h8$Ck{&o^X>`dy8*ttmA zM3g{V+BXgW3}`y)KLGZF{bH9u)PkvrsLVkcli=XV=zgt)jP&c9uJN^-;oKOan=zOvQaLfj6Y&{(6=t*Ue!C41V^{(>^CfIE2!MC8{!VSyp!zzi?3YtQcn7WxKf z&w8(+seZS!@}24MV(jR--R`rj?ZcCkq;y!ubomNbr6s0|2T=V^FeC(aZGPz}*iY(x zCdO>4sKg6mLM}4;YDk5-fKI1=lc%m{Xj#%T${}b@OiYZoQM~(lrSpd1#pAU-Babc< zL`k|_e@OKt_LZqe=V50D61j`|nG{Xy>0M_}Pzc<-igNaNWW_9QdXD`KEi_y9BJGwc zVdJ0nMR5AlW(1o>aWwk{D(HO%N54Iu!OeVUf?-+6qEbwnz)K~d_aARf?{k4qsBn9> zZ8t7;6Sno^LXHG<)9BP+o-L5bC*IdhPZhFdU-34YAV93?$Kg%hbD zN$H%-7kzIU?vD%^R)f3?AG(f~eJH-%TT!~;S*rf@encDjL5 zA&(cLPP2_;9$L$mz$*>LB~<@*_{k`IQpJw)hr;W5+o6U*ksk#b&)p#fbfsSma{H7H zzA0>iKx_ChEzu(1nh1k(vU+;Er1HIw^t`i&B0JtxVxcc&(R=t7V0yMsBwjm6ebKFa zpUd$ef~#}SWob0i&9ZXo5(LYK9=oDhsS=F)Fep1#8D4$|=Z$C9jhccr0ZKnXH$zz{ z+V;B^X{5fW)w>q<;1T&8VUd`S#3p3&I}3q_DDFbjUzs;;Y0 z{w}ad#Gy>Pw#j|4-L%JUGv&a(Wgw3&Y2mhPb)zP*Ui?(9TrW)KGjdD&6{mADdvtA` z7juVb_<)yh=_I2hHuAZ_^SWV8$P2!>uqTaJ1zDBJrYmo%JNy)X1`uDF2iM|SyyOBs zNb;VN#zT;;4|{&n{<}Qu$ESdOgpqF?#t-X<`AuRYg=Td{(@54M{e|ZLFGtB%3>oR@ zovDHWp)WnM?!%4VYxh-P=JlxD^GgfGp}kt0zxXVF)p}sF?#wY!+e24>6v{q_%kt}x zd&>hUZ(gs0)dy7=NU4H$(&mqn-rFwa;+5z(-QRLo@ic58Z~2->jEM88$blD&3d|a`du79d|3cgmWnz#JyB)1OJ_7{ zY(!RfK>xL;%d9GwZR$hV#6s040Q&VAtZPj5W{5m~_G;{7F8d0; zWAl}r<6z?#^^eLPMOtTkurLWy7BtIhjf5Di$x2Jhx*eD8@ua;O*f)$96qXZv6q7SD zOpg4mAlnnH{vj)}@#9SgP?` z?kKNN-i{IXI2-|pMz2-1C7PGZOvDCKd<~0BS=}-?PrbgL7I0jZ`_PH6!WdQGsFL<` z4A|esE3QK;*6yqCPG^0;BpdT1p(3(N&XRxqO;M+80ts&?1rG8NFbYW(Uud{^#3QLi z14FRNeIn!Hs8`>RB53#Ko9LT>b}x?KZT~1e8tKi}+T#md_b_$xx$$d!j53L?^odLw z8d1L${Y*lg&9*>NysPN_hQPmthdp=-!0oR2F#(Ex68y~VW;4RK!3Y5zO5nWMYjD=+ zys$8obhtJ^AA@KPFQyMK(JQtiTC<@b!;aPx4$k)GKDBAsr)LdBRg5fh+45#`bxca~0LLCgc9v=I&R9sJOJtO*6Jv;T7yTG&&Y5-D;j_}Z9HYzkDJ83r z3$Et9;A8*Y4TMH|4PC2k1BP?Jl2BRFaH;s2Z9Y1N3#B*Zy?c+CJ4c9ioU#%&AB7c& zGe+rQ5CVs%4Qk%kRafQSK%*LNTBn)UzHrrkdZ+xz=s>+S{VPRhL6&aV!)Xi+OmwQ1 zwPhWDz6Y;!gXO}-^VI1Y+}%XK;8F5ix%^OilC&}glGAU^=G33xewgiqBMRe|nwBvmoH7}+ zQ3$_IPGmPA*c0yB!~%@!X`V)VXipLFK9jk_d~2fd#l9;tp5_Hoy~k1F=Q~G9sJ}(~ zg~>qLSXI#2iW8(FQ4uLo5mU^*0oo+cK5|aj$;@BVqbsucNsiph?+Ie~jE@ub!v+0H z4KBqf#mm&@&w!!gj+H81DUAKRr>)tiA$;!Gd2=jji2U)Rn!$H51?hmid3@u^lynt= zw2nz?^2wHID1}d``f7_Dro4;UN&pB~8UNAsmd=vBhjG3Yx?Ip>i-{wO95#%w9%RG_ z0W`dOGVJ2Uv0ag5QhoIlzmk>@CFp?D)^jPh!BQ}h@#kO6gD!bqPw2eCD2L?2Wbq78 zd*^KUwL#Aa47MLMFS}#7ek#S=@yzO^d;2N4=le>d9zf_k0wlN{e0H*F>z9tuR0^y{ zmU$bd%b~^X< z>k9hET8xGqq-Jvug-Yzw>h&qeJ}?FSX=K%gTLu@$a9^M!m8gJ7phTj1GsBrM3=?#$ zpXfr2n|ebh{GVMlr=ISK86&b~b2qx}j)q@(^XYXwU)Y)PdzZ*wDPF+?rY5J?`EbB$ z0)OG>_5La6Dz72Q`q!|xDr&LGEuR&U&)~be9DH|=BALbl(W44jDXi%<8e?{;kUlr8 zfF6p9jh=^VeNS_q6l(Q#gU-V27qXXqw*`LVMYwA{v5)VuPdW5%42j*F)EQFn8f;fY z+mC*!7+6|L?Xo%fl<8V)hi0MKws^qC`)8Ars#t@crO-ab4+T&InX#ML1)bS9hcT>n ze16Z-Ph!!)jC4rJRlv&zuDW`pYJU)>Ms$kFmZYCwo->XWTvppm(TRF2>(n)-#~cAE zAos&no+vU2c74NW<8~jAn^~(|%Za`N-BO>RgzRS)Ho5Eik1JlCALO%q0LoRP$zQ^d zbGGJWB4|DsTynPAwmkgYq4=PTyIr)*G6=Xwk4BJYTVw~i+!O4oJVR~_@J>Arx}PpJ_uH_;jO1mxEupoywrOz~8x zM78*8z-e}v_B!P3P108Pzx*YW;E`S*9)~Czk{vwKv*YRC%!?xc_t!tLb*w#z*Gx$b zeq^|xz?O=Nbmxn+e7Q-CAs9)L|>sPgWFzCL>PVb=dWn*?qrN*dW{H zGGpJ%+i$Y#@Ugn~J(0xR&yF*K%bmXCgk0#?dAQc=<4MQLTBUv?l4J)^U0O5)2~v?M z|Lm_|D3E}2PgyI$Ng6oh8lyVC!D$mnn)L8p>AIxGn0@Tq)wK%py5CjR(zzA(c1oXp zL3>(wQ68y9&S~D!fUh>x5zp!4iPh*b*0xzX*Z?esM7`GIMbjDwjXt~+KI=C!gicjP zWJ>zbxy2sgc?jM;azq~cq=ILeY?)+fJA;`}X~e`Td{;D&y8K;0&~(8)$o{-LNC$zw zC-o*zj-n@DiSO2Ir)Kv!0{!70?y>FY2}~(1Uu{Z)+M#Soqd?ZT!lx95!FRWpb7Y67$znYk&os@pEnq4*bzyWbO}sk*JUZCzpuswp$1pB`qg;8(;8cE4zRye zA?3={ew(l`RS|B=(k%FOLCkTPg~O<3a`~$hpppfOP7jz5Plvwv4WI7%ecv7;=O@WT zR+g&qOB0LZG-@s|D?hgz`$%727K}_S(yDmZLH%qYlNtS{eEt*E_}6L7Q32}j(2qBM z_&Rjn3D5Xo@IydTcnqP;?}8CHIJm((%)}yjFEEfw9>bRlv6{`4Bf_wUGXANL3g34Y zdW>}eH|g*=ez(k*J|RNpk(2_;C10+6pj1(5-146Xbl%<>iswZhq6B6y@ur50YDcm^r|G3T)s*XeKPX2@IiE zAd6b?Qf@_55F2SyFAM>tMs7E_bVf^nBCJ6^!qDh8S1%c}2By zmsp=JQR~9ihaFdoYUraEtro~!=q~fx%ZlAxEy{U-?N6@s^zhKAg?h$@stX>e+LmbX zY`KBV8SEl87)i|aO8$AR4e!PZHVJFlqCYZu%~b0b6gcNg^D59ESnFt3S{QxL0kz3K zCy);KNTgyDe|wRJXJHCl;}9CY+$7b58Xz>ZP5BIBcR40em{p8>VAy7N+&ov|C{66;c8iDR!!DBVzmF&8r4dtEb78%*NSV{JsD2Kv@p&f!YDlM-AAbpf~ zacj*oS8KHrm#prL8GyYLa(l@wDu3H)yBtzev| zdwfcVmbvF3PlV}mO&q}w$ky*OuHXjrSQVE&g_6I+0T}mvHBBo+qPAD3=mszPGEW6Ovbcz z=wYA(tPU7IWe7}I%tM=Z<#a2|m`Esa$a%UfQi)i-S)Tu7Z@aW|z5gt)Y&Ne06B#+k zYUlhRa0tnJ1Gj#%r1ifI66sUQ&Hy8-2BiLtBOVbgn^e;HM>W|TueFN3_iz&>9~;Ym ztmY9C@XRrPP^{k1$IVXSw#@41XGWk4skZI~$`2VtXoja~5VgBs;3lb473N7plS%^( z8N|q<&}SAaY~+3-Aes+W%@E}@xMWt_Uyx3B|HTiM|IbsKKvS#R8kD7h#)s$hWiJnqr^pU{Eoy54@P0e)@W` z)exR+18bP2+MON8z=eO<9@C!7flBiM)sW!6Yq{m_`=KJv7Q^FgNo&Cp|+II zy3RV0e4l{f3`p^o{D_nAQhhaE|D|!R!IBwR?x=bM+y7Gnhd6kERiy3;k{d*`I0`pG z8Euak6k$aK3Iqq_;oAIKH9Yz z553sVnz5Qg>x&XR3^-RuLG+%r1gVVyau*_*Rv_+K=++Xj?t;%~gjCKXtgsf)3W!td zCyh)wc!nlT#UPuaiA7CKHYbY#(fp*uR2uYPuP|=;9=!209r)MbuM@QgD}YAKNm{V> zQgTNtbX&+s15Tz8xQgf9O}9M>cRQ@~x-HgLLDN%hS1=KL&(<(xyS74&2a5hU8%SA4 zzVx%XmlqdP-xg~(2q@#YTRs_F|u(?ct$&(s)7Q0D|2IX|;Aq88I>wk)jl*5Fv z=rlR*gI@mFNJKJVcW9l*8w#@SMBY`GJg z?nLebr!;Wi>wBSnyK|V;KFQPm3uq}H>cNyi*+SseY&xjnXUk1pHG7_59h4K68tT^1 znsSjFRar6OoXra!vBm;h!+c!wpUIbl2d^@@41@K;)Z)FbYP&KiiDW|bAdtBhN6zOy z`dxuA=S?7L_$c9Dsjwt?0NjVqBZ`5<2@ildB^o!=;|M`2?45?qqs{RZv4{Rq*DcCv ztl~D!H8h%EUYQqZ*VaVNlsWR*&c6~JeJ3;$Ng=4?j(q$99iM7AFY8nI3I1bbpG5YQ zUo)z^Yq5x++>yO_=oq(w-Wt%^+u(e?@T&(?tyCGZqEY)Ci0@#aTk=&<4 zMcPr#z+JXU$HBuRY`D}O*_{d4Vqf3Rd)TyjXOPWxqS8qhGdnwbbhFNP?tUebuE$63 z%79~fM}7q8TkWk?%bi!(r7bmKb0^*LdF%Go8ZL05Ao>8Mbzu$g$Rt)cbO{WvF7{wl0n1dLTSz3CPkvG`DVHH6O9-f5x`J{pP6CUD zp6AQE0#0S` zH~S2@>Uydu{U7$;GN{V6?;8dL0Z}?sIwYh+=}rM@q(eeN8l+*-5(3iQAPCakND4@c zba!{xbDZwIueq=Le&?Nc-Y?Jd%v>|f{;*tot#h5{ar}>e{37XP<3*!!Mn?+ocZZE( z@k#1gK@w&y@41VoX}_pp&9c#Ks?U$+p5EE4HODP=g+X~^tFh`?nm>2KHM^p>lP=vj zZzu4WHOu7^p>#@ZP^qzDP0(XRnGamQ4RyN9sK)(XvgQYB&j-1;7-Q-X)*bruLIZ=M z7XSM=I`HtERABbVN#x+b{Tm0Y-IkQ?BSQ@7xZlfkhqpJ#6&_to8<~PGhepwYdIHb2 ze=NsDarS*8;T3jHf5=*XvDwR<2ebqaQyX2Z2m(CtkxQaPVS;-3O0}6TjZ01o-uhid z{HqOsV=Adz=a9hkU`S}1%a%?)RT~n53yT$I{1>aJQxHZXP5AOgc+7e_S{MGo@_;00 z=S2_T!XvCQzO~(<$mh>MDlnx9|;~Fey>rh zB+$D9S<&S#^N@3QZB2g1U+p0}P*(hc(-r<-2Fmr&Kv_rP=?xetLl1R~NbR_%7M1An zSzxp{@R;GGHMgv18CU_Ww9e)L`|WMn z7hg9hGok=iNeL2YmSE0Y0wKj-M9|Lif+R9{M#L@!1r(m^DjdN<2=LH|MKoMsxVM93 zAxA%IKM|AA2TZC%nW zBG8Lc<2wGs_@@9M$z;=#K^1V}HX!7pe55kR0wS!TarHR?WMrqGYSL^#n_|y;4m}f~ zXF7rdJW#-J*K{r;Wcv;pSq)~~EA>Ho`j11F1VpfsnuIKDgsedu?ROGw&pq#PnU)bz zXL8w9ach6un}sY%MRQuh6%W6&PA=P}z$F68Q~f-AL4g-PbL{l<1HZMtdZ#u^p_c0vlvmjR^cPqN(M6WyqyOwC4Yhr7c?4AHqIWD18YPJWK5PJ4;DeD zQh^Dye+HFO8yn)Awg4v&%PJn=LZm+QZStMlXl;GPO{DLyUH$0>Pn*rE?i~QH54Gw_^IszTM z0x7^-1RjI8b7X#md`<`kc9)_=QpZj=M+?-KzRY$+@T#5x(N8g5-EN~KQUQ0&{+Lwd z5BEd=VcXF_01BK(_=%u9g$a?K+Q@|w!O!?hl{^rFvV#uF9kPAMTL~KB7Gp?Y-WDW; zz%AzAX$b$g#r$_#jE_*7t{|XI25tN(U8*oYse5ebb3xxZn5hqL*qtOrtQOa;C!tx!hEe3x$KRUUDkfi7B2aLxA?D1q{BX9oL6!F>O#m5Y{$$JcJ6p3U&+8 zCB`!pb%?zaOZo3yks4C(_fCsL%vx-{VIcT;E`^i$deK8(JM^C@E&)sw;_kSG@wz@j zC=vnv>4HyGm+;W5fV#7En}G-~+JrJXZuj5lJ7<(YEQ_g7aF>}EUD)d?RM7PyLJ?#O zSi}+x+;(a}hNHL3y}SMbrRD)hc8b^{*O8%DAxu3)*Lf-<^zXkO4xSQ~>P(KZz!w|_ zn|}@iX6P^oq3QvlnKb)QaaCZJmtN!}$ z-)4UQdkHE3%!Hv!`VaHVK{EJ%lH>Hn$%K6nmv^FkTEVyr;J`KbJAirUU)3F8B|xnS z#{gOYyqIspg50|d!xKSFtiWoy&Iu*817lcsbr1*z7+}9Zo{%02qQT@jFsS`Qm{Z3( z2iku5kjCFy8q9P#P#8=aWD0O={z-U1EU^Un>tH8FE%;CY2EYn{so&-S@*~+Ll$&2;`IPi8ww!{*C<%K=L?wFgh~s z@`Of?=Cc}s8#w;%zhM7!_8;&DPPp&&W3&-yN*2E7WUT?$az#S%PG574Fu{xTN@YD^>f zs=Zn9gx!X`-qJDoN+(Y%v)_7b$G<(^o?s8525b@Up4b9?ER(I(?F3~=^G+-L#|S&? zXpwR_5dMFjmL~8a5-<_;*$W$`@ZqBw)gtVcf4pjHy3Lf8cfC4gL_(U)+~O=6DoUAJ z59K-}tNQp`uR0e99*uxGo!PWJWs3A5=F~){$6s5p6tWC1b#otts7`?Z+Q(xc;0$G! zZs2DHHyZlJ0-k^v;F72>fBFna8f2%S3=moxhbWKwxaAm6`Fyacq@@0aVT-YHMs~eJ zK=m60qDD>8fDgd~+ht(X=Hk@0-?}Tt!eiED({A{HxaX`_Yw#7wMRzBK3*1zAj}Pq` zdpa)vAlE#(VmtCMB4D63ozAYsytiDIbPRL}IrgeGjxSH>>&*<>d}R~Zl!5xEv2r|o zxzPXXz-J4aHj4L?IX+BO^+AB*wey`~>pAP(*d0Xy#W3OmnhjTos>sUA{(6d-tNj=8 z2FpiM*c0v+cu$y98=rm(QoDRFYsvG=K;QS#H)&)RR;~6a*ei?SAW>MVlu%L_RrU=5 zcDsc?E&n9YKr1oL5(T_}Z*>S+66y;!goKykPjq1+TyJj0L(pGRPT@)G#%IFb)C*;!vp6u3Q+lp=vuTdgLf9A3^$XBiwzU2)o`AGnB|fvXkAsCI_`P^4zXbf$SH8-dPx$L;hJvxc?6JQRe;XEJj9ZYPui zZ1bS0$^pA2ssk{w=}#PHBSW06j|APg&!PA*J_w&lTHH5axFr6!x>A{0Z|wcS?x-$9 zL-rwR5G|lTKmbOB2lkh5!7>Lft_Bs~TXZVwetlI|1jUWmWW9e#?C_9b6^v%JC&4$7 z<1!hrQ;fNnXyh_l(x)O~zfu7c=Pv`BqdDI+*l+;S(a+z1s87qg!f6h^{-noG?wGYg zoOTrZZzHP~ECfRTIIml@NUlg+vxn4RSo7o|(cgs?SPxuT&r$T9mY%S^%dTj(`EkDu z@N0%D->WTSqy{Fou0j zoz=cr{*N{u_-R%%b+15o=(FI1a-(iJl2npK_YP z=dQF^>YmROO9BP?`s#!^FxtnZ?RQyYpJTtgRIIDm*r;R~pS`+bDop|`n({-&)1ur> z&r(8X*q!X*!%N^WI1D(It)Xefv{M|qx;|s}ms-&SCp_4{rZ0$VJ$M6w4soo>FUiw=364C<3^P;|8TU&EH+%W3E#h0Z^Wh1VBrd#fS^)G{#9{yJBP@DSU>Nz*n*uSoIAZEsz`+_S=NW!NS_)Jk*n zmsy_T(yGClL#GX6qumO2p;mqHE}GLym5BLUIBUQ<>T3`dt~Nc$yb#geAAj>EAK(^Z ziWP!DAU8co*2&(Rs!xKB7_0$KFl5VO)~tv;2oal5%~t{DknHN&XgCsm;_yf7!#N=k zrJ~*!qUB^flrIwN-UDs!AE%xZWtA2q{zq%3-+j38%_!c5rW4{90jE zM8-#A&}<+}7S@_6eo>&NP#OMS^I9KB-ll6%VGTLYV09h>LWRaQt=ibY7%A#5o`sFc z+mQ`s1F-yuAGK z8u`fwK(*9tH1`KoExA~RCnuM-UJU|{+a)a9%#Y$ab;SxbKllstf2y*cdOp=560nW} zPWI1q%J>?O&|at$84yFPUiUL@&mgYL`HIc;dnfhndm<;BFZ@kAyE1_TXV_5iO2}8h z7G31Glwt^0L|(1e=Brk9a_GwGt)9tY@g~q7FMvnG`xkL520X#}o~)22Z?7#VFl~jG zV(~Ko-x5I0Qg~Boyrp-%yrG;ldT4c{wU~xIPA^nW9ok;%vJk2o_5k6$`Xn&pUZ3P{ z3b$KW7!g0UXGjyim=A2-6L4W%EG`oNhyWv{?P z6rl3vv|dt3*j-o~$C#l0Wk~@7)xvAA#BJXFsZRNU#@Y_ALqxs%j%wHaJ-Vo_Zk^xK ziByZ*6~MVN#w3c~7D%0ixevFeWskv~u%A*5W{9@Ch<3=`0T)IEDX9`3dev|0{^ssV zQxyG|@22#$)|n6FiO0Pr0GIYjht-5jZdqZ=?%e9nRB}&+*%IfU2mz-%^`Ad}{3zaF z*LLAd!~I{`IZlayowER06k2W?J;1BKj7i^(1BPJbLI&;o!oys>od=GYH`}2ty7M~6 z069~&k&z+d-vi8ucuvcDoV7ab824T|%=S*#9w{9$H)rye38u~fW+3uTUd+$B##iUA{1G#t7z=&399PdIi<#8xJQQs zaw*e_)cL|y(emCJeUxsKyT29kM}-w>yyeC$A1{Q9BgLfR=17;Sy=n1+H^ot7J(Atn z9qS?QUN7%#R&JS1ou>3q=3WZ`xpVc8c0FIRhQN1|dbPkZ`xkW>IWaxF-la3li;4hS_{)vwZ()OB!Jo3Bpp+rG*R}2RE~ftP-kyZ)(*YF zKFLY8yub#%q4x?T*25lrgoN~O@}H+(^qN@kC7(?(fHAy9^z|iS|9jW|-7AdkIZ?p> z9Dd>Qb=cUC&`vPn_E+~(g-SAEk)IC|noKyN16&>Ha^L;6HhiMYoY$`?00J&6|7gqfS5h95s{k^vnTi7EK;NP_Gdus`38tR^f{OtTqVMh zI<+5B49$UyY2G)R@BKxiF_=I$GxDyE%V!>BO2g{CD{%~}vEzM$$O1h2r{Bz0iFbeB zEFNt+S==cfF6&Hg8^NbL|bIX2q9jm&3zn zJ2Ukmazuj9!^QT2Hy;P)EU|MJ?;eNA!o6nuFvLm}$-;?T<*eI&3lT z6ow2szD7G^j4pVcUrjZ-@jkg)bvN_{&o1_xhp5fsdAnDD0ihgeVz?GWMHNbBgs|$adfY8~(O+jI; z+Q~y51z}MHQy&9k5uCQ)Py&rrpU*z>fum3kipa@*2^ zan>|Y1$28k)zJULBBHME5~LDC*-U>)WH$j+0@wuK5c0avTmyzNgC~oN%d@P#`m;!; zK@A2Lh(bC`As(#k8$f-+>BgXM$hOy{=2KnG`C)f>W?N1UU>E>-=$C}ytnq-=2p+*X z`xkjEl8073o1-_txG=(W3fH#3eg#AL{i>k+6K0qS{f>#RZLZu*b{s;Oq0<44KdlI= z`R@C%TRCKmZDo>fYfsoY|01Mbd1*M}>QcDEAJp&G4I}iuA0F+4x5c`A|Kjqp*LK+{ zHnitwZ-fHyjwQGIN)c72zPQ@AwNR#tO%V^+F#aLcVfv+A4KC$=>Cb*%MqKDJW58Qv z7wgog?wc=|EwBRRaNff}w0GI)n@-)w)!d5LJB@;lLN_GpcOJIf+X>{>v)5%mYz0H9 z@N;bjd6*{bm$Tm#PWF^pxR8Fv*#Bpg#UuvRLQiEKkPxRoLo3;4&@jDsrp^I(_o6)T zHJX!I`+6Etk+;*?96CCFv*hdMGx|5Q1jfQbMllzGFVer?drjfL!|R9a{bG+Fr35Zx zVeP~i%k6Y07i2Ad^j3wK^oVGpo**u>Sr;i_`};TMJSl&NHsG!nO?#n}rw?yF`;FD5 zVG1$J^IY132N(n(mU6cp6Kdc5jM(yBA_b*tShm#^FW{TtS#VS=KVcSZI!~)Ace=xP8XJsMzMs2 zAns8t%TqgjX1Lxs#+QWig}utV6Z_rw70?YOFe*0;bS=f!W=M!F9ibnfv5XNE2n52HTI7^6DY%|Y?Oz4#nPORJRa zgvk&%ESyNDl%p1bK3wlV%R4ONB_y(!fwYk16sB{^iu?{X{kUd6lW+tXHpK260`z6&GVG|t=QppL?1$I>XBrt&SPslj8k(t7Qh)F1K!py9vjmB#1}FBDg1_7GqiJRUk}Mu^RERH1{Cj(N)h&& zu1FyF0a`T6T$j*UQVRz0k$EPMiWB zu~ViPg02rLZXVjmq}|^o?M;1O&Ytp*j4*F|_#&|qp#zV38GPP%xrG=-AZmdrq7YdX zqy<=YO0)?=bPFQOlhCsk6oiKPezZMK>ltiax&dV&FTosKnDr6SIK*liZ|>T-{O~@a zzR`}pK`%_;tD2fb?Iw9kD)Gvby=CjkW&7uv*(L5Aj@M#5PPabTjQrTNK5(8tlHi$i z>lpapzr^k+;xsj(JgB2xZAw)tO6K;L>^nc)2ug~c2hKtx8Qm-k?6^$*vf^f&1~H}J;*JwqTg4xx3?J{!=Ydir)? zI^#mDO%!jgXDIpxzAz%8N1IysF_z-+4EG=NoJ;1PMHnA=?$kXx9_5@F6~x@ef-DRE zaH(cHdcAcHNO>K_n6mE&*$?j262Av_Z+xQ3Bi0kwoG@_|v*B#=*@S*?opI~Q3SHf5 zU^(%Q64HOUApx8BnmtU|`S`n|kwh2{ z4Xa`nKH6^Fz7>Q2s(T9ZoF|nb=y`8hzdCRgdV6!07b$-8CceCYziJ?4B_sMx_U=`D5sSvVsil+&MmMB5v@D1&xg`3jp83fz+FvuZEHsGV;{HI)Afpyu3Ie00E&Jo+W~JjVN5y zkZo@9%%~FnNV$yX?bU?hXWot6;FM#NlsgYA;AYT>zMtt1xaxG_Hg{=qM6X)B33fj| zyqYlAmDwT(9U_``AMekD3K!MReNRc1MxW#F1t-I90$x`*Hj0}Lbm=*64#pz*?|X~R z!TYwqL+e#U(qu*kZeuxucdqQDZ$ybK6$;JqxSLsArx6Xjg;m;`6c>NMb>(H5qi=xy z)sWmG!&<0~EqUMBh*dqKzh7*EN5^~NGuisQC41Uw=Z`|y4& za{YG^4hdo%OrE)2W2WDS-UqIKfH!Djr@l-*Z+zK*sW*;E`fL+Oxhs(&`!{vpd-SzQ?2WC8kLwh>p?qP!&j|t?2zh^3rnueGwyXfbX@AJ%-0vL zALn-3W#F;B)_WFmsZ;P?uIo8v z;yQ~{g*SQ@d9;72QTV?tPnS0o&zHJKN&`YzntH3LHe! zMkCoI9z|KPWHs5X%{Z!8kxF|4_i`BV=}XoB<$>G)>95XC*P9ZeI19qdO1QyD>+Ai$ zUSH?I=2FdBIjyF%(FmB8GW~+u z$4W}%?oTb7D*g5?esOeKZAUorw`Nf1+P zqCoKCoQl%Tq@m!+wsy3qTFr8?;Xx{k^{xev=XHFFiWsDB1jJ8zxe95#e&F+R`%(&f zDXkGMZlACkZl~$QahNGB5}!hRG3V3mujAk6v?fb#^h+h0Z$z^Js&`3W~bYYI+bN>lX zs-c)I#o9v=mZqS6LVwV}dpmZJwe4!4Fl^df$vZdpU=N#j+c|FVi<3j20gh)E*S-iJ z^N%UCU7)_K;pzF+Fa8HxX=@ZxTL8L30g%Ygmc(rF7pvA&%9RtRI^thf0q z!<+8_HSGRkzxap{89HW*kpt1X5hrKFUBEyjH%^Z93Ju>je~cs|Z7RA;|F;W*P}R7b zF8So_Zey)VVa3V22xn^hrzo zq7Eux8|0u9*Y{+@f{ADEt{c0ps_a@T-99WVruhE0bTJA^NuG@$6EfR_wNl<2OK7(A z{axt^JWDL>#|OPdMsHev^O+HdJMB#h12do~Gi|f6f3mbjm$l}`+eJZ(t5ZTMd`p~H zap&7l(?s8trq4FD#T~jHtu~tiW3a=?7z}^Hv;>0Dv-Z@RAG0!UE{h>YH|n?w4bHEC z21?Cy^g-9~k~sORPj`$Jn7YVZz!N*>cag@|(Acqt@jj8bIOe;ot20Kc8w(t>xxS8& z)uDwpWXq@@SAd^69)g7Sp$4`$=fual-&ZXd7!>oXjKPz%2F3-_str8EA0oOiu^hdM zJ+hFH-`{jIUi{1BX`cr)zMOU9GKI-7b5Zo!c;r4fed35kx;N+k>?H@v5FBaunMbiF zdiXNC2xd#@GIvzPqUq0{nX-;I10k10{mI-iU}z9AcAbHTXgYICj|!mlK~cG;!_gZ( zh8HQI?Q3uVk6cLRwMN*qcl#pemEi@iE?`VmThB#gdh=hMjyxlm3$y}xT;1|#48%oh zcxROTY0oZemVq6@Uk^c&XNO?>2D8P~LL9y~wm6&A^`x{uehVcB94@z6)KOe|$xf+y z4UdhCgO{95ipre1Tz}crmN!esV0=|tx$CZoq6y6f;7!eyB|^ue6yVwT0Do=hJ;6<- z%UTfoUicLEQB%52!7ektEjP$zRPze=di}L$$p>KP2e$X3sKY`_SO$JVK92aM_o!%Q zSNcWnqen10+C7!@fpa7nuv+5gI+WXgTbX!#dlh5!l;6h-G;T+3J>0U1caW!6*|@Gt z9ms)@fV^(~a8VDbN)ARljEIQU`MN+1i2xnI&?Nt$|203WCR=vM+Hd=`j6 zmrYc+9zdT1Cg_bV7q}}x!^5D7v6MmPs?=rc<)xuUdmq{)M_3U#ijCO)0g#JqV>w?) z6FnJxBzb?_5NBjypOE2;8{7&QtFMz|<|4sA3?rzSsxT*iB9|h-E^PqUzZ^`Ke&y7- zoUquy^~35{FcK&jNLuqJVA)#G{ebn`7w;_h^9A?_d=)b;N~@h)b&;|MbPBPgLO10?DiDtI6^SgboUeg5ShOFk7%h-~{ej zU)Sh;i{GEv1Ab8i_Q1OxW<`puHkrdOv2d`&@|5>ob_@Z+?8NL!uQiohl^MP3`uNu* zJ|&+e$DK*J0yV}pnHTQo8JMCfC4uA>vxF5M#G_|aJbNp=d(oM$q@178&OoCs(;sau z|HEWktk(sQZMXCZ3^caB_G zcWyc0K7Uy){c4Je^~q!abHpxHR<`BejWl`(4R{#b7n>-ciQqSlPsCe$km2M^<}$4jb6> z(>uJ>CqBN+XasH=f&6Ue!AS4Wfly`k+?slb{P>c{)=Od)VT9_vUmG~Mr|S_vwLk`w z5T`R5{#ZbYUr>I)4Ljv5l3$}l63vSFBghzgoW!o>@YZftvMCI5x$8&QSPVOj0081! zgX4-XKbiZ!rbU_lEfFM*s`^r3c%h)!Ezel5)=tCKRti%P-k9JHU!5oe5nrXGq*c*| z8fB0q<@?3O9R6MXN=s`*zfMyu&`2Ud2n#es1p*lKVeki0cM@zNx2r98o~uA?ktR%w zn(g%jklTf3N_$km*A4jwZ)L#6?7qN_PcWIliZK zA!sNY7cc>~O(rI#%^)pWv2sbh+ZZYj$86IIYb`HScR+-1gq_Rv^UgHcf9!lv>fNYN zDQ%qp%yHX(u&GS*6IfXmbk|BZ_KiE>SlR2nL6C2a z!Y&-b@tjPilAgwDgm+sx3Rs9HC&=daz=RWW`w3Uj1>oqzF6-%UL z*QE9K)dWmvik-(BQQyM!ZE|!hWKPuzPV-yZQg48x#9+l&oNtaX^cyuqr9gLd6C%D;@izD+getW|QF+QTt8yLK+@_`Q_Mlw&%UHKUdnvA_LRdU5&ze?aS9qY?(ly)_4abY zX)&sBIxBusD3ij2cd~Wk%diO;gB)f(4h79~XrP2C)O05gf7yWP5FrtMGy1kN85O|a z5!xG6s7t@EJm$wGB#9oZGX`P}dFc%)tXrHs42zZD$2gyT*ceY8{_>h~P2O{TG`zKA zZh~l<9OXR!y~%_90qL=JtWZjcuu>iYcHlAg>t?H{Y`ghxLZnx%D0WQoV*KQYUpn5a zT8RuolnE^(rhaS|H*Pu!|L{h!_3JG*<;Xfa6Ds zY7Ci&=sk6XoEIXU-siqfl4FcYI|^HGzMZ6X5p zDy2_|D-2idJKzn1a)K)Ms!F{ae@_B?_4U0^qZ35Vxi_sy_IZ}|Os&1b(smX47RIvn zBZXQ{(n(B>$3IIPx)Eyk`;7rntPJg3)dzL2XY4*W_OjR5*Yd~mL>sd7dXvRb8n_yO zkfsZjU*zm*(vj+IdcM0}g-2hPti^N*&^qI`OcI~{YpHenc-gHymkP(3d{AnF3|L`P zReh6POnUT87qvTpq$o~S<=sf9a-sL4C-FrACWdSVh5RvMdx?lE6Z}Rnl;Dr zYIt-le`kGPOe|V&%fq23*Skko0=BK{rJ^mD=RU`aroQ=$bKvOGXlKQwi~e>PkK(CT zZFQm5RBqnB74l;?=NjpO(o$!i@n&~a=aBINmKb5h1+brz5*D`RbFisfKb9#qcox_w zZZl6wX5lZuhVZWms4-Nkm7$B<3z3CkL8aQ34d;t9tqRIBYs}iR^^SWV)X^k;yok8Vl*jssjhevxX(&CTEk}*VQ{kH_mif@PpifJ$ zX}}w?bg>5lxidj(BS@+G?Ofm)Y^UR3KOzfa?I*+vf`$6W4ywoapw&geX1viPjjNwc z)>-7I#7b|oB$#DItnl*k2p6WOgpei$D28GW?6l8gL7+e}suD+-$R_(*Xzp|%-9FV9 z5XaPIBP1c^2nj%s5~2}TLxPuP1zcw=-g#xLG&&fNR@n$nzB>Y%JI>ewRa)Q{SD;;_^#z{H~(a@eclSG&DCV{8(!o(w%wf zLA!7o_nuN3Wc;ui?93~uuWeL%znPTI!()ZlmXSlHCb8PFa7{X;68MTqob5JdpZohG z(W^Ca_lqQj6(`^@Zb9Hy=eb?lCmHCI3-wGO(^lRzcqrPdjI^$6yM)ruOhjHBma*YBWvQLz2cg2TNiTOZhX5SMb z!)f-*_mIfpqA1jc=F-v<|MN%qXuOY9fq||91hvTwoE29AY7_KGwa3^cG$@1baV};= zg$^jb>*58VKf!$K)Z``|n)GI3L^zJQIbtzbFqSFv9x~D1Wnhxup!r|$A!|t7r23R$ z5(&Z97s~J?1e1oOXZ`QGf%mIiX_BCL@N(IY{e7E-&|L~(SRd%PH$!0dLOPaK?lqxS z*bE_;bNsl?+X=tOo`LYX4?ALX?Ex6NvT1xDBoMX49}r|pw-UkuY+xuJk@`051(k}k zEuZif=j{{Wid(q@gJ0jwC^uBgOh{ANOJ?KuKU6c}KAxASkxPw4JD(I`53&Q5X{9@d z-7#04!$#TDzDziLI-_2E+B7;{!o8V#X8y-EI+>LBKXjFNFoW`hx_hH;ZeJS7!v*%+sjzd0wkupNWwW&#zJ}MZ0{`v(2v`QpW^sgU4+RMKVKVn1RX=U}9nh z6LB5UR0&d7M+h}AcmyvXy?i9`+Smhwcv@aEj41fU2TAe22RA~@07-m|E#?UTqw!Q; z-Rtq$;bq|8jxk%#b3T$B0yuL!2mGhEtTXp$Ndess1A6-)jtf8&qYh?o2a}O;onyaF zQv3&<44s`|Gl3R6jvLYk09><)0V_@CnZELieON&v>;XnOnIdq8>MEw>03Pm@3+cm}go9m#^WUvny1_kTdBPP8nV3X_ z>wMIKY6w1+8oTA(n8E@VblUZ~zzqnX^cMzSjRWmO9N@6R^WPm-;-OkR$~%h-yh{{S zpKVehnFaEiqJaOTN8Ku+2tG3ar{(cGE~UqiF$!dr!1EUHQ2uj4|G7i|&=>x>(H)fU91L>uymb0vjy@ zktN_sFit0Uk1RximYYX&;~O;lAGT9JDQp%r`7S>grZWI9r}DfkBCu^Jp=~3TSOV=i zYOrnI4NGW%Z6g6~8xF;BXop5Y+r|vT0{hDd!oM2|f$I<_TkfB#xcuDw|K~GJjs~b0 z5p~dq%;?M~qG(+3Dj!PygEs;f0sWDoW*@(%X(mT$K?B`=OhRzEJTFh5Zh~MBX5O9) z{2(+1_JKRlu{tX{1EMI1#ut!3)PMH)fA;wQ5qtbU2N&4K{~wNaw`8*Utt}Z;Z0bxm zr#%H{XXj*7_NS-8j!w2GPSU z5*)|7bVqQ#{ZLrsCT$0~m@^Xyi>Xtbc|cqMv?o{s#QRa`z%}s2L2>blx(E$nIQx|h zxBbeOqqTm>mBe9K?DPjpyTokFbJdLJcLfs6x((?=Spg$D3(s0K{&9wCZ6)EBS>@;- zV8#PQTRhMK+~i1^h+qgd{Z*5|a0ln8e|5eb7KIOPw?Nmdp+2yS* zNzhdP5=uBG@s&xV{QjWHBf?%jFkH&8HyQZxKFcLnCe8b4NXG_b7FCr@tNJps9TkT< z8??MKvtN0bj`i;V-kn`N_i#vEG`*g8nNh#A*+@=q^{lJR17s$h2brKP*4tE_qGFW+N^{EN zf`<=4;RZcTZ!S<{dT~u2n7%}DMup%4uOu^{kYOUD>R$)@ri4-ZdJ$l?JYE$~3HstC zLPNxc=G$rgvuj6Pn>F$A_hTYv)8i3LI1Bz;r^I!68@X~R zLIl1{d1n$jz^X%R`?0)Ku3Tz5P!oG>{M*tI;(>TvJ&HX&+nIU|Aa_AZ8vYOPG6K># zV_a7IQ^aY)?&ym{S9kJ=G~$IxS9(;_0sK+s{ZiMBNJa)Z7K1&%!0j@-!GTxfJ|IH^ zvj7reUe|Hm3imgl2c17RZ*RaF&}g%@`a1E&d-LpB)a{iQml{GsWR(ETk*IUpMKRe7 zD;^SW`KRLc5~2c|ClC=c6C99yi6G!>Ph-c!70U@{B_3o4!>+z$MIpCFi)TyQEA%0Iv#ixtoZrK4SU_rw1~DXMY7da2iR z?QnTar&(cBG#x{n0hfXS7EoZ54YL2F9tGV3@ZY>lD2s`HaJQD8O!|hjOdEi`M4qpH?Uw;!jB3!{wVIHV7IA zpe(fHQ&xw{f{$>W%=TPer%Fm+WQDu^&IaC(6-!Q(Q@e@uMblC8!)}wUrfW8^j=Y98 z!XF9F=5o+69Qjw1G`uD~ef5Nx{f8k7s8#OY^M~%I@}FA8NgE$=U2M$`l!FWaH0@m* zKRDy#)!=fZJi@HmGifn#T|EQrrSjF%q+{LHzDzV(vV%G38yzAOc)&;jnz+WZ<>TWN z7J<%FgJ11A4@3n++#BAVJeaJtPfGL)M_)fxb#gI+7ffdLD8qf2hib5%X~8!(E{pDeZKB<5Oo-(?_cNi!eIlkoP2&nFn}3QrpZSZvdwNaMxz z{;T1478m1(BmB~_=P}=QvvRE!{Yjcib2fE#?tf}F2fx*?f^@R$9}b9(oPEl{#*9XWNtL$?oIh{Qo%312Gu7iyw#(} zlW2m?D(%ceBf0V&McZVSxGO&rLKeG1VPR~-~!!9IKuxox= zOxP;`GE{qbm6(C##iKf#g@&I5U>v7Z-4))p+JC!Qty-~e1}K3gq$25*-_>5t#O5|q-UF-Sl}6dQGSIa1FOXzdF`CH0s^}?N!|8J+-zsntqwvF>QOV)5jAATx!GA+QZx}1;tqw zmxV|3U*w5&^ssg8*}oeG3`WZA+agKQ14_Xh_W=MZ!xTPA;^K_bh&WKewwt>+KmS2|Q*AY) zLQ*iNxX_;vNh2>ecQ+`%ih)`y(i}!wIN1tL(7&=IuPd;0=RG?i9(Riev`z%7pq9$K zF2dvH>0Zt%&4Cnpno85(V*QO1@9z!FFlp2!SM7|OF(-1tlHT+Mtr_k3yNNtx0rP|h zsHK7g@0sOg1$kLki&XJuH#SW^geS$5d$BUStH}bIMDT_{#WFoA3oMG~s`(m$OkWW+V#2sh~xAYTheRWK6jV2QoDQMX4NpVsGx?tm*F1h`BK{FEx8;R-;B%tm2 z6f6LEbAT&r&?|Kh%9Hs}K>7h3BUxQ<-?9lvhG`$ax(NVgFE7ZL)IYJnX04CK%VWy~ z{*KkAQCy90HXVUu9S>Q*S@GFHZoAcX<-!+z7VFz&6tvaJ%YhVLz|RP;k!e<3de^?Y zd3UuZxH*;k(fkdVyBsZOeBKAJhhNSsi_^8`nI7di@ggprbEo*5G0r)!s~WgdXZAQP z##NYJ(r?7W>+G#VI3B~#Tn0#YT36#aTfYl=r6Q1N zZq3Gp$mN_d$w@;9z^0*_D;OO7jB+rocXNpmA@VUvgsOG-q;g?NULeb9mX3}z6n~H7 zEZ1_r!xcZ%X8ue~D~vrgI3=3S<0}9HWYfMq5`=*P%~lNU1+nU`^eU5{r|1(lqiCOK zVZl?pJ?|B5mS-f8v&CP3yHKFK8gFF-f$|DUgl;}50@EQcH_+*^!x$U4h~W%+bWuur zAi;!wMfQ#5|68sRU)%%}F?( z#EczC+h!Dnm{0vszjxQ#n*{nJhWSvYP9ATME+kpQK8IU?zVkxSc#&=+ehj^y;%t8+ zx9WB2Vdwjgk@x@0vx4L{-D+B&gXhBy9ct(%(FmZH=L1P=Cwu4)J{leanbVm)@9jic zOe~|G@o2Wvv$+iF(5F2nU;_6AOyUgi^dQe3fGnt8cz4^{0g0l&NaaL5S-*W@{PVM} zz?I7{^@Z-2>Y-=LXwZ{#`hR{>Ui8TYeyhiX$9?Q_cZ zuCN`g_5B532fdo40cOWbwPHod549X_zg2R(9?`zEAV|9S0k11-T)pS+-qPrH=6A?W z_hI{&YTeM9djnYy@203frw_;pRZbhvR?y8mzBD4eebzr)ZCUTrj)mxUx{bS=!Eov# z$;V7>Kc98)i+obvZu;17*sk`CkC*7jQ<*Oqej)k5OKs3?WtM;hi@`*2XSUM!O`&Gz z=;!Ym6VgfGmcLG_qFONxByR0(d%N&7jbF$JuZ!as!7PEh$Bhw8K9TFqDLg?oCvNqSs}m8oxMX49e@H|C~{xX9|p_?%(R<|EuV zh+-e>4eFf_u!q_2&sk5_guS%Bl?iI){8!*kf&%xu78amKHvb0H1&;K(kC8zKm=Y`j zD?dV+*Kv%>iRN$a6n=ZQGaI%iA9xhRmYfX&bi|KPUCHnpnUQxt-z?z-)_C7s1-ksq z9xiW*He(;-a)8zEWMa*tT$kFNj_gN+F@%_FU3}A_Fh4L!u<5ehusZB0jTdYf7m?? z-DHyvC!0Ur(OOJCRTTNOFU!hI&^Teqo3_^f_AuL4yd(Q(IoFJZ;XVSx%AO_a?VNItQ{ypfaTiaK zpOI3b1_D}D*RNqWwZa~q1m6x<*v@&&S`gwV%D;#^-|ZY!RIW&2V(RP$-y{Pe^$>i^ zm7V^SDbv19JZan}kf8sS?oC5Ian5@IF5r-@hT8O62d$7vF6f+H+fiyWzu2K!{fKv4%5nGN)vC5~2`6b*PZ6-Jp}PSxeBeg5M>9LvaT?Sfo;YT@+k zwOMIMv)oMhN+F4G%cmz%8^CQbSmUH#KZD}MC9Uw-G1_XauGFpwG){8JC9NrE;k6> zslyQ%l?EZv5smw{M|Pk^)BeMe0R0OHo4hG8O_BFsK%#4B(?ojA})PT3k85&SOgU4-h7VjAbm>=P#a@tM@N73ujp7O8@J zoEyVBeaDxXK=v7zEbRU~CE|VbbxT6VJ$B_Kk&4IGBl%Rr+1G4*D7 z$NS!u-}?*ye?7_?y6Mw!5`K}%iklxaVTKTMz`a3)p>q!}hRYn6PN(XVn7n2X#0V(+b^qH6oL zVY<7e6_9S}1_@~t5s>a~7`g;OP)fQ}xckD=`?yTBoBm^INn1`SpuG)soN*_+K7PbkaF^~Lg5gRqCo!!=RNiU z;7*eWiW;=UCG@v~GH+8oOLa9P%0RO{^ychNuh!BlE0jV~vMkgXQKsKw&**Bd;e4l@ z1*ux$rqySGMbSB>-j+Nta1ftr!hqd^xaMMa<^}T`%{e&%=3gUlyj11GI`tVUDZIKt z*-&~(Z}hv-_15FnSbPCySx%652^0Zrr3V?I1dKV!<*Et+yPhy`@O=8|dY!&2w}0_= zwNd*_yC?*1W55mS4P^@^rKCH`R`IO!+Bbg@L>CZ5eEHSp$Ig0V9G%>QtHKjd4*+)1 zd*%j(3qdjhj?bqnX=(sG?|*d9_Gj~qgM#mtmYAOz9;>6FSD&}f=8l-F7nz_I$2-sV zuZW%FMX_>-4=OnY+6uXi0xy!1QJFV5QO_*#NO`BZ@*$76o_wObDUO?GN#cGL7=#K` zHTpH+kcyv2&xzyUpk|VUb(0WDEvd19Ve=T;+xP$o3j47dyBl8#@$0Lm=c9olhci-C7(Q5{^8zDeRKl~@+?VIfvJ7Pha?haR@(#gUe%`E+K*L*}x$0y;sSAz(! zUnvQ5kEW7i_r$8>+D3-3c?fTt?tK;a|efg?v zVLD!FT+Z4E4T`}K`ffmg2zN!u`>ppXzxi@kx>&{bYtsp8jBrI`3=cE;R$y$kSXh=s zBiQFY&Uo55bib;M7IY=Oz9tLLH2L?W#H>^oIR2=9`zNkRgc@Tz^XCwNm;^1t=Ex|CL|{TGc;q>DP37FYTIq&dAJmCT`$!%8Ov@@dFDG4_PWNSpLYUZ z{M;sBywZnkIpA5A;#mGeq|3v7$}PA?6_ww_&U4l}ZsCfTvO; zD5SUTwwJxTuPbCR!4&>dy-BlGu0Bc3=`s@#{WheH1`iZ*9Dw~5>HQcrPvm(Z@xIty zNu_tLt7$o_*S?PR0YVvzLA8Y5t(ltm(F$E5a8@efHvlBC8q<@-)bE9d;($rui2%Dd z3wiAK0Am>z8DwDi-gb%N+`i?ESHze63mT!#n8O{J_pF@gLgN_~RFfh(7i5G%WSn9% z$YNN#O3Ta3R;fbx=u9pIX zshEg1-Q}{}A*SnmpAh!{r3i%Ur?Wf2Rb>q%ajQYudyO^m#_)epDOv^kzA6=Oe3!*= zmp!mt`ct@TTq(#{(z|7z$}n#V3)0#+VGopQ-(I?@l6Ie`T_S&F&vheR1xI zhJ9T;nTbZo+;6n;GffEy+>H@2mW!2Wd|vQti)o&uLDBqb!@0LuM<3|(^{1)BrSZ0; zpo2k1GZw~xjP0yj%+%YuQ0Xm9d!nqX$LuDMhqAhl5MWJ!u-Y9FOG#j2^fLrF>xhwZ zdm_x&JL-1J$l^qe_k`S5BcZ%eC*J;nZ+)TF)wS;l==c&|8ZFgMCPqhDvN%whyl1%% zqUCP@Xx#=#)G)29G#XhpE5K=O7WKY_Vby!K$fVa$s9uQn8X){UZt|zo9wMjn%7cc2 zGP5giG#oDPE|Of1zdO>&-?6FJGw|3&poeH04KBM= zx3Zj2(B^-fU+m<4`AqvlQs2!^C5hWKhL!vuk^`URxGMRh!-!@|F~h34Ec@^P$kBc9&4hBFHVjZ`y$E0wn)K2K}?a^rz25H}L`e zl^?+;AS)?4Ta~`jC>6qK^4`n@#4)>_p2(YED63)CJmIccDh1-|Jj^yn4HIj-?{%5~=o8M!%k2lB&ecr%p#68Qeqdce7t-RX$;Tn@A{B(JFC|Kc+uW2le zb^O;(F=Sy`)Q)<`^AsrA)jO1xBpj=WlUuj1z+-v29puI_KSWIp_C=$9522J0ml$cw zXxrEK5?aEiS5_1|-DSUOb3Y0+8%mZTW=nk)7QYD0j|rPj-l`t2F|M_gR#3nCGB95G zif`teUL}2aoUtuT46Uq?w?6VMIySxz5i;BAK+-F#=8n%)b1hyWUzF0sfhNW)-~hxV z$s(w_AW;f9MstKf>SiLZ3iuuTO5zUI+_ZUK84*MDuPCJ(iXhTVuYiJR*g)GPdXi#9Hazyh$y*|?o73vVx| z_!*i}nrEVN!JS@z07)gy+B&kn&&-D^Q}n-ciOwsvpeUxe4B-vxH)W}(%0T|ZLR*;t z5Hw|Oxt>aTzM%xkEgId)m;nZNhJZY@ULOGsRRT2{nCwdKT??3np3n)r`-> zf8zsT>pwncfM|p<<3tv)L6q^CX1|A@ zVN;4m_0S$Z@d-pg4L#WLbI-e-2BsmGh_ z3{=1&APg%!2TEJOeIuHqdSB%riE9%Sq!ZVHrksztd*|Om?O>qR{sI4?uhonl>*|Cs zcHLRW;m|Z$)HXYgLu&cQ5jmxpFN-wl#lbv~<*VwwP{oCJXx4xxmNp?$O>Sv<1wp^K zw|Yc#dp;$0JrVt{#8T&v8}DD3FMMbO`w!AA81zrxFm#nZ`@;^1UI6R>ceO>J$#e-= zQ3-1J^eQ=@^OgH|pkhT$yz$1OQ)P>CJA5`_Q6he=&Eu`M%vat=F{U-PJ<2?vo}jqj zUZUO&H&M+D3OvP0>3xsR2|Hx?3m@U|_7L>?UVC|eqeN}Jx>k^1-=xogJfuge`IyCZ z$B_Yb&MXsOQjo52%YJ~SD^UCyy@Cocg2PFp_Z6mTZ`4Rpp7zC7Uv|MvAWRuflgsVbY`36`5VaQ&29+Dwq&H<;RZQ+Bb7D|G%w(`}b|D3x_w=y!24 zPnX@v1xwRULZPpV^xvZ~KnWlV?5}+c=Iaa|ASIecYPvR0cp#U-pki@bT#TFQ3yDnz92R4rh$G!U(S0|gHk#X+8EH=pfoN~Ct!`jFmCISwf3ru#B$88;-Pbw4r=c8tj ze34(}R-lO2fH6u;v7Z{*g(u>r)l38Inx)YwE(QMakB1fv0S+&OTOQ#29I(N3Zwg^9 z0efYA2Dqaa+!3~PHFh%}(TJ;Th?1djfA83>ccCBt?X%wQ$c7PzI%AUuzEEzoZTx~JwDA9R!~3VHQRGtjQGJeo zBe(ih7Csm42lYE`G|7r#9ktK1V{y?0)qJ0)gVCE+vF&1g5#iFuz=lt4I!Ct8xcERg zP%E$RA!i_|TCVP>`Q7bY19y#*yC$@U8pm?QcEpJcB{iQM+1<9@A@$#!?Pw?rA-jrQ zfA9MR``|xc2O%!iZOtfFs|H4-nV)a@h{N7#!y;Z4(&J^ zoJ=ICGM~Nn;MU6(nwt&C7ZB~`kr-c;QN+~RQr-vXW*M0pm7{Ktna38rRqm7w2d(X( z@Q1O}3w<&ghtb((F_`)c`Ebz@&o~1z;C7Q$YpsDtrC>-#kbWD%qW?9q=uNqjR@L>Z z`Yoz+srGcD`f?%HdwH{3K%D6;aM~1}rJ=bDMLwqt_@|jbZ9ob7e*?y!VnBWBBfxD~ ztR{6l0ed;9R{JJ(3h>6g{2Q=e!gVmbSnGuisueOUhatcX>`|dn*Ts67m$JiUuUayv zb;RqFFd{}p))c7%tNtMHM4m zz&TzyLqyiT?TmN5KOv;b99=!TL>OVMH#)7+ZJ&CTQUooBXJ=C3F`O47gL(x_oJU>k zME>9~?0~kCub^q!4`E)w z(0+Q%=C$zQ0a*EM5B5%C#>B9FY5h#E!WJ9X+M%TGU5P_p^eed0tr7yj1XWsd_s=Xx zL+6)NwgIJZ<)%{Mr&1zEGy$DM`j*2g;F^rK+nT_B+xu0Q>ok$qkrNf#MbO&n8~d`p zf74SB-82FhG9mjdcRzQ+dOThdQ8arcAzF&2esq+m9sQ;-*kUzLv)9}nuCJ14_F1%U zHFXF3Yo_h{hjbjNipvhQii-#b{T^ajsY5E}!sf8@eU3rxEu$lUy$93EjWyvLs80MF7p9mE`htG!n z%ehS9@V*Ousr~5kNNF4g_FdNNOnOj=THY+HA?cixCUOJ&(ZC-hLSN+zfbS`+WNT{a zTF{(AVK3llI(-26g6%Y;ro)#N%I->su{ih#a@VJi);K9y@UD!vpMbc%Jby`CAe7|BziNY76d5Yfi6Sa%5y&1;^=?j>kSTy$=%_9PDH6LFP|Zl zE?(x3Ve-%^5acKG6yY9>zUipl;S1Uvsjji>lvx&dw(^BT8mGegLa&jWoTXnSVRA(` zK9cD2q`d9JPM0hx2)W%?by?N)k{}A=nP|^-$7I2qLnp<#6N?8*KoX1&#}c;Md>=IsfUTa)X!%wvp3Ik>$LxQlN6y zWwHeJxc38;tOi(@B41cJk0KZrV5i|b*l9w3sok+7_fARN96zbYTbtZ3A)m!(Dol^cXm~?SM!G()YoC7!4 z1gt;`<~cMiYrzD_5`Vh^O8oE^I~y#;rU3t$1G((yAGS|q;+=nD{e_`-k}5X9`@705 zPR#`>%g<#{j=)byR|I!1eNL!=3`|FEaO+Y>BvNM(c2VI5%HaFLb)o3cvU^EeFmy_w zL-{979|U~(QIh4O*sg`8C52~}EBs@9(h`*+FbmPsV{Zx=|8B03Kt=CpEIWw6foGuI zHD1S9WDpw@fYypkbPxp%xb_AJUG=8$#+q}#?Q4h-n*YMo6F@Qb^NI5S+|?CM90>(? zsoDLWpEgO1cmmr@qlWN)?XPQ3y+jArkHeUV=iGn-1?u%K*_=m;@*p!vsu2TL)<^ z$k3b7`|p-6PeJXs-DHUY3iwwALxh)r{v#BIEC&*U4;5+z;QB0s%ls&U00~@WMJS+3 zc^Z`dq4MkhwG)6R23%k>P(+GY0TU1dTHuxfa&_RtAE2873Yj#t8>9>{e9Ll%IN+!0y?SMLdFp6Pf}&>Q7(UVu|**(4)quKNk|G zw)=C`Ht10eK69k;{NLPXLcI_q_nVc9v9J2yOX1*LTvM?k!P--Ud1SvOT>}dh5CI&e z>ctT&oI@fgM=6hf1Pl6IAVirt`0=@U^#6qxKpQp6mj;FJa~I~YfDI)F2%*d}#8kk8 z>B1}7|Df#ugR=h*${tLV|6TZMO+C1WXJ>f` zsQ4o-O%MiQ;knh{A!uOR{Ral92i^4_^SlB<6~(JHH8yZ4DA4dsrJM6t!=$R%<-c6}%#rc?S23UPM% z6U7Pr$tO>45_H=>ykv%|L;^6FQ5=1MJA}zkH4f_h0(y5}>K1)d(nUQa6W+0p1l)OT zFZxAl=RwZiNAfegDSLv5fb{Y`MpEv)5X5c_#G)393-iFhkgn;=g69q^w872QGLMiP z^6Ke>JT~Rrl<-k%KLA;cf3c<4$|7WrA6*b>63%xjK>d4!xI=w#glmjdS;4(YfNm9Z zicm?9r2w#@c^Kqp%7d%M2IylOTuii}Tze=E9U;2}fC}Gi@djANdQ#q}-d{Vvo3Ebn z_$Ff8c{LQ-mde1_4JO z%dR_008l+pg~H5iyFn>mQc7x#hl*;rZZVT#!s$5o$msEXc5+DM6JUN$c!i8mG-Sih zYpWxh!uMc$GrcY1)on3TM3JYsZOy04e~?VP5(IF?GNDkfR({GXm7vXY%8w+XiTm?S z*6e*W>9rO!5l$NebRulGTuvk{-dCeEzk}ufnJ@X!lKCqyq%$b#HW=*hx>-Tbz$HkB zhQ{>gXuk=_P@DBRQmta$m9OsfRX4`d1+Ym>Gb+>umzpd)$$F?Upo0*Pc7zQ0j@$cs-e%ZuGpd6?7C3EuGJZ`g)(@58c>Z zfm)z^9GjHuj_(tN6kgXSh=^&Cs!jkD&^*@Q`)(-s*Ws^b9~d)gK~L&T{yI>3l#(R5e7mKjZW)T&6~FrLau*G z#!z|C-iy=0y|-%zRB|3&Ud2IO;hSU2QpmAy^A|zsEDsdV=_4tZf}pewZgUldN=&cE z23Z)$=gFLwE1GDi4NM1dk^cTR<6qNE%|*Pp9o)vdlX;khuSwuQC(~+b;R%{{l{+kj zV2!@z(|=)`cA+Z=7J)Fh)S)~G6`4fVjc9~mMXGoVDiKJb#PeU|q62{@p!Aev`N-#d z6--Mlv}M8skyX7WU){ljM@Hl5A7cmcmFLAgBxgn+HW5oD@c*g=%qdZCc?y$OK0`&E zuPqrt)Nfp?>8-ncnFq<_tXk_QHPG@td0`Vxj;Q-}mkoG%wD;QFC*|@~_n#q1_y>@r zB?nTsGp1`&B=On%T3t&k%)Dn!dH9PfBN8xE^(uvz!jw`^lZx9jh)I5Aw)HX5p1ER% ziEFNO923<`P$2y5!LEyu&G^dKwQx5uIBa+ma78SKGQ?z^eN)Ojfg6Y9CsNmfU9WZ1 z+eJV7m~?@f_u(k8o%tEOF_A><$+F;xP za8;WQJtj!A3>}JSqa%W_>+q|jvIC2UJiAD$5H-*>k&mH8H64tJGZ}!6$b)+uong<^8dtW!j`lRNvt!N2gbJ_(6yiliMP1(R8nt=yB-ola-u9QCS%<2Qr@bju zlRuN|zCg=(U+;vCLQ9zQUrB04cEJ#R=dgwtah5 zt&xqq&wgJwpoe}r9W@+5vY|R zVx)ko1{^cTjeLl+VX1Mk$}Ih*U3t2avKYjR+u;yKA5}>?^dr@l{ib< z>)I&&WifO78f(+rcI-sQrsv$2!8(56tJ-mTWUo$z11fOTVVCqy7j7v0+)JjVS?5br z`m2Jm%7l`l&G@^BeCqJ_f>LCe7!xSqKr0cV0r^9V*`74Vy@8KWLZ3eLA^=GK>*)PS ziloi7=4R{UcN#cAq+6p%H$PoxV0^>0ZW9RQKh<*?f3KX@hQ~I;jgDvr-MC(U>Pskx z;jx^RTqxIUeLkMo=uo+z(+xIL6^iAsgtCy3#=m*nT{$+fX(0s+DX*0+HEu-pE$ z3>k092$8jG8n3QrgK?_BERvhw5C+(+%*mud!!k!j%$nuumrS}47}c00bwFRD#${V_S^L2xh=1YJVuc~F*XZXxUi+{LY`5&y+;Z}ChaUg|q0-`^ zeX^$WW_QNWBBxlf*y&XwtZ0}&WEV(n2N8veekIoQj{%ZyAHljV$ztk18?rcDm!tv!w4f%H8bVxi;e#^^W} zr*DaF^-F7_Dd-LT^B@JmhK0crC=*gof>lntWQR+g1Q-+YfIK$+go#!$(O*7+X}dBRe+@l_2Jr!yHxYcOs{r1|E$m^X_V^$TF904Pjhls1g_h`J^qqLlq^{vQ$$br z1f5P3$DCq&vG%lIwKm6hg`pD%T5RsO5Ip5|Hyt99qf#$p;Hx`&UzH*AF&pSp)1}<9 zo!Bv61FXlf=jxZXt=4x9KNS+*b$w|tC~mIYGg~ZZ3Y4}f=J^do&%qA>vE=s$(ZcSi z<|;H$p{%9{^2o7_ZzHTGu6wgw^_n-Oy|Kcm{mZZBoEJ*%*$+K70RsXDJ$xs3z(_&_;en;tpqyUqr>yT&ooES*q7W=0;#J$*r%j`(d{bY}2AW@bbHb zEqP((`h${zx5V!Rex7Oc+s_NJQ$o0R1gBdgQlx?={_oV!sp>3$sqKH%dXX+KY4ZZ^u0tDEE(yHXnUoTr zNe^tc&NOQ@YvL}8WPVTt;-`$s&s;roF*iLnGu~swk`)Z1bG8S8W~$DBE^~kgE@;Lo zE>v*@AfnBx7R&GyUAJMctVaz?{&~o2~=EVh;8@9889L&Q5@_hHyh`d9p5?*_Jrr(bSdAfE`Op{Wr>OR zIq=!W(ye?1Xm&ATa^w;f=0nOx_Q*DE>f2SZ(&UfZ10t~!0TQ9Ha~|xVtsTMPeY)4T z#ymRpCX2P!$VXT-;90>H8kpc zuHlIB+-G?{0J3&+^L_uQAyO9;9trF7o?&I@rDI!tvJ#tHoy~;u7jqS2W1a=rOT!;A zd0ooh;ewFpC5WU12RWKkphWMeb-8ylj_@(JrSE zu~;H1&$Pb@T>xgYWY04=WV*IJuO9~k@6U>Z(;f;gh3E=u-1BP<>)fg8Y;QthKCDuN zg$c$nzEYNP_PV*akqE=PlyF%@&tp07ZqFvN!~XTUo9Mr@x`@EAU4puO0l5qiI;VJH zyCMhFE5WS3+1-9v=?d$kfvxWbf@xV-9k1K)nBEkLQ%w}o*~{K?$&llENq>0RHgo0& zda_-qQ_Q6N76lV4t;urES*}+PH_eDpXGEovb*J9QyRNxp>@;E5NAVIgl$d;49_Lgm9=_D*i#aZnO#-pvi2^_7-BYYz4vZUu; z$LQEd0!}9R2u6DRr?alJjXucp%^v+_SHQ*b)p8)>_hyKh{AWQz5dum>DXH?2uM|z$ zYiSdcm#rfmQvP>$ZeL$Gg6#ayuD}FRR&|1rCI`@fjK8w67hO&11k`0fFY!LSmI72h z=^G^@+*f4sA5KPh!kothrr8{rJkmT+>LCYZb39-m)Cyf1b;+yVAz|j}>fvm3ndqsi z|3fRm!J>_h1YkDzqZ^=VUgD;AqP!0nb~%mh8N!ySI{Tk7Xo0+&=Pv-#Wf>XSAr#CMD8 zbY1Qh$t9L(`b0d}G6~Eb1c`(IUtOI|XR+Di9e@f9MovW0g}3d>mi2G^^pif6%Y0hX zuoUb7^!xe!?n{JYI-Y*${PlHD@RhiD=Eu6J2yD>;`M54W^2D^9v}1Cr`Bg0ba$5#k zgeRC}ywQS8kW2zyh{dCf!q1dqD}tDM34SuH6(rE?p`p2t9o6Hp#-~g1nJ5!CS(mZ9^t0MNx z%++1LRY%L}@P){uHlIy4rZ-;|gKC(AWdm_4qZYrsK0$F#SzczhIIh_?T3rw*_V%`s zT$RaCTbW9^sfktzmK|Ldi^P*1(B6Q5SX@YIpHXjhyv3a`VB!91V{B45ET2cqX!8tz;n-`*y4J=Ws6Eu->u+}{t0nf}S@Tg(&r~^)7VGv;->srr+;P*(`%wMfC8HQ48?Hk~M1Zw#9`OrE%14uI8JB3($3AS>Hh?x3{nUAm8n zS#hG6sCRU(O=-&-3`udVe|w>PNU2(+Zuvq@5{9HKdf#eJk(7ZQMlMt(lK*Vs2g}#Q zETf+^Wx_8-)?Kr-PU5dgx;Gx_D`WP$ho5}+@lRW%VrIt$s(xcWLC3JZ4m{X~FoSz1 zLt?zLSZ%pgn zrCDK6rY2Hgk3t|Qvd}f3AEyed_gK)y`=yt5tITcq2-xQ>zCHB=yt#awxB?uDUo@Tm+9myy#R5vx1o9?6LiH zao)fCfFS4p=RwU4iw4KVZHR>$CBFYy{GIlGgGMSNYLq`ctZ19C$J)65H0Q%sr|sFP zMu`sVSZnGw=WFBqdu(*SmwEYQCYv%>y%RF9wP~pB46cr zo)wLj7WXSKcBkv>Z=;-K(-Zp>kSk zpQ#lwCN-0QIbQn7=L-ouX400tFdye`U0qq3xOON~FV=n?q9}gRGLCDR^`_Vq-`F3!C1Eo{sPWH2y!xZXs9FDVa71NWA#d+J0@ViE`w=VT^ zRL?5PqxX(%-ApONs11hOMJX3xKT1@fw*lje>&@#k>-A!-v9roQ!_SympGigZs;2B= z9&%#39V0DMN4e-Qwy?7A*GLhmD!pftN(1Hi03ilS6T?upG1ZRCaU}s&*kmj6^t8G@uRT1dZ|bKQ`l%;CFMFPy$#ZdJi+D@M3^{ji*33p$Ll;M0=j6Ttii^ZWKHwexPm#xFxd#`|QfC!8x>`!RC;ap76 z*u=s`_>Lo|v1xNo0=KXfL9pwSMOKU*>h{qAF-Hb7!Gy3o;Q#j!`>wB<@eblYnkV1T z43F`80X8D_49ou$4^2p0*3`2)Mk%4W&9@#1BCbr*-K~6jrEi`Fdnaq=d*yASzmzYn zHkx;vK8&8l_aEWxraGG3@w6T3J-MG0x}q;iyZt2sIZi=@O*~12EWTNQIp039UMQ)Y zo7lpFyjk>l$YHr0tldU7^vT%1j{VNG4EypSKq|IQW6G@Z?atKa8||ja%(kRDaiV({ zx6($fvF_2eG8t`Q4y#?SzHo&CU{p4AOPK68HUs^1haF!~%4?BfPxRt*qgr!7S?ABr{RcZ8*Oqhf1hg9%CU zP6pmjF}^C<^mA^x9&Pqu)?MtoEJ7cbjVqeSeU* z1^1C5!R>^!fLoWJfKONxQ!_fXjsIgb4z!AXQA0>```*J>+qa)=?miu2QjC{*leD&s z0d<}UlD0k-^tKBJ{R7>h-xrzR?0+X*NQK0#r$yUqzdq_M*1L1^DCxu(WM@8rVQAi5 zxLLd2+4=3cpvUTB)*UkAC8e=IG;mfL5l+bbczH&yeGR%6QYsC#-=$|8ldTXz;m5(l2SbULv(j zz9~}8FQwun*Sk>U+?apn_9*%7+XpAmtOkJl6P+>mSD|lV~em5+<4kLVN zf3D|SqnhlvXH_H`%M-9|LLcmKzcVZ>kB|^y^)#z4xY<$QJTE+y(1;@lNZe#2zeZFd z;KF@Jk^6=Q=j}n=-aqd@r3cik*~YsMQSlchjIX$xvqwL>>k^{3^`kITZ zvhB6qb^_zYsa*F}C*5rUWTDE+DrqYJK>R}4rCP|NzmLfoKvk>E&g3IkfPI(6yDQTK z{j52w2EF*7gsoQ_%v!+7SU@2&OqwCExV%+^sX!HJfl23k3Dw2EP<%Rxuprl+KLp9$ zvOS!$ng3EjH^Wz%Nxh(;Sn0LQ#m~N`fY(r^jPY{66t-l(3u~>0<6VW?2H(#2pK?di zlw9H`BD|b+i&udUtK|?(UK}nJYGBxl&)wGL*cxZ>Ke-Y3Q5FL@C}oZz`tP5hw3B?B zBesA%!0)XkTbFe{mO`>XyZv+V!1c3+BS6q}He`ctdDPG?uUSXe1i-&0}jMGhXapI_JEp)=h`hV zB*f();(8!TjrXjzO0eWpbx4XN7CkU=>!&KAhVkhrKwB+>FsWtdp1L2sPHVARcS;h1 zd=))6cbBFk`C^zUqg~+E!+w1w>$7|37|x~0_ZrEHwmQXa*Cj*~MqVXVO!yH(42E4O zDpT)30dR;l3`>nWWmE4Bq{t87 z{eXx2eWj4rvls-)TcdBMXtc}z6T`v##yFlLxg9I$h-s+{#nF1Ti~DaVJ{;ZOL5 zpZ+!F0!}`FV0CkBVxu8Oe1QgxnT4h=!ZdjN`Y4B2anE@Zr(cs*#>alIJME?g`P&&bGlY?T4KU#nP6-M)LiNYaUeU~}bM{PDpc zCc3n(xX1x@$Ze-6@@F87&4y>KIW~wLf@JU79z;u|;ftrrKF>Gd3-?u>ybJW-9X8Va zYYoAO-YIOe=L8Q+m3>T%ugti&bc+Et7r)AlJQxx z)h*1bPN9OR+v^-dtIo~!i?i$^3H&w)cueoEYpmI>nyUm_pCT`J4ZGOsRDeg5H( z#lv9K-)Vu-dAB6tQ(H-~N5*5tf@$A|A9CD9oY6B&!Mpx&*Jd%E^7vSUsJstXe z>$)@9yIbDsGIa#eM`A`nIdA27dcLC2a_R%G)^82(X=-FUzRLG^PzvG)1{D6q1DJF% z)HOTkmbSmsGxNFqO3{C)Jk{&0QgljX?>)O(c97m1if7L~kSnG~^s=AVU~K(T$mku^ zb@c3MozOB25q9gNIiujvEX^}ppf;Nr>}@LC#~eNXc5|lUjv&4j(9$O*UQsB0^3>@~ zj*!cVVW(^n&zjFQKY1XG3x<6YgqgF`bxMy05nX1ha)3%B3wvO_HG&c4P&&l)ueC)3 z;T|HBd$j}B(n?f*c2y|AfUmculxp}cd43`VS@2Due{yYEjN2aN#47=h~TvY^E_W&=*uAG4PrTsjyXkR=ly>6puF{Ejg&n;DoAYg6iu!y-3 zo5aqrU0S>OlK$!_#Qv=E2=<*A#)I5KAo@wfg?b(y1)p7P&uth1b9)IU5 z?8iw5``{ykr7~Tzi{U$Mj>8*!G7}&eCxd^dTfGrCT6SlAO=5_IV@Ex#t}qX8%e@a9 z#G5E)A7MJiTvkwsdB}hutE13~m|zMYcXilFu^vWR#;j5yFy=J58*wn1C0G2$#29-+B#0 z_~r{R4zU|B4uP>(EW5dOIkR5D`p)8Yx3l-FNq zsRT{5hM4e|k#O8#taGPN1LrSDm}6buzPld1PLFR4AL%=LP$qfGk|= zpgttP^X9zmRCp-$DnsXs&KwObjNTJN{}6@idR=q+?@7aqn z>I$4L?Hq2D9{pa%q;B(^X*%jo0sOn7Mhcz7Htu63 zLbh*oHU5upX03O&sz5khJB2_p&EAk(GOl{UF?Z345A$(OjzhWA8D=BAMsH8M$Aif* zQoW(3NT<}Or?dUjGmm40nFiw~QLHt1yCZ##C&4JD>dK<-;xeB|(+eMUca?df@JCf0 ze_5KxS*CV4SQV8Dtn~c#wG`Ad?!z9X6Fba*QMw!GTy4*Z6e&Z5E!brZv?%Jpn2W(R zgIPaRH*kO1Zhugt`~f#2ITo}>h9_B{%BiTlnsUn{SMT_!Fn+bH*Ip|dI0YL@FhKd} zc0K{bpMeeRA5TiOYa3m5Ph;;Jyl2>}48G~)gf9NIgn)nbz-o60*O399HiW@oS~dK3 za;yR`o`P5`$zUy0s$}MykhqIWjmiN`cdY*Jqts&qZHaQni{IShqpTg}9nyYziDAb> zAMi)>7sYc57wzkp4`#1k)ONoW2OdbLBl*y>g6J3Ec2d^K2;A)URML6hl$wFWo$M|Jlk?Qw^2Jgz*XO0!DPHTG ze=pOQPczB$6;AlT*(?1}u>@n%W$bQ-7&y+%(S2;;xPFqxECJK~p82}WDxtNKhSXyN zf%pmsQG9{~#9Qn-m+$!HH5erv6Q5Z&zoiZkyvHct@HO}zY`Dmgn9}cn@$MwSDxVLd ze*x8TE5rTRa@{mD8xnoPXEf$_{Ud~ut!x;}C@fLF67aH-QPeoh2U#LjQ62Ig#$OGE zJ`Osxd$)L~0h4nxm}ZW7^E0I`CS_-$u4sSo*KyC&q#}$R55WMsa>^6p} zGJFKJzS_+jrE^98*Gi-a1^~ymrN@0d{~p^oBmv_!-cARH{Vwa@=bL+K}ec`MOx%$$d^LRi00?m!I(*c{1=B1HQAn6YGx@1e! zERU|D@11MuK(fGmHu4UOu^q>gp=1H2B^fH?N2u?2#A0#1_UC4tUKYyyWE47)6&i1o z3PxghT~tFvM3QPaeYc%Y6>Aws0P$Jx~3;nH4C}tgT(B` zE7RT9-@mPEL*VMGI_mk$b8|)da@uVOSD8kdlFWwd9l6&HGvh--VU1~@?oXDT{mdf# zhy^O=gYL%2P0`rch$w^=F55;l%x@P%Z6{9FTeT}HK`!DWGC)CHg3zHrt(0_JX>(=F zjRmia5-?o#+WO2%f5gW=>nLl@P(`Vy$K*s6##}mBW@oxZ5SDSL_q)i`5{8|+I}9^W-Z;l0|K62$rWxRY08)V! zRCjlh(jffHNPPUiCw`Pa52%=yG;ky!$WcRZG9_3cI`o4v-~~4rv0U5&Qv#Y~JBypI zKZG%rO}#|Mr1-*%YO06MJ%qnwUfz5fhz8NtFt`8JDn)jwQd~RCl+fLG=4!R$UGmBI za9Or$T_quZn91rw4C7wz$Bz(`UNZZJsdD*P)pWHCFPY#Y7Z1z%lvrAYc(y6mQc-Gx zcG8k)jB>=wmWOtU^fUnTeJ0mHvcBRxVG9;DL4X3=$B>4=khTWNk^pBGjpq8YCO@jO z&-v&~<%V$Ami5a$&9V{houed_sD#w>`Yb4q@U2_DeZXW z79BrVttsdq6ZiCNJRW$&zB6e*?CgY{=%{+rXuV!3J8Nm*&~k&RP8K7~r*r1s+O&e7 z(?Sjgjleglj$#q7trFav?xKoz)P18+I2_(Ar*+LVL3hzM5Ok?_%xN zR#zvrw?`E{Pz!KRGGf`ITPi9=8%?8?5S*G9C&|SmQrLnr?2h4(*0+8Mu2&Drltn&i z`xI!kGf`|x6`!Ps>2r%w>9LVsJ!8mQY);9pDB$(bD*lz0APTWhs6FN8Tt6 z0s?AkAk4!O2qK0U=qCy^D`PZ(E=H3qSkmd~l|DIFQv2^rxi)u_s?2^eg=-J| zQw7B10JYC{#dkemKxS(UNWj3Lh(Wagz(@sQ)ZZf&x{G4qdF+s-EB42-N`P>QEGOabCsQH;_YJS$ZC$#KuQVp%mnXVkRBnVbIS9W+ zbh-UCd1#~FfP5qBO&RUbJA^?MIGvB8V&y_Z$q2`A;QWp|Uatv6wO=Xt4^^C8%$^Lq zpk~JbU-7*ddU*f$)YP1gbszowN*<8FSN(m&Kae@GYBOm&>si3O5nep5DY56B6QvcIP-&F`WDrJb5Wc>4;Vq)0`> zpiV3M4{`j8u95<8K}J=@!Fgf>y>Pu7|NY|AE^&aXM2XFr^9?*4!o=m1 zdn;?IJnKbnV$w+CSN}{OTZDQNTQYgK=Wx*BCffF|;bxv#%0Khy?-_=IdwKdMB4ir= z-)A2f?9v8P_qSa8A(|rOh_K3PoBUu1yWuSv)ucU}AqJoR4Bf3_czeLAT9E>iVd#c$ z>IdGUj8~z8gM&{K!=6D9(ObSrl@PVI<{`J9=(K4vCxM;eaw;r9@^8~BO8gKZ21}p% zgty?Kso=SU_#ycWhzghHaF>iu8;pmd!4#0BY2SQfaXXz1{X5Ita*_?KlE=EM@L7;ghv-I z7Cmeg|MjM$6vklRpuLgvrP?gS)()g)25>? zOo^%w9)IJ0y<_L5&oaE)?*4k=ae02gFoq3wAs7ej8~y$8MByeel0_24nZ%r-2g~fWXt$&t;ucLK6Tpw`MT_ literal 0 HcmV?d00001 diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md new file mode 100644 index 0000000000..743c494591 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/KafkaTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.KafkaTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md new file mode 100644 index 0000000000..b34265dfbb --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/middleware/KafkaTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.middleware.KafkaTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md new file mode 100644 index 0000000000..730662fae5 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BaseConfluentTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.provider.BaseConfluentTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md new file mode 100644 index 0000000000..a6db133484 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/BatchConfluentTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.provider.BatchConfluentTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md new file mode 100644 index 0000000000..2c5242e6e5 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/ConfluentTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.provider.ConfluentTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md new file mode 100644 index 0000000000..7dd0e1d0fd --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/opentelemetry/provider/telemetry_attributes_provider_factory.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.opentelemetry.provider.telemetry_attributes_provider_factory diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md new file mode 100644 index 0000000000..02fb4805ac --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.KafkaTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md b/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md new file mode 100644 index 0000000000..aba78378f2 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/middleware/KafkaTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.middleware.KafkaTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md new file mode 100644 index 0000000000..5cb13be947 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BaseKafkaTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.provider.BaseKafkaTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md new file mode 100644 index 0000000000..d3d7080509 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/BatchKafkaTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.provider.BatchKafkaTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md new file mode 100644 index 0000000000..0859c0df3d --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.provider.KafkaTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md new file mode 100644 index 0000000000..3b2a1ad394 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.opentelemetry.provider.telemetry_attributes_provider_factory diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md b/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md new file mode 100644 index 0000000000..e72f2de8ab --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.NatsTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md b/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md new file mode 100644 index 0000000000..b2bb226585 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/middleware/NatsTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.middleware.NatsTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md new file mode 100644 index 0000000000..d6626c537d --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/provider/BaseNatsTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.provider.BaseNatsTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md new file mode 100644 index 0000000000..045996125a --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsBatchTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.provider.NatsBatchTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md new file mode 100644 index 0000000000..b58590c4fa --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/provider/NatsTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.provider.NatsTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md b/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md new file mode 100644 index 0000000000..200d333e0b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/opentelemetry/provider/telemetry_attributes_provider_factory.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.opentelemetry.provider.telemetry_attributes_provider_factory diff --git a/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md new file mode 100644 index 0000000000..914f134e60 --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/TelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.TelemetryMiddleware diff --git a/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md b/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md new file mode 100644 index 0000000000..7ca8b2cb6d --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/TelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.TelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md b/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md new file mode 100644 index 0000000000..cd58706774 --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/consts/MessageAction.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.consts.MessageAction diff --git a/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md new file mode 100644 index 0000000000..64a7b4a501 --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/middleware/BaseTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.middleware.BaseTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md b/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md new file mode 100644 index 0000000000..f019b3ad61 --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/middleware/TelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.middleware.TelemetryMiddleware diff --git a/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md b/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md new file mode 100644 index 0000000000..0fefe1c0ef --- /dev/null +++ b/docs/docs/en/api/faststream/opentelemetry/provider/TelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.opentelemetry.provider.TelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md new file mode 100644 index 0000000000..7d5ef3de27 --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/opentelemetry/RabbitTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.opentelemetry.RabbitTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md new file mode 100644 index 0000000000..e86771a8ba --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/opentelemetry/middleware/RabbitTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.opentelemetry.middleware.RabbitTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md new file mode 100644 index 0000000000..ba6742ac90 --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/opentelemetry/provider/RabbitTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.opentelemetry.provider.RabbitTelemetrySettingsProvider diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md b/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md new file mode 100644 index 0000000000..537a2dc7b9 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/opentelemetry/RedisTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.opentelemetry.RedisTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md b/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md new file mode 100644 index 0000000000..4c0febf261 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/opentelemetry/middleware/RedisTelemetryMiddleware.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.opentelemetry.middleware.RedisTelemetryMiddleware diff --git a/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md b/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md new file mode 100644 index 0000000000..26e7859c34 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/opentelemetry/provider/RedisTelemetrySettingsProvider.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.opentelemetry.provider.RedisTelemetrySettingsProvider diff --git a/docs/docs/en/getting-started/opentelemetry/index.md b/docs/docs/en/getting-started/opentelemetry/index.md new file mode 100644 index 0000000000..44e7fe9013 --- /dev/null +++ b/docs/docs/en/getting-started/opentelemetry/index.md @@ -0,0 +1,114 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 10 +--- + +# OpenTelemetry + +**OpenTelemetry** is an open-source observability framework designed to provide a unified standard for collecting and exporting telemetry data such as traces, metrics, and logs. It aims to make observability a built-in feature of software development, simplifying the integration and standardization of telemetry data across various services. For more details, you can read the official [OpenTelemetry documentation](https://opentelemetry.io/){.external-link target="_blank"}. + +## Tracing + +Tracing is a form of observability that tracks the flow of requests as they move through various services in a distributed system. It provides insights into the interactions between services, highlighting performance bottlenecks and errors. The result of implementing tracing is a detailed map of the service interactions, often visualized as a trace diagram. This helps developers understand the behavior and performance of their applications. For an in-depth explanation, refer to the [OpenTelemetry tracing specification](https://opentelemetry.io/docs/concepts/signals/traces/){.external-link target="_blank"}. + +![HTML-page](../../../assets/img/simple-trace.png){ loading=lazy } +`Visualized via Grafana and Tempo` + +This trace is derived from this relationship between handlers: + +```python linenums="1" +@broker.subscriber("first") +@broker.publisher("second") +async def first_handler(msg: str): + await asyncio.sleep(0.1) + return msg + + +@broker.subscriber("second") +@broker.publisher("third") +async def second_handler(msg: str): + await asyncio.sleep(0.05) + return msg + + +@broker.subscriber("third") +async def third_handler(msg: str): + await asyncio.sleep(0.075) +``` + +## FastStream Tracing + +**OpenTelemetry** tracing support in **FastStream** adheres to the [semantic conventions for messaging systems](https://opentelemetry.io/docs/specs/semconv/messaging/){.external-link target="_blank"}. + +To add a trace to your broker, you need to: + +1. Install `FastStream` with `opentelemetry-sdk` + + ```shell + pip install faststream[otel] + ``` + +2. Configure `TracerProvider` + + ```python linenums="1" hl_lines="5-7" + from opentelemetry import trace + from opentelemetry.sdk.resources import Resource + from opentelemetry.sdk.trace import TracerProvider + + resource = Resource.create(attributes={"service.name": "faststream"}) + tracer_provider = TracerProvider(resource=resource) + trace.set_tracer_provider(tracer_provider) + ``` + +3. Add `TelemetryMiddleware` to your broker + + {!> includes/getting_started/opentelemetry/1.md !} + +### Exporting + +To export traces, you must select and configure an exporter yourself: + +* [opentelemetry-exporter-jaeger](https://pypi.org/project/opentelemetry-exporter-jaeger/){.external-link target="_blank"} to export to **Jaeger** +* [opentelemetry-exporter-otlp](https://pypi.org/project/opentelemetry-exporter-otlp/){.external-link target="_blank"} for export via **gRPC** or **HTTP** +* ``InMemorySpanExporter`` from ``opentelemetry.sdk.trace.export.in_memory_span_exporter`` for local tests + +There are other exporters. + +Configuring the export of traces via `opentelemetry-exporter-otlp`: + +```python linenums="1" hl_lines="4-6" +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.trace.export import BatchSpanProcessor + +exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:4317") +processor = BatchSpanProcessor(exporter) +tracer_provider.add_span_processor(processor) +``` + +### Visualization + +To visualize traces, you can send them to a backend system that supports distributed tracing, such as **Jaeger**, **Zipkin**, or **Grafana Tempo**. These systems provide a user interface to visualize and analyze traces. + +* **Jaeger**: You can run **Jaeger** using Docker and configure your **OpenTelemetry** middleware to send traces to **Jaeger**. For more details, see the [Jaeger documentation](https://www.jaegertracing.io/){.external-link target="_blank"}. +* **Zipkin**: Similar to **Jaeger**, you can run **Zipkin** using **Docker** and configure the **OpenTelemetry** middleware accordingly. For more details, see the [Zipkin documentation](https://zipkin.io/){.external-link target="_blank"}. +* **Grafana Tempo**: **Grafana Tempo** is a high-scale distributed tracing backend. You can configure **OpenTelemetry** to export traces to **Tempo**, which can then be visualized using **Grafana**. For more details, see the [Grafana Tempo documentation](https://grafana.com/docs/tempo/latest/){.external-link target="_blank"}. + +## Example + +To see how to set up, visualize, and configure tracing for **FastStream** services, go to [example](https://github.com/draincoder/faststream-monitoring){.external-link target="_blank"}. + +An example includes: + +* Three `FastStream` services +* Exporting traces to `Grafana Tempo` via `gRPC` +* Visualization of traces via `Grafana` +* Examples with custom spans +* Configured `docker-compose` with the entire infrastructure + +![HTML-page](../../../assets/img/distributed-trace.png){ loading=lazy } +`Visualized via Grafana and Tempo` diff --git a/docs/docs/navigation_template.txt b/docs/docs/navigation_template.txt index 87df76aa6c..fa23f9c3c5 100644 --- a/docs/docs/navigation_template.txt +++ b/docs/docs/navigation_template.txt @@ -41,6 +41,7 @@ search: - [FastAPI Plugin](getting-started/integrations/fastapi/index.md) - [Django](getting-started/integrations/django/index.md) - [CLI commands](getting-started/cli/index.md) + - [OpenTelemetry](getting-started/opentelemetry/index.md) - [Logging](getting-started/logging.md) - [Config Management](getting-started/config/index.md) - [Task Scheduling](scheduling.md) diff --git a/docs/docs_src/getting_started/opentelemetry/__init__.py b/docs/docs_src/getting_started/opentelemetry/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/getting_started/opentelemetry/confluent_telemetry.py b/docs/docs_src/getting_started/opentelemetry/confluent_telemetry.py new file mode 100644 index 0000000000..e9e3175d6d --- /dev/null +++ b/docs/docs_src/getting_started/opentelemetry/confluent_telemetry.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.confluent import KafkaBroker +from faststream.confluent.opentelemetry import KafkaTelemetryMiddleware + +broker = KafkaBroker( + middlewares=( + KafkaTelemetryMiddleware(tracer_provider=tracer_provider) + ) +) +app = FastStream(broker) diff --git a/docs/docs_src/getting_started/opentelemetry/kafka_telemetry.py b/docs/docs_src/getting_started/opentelemetry/kafka_telemetry.py new file mode 100644 index 0000000000..4bbfd9d9d8 --- /dev/null +++ b/docs/docs_src/getting_started/opentelemetry/kafka_telemetry.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.kafka import KafkaBroker +from faststream.kafka.opentelemetry import KafkaTelemetryMiddleware + +broker = KafkaBroker( + middlewares=( + KafkaTelemetryMiddleware(tracer_provider=tracer_provider), + ) +) +app = FastStream(broker) diff --git a/docs/docs_src/getting_started/opentelemetry/nats_telemetry.py b/docs/docs_src/getting_started/opentelemetry/nats_telemetry.py new file mode 100644 index 0000000000..f503e22050 --- /dev/null +++ b/docs/docs_src/getting_started/opentelemetry/nats_telemetry.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.nats import NatsBroker +from faststream.nats.opentelemetry import NatsTelemetryMiddleware + +broker = NatsBroker( + middlewares=( + NatsTelemetryMiddleware(tracer_provider=tracer_provider), + ) +) +app = FastStream(broker) diff --git a/docs/docs_src/getting_started/opentelemetry/rabbit_telemetry.py b/docs/docs_src/getting_started/opentelemetry/rabbit_telemetry.py new file mode 100644 index 0000000000..4dea2f919f --- /dev/null +++ b/docs/docs_src/getting_started/opentelemetry/rabbit_telemetry.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.rabbit import RabbitBroker +from faststream.rabbit.opentelemetry import RabbitTelemetryMiddleware + +broker = RabbitBroker( + middlewares=( + RabbitTelemetryMiddleware(tracer_provider=tracer_provider), + ) +) +app = FastStream(broker) diff --git a/docs/docs_src/getting_started/opentelemetry/redis_telemetry.py b/docs/docs_src/getting_started/opentelemetry/redis_telemetry.py new file mode 100644 index 0000000000..2de8174264 --- /dev/null +++ b/docs/docs_src/getting_started/opentelemetry/redis_telemetry.py @@ -0,0 +1,10 @@ +from faststream import FastStream +from faststream.redis import RedisBroker +from faststream.redis.opentelemetry import RedisTelemetryMiddleware + +broker = RedisBroker( + middlewares=( + RedisTelemetryMiddleware(tracer_provider=tracer_provider), + ) +) +app = FastStream(broker) diff --git a/docs/includes/getting_started/opentelemetry/1.md b/docs/includes/getting_started/opentelemetry/1.md new file mode 100644 index 0000000000..5ddf58d192 --- /dev/null +++ b/docs/includes/getting_started/opentelemetry/1.md @@ -0,0 +1,24 @@ +=== "AIOKafka" + ```python linenums="1" hl_lines="7" + {!> docs_src/getting_started/opentelemetry/kafka_telemetry.py!} + ``` + +=== "Confluent" + ```python linenums="1" hl_lines="7" + {!> docs_src/getting_started/opentelemetry/confluent_telemetry.py!} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="7" + {!> docs_src/getting_started/opentelemetry/rabbit_telemetry.py!} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="7" + {!> docs_src/getting_started/opentelemetry/nats_telemetry.py!} + ``` + +=== "Redis" + ```python linenums="1" hl_lines="7" + {!> docs_src/getting_started/opentelemetry/redis_telemetry.py!} + ``` diff --git a/faststream/__about__.py b/faststream/__about__.py index 7aaf590027..6a9efa082f 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.6" +__version__ = "0.5.7" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/broker/core/abc.py b/faststream/broker/core/abc.py index e89f2d5144..eb1a49bb7b 100644 --- a/faststream/broker/core/abc.py +++ b/faststream/broker/core/abc.py @@ -107,10 +107,10 @@ def include_router( *middlewares, *h._broker_middlewares, ) - h._broker_dependecies = ( + h._broker_dependencies = ( *self._dependencies, *dependencies, - *h._broker_dependecies, + *h._broker_dependencies, ) self._subscribers = {**self._subscribers, key: h} diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py index ea988a1811..439230ad8a 100644 --- a/faststream/broker/core/usecase.py +++ b/faststream/broker/core/usecase.py @@ -42,7 +42,7 @@ from faststream.asyncapi.schema import Tag, TagDict from faststream.broker.publisher.proto import ProducerProto, PublisherProto from faststream.security import BaseSecurity - from faststream.types import AnyDict, AsyncFunc, Decorator, LoggerProto + from faststream.types import AnyDict, Decorator, LoggerProto class BrokerUsecase( @@ -242,22 +242,9 @@ def setup_subscriber( **kwargs: Any, ) -> None: """Setup the Subscriber to prepare it to starting.""" - subscriber.setup( - logger=self.logger, - producer=self._producer, - graceful_timeout=self.graceful_timeout, - extra_context={}, - # broker options - broker_parser=self._parser, - broker_decoder=self._decoder, - # dependant args - apply_types=self._is_apply_types, - is_validate=self._is_validate, - _get_dependant=self._get_dependant, - _call_decorators=self._call_decorators, - **self._subscriber_setup_extra, - **kwargs, - ) + data = self._subscriber_setup_extra.copy() + data.update(kwargs) + subscriber.setup(**data) def setup_publisher( self, @@ -265,19 +252,32 @@ def setup_publisher( **kwargs: Any, ) -> None: """Setup the Publisher to prepare it to starting.""" - publisher.setup( - producer=self._producer, - **self._publisher_setup_extra, - **kwargs, - ) + data = self._publisher_setup_extra.copy() + data.update(kwargs) + publisher.setup(**data) @property def _subscriber_setup_extra(self) -> "AnyDict": - return {} + return { + "logger": self.logger, + "producer": self._producer, + "graceful_timeout": self.graceful_timeout, + "extra_context": {}, + # broker options + "broker_parser": self._parser, + "broker_decoder": self._decoder, + # dependant args + "apply_types": self._is_apply_types, + "is_validate": self._is_validate, + "_get_dependant": self._get_dependant, + "_call_decorators": self._call_decorators, + } @property def _publisher_setup_extra(self) -> "AnyDict": - return {} + return { + "producer": self._producer, + } def publisher(self, *args: Any, **kwargs: Any) -> "PublisherProto[MsgType]": pub = super().publisher(*args, **kwargs) @@ -335,7 +335,8 @@ async def publish( """Publish message directly.""" assert producer, NOT_CONNECTED_YET # nosec B101 - publish: "AsyncFunc" = producer.publish + publish = producer.publish + for m in self._middlewares: publish = partial(m(None).publish_scope, publish) diff --git a/faststream/broker/subscriber/proto.py b/faststream/broker/subscriber/proto.py index fa19428fde..545c5fc169 100644 --- a/faststream/broker/subscriber/proto.py +++ b/faststream/broker/subscriber/proto.py @@ -31,7 +31,7 @@ class SubscriberProto( calls: List["HandlerItem[MsgType]"] running: bool - _broker_dependecies: Iterable["Depends"] + _broker_dependencies: Iterable["Depends"] _broker_middlewares: Iterable["BrokerMiddleware[MsgType]"] _producer: Optional["ProducerProto"] diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index 5d0dd886dd..a2e9d1aa58 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -86,7 +86,7 @@ class SubscriberUsecase( extra_context: "AnyDict" graceful_timeout: Optional[float] - _broker_dependecies: Iterable["Depends"] + _broker_dependencies: Iterable["Depends"] _call_options: Optional["_CallOptions"] def __init__( @@ -117,7 +117,7 @@ def __init__( self.lock = sync_fake_context() # Setup in include - self._broker_dependecies = broker_dependencies + self._broker_dependencies = broker_dependencies self._broker_middlewares = broker_middlewares # register in setup later @@ -141,7 +141,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional[ProducerProto], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], @@ -155,7 +155,7 @@ def setup( # type: ignore[override] self._producer = producer self.graceful_timeout = graceful_timeout - self.extra_context = extra_context or {} + self.extra_context = extra_context self.watcher = get_watcher_context(logger, self._no_ack, self._retry) @@ -181,7 +181,7 @@ def setup( # type: ignore[override] is_validate=is_validate, _get_dependant=_get_dependant, _call_decorators=_call_decorators, - broker_dependencies=self._broker_dependecies, + broker_dependencies=self._broker_dependencies, ) call.handler.refresh(with_mock=False) diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 9f31fbbb5e..30c97ae298 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -472,6 +472,7 @@ async def start(self) -> None: @property def _subscriber_setup_extra(self) -> "AnyDict": return { + **super()._subscriber_setup_extra, "client_id": self.client_id, "builder": self._connection, } diff --git a/faststream/confluent/opentelemetry/__init__.py b/faststream/confluent/opentelemetry/__init__.py new file mode 100644 index 0000000000..eb3bbafc74 --- /dev/null +++ b/faststream/confluent/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +from faststream.confluent.opentelemetry.middleware import KafkaTelemetryMiddleware + +__all__ = ("KafkaTelemetryMiddleware",) diff --git a/faststream/confluent/opentelemetry/middleware.py b/faststream/confluent/opentelemetry/middleware.py new file mode 100644 index 0000000000..d8e5906dd3 --- /dev/null +++ b/faststream/confluent/opentelemetry/middleware.py @@ -0,0 +1,26 @@ +from typing import Optional + +from opentelemetry.metrics import Meter, MeterProvider +from opentelemetry.trace import TracerProvider + +from faststream.confluent.opentelemetry.provider import ( + telemetry_attributes_provider_factory, +) +from faststream.opentelemetry.middleware import TelemetryMiddleware + + +class KafkaTelemetryMiddleware(TelemetryMiddleware): + def __init__( + self, + *, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__( + settings_provider_factory=telemetry_attributes_provider_factory, + tracer_provider=tracer_provider, + meter_provider=meter_provider, + meter=meter, + include_messages_counters=True, + ) diff --git a/faststream/confluent/opentelemetry/provider.py b/faststream/confluent/opentelemetry/provider.py new file mode 100644 index 0000000000..6add7330ca --- /dev/null +++ b/faststream/confluent/opentelemetry/provider.py @@ -0,0 +1,114 @@ +from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast + +from opentelemetry.semconv.trace import SpanAttributes + +from faststream.broker.types import MsgType +from faststream.opentelemetry import TelemetrySettingsProvider +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME + +if TYPE_CHECKING: + from confluent_kafka import Message + + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class BaseConfluentTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]): + __slots__ = ("messaging_system",) + + def __init__(self) -> None: + self.messaging_system = "kafka" + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"], + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], + } + + if (partition := kwargs.get("partition")) is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition + + if (key := kwargs.get("key")) is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key + + return attrs + + @staticmethod + def get_publish_destination_name( + kwargs: "AnyDict", + ) -> str: + return cast(str, kwargs["topic"]) + + +class ConfluentTelemetrySettingsProvider( + BaseConfluentTelemetrySettingsProvider["Message"] +): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[Message]", + ) -> "AnyDict": + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: msg.raw_message.partition(), + SpanAttributes.MESSAGING_KAFKA_MESSAGE_OFFSET: msg.raw_message.offset(), + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.topic(), + } + + if (key := msg.raw_message.key()) is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key + + return attrs + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[Message]", + ) -> str: + return cast(str, msg.raw_message.topic()) + + +class BatchConfluentTelemetrySettingsProvider( + BaseConfluentTelemetrySettingsProvider[Tuple["Message", ...]] +): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[Tuple[Message, ...]]", + ) -> "AnyDict": + raw_message = msg.raw_message[0] + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT: len(msg.raw_message), + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len( + bytearray().join(cast(Sequence[bytes], msg.body)) + ), + SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: raw_message.partition(), + MESSAGING_DESTINATION_PUBLISH_NAME: raw_message.topic(), + } + + return attrs + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[Tuple[Message, ...]]", + ) -> str: + return cast(str, msg.raw_message[0].topic()) + + +def telemetry_attributes_provider_factory( + msg: Union["Message", Sequence["Message"], None], +) -> Union[ + ConfluentTelemetrySettingsProvider, + BatchConfluentTelemetrySettingsProvider, +]: + if isinstance(msg, Sequence): + return BatchConfluentTelemetrySettingsProvider() + else: + return ConfluentTelemetrySettingsProvider() diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index 8541ceb4f0..e743c96e6b 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -52,7 +52,7 @@ async def parse_message_batch( last = message[-1] for m in message: - body.append(m.value) + body.append(m.value()) batch_headers.append(_parse_msg_headers(m.headers())) headers = next(iter(batch_headers), {}) diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py index 1dc8f9a218..33480a12ea 100644 --- a/faststream/confluent/router.py +++ b/faststream/confluent/router.py @@ -1,6 +1,7 @@ from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Dict, Iterable, @@ -129,7 +130,10 @@ class KafkaRoute(SubscriberRoute): def __init__( self, call: Annotated[ - Callable[..., "SendableMessage"], + Union[ + Callable[..., "SendableMessage"], + Callable[..., Awaitable["SendableMessage"]], + ], Doc("Message handler function."), ], *topics: Annotated[ diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py index e5e23ed710..28f7ece4e7 100644 --- a/faststream/confluent/subscriber/usecase.py +++ b/faststream/confluent/subscriber/usecase.py @@ -100,7 +100,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 2a29796860..de0b6980f1 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -534,6 +534,7 @@ def __init__( apply_types=apply_types, validate=validate, ) + self.client_id = client_id self._producer = None @@ -612,6 +613,7 @@ async def start(self) -> None: @property def _subscriber_setup_extra(self) -> "AnyDict": return { + **super()._subscriber_setup_extra, "client_id": self.client_id, "builder": self._connection, } diff --git a/faststream/kafka/opentelemetry/__init__.py b/faststream/kafka/opentelemetry/__init__.py new file mode 100644 index 0000000000..6bd75f272c --- /dev/null +++ b/faststream/kafka/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +from faststream.kafka.opentelemetry.middleware import KafkaTelemetryMiddleware + +__all__ = ("KafkaTelemetryMiddleware",) diff --git a/faststream/kafka/opentelemetry/middleware.py b/faststream/kafka/opentelemetry/middleware.py new file mode 100644 index 0000000000..2f06486c33 --- /dev/null +++ b/faststream/kafka/opentelemetry/middleware.py @@ -0,0 +1,26 @@ +from typing import Optional + +from opentelemetry.metrics import Meter, MeterProvider +from opentelemetry.trace import TracerProvider + +from faststream.kafka.opentelemetry.provider import ( + telemetry_attributes_provider_factory, +) +from faststream.opentelemetry.middleware import TelemetryMiddleware + + +class KafkaTelemetryMiddleware(TelemetryMiddleware): + def __init__( + self, + *, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__( + settings_provider_factory=telemetry_attributes_provider_factory, + tracer_provider=tracer_provider, + meter_provider=meter_provider, + meter=meter, + include_messages_counters=True, + ) diff --git a/faststream/kafka/opentelemetry/provider.py b/faststream/kafka/opentelemetry/provider.py new file mode 100644 index 0000000000..b1702b6022 --- /dev/null +++ b/faststream/kafka/opentelemetry/provider.py @@ -0,0 +1,115 @@ +from typing import TYPE_CHECKING, Sequence, Tuple, Union, cast + +from opentelemetry.semconv.trace import SpanAttributes + +from faststream.broker.types import MsgType +from faststream.opentelemetry import TelemetrySettingsProvider +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME + +if TYPE_CHECKING: + from aiokafka import ConsumerRecord + + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class BaseKafkaTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]): + __slots__ = ("messaging_system",) + + def __init__(self) -> None: + self.messaging_system = "kafka" + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["topic"], + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], + } + + if (partition := kwargs.get("partition")) is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION] = partition + + if (key := kwargs.get("key")) is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = key + + return attrs + + @staticmethod + def get_publish_destination_name( + kwargs: "AnyDict", + ) -> str: + return cast(str, kwargs["topic"]) + + +class KafkaTelemetrySettingsProvider( + BaseKafkaTelemetrySettingsProvider["ConsumerRecord"] +): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[ConsumerRecord]", + ) -> "AnyDict": + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: msg.raw_message.partition, + SpanAttributes.MESSAGING_KAFKA_MESSAGE_OFFSET: msg.raw_message.offset, + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.topic, + } + + if msg.raw_message.key is not None: + attrs[SpanAttributes.MESSAGING_KAFKA_MESSAGE_KEY] = msg.raw_message.key + + return attrs + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[ConsumerRecord]", + ) -> str: + return cast(str, msg.raw_message.topic) + + +class BatchKafkaTelemetrySettingsProvider( + BaseKafkaTelemetrySettingsProvider[Tuple["ConsumerRecord", ...]] +): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[Tuple[ConsumerRecord, ...]]", + ) -> "AnyDict": + raw_message = msg.raw_message[0] + + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len( + bytearray().join(cast(Sequence[bytes], msg.body)) + ), + SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT: len(msg.raw_message), + SpanAttributes.MESSAGING_KAFKA_DESTINATION_PARTITION: raw_message.partition, + MESSAGING_DESTINATION_PUBLISH_NAME: raw_message.topic, + } + + return attrs + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[Tuple[ConsumerRecord, ...]]", + ) -> str: + return cast(str, msg.raw_message[0].topic) + + +def telemetry_attributes_provider_factory( + msg: Union["ConsumerRecord", Sequence["ConsumerRecord"], None], +) -> Union[ + KafkaTelemetrySettingsProvider, + BatchKafkaTelemetrySettingsProvider, +]: + if isinstance(msg, Sequence): + return BatchKafkaTelemetrySettingsProvider() + else: + return KafkaTelemetrySettingsProvider() diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py index 98383512a5..44540ee4d5 100644 --- a/faststream/kafka/router.py +++ b/faststream/kafka/router.py @@ -1,6 +1,7 @@ from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Dict, Iterable, @@ -132,7 +133,10 @@ class KafkaRoute(SubscriberRoute): def __init__( self, call: Annotated[ - Callable[..., "SendableMessage"], + Union[ + Callable[..., "SendableMessage"], + Callable[..., Awaitable["SendableMessage"]], + ], Doc( "Message handler function " "to wrap the same with `@broker.subscriber(...)` way." diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index 818922c48e..650bae75d1 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -112,7 +112,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index 2ccbe47bad..a2e196a535 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -717,7 +717,7 @@ async def publish( # type: ignore[override] Please, use `@broker.publisher(...)` or `broker.publisher(...).publish(...)` instead in a regular way. """ - publihs_kwargs = { + publish_kwargs = { "subject": subject, "headers": headers, "reply_to": reply_to, @@ -732,7 +732,7 @@ async def publish( # type: ignore[override] producer = self._producer else: producer = self._js_producer - publihs_kwargs.update( + publish_kwargs.update( { "stream": stream, "timeout": timeout, @@ -742,7 +742,7 @@ async def publish( # type: ignore[override] return await super().publish( message, producer=producer, - **publihs_kwargs, + **publish_kwargs, ) @override @@ -770,10 +770,7 @@ def setup_publisher( # type: ignore[override] elif self._producer is not None: producer = self._producer - publisher.setup( - producer=producer, - **self._publisher_setup_extra, - ) + super().setup_publisher(publisher, producer=producer) def _log_connection_broken( self, diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index ff8c332d94..c0670e12c1 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -190,7 +190,7 @@ def subscriber( # type: ignore[override] subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( + AsyncAPISubscriber.create( # type: ignore[arg-type] subject=subject, queue=queue, stream=stream, diff --git a/faststream/nats/opentelemetry/__init__.py b/faststream/nats/opentelemetry/__init__.py new file mode 100644 index 0000000000..d97f2b5d38 --- /dev/null +++ b/faststream/nats/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +from faststream.nats.opentelemetry.middleware import NatsTelemetryMiddleware + +__all__ = ("NatsTelemetryMiddleware",) diff --git a/faststream/nats/opentelemetry/middleware.py b/faststream/nats/opentelemetry/middleware.py new file mode 100644 index 0000000000..cafd8787d8 --- /dev/null +++ b/faststream/nats/opentelemetry/middleware.py @@ -0,0 +1,24 @@ +from typing import Optional + +from opentelemetry.metrics import Meter, MeterProvider +from opentelemetry.trace import TracerProvider + +from faststream.nats.opentelemetry.provider import telemetry_attributes_provider_factory +from faststream.opentelemetry.middleware import TelemetryMiddleware + + +class NatsTelemetryMiddleware(TelemetryMiddleware): + def __init__( + self, + *, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__( + settings_provider_factory=telemetry_attributes_provider_factory, + tracer_provider=tracer_provider, + meter_provider=meter_provider, + meter=meter, + include_messages_counters=True, + ) diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py new file mode 100644 index 0000000000..7aefafed2c --- /dev/null +++ b/faststream/nats/opentelemetry/provider.py @@ -0,0 +1,114 @@ +from typing import TYPE_CHECKING, List, Optional, Sequence, Union, overload + +from opentelemetry.semconv.trace import SpanAttributes + +from faststream.__about__ import SERVICE_NAME +from faststream.broker.types import MsgType +from faststream.opentelemetry import TelemetrySettingsProvider +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME + +if TYPE_CHECKING: + from nats.aio.msg import Msg + + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class BaseNatsTelemetrySettingsProvider(TelemetrySettingsProvider[MsgType]): + __slots__ = ("messaging_system",) + + def __init__(self) -> None: + self.messaging_system = "nats" + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs["subject"], + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], + } + + @staticmethod + def get_publish_destination_name( + kwargs: "AnyDict", + ) -> str: + subject: str = kwargs.get("subject", SERVICE_NAME) + return subject + + +class NatsTelemetrySettingsProvider(BaseNatsTelemetrySettingsProvider["Msg"]): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[Msg]", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.subject, + } + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[Msg]", + ) -> str: + return msg.raw_message.subject + + +class NatsBatchTelemetrySettingsProvider( + BaseNatsTelemetrySettingsProvider[List["Msg"]] +): + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[List[Msg]]", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT: len(msg.raw_message), + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message[0].subject, + } + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[List[Msg]]", + ) -> str: + return msg.raw_message[0].subject + + +@overload +def telemetry_attributes_provider_factory( + msg: Optional["Msg"], +) -> NatsTelemetrySettingsProvider: ... + + +@overload +def telemetry_attributes_provider_factory( + msg: Sequence["Msg"], +) -> NatsBatchTelemetrySettingsProvider: ... + + +@overload +def telemetry_attributes_provider_factory( + msg: Union["Msg", Sequence["Msg"], None], +) -> Union[ + NatsTelemetrySettingsProvider, + NatsBatchTelemetrySettingsProvider, +]: ... + + +def telemetry_attributes_provider_factory( + msg: Union["Msg", Sequence["Msg"], None], +) -> Union[ + NatsTelemetrySettingsProvider, + NatsBatchTelemetrySettingsProvider, +]: + if isinstance(msg, Sequence): + return NatsBatchTelemetrySettingsProvider() + else: + return NatsTelemetrySettingsProvider() diff --git a/faststream/nats/router.py b/faststream/nats/router.py index 5c1743460e..65f07947a9 100644 --- a/faststream/nats/router.py +++ b/faststream/nats/router.py @@ -1,4 +1,13 @@ -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Optional, Union +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Iterable, + Optional, + Union, +) from nats.js import api from typing_extensions import Annotated, Doc, deprecated @@ -106,7 +115,10 @@ class NatsRoute(SubscriberRoute): def __init__( self, call: Annotated[ - Callable[..., "SendableMessage"], + Union[ + Callable[..., "SendableMessage"], + Callable[..., Awaitable["SendableMessage"]], + ], Doc( "Message handler function " "to wrap the same with `@broker.subscriber(...)` way." diff --git a/faststream/nats/subscriber/asyncapi.py b/faststream/nats/subscriber/asyncapi.py index e743d4bec5..7e5a6d4312 100644 --- a/faststream/nats/subscriber/asyncapi.py +++ b/faststream/nats/subscriber/asyncapi.py @@ -188,9 +188,9 @@ def create( # type: ignore[override] ) -class AsyncAPIDefaultSubscriber(AsyncAPISubscriber, DefaultHandler): +class AsyncAPIDefaultSubscriber(DefaultHandler, AsyncAPISubscriber): """One-message consumer with AsyncAPI methods.""" -class AsyncAPIBatchSubscriber(AsyncAPISubscriber, BatchHandler): +class AsyncAPIBatchSubscriber(BatchHandler, AsyncAPISubscriber): """Batch-message consumer with AsyncAPI methods.""" diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index f775530862..55a8340a7d 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -115,7 +115,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], diff --git a/faststream/opentelemetry/__init__.py b/faststream/opentelemetry/__init__.py new file mode 100644 index 0000000000..401c1be077 --- /dev/null +++ b/faststream/opentelemetry/__init__.py @@ -0,0 +1,7 @@ +from faststream.opentelemetry.middleware import TelemetryMiddleware +from faststream.opentelemetry.provider import TelemetrySettingsProvider + +__all__ = ( + "TelemetryMiddleware", + "TelemetrySettingsProvider", +) diff --git a/faststream/opentelemetry/consts.py b/faststream/opentelemetry/consts.py new file mode 100644 index 0000000000..2436d568ee --- /dev/null +++ b/faststream/opentelemetry/consts.py @@ -0,0 +1,9 @@ +class MessageAction: + CREATE = "create" + PUBLISH = "publish" + PROCESS = "process" + RECEIVE = "receive" + + +ERROR_TYPE = "error.type" +MESSAGING_DESTINATION_PUBLISH_NAME = "messaging.destination_publish.name" diff --git a/faststream/opentelemetry/middleware.py b/faststream/opentelemetry/middleware.py new file mode 100644 index 0000000000..9a4ad34c10 --- /dev/null +++ b/faststream/opentelemetry/middleware.py @@ -0,0 +1,299 @@ +import time +from copy import copy +from typing import TYPE_CHECKING, Any, Callable, Optional, Type + +from opentelemetry import context, metrics, propagate, trace +from opentelemetry.semconv.trace import SpanAttributes + +from faststream import BaseMiddleware +from faststream.opentelemetry.consts import ( + ERROR_TYPE, + MESSAGING_DESTINATION_PUBLISH_NAME, + MessageAction, +) +from faststream.opentelemetry.provider import TelemetrySettingsProvider + +if TYPE_CHECKING: + from types import TracebackType + + from opentelemetry.context import Context + from opentelemetry.metrics import Meter, MeterProvider + from opentelemetry.trace import Span, Tracer, TracerProvider + + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict, AsyncFunc, AsyncFuncAny + + +_OTEL_SCHEMA = "https://opentelemetry.io/schemas/1.11.0" + + +def _create_span_name(destination: str, action: str) -> str: + return f"{destination} {action}" + + +class _MetricsContainer: + __slots__ = ( + "include_messages_counters", + "publish_duration", + "publish_counter", + "process_duration", + "process_counter", + ) + + def __init__(self, meter: "Meter", include_messages_counters: bool) -> None: + self.include_messages_counters = include_messages_counters + + self.publish_duration = meter.create_histogram( + name="messaging.publish.duration", + unit="s", + description="Measures the duration of publish operation.", + ) + self.process_duration = meter.create_histogram( + name="messaging.process.duration", + unit="s", + description="Measures the duration of process operation.", + ) + + if include_messages_counters: + self.process_counter = meter.create_counter( + name="messaging.process.messages", + unit="message", + description="Measures the number of processed messages.", + ) + self.publish_counter = meter.create_counter( + name="messaging.publish.messages", + unit="message", + description="Measures the number of published messages.", + ) + + def observe_publish( + self, attrs: "AnyDict", duration: float, msg_count: int + ) -> None: + self.publish_duration.record( + amount=duration, + attributes=attrs, + ) + if self.include_messages_counters: + counter_attrs = copy(attrs) + counter_attrs.pop(ERROR_TYPE, None) + self.publish_counter.add( + amount=msg_count, + attributes=counter_attrs, + ) + + def observe_consume( + self, attrs: "AnyDict", duration: float, msg_count: int + ) -> None: + self.process_duration.record( + amount=duration, + attributes=attrs, + ) + if self.include_messages_counters: + counter_attrs = copy(attrs) + counter_attrs.pop(ERROR_TYPE, None) + self.process_counter.add( + amount=msg_count, + attributes=counter_attrs, + ) + + +class BaseTelemetryMiddleware(BaseMiddleware): + def __init__( + self, + *, + tracer: "Tracer", + settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + metrics_container: _MetricsContainer, + msg: Optional[Any] = None, + ) -> None: + self.msg = msg + + self._tracer = tracer + self._metrics = metrics_container + self._current_span: Optional[Span] = None + self._origin_context: Optional[Context] = None + self.__settings_provider = settings_provider_factory(msg) + + async def publish_scope( + self, + call_next: "AsyncFunc", + msg: Any, + *args: Any, + **kwargs: Any, + ) -> Any: + provider = self.__settings_provider + + headers = kwargs.pop("headers", {}) or {} + current_context = context.get_current() + destination_name = provider.get_publish_destination_name(kwargs) + + trace_attributes = provider.get_publish_attrs_from_kwargs(kwargs) + metrics_attributes = { + SpanAttributes.MESSAGING_SYSTEM: provider.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: destination_name, + } + + # NOTE: if batch with single message? + if (msg_count := len((msg, *args))) > 1: + trace_attributes[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = msg_count + + if self._current_span and self._current_span.is_recording(): + current_context = trace.set_span_in_context( + self._current_span, current_context + ) + propagate.inject(headers, context=self._origin_context) + + else: + create_span = self._tracer.start_span( + name=_create_span_name(destination_name, MessageAction.CREATE), + kind=trace.SpanKind.PRODUCER, + attributes=trace_attributes, + ) + current_context = trace.set_span_in_context(create_span) + propagate.inject(headers, context=current_context) + create_span.end() + + start_time = time.perf_counter() + + try: + with self._tracer.start_as_current_span( + name=_create_span_name(destination_name, MessageAction.PUBLISH), + kind=trace.SpanKind.PRODUCER, + attributes=trace_attributes, + context=current_context, + ) as span: + span.set_attribute( + SpanAttributes.MESSAGING_OPERATION, MessageAction.PUBLISH + ) + result = await call_next(msg, *args, headers=headers, **kwargs) + + except Exception as e: + metrics_attributes[ERROR_TYPE] = type(e).__name__ + raise + + finally: + duration = time.perf_counter() - start_time + self._metrics.observe_publish(metrics_attributes, duration, msg_count) + + return result + + async def consume_scope( + self, + call_next: "AsyncFuncAny", + msg: "StreamMessage[Any]", + ) -> Any: + provider = self.__settings_provider + + current_context = propagate.extract(msg.headers) + destination_name = provider.get_consume_destination_name(msg) + + trace_attributes = provider.get_consume_attrs_from_message(msg) + metrics_attributes = { + SpanAttributes.MESSAGING_SYSTEM: provider.messaging_system, + MESSAGING_DESTINATION_PUBLISH_NAME: destination_name, + } + + if not len(current_context): + create_span = self._tracer.start_span( + name=_create_span_name(destination_name, MessageAction.CREATE), + kind=trace.SpanKind.CONSUMER, + attributes=trace_attributes, + ) + current_context = trace.set_span_in_context(create_span) + create_span.end() + + self._origin_context = current_context + start_time = time.perf_counter() + + try: + with self._tracer.start_as_current_span( + name=_create_span_name(destination_name, MessageAction.PROCESS), + kind=trace.SpanKind.CONSUMER, + context=current_context, + attributes=trace_attributes, + end_on_exit=False, + ) as span: + span.set_attribute( + SpanAttributes.MESSAGING_OPERATION, MessageAction.PROCESS + ) + self._current_span = span + new_context = trace.set_span_in_context(span, current_context) + token = context.attach(new_context) + result = await call_next(msg) + context.detach(token) + + except Exception as e: + metrics_attributes[ERROR_TYPE] = type(e).__name__ + raise + + finally: + duration = time.perf_counter() - start_time + msg_count = trace_attributes.get( + SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT, 1 + ) + self._metrics.observe_consume(metrics_attributes, duration, msg_count) + + return result + + async def after_processed( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_val: Optional[BaseException] = None, + exc_tb: Optional["TracebackType"] = None, + ) -> Optional[bool]: + if self._current_span and self._current_span.is_recording(): + self._current_span.end() + return False + + +class TelemetryMiddleware: + # NOTE: should it be class or function? + __slots__ = ( + "_tracer", + "_meter", + "_metrics", + "_settings_provider_factory", + ) + + def __init__( + self, + *, + settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + tracer_provider: Optional["TracerProvider"] = None, + meter_provider: Optional["MeterProvider"] = None, + meter: Optional["Meter"] = None, + include_messages_counters: bool = False, + ) -> None: + self._tracer = _get_tracer(tracer_provider) + self._meter = _get_meter(meter_provider, meter) + self._metrics = _MetricsContainer(self._meter, include_messages_counters) + self._settings_provider_factory = settings_provider_factory + + def __call__(self, msg: Optional[Any]) -> BaseMiddleware: + return BaseTelemetryMiddleware( + tracer=self._tracer, + metrics_container=self._metrics, + settings_provider_factory=self._settings_provider_factory, + msg=msg, + ) + + +def _get_meter( + meter_provider: Optional["MeterProvider"] = None, + meter: Optional["Meter"] = None, +) -> "Meter": + if meter is None: + return metrics.get_meter( + __name__, + meter_provider=meter_provider, + schema_url=_OTEL_SCHEMA, + ) + return meter + + +def _get_tracer(tracer_provider: Optional["TracerProvider"] = None) -> "Tracer": + return trace.get_tracer( + __name__, + tracer_provider=tracer_provider, + schema_url=_OTEL_SCHEMA, + ) diff --git a/faststream/opentelemetry/provider.py b/faststream/opentelemetry/provider.py new file mode 100644 index 0000000000..90232d45ab --- /dev/null +++ b/faststream/opentelemetry/provider.py @@ -0,0 +1,31 @@ +from typing import TYPE_CHECKING, Protocol + +from faststream.broker.types import MsgType + +if TYPE_CHECKING: + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class TelemetrySettingsProvider(Protocol[MsgType]): + messaging_system: str + + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[MsgType]", + ) -> "AnyDict": ... + + def get_consume_destination_name( + self, + msg: "StreamMessage[MsgType]", + ) -> str: ... + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": ... + + def get_publish_destination_name( + self, + kwargs: "AnyDict", + ) -> str: ... diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py index b0bc98c42f..f7ec134f86 100644 --- a/faststream/rabbit/broker/broker.py +++ b/faststream/rabbit/broker/broker.py @@ -278,6 +278,7 @@ def __init__( @property def _subscriber_setup_extra(self) -> "AnyDict": return { + **super()._subscriber_setup_extra, "app_id": self.app_id, "virtual_host": self.virtual_host, "declarer": self.declarer, @@ -286,6 +287,7 @@ def _subscriber_setup_extra(self) -> "AnyDict": @property def _publisher_setup_extra(self) -> "AnyDict": return { + **super()._publisher_setup_extra, "app_id": self.app_id, "virtual_host": self.virtual_host, } diff --git a/faststream/rabbit/opentelemetry/__init__.py b/faststream/rabbit/opentelemetry/__init__.py new file mode 100644 index 0000000000..f850b09125 --- /dev/null +++ b/faststream/rabbit/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +from faststream.rabbit.opentelemetry.middleware import RabbitTelemetryMiddleware + +__all__ = ("RabbitTelemetryMiddleware",) diff --git a/faststream/rabbit/opentelemetry/middleware.py b/faststream/rabbit/opentelemetry/middleware.py new file mode 100644 index 0000000000..29a553a7f0 --- /dev/null +++ b/faststream/rabbit/opentelemetry/middleware.py @@ -0,0 +1,24 @@ +from typing import Optional + +from opentelemetry.metrics import Meter, MeterProvider +from opentelemetry.trace import TracerProvider + +from faststream.opentelemetry.middleware import TelemetryMiddleware +from faststream.rabbit.opentelemetry.provider import RabbitTelemetrySettingsProvider + + +class RabbitTelemetryMiddleware(TelemetryMiddleware): + def __init__( + self, + *, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__( + settings_provider_factory=lambda _: RabbitTelemetrySettingsProvider(), + tracer_provider=tracer_provider, + meter_provider=meter_provider, + meter=meter, + include_messages_counters=False, + ) diff --git a/faststream/rabbit/opentelemetry/provider.py b/faststream/rabbit/opentelemetry/provider.py new file mode 100644 index 0000000000..da62338e70 --- /dev/null +++ b/faststream/rabbit/opentelemetry/provider.py @@ -0,0 +1,62 @@ +from typing import TYPE_CHECKING + +from opentelemetry.semconv.trace import SpanAttributes + +from faststream.opentelemetry import TelemetrySettingsProvider +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME + +if TYPE_CHECKING: + from aio_pika import IncomingMessage + + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class RabbitTelemetrySettingsProvider(TelemetrySettingsProvider["IncomingMessage"]): + __slots__ = ("messaging_system",) + + def __init__(self) -> None: + self.messaging_system = "rabbitmq" + + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[IncomingMessage]", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: msg.raw_message.routing_key, + "messaging.rabbitmq.message.delivery_tag": msg.raw_message.delivery_tag, + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.exchange, + } + + @staticmethod + def get_consume_destination_name( + msg: "StreamMessage[IncomingMessage]", + ) -> str: + exchange = msg.raw_message.exchange or "default" + routing_key = msg.raw_message.routing_key + return f"{exchange}.{routing_key}" + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: kwargs.get("exchange") or "", + SpanAttributes.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY: kwargs[ + "routing_key" + ], + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], + } + + @staticmethod + def get_publish_destination_name( + kwargs: "AnyDict", + ) -> str: + exchange: str = kwargs.get("exchange") or "default" + routing_key: str = kwargs["routing_key"] + return f"{exchange}.{routing_key}" diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py index 505bcf2268..7ac5dc6389 100644 --- a/faststream/rabbit/publisher/usecase.py +++ b/faststream/rabbit/publisher/usecase.py @@ -97,9 +97,10 @@ class LogicPublisher( ): """A class to represent a RabbitMQ publisher.""" - _producer: Optional["AioPikaFastProducer"] app_id: Optional[str] + _producer: Optional["AioPikaFastProducer"] + def __init__( self, *, diff --git a/faststream/rabbit/router.py b/faststream/rabbit/router.py index 6892852694..0890433347 100644 --- a/faststream/rabbit/router.py +++ b/faststream/rabbit/router.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Iterable, Optional, Union from typing_extensions import Annotated, Doc, deprecated @@ -177,7 +177,10 @@ class RabbitRoute(SubscriberRoute): def __init__( self, call: Annotated[ - Callable[..., "AioPikaSendableMessage"], + Union[ + Callable[..., "AioPikaSendableMessage"], + Callable[..., Awaitable["AioPikaSendableMessage"]], + ], Doc( "Message handler function " "to wrap the same with `@broker.subscriber(...)` way." diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py index d2ca4480a2..c0700dcc82 100644 --- a/faststream/rabbit/subscriber/usecase.py +++ b/faststream/rabbit/subscriber/usecase.py @@ -104,7 +104,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional["AioPikaFastProducer"], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py index 7c0cefe09a..3164c7a01b 100644 --- a/faststream/redis/broker/broker.py +++ b/faststream/redis/broker/broker.py @@ -263,7 +263,7 @@ async def connect( # type: ignore[override] **kwargs, } else: - connect_kwargs = {**kwargs} + connect_kwargs = dict(kwargs).copy() return await super().connect(**connect_kwargs) @@ -359,6 +359,7 @@ async def start(self) -> None: @property def _subscriber_setup_extra(self) -> "AnyDict": return { + **super()._subscriber_setup_extra, "connection": self._connection, } diff --git a/faststream/redis/opentelemetry/__init__.py b/faststream/redis/opentelemetry/__init__.py new file mode 100644 index 0000000000..aea6429256 --- /dev/null +++ b/faststream/redis/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +from faststream.redis.opentelemetry.middleware import RedisTelemetryMiddleware + +__all__ = ("RedisTelemetryMiddleware",) diff --git a/faststream/redis/opentelemetry/middleware.py b/faststream/redis/opentelemetry/middleware.py new file mode 100644 index 0000000000..54c0024143 --- /dev/null +++ b/faststream/redis/opentelemetry/middleware.py @@ -0,0 +1,24 @@ +from typing import Optional + +from opentelemetry.metrics import Meter, MeterProvider +from opentelemetry.trace import TracerProvider + +from faststream.opentelemetry.middleware import TelemetryMiddleware +from faststream.redis.opentelemetry.provider import RedisTelemetrySettingsProvider + + +class RedisTelemetryMiddleware(TelemetryMiddleware): + def __init__( + self, + *, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__( + settings_provider_factory=lambda _: RedisTelemetrySettingsProvider(), + tracer_provider=tracer_provider, + meter_provider=meter_provider, + meter=meter, + include_messages_counters=True, + ) diff --git a/faststream/redis/opentelemetry/provider.py b/faststream/redis/opentelemetry/provider.py new file mode 100644 index 0000000000..1fcfd4e9c3 --- /dev/null +++ b/faststream/redis/opentelemetry/provider.py @@ -0,0 +1,62 @@ +from typing import TYPE_CHECKING, Sized, cast + +from opentelemetry.semconv.trace import SpanAttributes + +from faststream.opentelemetry import TelemetrySettingsProvider +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME + +if TYPE_CHECKING: + from faststream.broker.message import StreamMessage + from faststream.types import AnyDict + + +class RedisTelemetrySettingsProvider(TelemetrySettingsProvider["AnyDict"]): + __slots__ = ("messaging_system",) + + def __init__(self) -> None: + self.messaging_system = "redis" + + def get_consume_attrs_from_message( + self, + msg: "StreamMessage[AnyDict]", + ) -> "AnyDict": + attrs = { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_MESSAGE_ID: msg.message_id, + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: msg.correlation_id, + SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.body), + MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message["channel"], + } + + if cast(str, msg.raw_message.get("type", "")).startswith("b"): + attrs[SpanAttributes.MESSAGING_BATCH_MESSAGE_COUNT] = len( + cast(Sized, msg.decoded_body) + ) + + return attrs + + def get_consume_destination_name( + self, + msg: "StreamMessage[AnyDict]", + ) -> str: + return self._get_destination(msg.raw_message) + + def get_publish_attrs_from_kwargs( + self, + kwargs: "AnyDict", + ) -> "AnyDict": + return { + SpanAttributes.MESSAGING_SYSTEM: self.messaging_system, + SpanAttributes.MESSAGING_DESTINATION_NAME: self._get_destination(kwargs), + SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], + } + + def get_publish_destination_name( + self, + kwargs: "AnyDict", + ) -> str: + return self._get_destination(kwargs) + + @staticmethod + def _get_destination(kwargs: "AnyDict") -> str: + return kwargs.get("channel") or kwargs.get("list") or kwargs.get("stream") or "" diff --git a/faststream/redis/publisher/producer.py b/faststream/redis/publisher/producer.py index d5f6f23f9b..ce807aeab8 100644 --- a/faststream/redis/publisher/producer.py +++ b/faststream/redis/publisher/producer.py @@ -126,13 +126,14 @@ async def publish_batch( *msgs: "SendableMessage", list: str, correlation_id: str, + headers: Optional["AnyDict"] = None, ) -> None: batch = ( RawMessage.encode( message=msg, correlation_id=correlation_id, reply_to=None, - headers=None, + headers=headers, ) for msg in msgs ) diff --git a/faststream/redis/router.py b/faststream/redis/router.py index 416669800a..635f86083e 100644 --- a/faststream/redis/router.py +++ b/faststream/redis/router.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Iterable, Optional, Union from typing_extensions import Annotated, Doc, deprecated @@ -99,7 +99,10 @@ class RedisRoute(SubscriberRoute): def __init__( self, call: Annotated[ - Callable[..., "SendableMessage"], + Union[ + Callable[..., "SendableMessage"], + Callable[..., Awaitable["SendableMessage"]], + ], Doc( "Message handler function " "to wrap the same with `@broker.subscriber(...)` way." diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index e9bdde2ca0..58b2745b14 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -103,7 +103,7 @@ def setup( # type: ignore[override] logger: Optional["LoggerProto"], producer: Optional["ProducerProto"], graceful_timeout: Optional[float], - extra_context: Optional["AnyDict"], + extra_context: "AnyDict", # broker options broker_parser: Optional["CustomCallable"], broker_decoder: Optional["CustomCallable"], diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index 7d4a60da4e..1beedc4dc0 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -183,6 +183,7 @@ async def publish_batch( self, *msgs: "SendableMessage", list: str, + headers: Optional["AnyDict"] = None, correlation_id: Optional[str] = None, ) -> None: correlation_id = correlation_id or gen_cor_id() @@ -200,6 +201,7 @@ async def publish_batch( build_message( m, correlation_id=correlation_id, + headers=headers, ) for m in msgs ], diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index c7d186d34c..f8925210a4 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -214,7 +214,9 @@ async def call_handler( if rpc: message_body, content_type = encode_message(result) - msg_to_publish = StreamMessage(raw_message=None, body=message_body, content_type=content_type) + msg_to_publish = StreamMessage( + raw_message=None, body=message_body, content_type=content_type + ) consumed_data = decode_message(msg_to_publish) return consumed_data diff --git a/pyproject.toml b/pyproject.toml index 505e0de0dc..0ef08d8502 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,11 @@ nats = ["nats-py>=2.3.1,<=3.0.0"] redis = ["redis>=5.0.0,<6.0.0"] +otel = ["opentelemetry-sdk>=1.24.0,<2.0.0"] + # dev dependencies +optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel]"] + devdocs = [ "mkdocs-material==9.5.21", "mkdocs-static-i18n==1.2.3", @@ -92,7 +96,7 @@ devdocs = [ ] types = [ - "faststream[rabbit,confluent,kafka,nats,redis]", + "faststream[optionals]", "mypy==1.10.0", # mypy extensions "types-PyYAML", @@ -130,7 +134,7 @@ testing = [ ] dev = [ - "faststream[rabbit,kafka,confluent,nats,redis,lint,testing,devdocs]", + "faststream[optionals,lint,testing,devdocs]", "pre-commit==3.5.0; python_version < '3.9'", "pre-commit==3.7.0; python_version >= '3.9'", "detect-secrets==1.5.0", diff --git a/tests/asyncapi/confluent/__init__.py b/tests/asyncapi/confluent/__init__.py index e69de29bb2..c4a1803708 100644 --- a/tests/asyncapi/confluent/__init__.py +++ b/tests/asyncapi/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/asyncapi/kafka/__init__.py b/tests/asyncapi/kafka/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/asyncapi/kafka/__init__.py +++ b/tests/asyncapi/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/asyncapi/nats/__init__.py b/tests/asyncapi/nats/__init__.py index e69de29bb2..87ead90ee6 100644 --- a/tests/asyncapi/nats/__init__.py +++ b/tests/asyncapi/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/asyncapi/rabbit/__init__.py b/tests/asyncapi/rabbit/__init__.py index e69de29bb2..ebec43fcd5 100644 --- a/tests/asyncapi/rabbit/__init__.py +++ b/tests/asyncapi/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/asyncapi/redis/__init__.py b/tests/asyncapi/redis/__init__.py index e69de29bb2..4752ef19b1 100644 --- a/tests/asyncapi/redis/__init__.py +++ b/tests/asyncapi/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/brokers/base/consume.py b/tests/brokers/base/consume.py index fc3ad0956d..7b7b5bdd6b 100644 --- a/tests/brokers/base/consume.py +++ b/tests/brokers/base/consume.py @@ -1,4 +1,5 @@ import asyncio +from abc import abstractmethod from typing import Any, ClassVar, Dict from unittest.mock import MagicMock @@ -15,25 +16,29 @@ class BrokerConsumeTestcase: timeout: int = 3 subscriber_kwargs: ClassVar[Dict[str, Any]] = {} - @pytest.fixture() - def consume_broker(self, broker: BrokerUsecase): + @abstractmethod + def get_broker(self, broker: BrokerUsecase) -> BrokerUsecase[Any, Any]: + raise NotImplementedError + + def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]: return broker async def test_consume( self, queue: str, - consume_broker: BrokerUsecase, event: asyncio.Event, ): + consume_broker = self.get_broker() + @consume_broker.subscriber(queue, **self.subscriber_kwargs) def subscriber(m): event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -44,9 +49,10 @@ def subscriber(m): async def test_consume_from_multi( self, queue: str, - consume_broker: BrokerUsecase, mock: MagicMock, ): + consume_broker = self.get_broker() + consume = asyncio.Event() consume2 = asyncio.Event() @@ -59,12 +65,12 @@ def subscriber(m): else: consume2.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", queue + "1")), + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue + "1")), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), ), @@ -78,9 +84,10 @@ def subscriber(m): async def test_consume_double( self, queue: str, - consume_broker: BrokerUsecase, mock: MagicMock, ): + consume_broker = self.get_broker() + consume = asyncio.Event() consume2 = asyncio.Event() @@ -92,12 +99,12 @@ async def handler(m): else: consume2.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), ), @@ -111,9 +118,10 @@ async def handler(m): async def test_different_consume( self, queue: str, - consume_broker: BrokerUsecase, mock: MagicMock, ): + consume_broker = self.get_broker() + consume = asyncio.Event() consume2 = asyncio.Event() @@ -129,12 +137,12 @@ def handler2(m): mock.handler2() consume2.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(consume_broker.publish("hello", another_topic)), + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(br.publish("hello", another_topic)), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), ), @@ -149,9 +157,10 @@ def handler2(m): async def test_consume_with_filter( self, queue: str, - consume_broker: BrokerUsecase, mock: MagicMock, ): + consume_broker = self.get_broker() + consume = asyncio.Event() consume2 = asyncio.Event() @@ -169,14 +178,12 @@ async def handler2(m): mock.handler2(m) consume2.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task( - consume_broker.publish({"msg": "hello"}, queue) - ), - asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(br.publish({"msg": "hello"}, queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), ), @@ -191,10 +198,11 @@ async def handler2(m): async def test_consume_validate_false( self, queue: str, - consume_broker: BrokerUsecase, event: asyncio.Event, mock: MagicMock, ): + consume_broker = self.get_broker() + consume_broker._is_apply_types = True consume_broker._is_validate = False @@ -209,38 +217,41 @@ async def handler(m: Foo, dep: int = Depends(dependency), broker=Context()): mock(m, dep, broker) event.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish({"x": 1}, queue)), - asyncio.create_task(event.wait()), - ), - timeout=self.timeout, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() - assert event.is_set() - mock.assert_called_once_with({"x": 1}, "100", consume_broker) + await asyncio.wait( + ( + asyncio.create_task(br.publish({"x": 1}, queue)), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() + mock.assert_called_once_with({"x": 1}, "100", consume_broker) async def test_dynamic_sub( self, queue: str, - consume_broker: BrokerUsecase, event: asyncio.Event, ): + consume_broker = self.get_broker() + def subscriber(m): event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - sub = consume_broker.subscriber(queue, **self.subscriber_kwargs) + sub = br.subscriber(queue, **self.subscriber_kwargs) sub(subscriber) - consume_broker.setup_subscriber(sub) + br.setup_subscriber(sub) await sub.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -257,27 +268,28 @@ class BrokerRealConsumeTestcase(BrokerConsumeTestcase): async def test_stop_consume_exc( self, queue: str, - consume_broker: BrokerUsecase, event: asyncio.Event, mock: MagicMock, ): + consume_broker = self.get_broker() + @consume_broker.subscriber(queue, **self.subscriber_kwargs) def subscriber(m): mock() event.set() raise StopConsume() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, ) await asyncio.sleep(0.5) - await consume_broker.publish("hello", queue) + await br.publish("hello", queue) await asyncio.sleep(0.5) assert event.is_set() diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index 4deb2a50ae..327f31627b 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -1,4 +1,5 @@ import asyncio +from abc import abstractmethod from dataclasses import asdict, dataclass from datetime import datetime from typing import Any, ClassVar, Dict, List, Tuple @@ -29,9 +30,12 @@ class BrokerPublishTestcase: timeout: int = 3 subscriber_kwargs: ClassVar[Dict[str, Any]] = {} - @pytest.fixture() - def pub_broker(self, full_broker): - return full_broker + @abstractmethod + def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]: + raise NotImplementedError + + def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]: + return broker @pytest.mark.asyncio() @pytest.mark.parametrize( @@ -143,7 +147,6 @@ def pub_broker(self, full_broker): ) async def test_serialize( self, - pub_broker: BrokerUsecase, mock: Mock, queue: str, message, @@ -151,17 +154,19 @@ async def test_serialize( expected_message, event, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def handler(m: message_type, logger: Logger): event.set() mock(m) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish(message, queue)), + asyncio.create_task(br.publish(message, queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -172,18 +177,23 @@ async def handler(m: message_type, logger: Logger): @pytest.mark.asyncio() async def test_unwrap_dict( - self, mock: Mock, queue: str, pub_broker: BrokerUsecase, event + self, + mock: Mock, + queue: str, + event, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def m(a: int, b: int, logger: Logger): event.set() mock({"a": a, "b": b}) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish({"a": 1, "b": 1.0}, queue)), + asyncio.create_task(br.publish({"a": 1, "b": 1.0}, queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -199,18 +209,23 @@ async def m(a: int, b: int, logger: Logger): @pytest.mark.asyncio() async def test_unwrap_list( - self, mock: Mock, queue: str, pub_broker: BrokerUsecase, event: asyncio.Event + self, + mock: Mock, + queue: str, + event: asyncio.Event, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def m(a: int, b: int, *args: Tuple[int, ...], logger: Logger): event.set() mock({"a": a, "b": b, "args": args}) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish([1, 1.0, 2.0, 3.0], queue)), + asyncio.create_task(br.publish([1, 1.0, 2.0, 3.0], queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -223,10 +238,11 @@ async def m(a: int, b: int, *args: Tuple[int, ...], logger: Logger): async def test_base_publisher( self, queue: str, - pub_broker: BrokerUsecase, event, mock, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) @pub_broker.publisher(queue + "resp") async def m(): @@ -237,11 +253,11 @@ async def resp(msg): event.set() mock(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", queue)), + asyncio.create_task(br.publish("", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -254,10 +270,11 @@ async def resp(msg): async def test_publisher_object( self, queue: str, - pub_broker: BrokerUsecase, event, mock, ): + pub_broker = self.get_broker(apply_types=True) + publisher = pub_broker.publisher(queue + "resp") @publisher @@ -270,11 +287,11 @@ async def resp(msg): event.set() mock(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", queue)), + asyncio.create_task(br.publish("", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -287,10 +304,11 @@ async def resp(msg): async def test_publish_manual( self, queue: str, - pub_broker: BrokerUsecase, event, mock, ): + pub_broker = self.get_broker(apply_types=True) + publisher = pub_broker.publisher(queue + "resp") @pub_broker.subscriber(queue, **self.subscriber_kwargs) @@ -302,11 +320,11 @@ async def resp(msg): event.set() mock(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", queue)), + asyncio.create_task(br.publish("", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -317,8 +335,12 @@ async def resp(msg): @pytest.mark.asyncio() async def test_multiple_publishers( - self, queue: str, pub_broker: BrokerUsecase, mock + self, + queue: str, + mock, ): + pub_broker = self.get_broker(apply_types=True) + event = anyio.Event() event2 = anyio.Event() @@ -338,11 +360,11 @@ async def resp2(msg): event2.set() mock.resp2(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", queue)), + asyncio.create_task(br.publish("", queue)), asyncio.create_task(event.wait()), asyncio.create_task(event2.wait()), ), @@ -356,8 +378,12 @@ async def resp2(msg): @pytest.mark.asyncio() async def test_reusable_publishers( - self, queue: str, pub_broker: BrokerUsecase, mock + self, + queue: str, + mock, ): + pub_broker = self.get_broker(apply_types=True) + consume = anyio.Event() consume2 = anyio.Event() @@ -381,12 +407,12 @@ async def resp(): consume2.set() mock() - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", queue)), - asyncio.create_task(pub_broker.publish("", queue + "2")), + asyncio.create_task(br.publish("", queue)), + asyncio.create_task(br.publish("", queue + "2")), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), ), @@ -400,11 +426,12 @@ async def resp(): @pytest.mark.asyncio() async def test_reply_to( self, - pub_broker: BrokerUsecase, queue: str, event, mock, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue + "reply", **self.subscriber_kwargs) async def reply_handler(m): event.set() @@ -414,13 +441,13 @@ async def reply_handler(m): async def handler(m): return m - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await asyncio.wait( ( asyncio.create_task( - pub_broker.publish("Hello!", queue, reply_to=queue + "reply") + br.publish("Hello!", queue, reply_to=queue + "reply") ), asyncio.create_task(event.wait()), ), @@ -433,20 +460,21 @@ async def handler(m): @pytest.mark.asyncio() async def test_publisher_after_start( self, - pub_broker: BrokerUsecase, queue: str, event, mock, ): + pub_broker = self.get_broker(apply_types=True) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def handler(m): event.set() mock(m) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - pub = pub_broker.publisher(queue) + pub = br.publisher(queue) await asyncio.wait( ( diff --git a/tests/brokers/base/router.py b/tests/brokers/base/router.py index d22f5e919d..1361f4c9b5 100644 --- a/tests/brokers/base/router.py +++ b/tests/brokers/base/router.py @@ -381,7 +381,7 @@ def subscriber(): ... pub_broker.include_routers(router) sub = next(iter(pub_broker._subscribers.values())) - assert len((*sub._broker_dependecies, *sub.calls[0].dependencies)) == 3 + assert len((*sub._broker_dependencies, *sub.calls[0].dependencies)) == 3 async def test_router_include_with_dependencies( self, @@ -402,7 +402,7 @@ def subscriber(): ... pub_broker.include_router(router, dependencies=(Depends(lambda: 1),)) sub = next(iter(pub_broker._subscribers.values())) - dependencies = (*sub._broker_dependecies, *sub.calls[0].dependencies) + dependencies = (*sub._broker_dependencies, *sub.calls[0].dependencies) assert len(dependencies) == 3, dependencies async def test_router_middlewares( diff --git a/tests/brokers/base/rpc.py b/tests/brokers/base/rpc.py index d4741b3db9..e544360bc5 100644 --- a/tests/brokers/base/rpc.py +++ b/tests/brokers/base/rpc.py @@ -1,4 +1,6 @@ import asyncio +from abc import abstractstaticmethod +from typing import Any from unittest.mock import MagicMock import anyio @@ -9,33 +11,40 @@ class BrokerRPCTestcase: - @pytest.fixture() - def rpc_broker(self, broker): + @abstractstaticmethod + def get_broker(self, apply_types: bool = False) -> BrokerUsecase[Any, Any]: + raise NotImplementedError + + def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]: return broker @pytest.mark.asyncio() - async def test_rpc(self, queue: str, rpc_broker: BrokerUsecase): + async def test_rpc(self, queue: str): + rpc_broker = self.get_broker() + @rpc_broker.subscriber(queue) async def m(m): # pragma: no cover return "1" - async with rpc_broker: - await rpc_broker.start() - r = await rpc_broker.publish("hello", queue, rpc_timeout=3, rpc=True) + async with self.patch_broker(rpc_broker) as br: + await br.start() + r = await br.publish("hello", queue, rpc_timeout=3, rpc=True) assert r == "1" @pytest.mark.asyncio() - async def test_rpc_timeout_raises(self, queue: str, rpc_broker: BrokerUsecase): + async def test_rpc_timeout_raises(self, queue: str): + rpc_broker = self.get_broker() + @rpc_broker.subscriber(queue) async def m(m): # pragma: no cover await anyio.sleep(1) - async with rpc_broker: - await rpc_broker.start() + async with self.patch_broker(rpc_broker) as br: + await br.start() with pytest.raises(TimeoutError): # pragma: no branch - await rpc_broker.publish( + await br.publish( "hello", queue, rpc=True, @@ -44,15 +53,17 @@ async def m(m): # pragma: no cover ) @pytest.mark.asyncio() - async def test_rpc_timeout_none(self, queue: str, rpc_broker: BrokerUsecase): + async def test_rpc_timeout_none(self, queue: str): + rpc_broker = self.get_broker() + @rpc_broker.subscriber(queue) async def m(m): # pragma: no cover await anyio.sleep(1) - async with rpc_broker: - await rpc_broker.start() + async with self.patch_broker(rpc_broker) as br: + await br.start() - r = await rpc_broker.publish( + r = await br.publish( "hello", queue, rpc=True, @@ -65,10 +76,11 @@ async def m(m): # pragma: no cover async def test_rpc_with_reply( self, queue: str, - rpc_broker: BrokerUsecase, mock: MagicMock, event: asyncio.Event, ): + rpc_broker = self.get_broker() + reply_queue = queue + "1" @rpc_broker.subscriber(reply_queue) @@ -80,10 +92,10 @@ async def response_hanler(m: str): async def m(m): # pragma: no cover return "1" - async with rpc_broker: - await rpc_broker.start() + async with self.patch_broker(rpc_broker) as br: + await br.start() - await rpc_broker.publish("hello", queue, reply_to=reply_queue) + await br.publish("hello", queue, reply_to=reply_queue) with timeout_scope(3, True): await event.wait() @@ -93,12 +105,15 @@ async def m(m): # pragma: no cover class ReplyAndConsumeForbidden: @pytest.mark.asyncio() - async def test_rpc_with_reply_and_callback(self, full_broker: BrokerUsecase): - with pytest.raises(ValueError): # noqa: PT011 - await full_broker.publish( - "hello", - "some", - reply_to="some", - rpc=True, - rpc_timeout=0, - ) + async def test_rpc_with_reply_and_callback(self): + rpc_broker = self.get_broker() + + async with rpc_broker: + with pytest.raises(ValueError): # noqa: PT011 + await rpc_broker.publish( + "hello", + "some", + reply_to="some", + rpc=True, + rpc_timeout=0, + ) diff --git a/tests/brokers/base/testclient.py b/tests/brokers/base/testclient.py index 2112519c89..8381c95dc1 100644 --- a/tests/brokers/base/testclient.py +++ b/tests/brokers/base/testclient.py @@ -1,6 +1,6 @@ import pytest -from faststream.broker.core.usecase import BrokerUsecase +from faststream.testing.broker import TestBroker from faststream.types import AnyCallable from tests.brokers.base.consume import BrokerConsumeTestcase from tests.brokers.base.publish import BrokerPublishTestcase @@ -13,61 +13,62 @@ class BrokerTestclientTestcase( BrokerRPCTestcase, ): build_message: AnyCallable - - @pytest.fixture() - def pub_broker(self, test_broker): - return test_broker - - @pytest.fixture() - def consume_broker(self, test_broker): - return test_broker - - @pytest.fixture() - def rpc_broker(self, test_broker): - return test_broker + test_class: TestBroker @pytest.mark.asyncio() - async def test_subscriber_mock(self, queue: str, test_broker: BrokerUsecase): + async def test_subscriber_mock(self, queue: str): + test_broker = self.get_broker() + @test_broker.subscriber(queue) - async def m(): + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") + async with self.test_class(test_broker): + await test_broker.start() + await test_broker.publish("hello", queue) + m.mock.assert_called_once_with("hello") @pytest.mark.asyncio() - async def test_publisher_mock(self, queue: str, test_broker: BrokerUsecase): + async def test_publisher_mock(self, queue: str): + test_broker = self.get_broker() + publisher = test_broker.publisher(queue + "resp") @publisher @test_broker.subscriber(queue) - async def m(): + async def m(msg): return "response" - await test_broker.start() - await test_broker.publish("hello", queue) - publisher.mock.assert_called_with("response") + async with self.test_class(test_broker): + await test_broker.start() + await test_broker.publish("hello", queue) + publisher.mock.assert_called_with("response") @pytest.mark.asyncio() - async def test_manual_publisher_mock(self, queue: str, test_broker: BrokerUsecase): + async def test_manual_publisher_mock(self, queue: str): + test_broker = self.get_broker() + publisher = test_broker.publisher(queue + "resp") @test_broker.subscriber(queue) - async def m(): + async def m(msg): await publisher.publish("response") - await test_broker.start() - await test_broker.publish("hello", queue) - publisher.mock.assert_called_with("response") + async with self.test_class(test_broker): + await test_broker.start() + await test_broker.publish("hello", queue) + publisher.mock.assert_called_with("response") @pytest.mark.asyncio() - async def test_exception_raises(self, queue: str, test_broker: BrokerUsecase): + async def test_exception_raises(self, queue: str): + test_broker = self.get_broker() + @test_broker.subscriber(queue) - async def m(): # pragma: no cover + async def m(msg): # pragma: no cover raise ValueError() - await test_broker.start() + async with self.test_class(test_broker): + await test_broker.start() - with pytest.raises(ValueError): # noqa: PT011 - await test_broker.publish("hello", queue) + with pytest.raises(ValueError): # noqa: PT011 + await test_broker.publish("hello", queue) diff --git a/tests/brokers/confluent/__init__.py b/tests/brokers/confluent/__init__.py index e69de29bb2..c4a1803708 100644 --- a/tests/brokers/confluent/__init__.py +++ b/tests/brokers/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/brokers/confluent/conftest.py b/tests/brokers/confluent/conftest.py index d128af04c1..aaac741a25 100644 --- a/tests/brokers/confluent/conftest.py +++ b/tests/brokers/confluent/conftest.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from uuid import uuid4 import pytest import pytest_asyncio @@ -31,12 +30,6 @@ async def broker(settings): yield broker -@pytest_asyncio.fixture(scope="session") -async def confluent_kafka_topic(settings): - topic = str(uuid4()) - return topic - - @pytest_asyncio.fixture async def full_broker(settings): broker = KafkaBroker(settings.url) diff --git a/tests/brokers/confluent/test_consume.py b/tests/brokers/confluent/test_consume.py index 2c471c6e73..805b3a97f2 100644 --- a/tests/brokers/confluent/test_consume.py +++ b/tests/brokers/confluent/test_consume.py @@ -19,18 +19,23 @@ class TestConsume(BrokerRealConsumeTestcase): timeout: int = 10 subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_consume_batch(self, confluent_kafka_topic: str, broker: KafkaBroker): + async def test_consume_batch(self, queue: str): + consume_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(confluent_kafka_topic, batch=True, **self.subscriber_kwargs) + @consume_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", topic=confluent_kafka_topic) + await br.publish_batch(1, "hi", topic=queue) result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), @@ -41,9 +46,14 @@ async def handler(msg): @pytest.mark.asyncio() async def test_consume_batch_headers( - self, mock, event: asyncio.Event, queue: str, full_broker: KafkaBroker + self, + mock, + event: asyncio.Event, + queue: str, ): - @full_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, batch=True, **self.subscriber_kwargs) def subscriber(m, msg: KafkaMessage): check = all( ( @@ -55,14 +65,12 @@ def subscriber(m, msg: KafkaMessage): mock(check) event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task( - full_broker.publish("", queue, headers={"custom": "1"}) - ), + asyncio.create_task(br.publish("", queue, headers={"custom": "1"})), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -76,17 +84,18 @@ def subscriber(m, msg: KafkaMessage): async def test_consume_ack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, group_id="test", auto_commit=False, **self.subscriber_kwargs ) async def handler(msg: KafkaMessage): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AsyncConfluentConsumer, @@ -96,7 +105,7 @@ async def handler(msg: KafkaMessage): await asyncio.wait( ( asyncio.create_task( - full_broker.publish( + br.publish( "hello", queue, ) @@ -114,18 +123,19 @@ async def handler(msg: KafkaMessage): async def test_consume_ack_manual( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, group_id="test", auto_commit=False, **self.subscriber_kwargs ) async def handler(msg: KafkaMessage): await msg.ack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AsyncConfluentConsumer, @@ -134,12 +144,7 @@ async def handler(msg: KafkaMessage): ) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -153,18 +158,19 @@ async def handler(msg: KafkaMessage): async def test_consume_ack_raise( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, group_id="test", auto_commit=False, **self.subscriber_kwargs ) async def handler(msg: KafkaMessage): event.set() raise AckMessage() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AsyncConfluentConsumer, @@ -173,12 +179,7 @@ async def handler(msg: KafkaMessage): ) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -192,18 +193,19 @@ async def handler(msg: KafkaMessage): async def test_nack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, group_id="test", auto_commit=False, **self.subscriber_kwargs ) async def handler(msg: KafkaMessage): await msg.nack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AsyncConfluentConsumer, @@ -212,12 +214,7 @@ async def handler(msg: KafkaMessage): ) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, @@ -231,34 +228,37 @@ async def handler(msg: KafkaMessage): async def test_consume_no_ack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, group_id="test", no_ack=True, **self.subscriber_kwargs ) async def handler(msg: KafkaMessage): event.set() - await full_broker.start() - with patch.object( - AsyncConfluentConsumer, - "commit", - spy_decorator(AsyncConfluentConsumer.commit), - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() + + with patch.object( + AsyncConfluentConsumer, + "commit", + spy_decorator(AsyncConfluentConsumer.commit), + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + br.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=self.timeout, - ) - m.mock.assert_not_called() + timeout=self.timeout, + ) + m.mock.assert_not_called() assert event.is_set() @@ -267,17 +267,18 @@ async def handler(msg: KafkaMessage): async def test_consume_with_no_auto_commit( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, auto_commit=False, group_id="test", **self.subscriber_kwargs ) async def subscriber_no_auto_commit(msg: KafkaMessage): await msg.nack() event.set() - broker2 = KafkaBroker() + broker2 = self.get_broker() event2 = asyncio.Event() @broker2.subscriber( @@ -286,18 +287,20 @@ async def subscriber_no_auto_commit(msg: KafkaMessage): async def subscriber_with_auto_commit(m): event2.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(full_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=self.timeout, ) - async with broker2: - await broker2.start() + async with self.patch_broker(broker2) as br2: + await br2.start() + await asyncio.wait( (asyncio.create_task(event2.wait()),), timeout=self.timeout, diff --git a/tests/brokers/confluent/test_publish.py b/tests/brokers/confluent/test_publish.py index 156b150356..0fed589efb 100644 --- a/tests/brokers/confluent/test_publish.py +++ b/tests/brokers/confluent/test_publish.py @@ -12,18 +12,23 @@ class TestPublish(BrokerPublishTestcase): timeout: int = 10 subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_publish_batch(self, queue: str, broker: KafkaBroker): + async def test_publish_batch(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", topic=queue) + await br.publish_batch(1, "hi", topic=queue) result, _ = await asyncio.wait( ( @@ -36,17 +41,19 @@ async def handler(msg): assert {1, "hi"} == {r.result() for r in result} @pytest.mark.asyncio() - async def test_batch_publisher_manual(self, queue: str, broker: KafkaBroker): + async def test_batch_publisher_manual(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def handler(msg): await msgs_queue.put(msg) - publisher = broker.publisher(queue, batch=True) + publisher = pub_broker.publisher(queue, batch=True) - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await publisher.publish(1, "hi") @@ -61,22 +68,24 @@ async def handler(msg): assert {1, "hi"} == {r.result() for r in result} @pytest.mark.asyncio() - async def test_batch_publisher_decorator(self, queue: str, broker: KafkaBroker): + async def test_batch_publisher_decorator(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.subscriber(queue, **self.subscriber_kwargs) async def handler(msg): await msgs_queue.put(msg) - @broker.publisher(queue, batch=True) - @broker.subscriber(queue + "1", **self.subscriber_kwargs) + @pub_broker.publisher(queue, batch=True) + @pub_broker.subscriber(queue + "1", **self.subscriber_kwargs) async def pub(m): return 1, "hi" - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - await broker.publish("", queue + "1") + await br.publish("", queue + "1") result, _ = await asyncio.wait( ( diff --git a/tests/brokers/confluent/test_test_client.py b/tests/brokers/confluent/test_test_client.py index d70d2fda6d..b8e232802f 100644 --- a/tests/brokers/confluent/test_test_client.py +++ b/tests/brokers/confluent/test_test_client.py @@ -11,13 +11,22 @@ class TestTestclient(BrokerTestclientTestcase): """A class to represent a test Kafka broker.""" + test_class = TestKafkaBroker + + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + + def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker: + return TestKafkaBroker(broker) + @pytest.mark.confluent() async def test_with_real_testclient( self, - broker: KafkaBroker, queue: str, event: asyncio.Event, ): + broker = self.get_broker() + @broker.subscriber(queue, auto_offset_reset="earliest") def subscriber(m): event.set() @@ -35,46 +44,49 @@ def subscriber(m): async def test_batch_pub_by_default_pub( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(queue, batch=True, auto_offset_reset="earliest") - async def m(): + broker = self.get_broker() + + @broker.subscriber(queue, batch=True, auto_offset_reset="earliest") + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with(["hello"]) + async with self.patch_broker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_pub_by_pub_batch( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(queue, batch=True, auto_offset_reset="earliest") - async def m(): + broker = self.get_broker() + + @broker.subscriber(queue, batch=True, auto_offset_reset="earliest") + async def m(msg): pass - await test_broker.start() - await test_broker.publish_batch("hello", topic=queue) - m.mock.assert_called_once_with(["hello"]) + async with self.patch_broker(broker) as br: + await br.publish_batch("hello", topic=queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_publisher_mock( self, - test_broker: KafkaBroker, queue: str, ): - publisher = test_broker.publisher(queue + "1", batch=True) + broker = self.get_broker() + + publisher = broker.publisher(queue + "1", batch=True) @publisher - @test_broker.subscriber(queue, auto_offset_reset="earliest") - async def m(): + @broker.subscriber(queue, auto_offset_reset="earliest") + async def m(msg): return 1, 2, 3 - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") - publisher.mock.assert_called_once_with([1, 2, 3]) + async with self.patch_broker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) async def test_respect_middleware(self, queue): routes = [] diff --git a/tests/brokers/conftest.py b/tests/brokers/conftest.py deleted file mode 100644 index 5aac495a23..0000000000 --- a/tests/brokers/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -from uuid import uuid4 - -import pytest - - -@pytest.fixture() -def queue(): - return str(uuid4()) diff --git a/tests/brokers/kafka/__init__.py b/tests/brokers/kafka/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/brokers/kafka/__init__.py +++ b/tests/brokers/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/brokers/kafka/test_consume.py b/tests/brokers/kafka/test_consume.py index 82c3a7d0b8..2a7f57b888 100644 --- a/tests/brokers/kafka/test_consume.py +++ b/tests/brokers/kafka/test_consume.py @@ -13,18 +13,23 @@ @pytest.mark.kafka() class TestConsume(BrokerRealConsumeTestcase): + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_consume_batch(self, queue: str, broker: KafkaBroker): + async def test_consume_batch(self, queue: str): + consume_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(queue, batch=True) + @consume_broker.subscriber(queue, batch=True) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", topic=queue) + await br.publish_batch(1, "hi", topic=queue) result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), @@ -35,9 +40,14 @@ async def handler(msg): @pytest.mark.asyncio() async def test_consume_batch_headers( - self, mock, event: asyncio.Event, queue: str, full_broker: KafkaBroker + self, + mock, + event: asyncio.Event, + queue: str, ): - @full_broker.subscriber(queue, batch=True) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, batch=True) def subscriber(m, msg: KafkaMessage): check = all( ( @@ -49,14 +59,12 @@ def subscriber(m, msg: KafkaMessage): mock(check) event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task( - full_broker.publish("", queue, headers={"custom": "1"}) - ), + asyncio.create_task(br.publish("", queue, headers={"custom": "1"})), asyncio.create_task(event.wait()), ), timeout=3, @@ -70,15 +78,16 @@ def subscriber(m, msg: KafkaMessage): async def test_consume_ack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, group_id="test", auto_commit=False) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, group_id="test", auto_commit=False) async def handler(msg: KafkaMessage): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) @@ -86,7 +95,7 @@ async def handler(msg: KafkaMessage): await asyncio.wait( ( asyncio.create_task( - full_broker.publish( + consume_broker.publish( "hello", queue, ) @@ -101,21 +110,24 @@ async def handler(msg: KafkaMessage): @pytest.mark.asyncio() async def test_manual_partition_consume( - self, queue: str, full_broker: KafkaBroker, event: asyncio.Event + self, + queue: str, + event: asyncio.Event, ): + consume_broker = self.get_broker() + tp1 = TopicPartition(queue, partition=0) - @full_broker.subscriber(partitions=[tp1]) - async def handler_tp1(msg: KafkaMessage): + @consume_broker.subscriber(partitions=[tp1]) + async def handler_tp1(msg): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task( - full_broker.publish("hello", queue, partition=0) - ), + asyncio.create_task(br.publish("hello", queue, partition=0)), asyncio.create_task(event.wait()), ), timeout=10, @@ -128,16 +140,17 @@ async def handler_tp1(msg: KafkaMessage): async def test_consume_ack_manual( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, group_id="test", auto_commit=False) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, group_id="test", auto_commit=False) async def handler(msg: KafkaMessage): await msg.ack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) @@ -145,7 +158,7 @@ async def handler(msg: KafkaMessage): await asyncio.wait( ( asyncio.create_task( - full_broker.publish( + br.publish( "hello", queue, ) @@ -163,16 +176,17 @@ async def handler(msg: KafkaMessage): async def test_consume_ack_raise( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, group_id="test", auto_commit=False) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, group_id="test", auto_commit=False) async def handler(msg: KafkaMessage): event.set() raise AckMessage() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) @@ -180,7 +194,7 @@ async def handler(msg: KafkaMessage): await asyncio.wait( ( asyncio.create_task( - full_broker.publish( + br.publish( "hello", queue, ) @@ -198,16 +212,17 @@ async def handler(msg: KafkaMessage): async def test_nack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, group_id="test", auto_commit=False) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, group_id="test", auto_commit=False) async def handler(msg: KafkaMessage): await msg.nack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object( AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) @@ -215,7 +230,7 @@ async def handler(msg: KafkaMessage): await asyncio.wait( ( asyncio.create_task( - full_broker.publish( + br.publish( "hello", queue, ) @@ -233,29 +248,32 @@ async def handler(msg: KafkaMessage): async def test_consume_no_ack( self, queue: str, - full_broker: KafkaBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, group_id="test", no_ack=True) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, group_id="test", no_ack=True) async def handler(msg: KafkaMessage): event.set() - await full_broker.start() - with patch.object( - AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() + + with patch.object( + AIOKafkaConsumer, "commit", spy_decorator(AIOKafkaConsumer.commit) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + br.publish( + "hello", + queue, + ) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=10, - ) - m.mock.assert_not_called() + timeout=10, + ) + m.mock.assert_not_called() - assert event.is_set() + assert event.is_set() diff --git a/tests/brokers/kafka/test_publish.py b/tests/brokers/kafka/test_publish.py index 2aee2ad0ca..e913e3c638 100644 --- a/tests/brokers/kafka/test_publish.py +++ b/tests/brokers/kafka/test_publish.py @@ -8,18 +8,23 @@ @pytest.mark.kafka() class TestPublish(BrokerPublishTestcase): + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_publish_batch(self, queue: str, broker: KafkaBroker): + async def test_publish_batch(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue) + @pub_broker.subscriber(queue) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", topic=queue) + await br.publish_batch(1, "hi", topic=queue) result, _ = await asyncio.wait( ( @@ -32,17 +37,19 @@ async def handler(msg): assert {1, "hi"} == {r.result() for r in result} @pytest.mark.asyncio() - async def test_batch_publisher_manual(self, queue: str, broker: KafkaBroker): + async def test_batch_publisher_manual(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue) + @pub_broker.subscriber(queue) async def handler(msg): await msgs_queue.put(msg) - publisher = broker.publisher(queue, batch=True) + publisher = pub_broker.publisher(queue, batch=True) - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() await publisher.publish(1, "hi") @@ -57,22 +64,24 @@ async def handler(msg): assert {1, "hi"} == {r.result() for r in result} @pytest.mark.asyncio() - async def test_batch_publisher_decorator(self, queue: str, broker: KafkaBroker): + async def test_batch_publisher_decorator(self, queue: str): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(queue) + @pub_broker.subscriber(queue) async def handler(msg): await msgs_queue.put(msg) - @broker.publisher(queue, batch=True) - @broker.subscriber(queue + "1") + @pub_broker.publisher(queue, batch=True) + @pub_broker.subscriber(queue + "1") async def pub(m): return 1, "hi" - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - await broker.publish("", queue + "1") + await br.publish("", queue + "1") result, _ = await asyncio.wait( ( diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py index cc128921ed..a89ecff707 100644 --- a/tests/brokers/kafka/test_test_client.py +++ b/tests/brokers/kafka/test_test_client.py @@ -9,57 +9,72 @@ @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + test_class = TestKafkaBroker + + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + + def patch_broker(self, broker: KafkaBroker) -> TestKafkaBroker: + return TestKafkaBroker(broker) + async def test_partition_match( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) - async def m(): + broker = self.get_broker() + + @broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") + async with self.patch_broker(broker) as br: + await br.publish("hello", queue) + + m.mock.assert_called_once_with("hello") async def test_partition_match_exect( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) - async def m(): + broker = self.get_broker() + + @broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue, partition=1) - m.mock.assert_called_once_with("hello") + async with self.patch_broker(broker) as br: + await br.publish("hello", queue, partition=1) + + m.mock.assert_called_once_with("hello") async def test_partition_missmatch( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(partitions=[TopicPartition(queue, 1)]) - async def m(): + broker = self.get_broker() + + @broker.subscriber(partitions=[TopicPartition(queue, 1)]) + async def m(msg): pass - @test_broker.subscriber(queue) - async def m2(): + @broker.subscriber(queue) + async def m2(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue, partition=2) - assert not m.mock.called - m2.mock.assert_called_once_with("hello") + async with self.patch_broker(broker) as br: + await br.publish("hello", queue, partition=2) + + assert not m.mock.called + m2.mock.assert_called_once_with("hello") @pytest.mark.kafka() async def test_with_real_testclient( self, - broker: KafkaBroker, queue: str, event: asyncio.Event, ): + broker = self.get_broker() + @broker.subscriber(queue) def subscriber(m): event.set() @@ -77,46 +92,49 @@ def subscriber(m): async def test_batch_pub_by_default_pub( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(queue, batch=True) - async def m(): + broker = self.get_broker() + + @broker.subscriber(queue, batch=True) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with(["hello"]) + async with TestKafkaBroker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_pub_by_pub_batch( self, - test_broker: KafkaBroker, queue: str, ): - @test_broker.subscriber(queue, batch=True) - async def m(): + broker = self.get_broker() + + @broker.subscriber(queue, batch=True) + async def m(msg): pass - await test_broker.start() - await test_broker.publish_batch("hello", topic=queue) - m.mock.assert_called_once_with(["hello"]) + async with TestKafkaBroker(broker) as br: + await br.publish_batch("hello", topic=queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_publisher_mock( self, - test_broker: KafkaBroker, queue: str, ): - publisher = test_broker.publisher(queue + "1", batch=True) + broker = self.get_broker() + + publisher = broker.publisher(queue + "1", batch=True) @publisher - @test_broker.subscriber(queue) - async def m(): + @broker.subscriber(queue) + async def m(msg): return 1, 2, 3 - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") - publisher.mock.assert_called_once_with([1, 2, 3]) + async with TestKafkaBroker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) async def test_respect_middleware(self, queue): routes = [] diff --git a/tests/brokers/nats/__init__.py b/tests/brokers/nats/__init__.py new file mode 100644 index 0000000000..87ead90ee6 --- /dev/null +++ b/tests/brokers/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 1e7997526e..5318fb2a69 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -13,24 +13,26 @@ @pytest.mark.nats() class TestConsume(BrokerRealConsumeTestcase): + def get_broker(self, apply_types: bool = False) -> NatsBroker: + return NatsBroker(apply_types=apply_types) + async def test_consume_js( self, queue: str, - consume_broker: NatsBroker, stream: JStream, event: asyncio.Event, ): + consume_broker = self.get_broker() + @consume_broker.subscriber(queue, stream=stream) def subscriber(m): event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task( - consume_broker.publish("hello", queue, stream=stream.name) - ), + asyncio.create_task(br.publish("hello", queue, stream=stream.name)), asyncio.create_task(event.wait()), ), timeout=3, @@ -41,11 +43,12 @@ def subscriber(m): async def test_consume_pull( self, queue: str, - consume_broker: NatsBroker, stream: JStream, event: asyncio.Event, mock, ): + consume_broker = self.get_broker() + @consume_broker.subscriber( queue, stream=stream, @@ -55,26 +58,29 @@ def subscriber(m): mock(m) event.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish("hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() - assert event.is_set() - mock.assert_called_once_with("hello") + await asyncio.wait( + ( + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with("hello") async def test_consume_batch( self, queue: str, - consume_broker: NatsBroker, stream: JStream, event: asyncio.Event, mock, ): + consume_broker = self.get_broker() + @consume_broker.subscriber( queue, stream=stream, @@ -84,40 +90,39 @@ def subscriber(m): mock(m) event.set() - await consume_broker.start() - await asyncio.wait( - ( - asyncio.create_task(consume_broker.publish(b"hello", queue)), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() - assert event.is_set() - mock.assert_called_once_with([b"hello"]) + await asyncio.wait( + ( + asyncio.create_task(br.publish(b"hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with([b"hello"]) async def test_consume_ack( self, queue: str, - full_broker: NatsBroker, event: asyncio.Event, stream: JStream, ): - @full_broker.subscriber(queue, stream=stream) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, stream=stream) async def handler(msg: NatsMessage): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -129,26 +134,23 @@ async def handler(msg: NatsMessage): async def test_consume_ack_manual( self, queue: str, - full_broker: NatsBroker, event: asyncio.Event, stream: JStream, ): - @full_broker.subscriber(queue, stream=stream) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, stream=stream) async def handler(msg: NatsMessage): await msg.ack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -160,26 +162,23 @@ async def handler(msg: NatsMessage): async def test_consume_ack_raise( self, queue: str, - full_broker: NatsBroker, event: asyncio.Event, stream: JStream, ): - @full_broker.subscriber(queue, stream=stream) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, stream=stream) async def handler(msg: NatsMessage): event.set() raise AckMessage() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -191,26 +190,23 @@ async def handler(msg: NatsMessage): async def test_nack( self, queue: str, - full_broker: NatsBroker, event: asyncio.Event, stream: JStream, ): - @full_broker.subscriber(queue, stream=stream) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, stream=stream) async def handler(msg: NatsMessage): await msg.nack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object(Msg, "nak", spy_decorator(Msg.nak)) as m: await asyncio.wait( ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) - ), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -220,39 +216,41 @@ async def handler(msg: NatsMessage): assert event.is_set() async def test_consume_no_ack( - self, queue: str, full_broker: NatsBroker, event: asyncio.Event + self, + queue: str, + event: asyncio.Event, ): - @full_broker.subscriber(queue, no_ack=True) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, no_ack=True) async def handler(msg: NatsMessage): event.set() - await full_broker.start() - with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish( - "hello", - queue, - ) + async with self.patch_broker(consume_broker) as br: + await br.start() + + with patch.object(Msg, "ack", spy_decorator(Msg.ack)) as m: + await asyncio.wait( + ( + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_not_called() + timeout=3, + ) + m.mock.assert_not_called() - assert event.is_set() + assert event.is_set() async def test_consume_batch_headers( self, queue: str, - full_broker: NatsBroker, stream: JStream, event: asyncio.Event, mock, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( queue, stream=stream, pull_sub=PullSub(1, batch=True), @@ -268,16 +266,15 @@ def subscriber(m, msg: NatsMessage): mock(check) event.set() - await full_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("", queue, headers={"custom": "1"}) + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( + ( + asyncio.create_task(br.publish("", queue, headers={"custom": "1"})), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() - mock.assert_called_once_with(True) + assert event.is_set() + mock.assert_called_once_with(True) diff --git a/tests/brokers/nats/test_publish.py b/tests/brokers/nats/test_publish.py index cfe7c74000..0f4aa8581d 100644 --- a/tests/brokers/nats/test_publish.py +++ b/tests/brokers/nats/test_publish.py @@ -1,5 +1,6 @@ import pytest +from faststream.nats import NatsBroker from tests.brokers.base.publish import BrokerPublishTestcase @@ -7,28 +8,5 @@ class TestPublish(BrokerPublishTestcase): """Test publish method of NATS broker.""" - @pytest.mark.asyncio() - async def test_stream_publish( - self, - queue: str, - test_broker, - ): - @test_broker.subscriber(queue, stream="test") - async def m(): ... - - await test_broker.start() - await test_broker.publish("Hi!", queue, stream="test") - m.mock.assert_called_once_with("Hi!") - - @pytest.mark.asyncio() - async def test_wrong_stream_publish( - self, - queue: str, - test_broker, - ): - @test_broker.subscriber(queue) - async def m(): ... - - await test_broker.start() - await test_broker.publish("Hi!", queue, stream="test") - assert not m.mock.called + def get_broker(self, apply_types: bool = False) -> NatsBroker: + return NatsBroker(apply_types=apply_types) diff --git a/tests/brokers/nats/test_rpc.py b/tests/brokers/nats/test_rpc.py index 7c0bd18f06..9675883c2b 100644 --- a/tests/brokers/nats/test_rpc.py +++ b/tests/brokers/nats/test_rpc.py @@ -6,8 +6,13 @@ @pytest.mark.nats() class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden): + def get_broker(self, apply_types: bool = False) -> NatsBroker: + return NatsBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_rpc_js(self, queue: str, rpc_broker: NatsBroker, stream: JStream): + async def test_rpc_js(self, queue: str, stream: JStream): + rpc_broker = self.get_broker() + @rpc_broker.subscriber(queue, stream=stream) async def m(m): # pragma: no cover return "1" diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py index c4bdaa7b41..ebbd1c7887 100644 --- a/tests/brokers/nats/test_test_client.py +++ b/tests/brokers/nats/test_test_client.py @@ -10,6 +10,43 @@ @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + test_class = TestNatsBroker + + def get_broker(self, apply_types: bool = False) -> NatsBroker: + return NatsBroker(apply_types=apply_types) + + def patch_broker(self, broker: NatsBroker) -> TestNatsBroker: + return TestNatsBroker(broker) + + @pytest.mark.asyncio() + async def test_stream_publish( + self, + queue: str, + ): + pub_broker = NatsBroker(apply_types=False) + + @pub_broker.subscriber(queue, stream="test") + async def m(msg): ... + + async with TestNatsBroker(pub_broker) as br: + await br.publish("Hi!", queue, stream="test") + m.mock.assert_called_once_with("Hi!") + + @pytest.mark.asyncio() + async def test_wrong_stream_publish( + self, + queue: str, + ): + pub_broker = NatsBroker(apply_types=False) + + @pub_broker.subscriber(queue) + async def m(msg): ... + + async with TestNatsBroker(pub_broker) as br: + await br.publish("Hi!", queue, stream="test") + assert not m.mock.called + + @pytest.mark.asyncio() async def test_rpc_conflicts_reply(self, queue): async with TestNatsBroker(NatsBroker()) as br: with pytest.raises(SetupError): @@ -23,10 +60,11 @@ async def test_rpc_conflicts_reply(self, queue): @pytest.mark.nats() async def test_with_real_testclient( self, - broker: NatsBroker, queue: str, event: asyncio.Event, ): + broker = self.get_broker() + @broker.subscriber(queue) def subscriber(m): event.set() @@ -90,76 +128,92 @@ async def h2(): ... assert len(routes) == 2 async def test_js_subscriber_mock( - self, queue: str, test_broker: NatsBroker, stream: JStream + self, + queue: str, + stream: JStream, ): - @test_broker.subscriber(queue, stream=stream) - async def m(): + broker = self.get_broker() + + @broker.subscriber(queue, stream=stream) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", queue, stream=stream.name) - m.mock.assert_called_once_with("hello") + async with TestNatsBroker(broker) as br: + await br.publish("hello", queue, stream=stream.name) + m.mock.assert_called_once_with("hello") async def test_js_publisher_mock( - self, queue: str, test_broker: NatsBroker, stream: JStream + self, + queue: str, + stream: JStream, ): - publisher = test_broker.publisher(queue + "resp") + broker = self.get_broker() + + publisher = broker.publisher(queue + "resp") @publisher - @test_broker.subscriber(queue, stream=stream) - async def m(): + @broker.subscriber(queue, stream=stream) + async def m(msg): return "response" - await test_broker.start() - await test_broker.publish("hello", queue, stream=stream.name) - publisher.mock.assert_called_with("response") + async with TestNatsBroker(broker) as br: + await br.publish("hello", queue, stream=stream.name) + publisher.mock.assert_called_with("response") + + async def test_any_subject_routing(self): + broker = self.get_broker() - async def test_any_subject_routing(self, test_broker: NatsBroker): - @test_broker.subscriber("test.*.subj.*") - def subscriber(): ... + @broker.subscriber("test.*.subj.*") + def subscriber(msg): ... - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b") - subscriber.mock.assert_called_once_with("hello") + async with TestNatsBroker(broker) as br: + await br.publish("hello", "test.a.subj.b") + subscriber.mock.assert_called_once_with("hello") + + async def test_ending_subject_routing(self): + broker = self.get_broker() - async def test_ending_subject_routing(self, test_broker: NatsBroker): - @test_broker.subscriber("test.>") - def subscriber(): ... + @broker.subscriber("test.>") + def subscriber(msg): ... - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b") - subscriber.mock.assert_called_once_with("hello") + async with TestNatsBroker(broker) as br: + await br.publish("hello", "test.a.subj.b") + subscriber.mock.assert_called_once_with("hello") - async def test_mixed_subject_routing(self, test_broker: NatsBroker): - @test_broker.subscriber("*.*.subj.>") - def subscriber(): ... + async def test_mixed_subject_routing(self): + broker = self.get_broker() - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b.c") - subscriber.mock.assert_called_once_with("hello") + @broker.subscriber("*.*.subj.>") + def subscriber(msg): ... + + async with TestNatsBroker(broker) as br: + await br.publish("hello", "test.a.subj.b.c") + subscriber.mock.assert_called_once_with("hello") async def test_consume_pull( self, queue: str, - test_broker: NatsBroker, stream: JStream, ): - @test_broker.subscriber(queue, stream=stream, pull_sub=PullSub(1)) + broker = self.get_broker() + + @broker.subscriber(queue, stream=stream, pull_sub=PullSub(1)) def subscriber(m): ... - await test_broker.start() - await test_broker.publish("hello", queue) - subscriber.mock.assert_called_once_with("hello") + async with TestNatsBroker(broker) as br: + await br.publish("hello", queue) + subscriber.mock.assert_called_once_with("hello") async def test_consume_batch( self, queue: str, - test_broker: NatsBroker, stream: JStream, event: asyncio.Event, mock, ): - @test_broker.subscriber( + broker = self.get_broker() + + @broker.subscriber( queue, stream=stream, pull_sub=PullSub(1, batch=True), @@ -168,6 +222,6 @@ def subscriber(m): mock(m) event.set() - await test_broker.start() - await test_broker.publish("hello", queue) - subscriber.mock.assert_called_once_with(["hello"]) + async with TestNatsBroker(broker) as br: + await br.publish("hello", queue) + subscriber.mock.assert_called_once_with(["hello"]) diff --git a/tests/brokers/rabbit/__init__.py b/tests/brokers/rabbit/__init__.py index e69de29bb2..ebec43fcd5 100644 --- a/tests/brokers/rabbit/__init__.py +++ b/tests/brokers/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/brokers/rabbit/test_consume.py b/tests/brokers/rabbit/test_consume.py index 56ef7b9bae..30b5cab321 100644 --- a/tests/brokers/rabbit/test_consume.py +++ b/tests/brokers/rabbit/test_consume.py @@ -13,24 +13,28 @@ @pytest.mark.rabbit() class TestConsume(BrokerRealConsumeTestcase): + def get_broker(self, apply_types: bool = False) -> RabbitBroker: + return RabbitBroker(apply_types=apply_types) + @pytest.mark.asyncio() async def test_consume_from_exchange( self, queue: str, exchange: RabbitExchange, - broker: RabbitBroker, event: asyncio.Event, ): - @broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker() + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) def h(m): event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( asyncio.create_task( - broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -44,13 +48,11 @@ async def test_consume_with_get_old( self, queue: str, exchange: RabbitExchange, - broker: RabbitBroker, event: asyncio.Event, ): - await broker.declare_queue(RabbitQueue(queue)) - await broker.declare_exchange(exchange) + consume_broker = self.get_broker() - @broker.subscriber( + @consume_broker.subscriber( queue=RabbitQueue(name=queue, passive=True), exchange=RabbitExchange(name=exchange.name, passive=True), retry=True, @@ -58,13 +60,19 @@ async def test_consume_with_get_old( def h(m): event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.declare_queue(RabbitQueue(queue)) + await br.declare_exchange(exchange) + + await br.start() + await asyncio.wait( ( asyncio.create_task( - broker.publish( - Message(b"hello"), queue=queue, exchange=exchange.name + br.publish( + Message(b"hello"), + queue=queue, + exchange=exchange.name, ) ), asyncio.create_task(event.wait()), @@ -79,22 +87,24 @@ async def test_consume_ack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -109,23 +119,25 @@ async def test_consume_manual_ack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): await msg.ack() event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -139,25 +151,27 @@ async def test_consume_exception_ack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): try: raise AckMessage() finally: event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -171,24 +185,26 @@ async def test_consume_manual_nack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): await msg.nack() event.set() raise ValueError() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -202,25 +218,27 @@ async def test_consume_exception_nack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): try: raise NackMessage() finally: event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "nack", spy_decorator(IncomingMessage.nack) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -234,24 +252,26 @@ async def test_consume_manual_reject( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): await msg.reject() event.set() raise ValueError() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -265,25 +285,27 @@ async def test_consume_exception_reject( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue=queue, exchange=exchange, retry=1) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): try: raise RejectMessage() finally: event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) ) as m: await asyncio.wait( ( asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task(event.wait()), ), @@ -296,18 +318,20 @@ async def handler(msg: RabbitMessage): async def test_consume_skip_message( self, queue: str, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue) async def handler(msg: RabbitMessage): try: raise SkipMessage() finally: event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + with patch.object( IncomingMessage, "reject", spy_decorator(IncomingMessage.reject) ) as m, patch.object( @@ -317,7 +341,7 @@ async def handler(msg: RabbitMessage): ) as m2: await asyncio.wait( ( - asyncio.create_task(full_broker.publish("hello", queue)), + asyncio.create_task(br.publish("hello", queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -333,26 +357,29 @@ async def test_consume_no_ack( self, queue: str, exchange: RabbitExchange, - full_broker: RabbitBroker, event: asyncio.Event, ): - @full_broker.subscriber(queue, exchange=exchange, retry=1, no_ack=True) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, exchange=exchange, retry=1, no_ack=True) async def handler(msg: RabbitMessage): event.set() - await full_broker.start() - with patch.object( - IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) - ) as m: - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("hello", queue=queue, exchange=exchange) + async with self.patch_broker(consume_broker) as br: + await br.start() + + with patch.object( + IncomingMessage, "ack", spy_decorator(IncomingMessage.ack) + ) as m: + await asyncio.wait( + ( + asyncio.create_task( + br.publish("hello", queue=queue, exchange=exchange) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) - m.mock.assert_not_called() + timeout=3, + ) + m.mock.assert_not_called() - assert event.is_set() + assert event.is_set() diff --git a/tests/brokers/rabbit/test_publish.py b/tests/brokers/rabbit/test_publish.py index 0cf9672866..97be60f066 100644 --- a/tests/brokers/rabbit/test_publish.py +++ b/tests/brokers/rabbit/test_publish.py @@ -11,15 +11,21 @@ @pytest.mark.rabbit() class TestPublish(BrokerPublishTestcase): + def get_broker(self, apply_types: bool = False) -> RabbitBroker: + return RabbitBroker(apply_types=apply_types) + @pytest.mark.asyncio() async def test_reply_config( self, - pub_broker: RabbitBroker, queue: str, event, mock, ): - @pub_broker.subscriber(queue + "reply") + pub_broker = self.get_broker() + + reply_queue = queue + "reply" + + @pub_broker.subscriber(reply_queue) async def reply_handler(m): event.set() mock(m) @@ -28,20 +34,18 @@ async def reply_handler(m): async def handler(m): return m - async with pub_broker: + async with self.patch_broker(pub_broker) as br: with patch.object( AioPikaFastProducer, "publish", spy_decorator(AioPikaFastProducer.publish), ) as m: - await pub_broker.start() + await br.start() await asyncio.wait( ( asyncio.create_task( - pub_broker.publish( - "Hello!", queue, reply_to=queue + "reply" - ) + br.publish("Hello!", queue, reply_to=reply_queue) ), asyncio.create_task(event.wait()), ), diff --git a/tests/brokers/rabbit/test_rpc.py b/tests/brokers/rabbit/test_rpc.py index 76b8fc2b68..d0bd80cab7 100644 --- a/tests/brokers/rabbit/test_rpc.py +++ b/tests/brokers/rabbit/test_rpc.py @@ -1,8 +1,10 @@ import pytest +from faststream.rabbit import RabbitBroker from tests.brokers.base.rpc import BrokerRPCTestcase, ReplyAndConsumeForbidden @pytest.mark.rabbit() class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden): - pass + def get_broker(self, apply_types: bool = False) -> RabbitBroker: + return RabbitBroker(apply_types=apply_types) diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py index 0bb72286e5..e07cbd88c0 100644 --- a/tests/brokers/rabbit/test_test_client.py +++ b/tests/brokers/rabbit/test_test_client.py @@ -1,5 +1,4 @@ import asyncio -from unittest.mock import Mock import pytest @@ -19,8 +18,18 @@ @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + test_class = TestRabbitBroker + + def get_broker(self, apply_types: bool = False) -> RabbitBroker: + return RabbitBroker(apply_types=apply_types) + + def patch_broker(self, broker: RabbitBroker) -> RabbitBroker: + return TestRabbitBroker(broker) + async def test_rpc_conflicts_reply(self, queue): - async with TestRabbitBroker(RabbitBroker()) as br: + broker = self.get_broker() + + async with TestRabbitBroker(broker) as br: with pytest.raises(SetupError): await br.publish( "", @@ -32,10 +41,11 @@ async def test_rpc_conflicts_reply(self, queue): @pytest.mark.rabbit() async def test_with_real_testclient( self, - broker: RabbitBroker, queue: str, event: asyncio.Event, ): + broker = self.get_broker() + @broker.subscriber(queue) def subscriber(m): event.set() @@ -53,86 +63,92 @@ def subscriber(m): async def test_direct( self, - test_broker: RabbitBroker, queue: str, ): - @test_broker.subscriber(queue) + broker = self.get_broker() + + @broker.subscriber(queue) async def handler(m): return 1 - @test_broker.subscriber(queue + "1", exchange="test") + @broker.subscriber(queue + "1", exchange="test") async def handler2(m): return 2 - await test_broker.start() - assert await test_broker.publish("", queue, rpc=True) == 1 - assert ( - await test_broker.publish("", queue + "1", exchange="test", rpc=True) == 2 - ) - assert None is await test_broker.publish("", exchange="test2", rpc=True) + async with TestRabbitBroker(broker) as br: + await br.start() + assert await br.publish("", queue, rpc=True) == 1 + assert await br.publish("", queue + "1", exchange="test", rpc=True) == 2 + assert None is await br.publish("", exchange="test2", rpc=True) async def test_fanout( self, - test_broker: RabbitBroker, queue: str, + mock, ): - mock = Mock() + broker = self.get_broker() exch = RabbitExchange("test", type=ExchangeType.FANOUT) - @test_broker.subscriber(queue, exchange=exch) + @broker.subscriber(queue, exchange=exch) async def handler(m): mock() - await test_broker.start() - await test_broker.publish("", exchange=exch, rpc=True) - assert None is await test_broker.publish("", exchange="test2", rpc=True) + async with TestRabbitBroker(broker) as br: + await br.publish("", exchange=exch, rpc=True) + + assert None is await br.publish("", exchange="test2", rpc=True) + + assert mock.call_count == 1 - assert mock.call_count == 1 + async def test_any_topic_routing(self): + broker = self.get_broker() - async def test_any_topic_routing(self, test_broker: RabbitBroker): exch = RabbitExchange("test", type=ExchangeType.TOPIC) - @test_broker.subscriber( + @broker.subscriber( RabbitQueue("test", routing_key="test.*.subj.*"), exchange=exch, ) - def subscriber(): ... + def subscriber(msg): ... - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b", exchange=exch) - subscriber.mock.assert_called_once_with("hello") + async with TestRabbitBroker(broker) as br: + await br.publish("hello", "test.a.subj.b", exchange=exch) + subscriber.mock.assert_called_once_with("hello") + + async def test_ending_topic_routing(self): + broker = self.get_broker() - async def test_ending_topic_routing(self, test_broker: RabbitBroker): exch = RabbitExchange("test", type=ExchangeType.TOPIC) - @test_broker.subscriber( + @broker.subscriber( RabbitQueue("test", routing_key="test.#"), exchange=exch, ) - def subscriber(): ... + def subscriber(msg): ... + + async with TestRabbitBroker(broker) as br: + await br.publish("hello", "test.a.subj.b", exchange=exch) + subscriber.mock.assert_called_once_with("hello") - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b", exchange=exch) - subscriber.mock.assert_called_once_with("hello") + async def test_mixed_topic_routing(self): + broker = self.get_broker() - async def test_mixed_topic_routing(self, test_broker: RabbitBroker): exch = RabbitExchange("test", type=ExchangeType.TOPIC) - @test_broker.subscriber( + @broker.subscriber( RabbitQueue("test", routing_key="*.*.subj.#"), exchange=exch, ) - def subscriber(): ... + def subscriber(msg): ... - await test_broker.start() - await test_broker.publish("hello", "test.a.subj.b.c", exchange=exch) - subscriber.mock.assert_called_once_with("hello") + async with TestRabbitBroker(broker) as br: + await br.publish("hello", "test.a.subj.b.c", exchange=exch) + subscriber.mock.assert_called_once_with("hello") + + async def test_header(self): + broker = self.get_broker() - async def test_header( - self, - test_broker: RabbitBroker, - ): q1 = RabbitQueue( "test-queue-2", bind_arguments={"key": 2, "key2": 2, "x-match": "any"}, @@ -147,74 +163,65 @@ async def test_header( ) exch = RabbitExchange("exchange", type=ExchangeType.HEADERS) - @test_broker.subscriber(q2, exch) - async def handler2(): + @broker.subscriber(q2, exch) + async def handler2(msg): return 2 - @test_broker.subscriber(q1, exch) - async def handler(): + @broker.subscriber(q1, exch) + async def handler(msg): return 1 - @test_broker.subscriber(q3, exch) - async def handler3(): + @broker.subscriber(q3, exch) + async def handler3(msg): return 3 - await test_broker.start() - assert ( - await test_broker.publish( - exchange=exch, rpc=True, headers={"key": 2, "key2": 2} + async with TestRabbitBroker(broker) as br: + assert ( + await br.publish(exchange=exch, rpc=True, headers={"key": 2, "key2": 2}) + == 2 ) - == 2 - ) - assert ( - await test_broker.publish(exchange=exch, rpc=True, headers={"key": 2}) == 1 - ) - assert await test_broker.publish(exchange=exch, rpc=True, headers={}) == 3 + assert await br.publish(exchange=exch, rpc=True, headers={"key": 2}) == 1 + assert await br.publish(exchange=exch, rpc=True, headers={}) == 3 async def test_consume_manual_ack( self, queue: str, exchange: RabbitExchange, - test_broker: RabbitBroker, ): + broker = self.get_broker(apply_types=True) + consume = asyncio.Event() consume2 = asyncio.Event() consume3 = asyncio.Event() - @test_broker.subscriber(queue=queue, exchange=exchange, retry=1) + @broker.subscriber(queue=queue, exchange=exchange, retry=1) async def handler(msg: RabbitMessage): await msg.raw_message.ack() consume.set() - @test_broker.subscriber(queue=queue + "1", exchange=exchange, retry=1) + @broker.subscriber(queue=queue + "1", exchange=exchange, retry=1) async def handler2(msg: RabbitMessage): await msg.raw_message.nack() consume2.set() raise ValueError() - @test_broker.subscriber(queue=queue + "2", exchange=exchange, retry=1) + @broker.subscriber(queue=queue + "2", exchange=exchange, retry=1) async def handler3(msg: RabbitMessage): await msg.raw_message.reject() consume3.set() raise ValueError() - await test_broker.start() - async with test_broker: - await test_broker.start() + async with TestRabbitBroker(broker) as br: await asyncio.wait( ( asyncio.create_task( - test_broker.publish("hello", queue=queue, exchange=exchange) + br.publish("hello", queue=queue, exchange=exchange) ), asyncio.create_task( - test_broker.publish( - "hello", queue=queue + "1", exchange=exchange - ) + br.publish("hello", queue=queue + "1", exchange=exchange) ), asyncio.create_task( - test_broker.publish( - "hello", queue=queue + "2", exchange=exchange - ) + br.publish("hello", queue=queue + "2", exchange=exchange) ), asyncio.create_task(consume.wait()), asyncio.create_task(consume2.wait()), @@ -238,10 +245,10 @@ async def on_receive(self) -> None: broker = RabbitBroker(middlewares=(Middleware,)) @broker.subscriber(queue) - async def h1(): ... + async def h1(msg): ... @broker.subscriber(queue + "1") - async def h2(): ... + async def h2(msg): ... async with TestRabbitBroker(broker) as br: await br.publish("", queue) @@ -261,10 +268,10 @@ async def on_receive(self) -> None: broker = RabbitBroker(middlewares=(Middleware,)) @broker.subscriber(queue) - async def h1(): ... + async def h1(msg): ... @broker.subscriber(queue + "1") - async def h2(): ... + async def h2(msg): ... async with TestRabbitBroker(broker, with_real=True) as br: await br.publish("", queue) diff --git a/tests/brokers/redis/__init__.py b/tests/brokers/redis/__init__.py index e69de29bb2..4752ef19b1 100644 --- a/tests/brokers/redis/__init__.py +++ b/tests/brokers/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/brokers/redis/test_consume.py b/tests/brokers/redis/test_consume.py index 8ddad852c8..071467c449 100644 --- a/tests/brokers/redis/test_consume.py +++ b/tests/brokers/redis/test_consume.py @@ -13,25 +13,28 @@ @pytest.mark.redis() @pytest.mark.asyncio() class TestConsume(BrokerRealConsumeTestcase): + def get_broker(self, apply_types: bool = False): + return RedisBroker(apply_types=apply_types) + async def test_consume_native( self, - consume_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, queue: str, ): + consume_broker = self.get_broker() + @consume_broker.subscriber(queue) async def handler(msg): mock(msg) event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task( - consume_broker._connection.publish(queue, "hello") - ), + asyncio.create_task(br._connection.publish(queue, "hello")), asyncio.create_task(event.wait()), ), timeout=3, @@ -41,20 +44,22 @@ async def handler(msg): async def test_pattern_with_path( self, - consume_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, ): + consume_broker = self.get_broker() + @consume_broker.subscriber("test.{name}") async def handler(msg): mock(msg) event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", "test.name")), + asyncio.create_task(br.publish("hello", "test.name")), asyncio.create_task(event.wait()), ), timeout=3, @@ -64,20 +69,22 @@ async def handler(msg): async def test_pattern_without_path( self, - consume_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, ): + consume_broker = self.get_broker() + @consume_broker.subscriber(PubSub("test.*", pattern=True)) async def handler(msg): mock(msg) event.set() - async with consume_broker: - await consume_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(consume_broker.publish("hello", "test.name")), + asyncio.create_task(br.publish("hello", "test.name")), asyncio.create_task(event.wait()), ), timeout=3, @@ -89,23 +96,31 @@ async def handler(msg): @pytest.mark.redis() @pytest.mark.asyncio() class TestConsumeList: + def get_broker(self, apply_types: bool = False): + return RedisBroker(apply_types=apply_types) + + def patch_broker(self, broker): + return broker + async def test_consume_list( self, - broker: RedisBroker, event: asyncio.Event, queue: str, mock: MagicMock, ): - @broker.subscriber(list=queue) + consume_broker = self.get_broker() + + @consume_broker.subscriber(list=queue) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(broker.publish("hello", list=queue)), + asyncio.create_task(br.publish("hello", list=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -115,21 +130,23 @@ async def handler(msg): async def test_consume_list_native( self, - broker: RedisBroker, event: asyncio.Event, queue: str, mock: MagicMock, ): - @broker.subscriber(list=queue) + consume_broker = self.get_broker() + + @consume_broker.subscriber(list=queue) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(broker._connection.rpush(queue, "hello")), + asyncio.create_task(br._connection.rpush(queue, "hello")), asyncio.create_task(event.wait()), ), timeout=3, @@ -139,38 +156,45 @@ async def handler(msg): @pytest.mark.slow() async def test_consume_list_batch_with_one( - self, event: asyncio.Event, mock, queue: str, broker: RedisBroker + self, + queue: str, + event: asyncio.Event, + mock, ): - @broker.subscriber( - list=ListSub(queue, batch=True, max_records=1, polling_interval=0.01) + consume_broker = self.get_broker() + + @consume_broker.subscriber( + list=ListSub(queue, batch=True, polling_interval=0.01) ) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() - + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(broker.publish("hi", list=queue)), + asyncio.create_task(br.publish("hi", list=queue)), asyncio.create_task(event.wait()), ), timeout=3, ) - assert event.is_set() - mock.assert_called_once_with(["hi"]) + assert event.is_set() + mock.assert_called_once_with(["hi"]) @pytest.mark.slow() async def test_consume_list_batch_headers( self, queue: str, - full_broker: RedisBroker, event: asyncio.Event, mock, ): - @full_broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( + list=ListSub(queue, batch=True, polling_interval=0.01) + ) def subscriber(m, msg: RedisMessage): check = all( ( @@ -183,42 +207,55 @@ def subscriber(m, msg: RedisMessage): mock(check) event.set() - await full_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("", list=queue, headers={"custom": "1"}) + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( + ( + asyncio.create_task( + br.publish("", list=queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() - mock.assert_called_once_with(True) + assert event.is_set() + mock.assert_called_once_with(True) @pytest.mark.slow() - async def test_consume_list_batch(self, queue: str, broker: RedisBroker): + async def test_consume_list_batch( + self, + queue: str, + ): + consume_broker = self.get_broker(apply_types=True) + msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) + @consume_broker.subscriber( + list=ListSub(queue, batch=True, polling_interval=0.01) + ) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", list=queue) + await br.publish_batch(1, "hi", list=queue) result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), timeout=3, ) - assert [{1, "hi"}] == [set(r.result()) for r in result] + assert [{1, "hi"}] == [set(r.result()) for r in result] @pytest.mark.slow() - async def test_consume_list_batch_complex(self, queue: str, broker: RedisBroker): + async def test_consume_list_batch_complex( + self, + queue: str, + ): + consume_broker = self.get_broker(apply_types=True) + from pydantic import BaseModel class Data(BaseModel): @@ -229,15 +266,16 @@ def __hash__(self): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) + @consume_broker.subscriber( + list=ListSub(queue, batch=True, polling_interval=0.01) + ) async def handler(msg: List[Data]): await msgs_queue.put(msg) - broker._is_apply_types = True - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker.publish_batch(Data(m="hi"), Data(m="again"), list=queue) + await br.publish_batch(Data(m="hi"), Data(m="again"), list=queue) result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), @@ -247,17 +285,24 @@ async def handler(msg: List[Data]): assert [{Data(m="hi"), Data(m="again")}] == [set(r.result()) for r in result] @pytest.mark.slow() - async def test_consume_list_batch_native(self, queue: str, broker: RedisBroker): + async def test_consume_list_batch_native( + self, + queue: str, + ): + consume_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(list=ListSub(queue, batch=True, polling_interval=0.01)) + @consume_broker.subscriber( + list=ListSub(queue, batch=True, polling_interval=0.01) + ) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker._connection.rpush(queue, 1, "hi") + await br._connection.rpush(queue, 1, "hi") result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), @@ -270,25 +315,32 @@ async def handler(msg): @pytest.mark.redis() @pytest.mark.asyncio() class TestConsumeStream: + def get_broker(self, apply_types: bool = False): + return RedisBroker(apply_types=apply_types) + + def patch_broker(self, broker): + return broker + @pytest.mark.slow() async def test_consume_stream( self, - broker: RedisBroker, event: asyncio.Event, mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=10)) + consume_broker = self.get_broker() + + @consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10)) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(broker.publish("hello", stream=queue)), + asyncio.create_task(br.publish("hello", stream=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -299,23 +351,24 @@ async def handler(msg): @pytest.mark.slow() async def test_consume_stream_native( self, - broker: RedisBroker, event: asyncio.Event, mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=10)) + consume_broker = self.get_broker() + + @consume_broker.subscriber(stream=StreamSub(queue, polling_interval=10)) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( asyncio.create_task( - broker._connection.xadd(queue, {"message": "hello"}) + br._connection.xadd(queue, {"message": "hello"}) ), asyncio.create_task(event.wait()), ), @@ -327,22 +380,25 @@ async def handler(msg): @pytest.mark.slow() async def test_consume_stream_batch( self, - broker: RedisBroker, event: asyncio.Event, mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) + consume_broker = self.get_broker() + + @consume_broker.subscriber( + stream=StreamSub(queue, polling_interval=10, batch=True) + ) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( - asyncio.create_task(broker.publish("hello", stream=queue)), + asyncio.create_task(br.publish("hello", stream=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -354,11 +410,12 @@ async def handler(msg): async def test_consume_stream_batch_headers( self, queue: str, - full_broker: RedisBroker, event: asyncio.Event, mock, ): - @full_broker.subscriber( + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( stream=StreamSub(queue, polling_interval=10, batch=True) ) def subscriber(m, msg: RedisMessage): @@ -373,26 +430,28 @@ def subscriber(m, msg: RedisMessage): mock(check) event.set() - await full_broker.start() - await asyncio.wait( - ( - asyncio.create_task( - full_broker.publish("", stream=queue, headers={"custom": "1"}) + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( + ( + asyncio.create_task( + br.publish("", stream=queue, headers={"custom": "1"}) + ), + asyncio.create_task(event.wait()), ), - asyncio.create_task(event.wait()), - ), - timeout=3, - ) + timeout=3, + ) - assert event.is_set() - mock.assert_called_once_with(True) + assert event.is_set() + mock.assert_called_once_with(True) @pytest.mark.slow() async def test_consume_stream_batch_complex( self, - broker: RedisBroker, queue, ): + consume_broker = self.get_broker(apply_types=True) + from pydantic import BaseModel class Data(BaseModel): @@ -400,15 +459,16 @@ class Data(BaseModel): msgs_queue = asyncio.Queue(maxsize=1) - @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) + @consume_broker.subscriber( + stream=StreamSub(queue, polling_interval=10, batch=True) + ) async def handler(msg: List[Data]): await msgs_queue.put(msg) - broker._is_apply_types = True - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() - await broker.publish(Data(m="hi"), stream=queue) + await br.publish(Data(m="hi"), stream=queue) result, _ = await asyncio.wait( (asyncio.create_task(msgs_queue.get()),), @@ -420,23 +480,26 @@ async def handler(msg: List[Data]): @pytest.mark.slow() async def test_consume_stream_batch_native( self, - broker: RedisBroker, event: asyncio.Event, mock: MagicMock, queue, ): - @broker.subscriber(stream=StreamSub(queue, polling_interval=10, batch=True)) + consume_broker = self.get_broker() + + @consume_broker.subscriber( + stream=StreamSub(queue, polling_interval=10, batch=True) + ) async def handler(msg): mock(msg) event.set() - async with broker: - await broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() await asyncio.wait( ( asyncio.create_task( - broker._connection.xadd(queue, {"message": "hello"}) + br._connection.xadd(queue, {"message": "hello"}) ), asyncio.create_task(event.wait()), ), @@ -448,43 +511,50 @@ async def handler(msg): async def test_consume_group( self, queue: str, - full_broker: RedisBroker, ): - @full_broker.subscriber(stream=StreamSub(queue, group="group", consumer=queue)) + consume_broker = self.get_broker() + + @consume_broker.subscriber( + stream=StreamSub(queue, group="group", consumer=queue) + ) async def handler(msg: RedisMessage): ... - assert next(iter(full_broker._subscribers.values())).last_id == "$" + assert next(iter(consume_broker._subscribers.values())).last_id == "$" async def test_consume_group_with_last_id( self, queue: str, - full_broker: RedisBroker, ): - @full_broker.subscriber( + consume_broker = self.get_broker() + + @consume_broker.subscriber( stream=StreamSub(queue, group="group", consumer=queue, last_id="0") ) async def handler(msg: RedisMessage): ... - assert next(iter(full_broker._subscribers.values())).last_id == "0" + assert next(iter(consume_broker._subscribers.values())).last_id == "0" async def test_consume_nack( self, queue: str, - full_broker: RedisBroker, event: asyncio.Event, ): - @full_broker.subscriber(stream=StreamSub(queue, group="group", consumer=queue)) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( + stream=StreamSub(queue, group="group", consumer=queue) + ) async def handler(msg: RedisMessage): event.set() await msg.nack() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object(Redis, "xack", spy_decorator(Redis.xack)) as m: await asyncio.wait( ( - asyncio.create_task(full_broker.publish("hello", stream=queue)), + asyncio.create_task(br.publish("hello", stream=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -497,20 +567,23 @@ async def handler(msg: RedisMessage): async def test_consume_ack( self, queue: str, - full_broker: RedisBroker, event: asyncio.Event, ): - @full_broker.subscriber(stream=StreamSub(queue, group="group", consumer=queue)) + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber( + stream=StreamSub(queue, group="group", consumer=queue) + ) async def handler(msg: RedisMessage): event.set() - async with full_broker: - await full_broker.start() + async with self.patch_broker(consume_broker) as br: + await br.start() with patch.object(Redis, "xack", spy_decorator(Redis.xack)) as m: await asyncio.wait( ( - asyncio.create_task(full_broker.publish("hello", stream=queue)), + asyncio.create_task(br.publish("hello", stream=queue)), asyncio.create_task(event.wait()), ), timeout=3, diff --git a/tests/brokers/redis/test_publish.py b/tests/brokers/redis/test_publish.py index 6210c3ddde..2c1f2b96ff 100644 --- a/tests/brokers/redis/test_publish.py +++ b/tests/brokers/redis/test_publish.py @@ -12,16 +12,20 @@ @pytest.mark.redis() @pytest.mark.asyncio() class TestPublish(BrokerPublishTestcase): + def get_broker(self, apply_types: bool = False): + return RedisBroker(apply_types=apply_types) + async def test_list_publisher( self, queue: str, - pub_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, ): + pub_broker = self.get_broker() + @pub_broker.subscriber(list=queue) @pub_broker.publisher(list=queue + "resp") - async def m(): + async def m(msg): return "" @pub_broker.subscriber(list=queue + "resp") @@ -29,11 +33,12 @@ async def resp(msg): event.set() mock(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", list=queue)), + asyncio.create_task(br.publish("", list=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -42,17 +47,22 @@ async def resp(msg): assert event.is_set() mock.assert_called_once_with("") - async def test_list_publish_batch(self, queue: str, broker: RedisBroker): + async def test_list_publish_batch( + self, + queue: str, + ): + pub_broker = self.get_broker() + msgs_queue = asyncio.Queue(maxsize=2) - @broker.subscriber(list=queue) + @pub_broker.subscriber(list=queue) async def handler(msg): await msgs_queue.put(msg) - async with broker: - await broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() - await broker.publish_batch(1, "hi", list=queue) + await br.publish_batch(1, "hi", list=queue) result, _ = await asyncio.wait( ( @@ -67,15 +77,16 @@ async def handler(msg): async def test_batch_list_publisher( self, queue: str, - pub_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, ): + pub_broker = self.get_broker() + batch_list = ListSub(queue + "resp", batch=True) @pub_broker.subscriber(list=queue) @pub_broker.publisher(list=batch_list) - async def m(): + async def m(msg): return 1, 2, 3 @pub_broker.subscriber(list=batch_list) @@ -83,11 +94,12 @@ async def resp(msg): event.set() mock(msg) - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("", list=queue)), + asyncio.create_task(br.publish("", list=queue)), asyncio.create_task(event.wait()), ), timeout=3, @@ -99,10 +111,11 @@ async def resp(msg): async def test_publisher_with_maxlen( self, queue: str, - pub_broker: RedisBroker, event: asyncio.Event, mock: MagicMock, ): + pub_broker = self.get_broker() + stream = StreamSub(queue + "resp", maxlen=1) @pub_broker.subscriber(stream=queue) @@ -116,11 +129,12 @@ async def resp(msg): mock(msg) with patch.object(Redis, "xadd", spy_decorator(Redis.xadd)) as m: - async with pub_broker: - await pub_broker.start() + async with self.patch_broker(pub_broker) as br: + await br.start() + await asyncio.wait( ( - asyncio.create_task(pub_broker.publish("hi", stream=queue)), + asyncio.create_task(br.publish("hi", stream=queue)), asyncio.create_task(event.wait()), ), timeout=3, diff --git a/tests/brokers/redis/test_rpc.py b/tests/brokers/redis/test_rpc.py index 4006ef7d0f..c149d20d01 100644 --- a/tests/brokers/redis/test_rpc.py +++ b/tests/brokers/redis/test_rpc.py @@ -6,14 +6,20 @@ @pytest.mark.redis() class TestRPC(BrokerRPCTestcase, ReplyAndConsumeForbidden): + def get_broker(self, apply_types: bool = False): + return RedisBroker(apply_types=apply_types) + @pytest.mark.asyncio() - async def test_list_rpc(self, queue: str, rpc_broker: RedisBroker): + async def test_list_rpc(self, queue: str): + rpc_broker = self.get_broker() + @rpc_broker.subscriber(list=queue) async def m(m): # pragma: no cover return "1" - async with rpc_broker: - await rpc_broker.start() - r = await rpc_broker.publish("hello", list=queue, rpc_timeout=3, rpc=True) + async with self.patch_broker(rpc_broker) as br: + await br.start() + + r = await br.publish("hello", list=queue, rpc_timeout=3, rpc=True) assert r == "1" diff --git a/tests/brokers/redis/test_test_client.py b/tests/brokers/redis/test_test_client.py index 951d071fbe..ae6340ad7a 100644 --- a/tests/brokers/redis/test_test_client.py +++ b/tests/brokers/redis/test_test_client.py @@ -10,6 +10,14 @@ @pytest.mark.asyncio() class TestTestclient(BrokerTestclientTestcase): + test_class = TestRedisBroker + + def get_broker(self, apply_types: bool = False) -> RedisBroker: + return RedisBroker(apply_types=apply_types) + + def patch_broker(self, broker: RedisBroker) -> TestRedisBroker: + return TestRedisBroker(broker) + async def test_rpc_conflicts_reply(self, queue): async with TestRedisBroker(RedisBroker()) as br: with pytest.raises(SetupError): @@ -23,10 +31,11 @@ async def test_rpc_conflicts_reply(self, queue): @pytest.mark.redis() async def test_with_real_testclient( self, - broker: RedisBroker, queue: str, event: asyncio.Event, ): + broker = self.get_broker() + @broker.subscriber(queue) def subscriber(m): event.set() @@ -89,127 +98,131 @@ async def h2(): ... assert len(routes) == 2 - async def test_pub_sub_pattern( - self, - test_broker: RedisBroker, - ): - @test_broker.subscriber("test.{name}") + async def test_pub_sub_pattern(self): + broker = self.get_broker() + + @broker.subscriber("test.{name}") async def handler(msg): return msg - await test_broker.start() - - assert await test_broker.publish(1, "test.name.useless", rpc=True) == 1 - handler.mock.assert_called_once_with(1) + async with self.patch_broker(broker) as br: + assert await br.publish(1, "test.name.useless", rpc=True) == 1 + handler.mock.assert_called_once_with(1) async def test_list( self, - test_broker: RedisBroker, queue: str, ): - @test_broker.subscriber(list=queue) + broker = self.get_broker() + + @broker.subscriber(list=queue) async def handler(msg): return msg - await test_broker.start() - - assert await test_broker.publish(1, list=queue, rpc=True) == 1 - handler.mock.assert_called_once_with(1) + async with self.patch_broker(broker) as br: + assert await br.publish(1, list=queue, rpc=True) == 1 + handler.mock.assert_called_once_with(1) async def test_batch_pub_by_default_pub( self, - test_broker: RedisBroker, queue: str, ): - @test_broker.subscriber(list=ListSub(queue, batch=True)) - async def m(): + broker = self.get_broker() + + @broker.subscriber(list=ListSub(queue, batch=True)) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", list=queue) - m.mock.assert_called_once_with(["hello"]) + async with self.patch_broker(broker) as br: + await br.publish("hello", list=queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_pub_by_pub_batch( self, - test_broker: RedisBroker, queue: str, ): - @test_broker.subscriber(list=ListSub(queue, batch=True)) - async def m(): + broker = self.get_broker() + + @broker.subscriber(list=ListSub(queue, batch=True)) + async def m(msg): pass - await test_broker.start() - await test_broker.publish_batch("hello", list=queue) - m.mock.assert_called_once_with(["hello"]) + async with self.patch_broker(broker) as br: + await br.publish_batch("hello", list=queue) + m.mock.assert_called_once_with(["hello"]) async def test_batch_publisher_mock( self, - test_broker: RedisBroker, queue: str, ): + broker = self.get_broker() + batch_list = ListSub(queue + "1", batch=True) - publisher = test_broker.publisher(list=batch_list) + publisher = broker.publisher(list=batch_list) @publisher - @test_broker.subscriber(queue) - async def m(): + @broker.subscriber(queue) + async def m(msg): return 1, 2, 3 - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") - publisher.mock.assert_called_once_with([1, 2, 3]) + async with self.patch_broker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) async def test_stream( self, - test_broker: RedisBroker, queue: str, ): - @test_broker.subscriber(stream=queue) + broker = self.get_broker() + + @broker.subscriber(stream=queue) async def handler(msg): return msg - await test_broker.start() - - assert await test_broker.publish(1, stream=queue, rpc=True) == 1 - handler.mock.assert_called_once_with(1) + async with self.patch_broker(broker) as br: + assert await br.publish(1, stream=queue, rpc=True) == 1 + handler.mock.assert_called_once_with(1) async def test_stream_batch_pub_by_default_pub( self, - test_broker: RedisBroker, queue: str, ): - @test_broker.subscriber(stream=StreamSub(queue, batch=True)) - async def m(): + broker = self.get_broker() + + @broker.subscriber(stream=StreamSub(queue, batch=True)) + async def m(msg): pass - await test_broker.start() - await test_broker.publish("hello", stream=queue) - m.mock.assert_called_once_with(["hello"]) + async with self.patch_broker(broker) as br: + await br.publish("hello", stream=queue) + m.mock.assert_called_once_with(["hello"]) async def test_stream_publisher( self, - test_broker: RedisBroker, queue: str, ): + broker = self.get_broker() + batch_stream = StreamSub(queue + "1") - publisher = test_broker.publisher(stream=batch_stream) + publisher = broker.publisher(stream=batch_stream) @publisher - @test_broker.subscriber(queue) - async def m(): + @broker.subscriber(queue) + async def m(msg): return 1, 2, 3 - await test_broker.start() - await test_broker.publish("hello", queue) - m.mock.assert_called_once_with("hello") - publisher.mock.assert_called_once_with([1, 2, 3]) + async with self.patch_broker(broker) as br: + await br.publish("hello", queue) + m.mock.assert_called_once_with("hello") + publisher.mock.assert_called_once_with([1, 2, 3]) async def test_publish_to_none( self, - test_broker: RedisBroker, queue: str, ): - await test_broker.start() - with pytest.raises(ValueError): # noqa: PT011 - await test_broker.publish("hello") + broker = self.get_broker() + + async with self.patch_broker(broker) as br: + with pytest.raises(ValueError): # noqa: PT011 + await br.publish("hello") diff --git a/tests/conftest.py b/tests/conftest.py index d15d9cb7a2..92778c660a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import asyncio from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 import pytest from typer.testing import CliRunner @@ -18,6 +19,11 @@ def pytest_collection_modifyitems(items): item.add_marker("all") +@pytest.fixture() +def queue(): + return str(uuid4()) + + @pytest.fixture() def event(): return asyncio.Event() diff --git a/tests/docs/confluent/__init__.py b/tests/docs/confluent/__init__.py index e69de29bb2..c4a1803708 100644 --- a/tests/docs/confluent/__init__.py +++ b/tests/docs/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/docs/kafka/__init__.py b/tests/docs/kafka/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/docs/kafka/__init__.py +++ b/tests/docs/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/docs/nats/__init__.py b/tests/docs/nats/__init__.py index e69de29bb2..87ead90ee6 100644 --- a/tests/docs/nats/__init__.py +++ b/tests/docs/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/docs/rabbit/__init__.py b/tests/docs/rabbit/__init__.py index e69de29bb2..ebec43fcd5 100644 --- a/tests/docs/rabbit/__init__.py +++ b/tests/docs/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/docs/redis/__init__.py b/tests/docs/redis/__init__.py index e69de29bb2..4752ef19b1 100644 --- a/tests/docs/redis/__init__.py +++ b/tests/docs/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/marks.py b/tests/marks.py index 4a41446988..80bb1cde5c 100644 --- a/tests/marks.py +++ b/tests/marks.py @@ -4,12 +4,22 @@ from faststream._compat import PYDANTIC_V2 -python39 = pytest.mark.skipif(sys.version_info < (3, 9), reason="requires python3.9+") +python39 = pytest.mark.skipif( + sys.version_info < (3, 9), + reason="requires python3.9+", +) python310 = pytest.mark.skipif( - sys.version_info < (3, 10), reason="requires python3.10+" + sys.version_info < (3, 10), + reason="requires python3.10+", ) -pydantic_v1 = pytest.mark.skipif(PYDANTIC_V2, reason="requires PydanticV2") +pydantic_v1 = pytest.mark.skipif( + PYDANTIC_V2, + reason="requires PydanticV2", +) -pydantic_v2 = pytest.mark.skipif(not PYDANTIC_V2, reason="requires PydanticV1") +pydantic_v2 = pytest.mark.skipif( + not PYDANTIC_V2, + reason="requires PydanticV1", +) diff --git a/tests/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py new file mode 100644 index 0000000000..75763c2fee --- /dev/null +++ b/tests/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("opentelemetry") diff --git a/tests/opentelemetry/basic.py b/tests/opentelemetry/basic.py new file mode 100644 index 0000000000..794a09ee6d --- /dev/null +++ b/tests/opentelemetry/basic.py @@ -0,0 +1,357 @@ +import asyncio +from typing import Any, ClassVar, Dict, List, Optional, Tuple, Type, cast +from unittest.mock import Mock + +import pytest +from dirty_equals import IsFloat, IsUUID +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics._internal.point import Metric +from opentelemetry.sdk.metrics.export import InMemoryMetricReader +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import Span, TracerProvider +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr +from opentelemetry.trace import SpanKind + +from faststream.broker.core.usecase import BrokerUsecase +from faststream.opentelemetry.consts import ( + ERROR_TYPE, + MESSAGING_DESTINATION_PUBLISH_NAME, +) +from faststream.opentelemetry.middleware import MessageAction as Action +from faststream.opentelemetry.middleware import TelemetryMiddleware + + +@pytest.mark.asyncio() +class LocalTelemetryTestcase: + messaging_system: str + include_messages_counters: bool + broker_class: Type[BrokerUsecase] + timeout: int = 3 + subscriber_kwargs: ClassVar[Dict[str, Any]] = {} + resource: Resource = Resource.create(attributes={"service.name": "faststream.test"}) + + telemetry_middleware_class: TelemetryMiddleware + + def patch_broker(self, broker: BrokerUsecase) -> BrokerUsecase: + return broker + + def destination_name(self, queue: str) -> str: + return queue + + @staticmethod + def get_spans(exporter: InMemorySpanExporter) -> List[Span]: + spans = cast(Tuple[Span, ...], exporter.get_finished_spans()) + return sorted(spans, key=lambda s: s.start_time) + + @staticmethod + def get_metrics( + reader: InMemoryMetricReader, + ) -> List[Metric]: + """Get sorted metrics. + + Return order: + - messaging.process.duration + - messaging.process.messages + - messaging.publish.duration + - messaging.publish.messages + """ + metrics = reader.get_metrics_data() + metrics = metrics.resource_metrics[0].scope_metrics[0].metrics + metrics = sorted(metrics, key=lambda m: m.name) + return cast(List[Metric], metrics) + + @pytest.fixture() + def tracer_provider(self) -> TracerProvider: + tracer_provider = TracerProvider(resource=self.resource) + return tracer_provider + + @pytest.fixture() + def trace_exporter(self, tracer_provider: TracerProvider) -> InMemorySpanExporter: + exporter = InMemorySpanExporter() + tracer_provider.add_span_processor(SimpleSpanProcessor(exporter)) + return exporter + + @pytest.fixture() + def metric_reader(self) -> InMemoryMetricReader: + return InMemoryMetricReader() + + @pytest.fixture() + def meter_provider(self, metric_reader: InMemoryMetricReader) -> MeterProvider: + return MeterProvider(metric_readers=(metric_reader,), resource=self.resource) + + def assert_span( + self, + span: Span, + action: str, + queue: str, + msg: str, + parent_span_id: Optional[str] = None, + ) -> None: + attrs = span.attributes + assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system, attrs[ + SpanAttr.MESSAGING_SYSTEM + ] + assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID, attrs[ + SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID + ] + assert span.name == f"{self.destination_name(queue)} {action}", span.name + assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER), span.kind + + if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH): + assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue, attrs[ + SpanAttr.MESSAGING_DESTINATION_NAME + ] + + if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS): + assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue, attrs[ + MESSAGING_DESTINATION_PUBLISH_NAME + ] + assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsUUID, attrs[ + SpanAttr.MESSAGING_MESSAGE_ID + ] + + if action == Action.PROCESS: + assert attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] == len( + msg + ), attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] + assert attrs[SpanAttr.MESSAGING_OPERATION] == action, attrs[ + SpanAttr.MESSAGING_OPERATION + ] + + if action == Action.PUBLISH: + assert attrs[SpanAttr.MESSAGING_OPERATION] == action, attrs[ + SpanAttr.MESSAGING_OPERATION + ] + + if parent_span_id: + assert span.parent.span_id == parent_span_id, span.parent.span_id + + def assert_metrics( + self, + metrics: List[Metric], + count: int = 1, + error_type: Optional[str] = None, + ) -> None: + if self.include_messages_counters: + assert len(metrics) == 4 + proc_dur, proc_msg, pub_dur, pub_msg = metrics + + assert proc_msg.data.data_points[0].value == count + assert pub_msg.data.data_points[0].value == count + + else: + assert len(metrics) == 2 + proc_dur, pub_dur = metrics + + if error_type: + assert proc_dur.data.data_points[0].attributes[ERROR_TYPE] == error_type + + assert proc_dur.data.data_points[0].count == 1 + assert proc_dur.data.data_points[0].sum == IsFloat + + assert pub_dur.data.data_points[0].count == 1 + assert pub_dur.data.data_points[0].sum == IsFloat + + async def test_subscriber_create_publish_process_span( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class(tracer_provider=tracer_provider) + broker = self.broker_class(middlewares=(mid,)) + + @broker.subscriber(queue, **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + msg = "start" + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish(msg, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + create, publish, process = self.get_spans(trace_exporter) + parent_span_id = create.context.span_id + + self.assert_span(create, Action.CREATE, queue, msg) + self.assert_span(publish, Action.PUBLISH, queue, msg, parent_span_id) + self.assert_span(process, Action.PROCESS, queue, msg, parent_span_id) + + assert event.is_set() + mock.assert_called_once_with(msg) + + async def test_chain_subscriber_publisher( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class(tracer_provider=tracer_provider) + broker = self.broker_class(middlewares=(mid,)) + + first_queue = queue + second_queue = queue + "2" + + @broker.subscriber(first_queue, **self.subscriber_kwargs) + @broker.publisher(second_queue) + async def handler1(m): + return m + + @broker.subscriber(second_queue, **self.subscriber_kwargs) + async def handler2(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + msg = "start" + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish(msg, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + spans = self.get_spans(trace_exporter) + create, pub1, proc1, pub2, proc2 = spans + parent_span_id = create.context.span_id + + self.assert_span(create, Action.CREATE, first_queue, msg) + self.assert_span(pub1, Action.PUBLISH, first_queue, msg, parent_span_id) + self.assert_span(proc1, Action.PROCESS, first_queue, msg, parent_span_id) + self.assert_span(pub2, Action.PUBLISH, second_queue, msg, proc1.context.span_id) + self.assert_span(proc2, Action.PROCESS, second_queue, msg, parent_span_id) + + assert ( + create.start_time + < pub1.start_time + < proc1.start_time + < pub2.start_time + < proc2.start_time + ) + + assert event.is_set() + mock.assert_called_once_with(msg) + + async def test_no_trace_context_create_process_span( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class(tracer_provider=tracer_provider) + broker = self.broker_class(middlewares=(mid,)) + + @broker.subscriber(queue, **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + msg = "start" + + async with broker: + await broker.start() + broker._middlewares = () + tasks = ( + asyncio.create_task(broker.publish(msg, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + create, process = self.get_spans(trace_exporter) + parent_span_id = create.context.span_id + + self.assert_span(create, Action.CREATE, queue, msg) + self.assert_span(process, Action.PROCESS, queue, msg, parent_span_id) + + assert event.is_set() + mock.assert_called_once_with(msg) + + async def test_metrics( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + ): + mid = self.telemetry_middleware_class(meter_provider=meter_provider) + broker = self.broker_class(middlewares=(mid,)) + + @broker.subscriber(queue, **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + msg = "start" + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish(msg, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + + self.assert_metrics(metrics) + + assert event.is_set() + mock.assert_called_once_with(msg) + + async def test_error_metrics( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + ): + mid = self.telemetry_middleware_class(meter_provider=meter_provider) + broker = self.broker_class(middlewares=(mid,)) + expected_value_type = "ValueError" + + @broker.subscriber(queue, **self.subscriber_kwargs) + async def handler(m): + try: + raise ValueError + finally: + mock(m) + event.set() + + broker = self.patch_broker(broker) + msg = "start" + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish(msg, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + + self.assert_metrics(metrics, error_type=expected_value_type) + + assert event.is_set() + mock.assert_called_once_with(msg) diff --git a/tests/opentelemetry/confluent/__init__.py b/tests/opentelemetry/confluent/__init__.py new file mode 100644 index 0000000000..c4a1803708 --- /dev/null +++ b/tests/opentelemetry/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/opentelemetry/confluent/test_confluent.py b/tests/opentelemetry/confluent/test_confluent.py new file mode 100644 index 0000000000..3877d488ba --- /dev/null +++ b/tests/opentelemetry/confluent/test_confluent.py @@ -0,0 +1,130 @@ +import asyncio +from typing import Any, ClassVar, Dict, Optional +from unittest.mock import Mock + +import pytest +from dirty_equals import IsStr, IsUUID +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import InMemoryMetricReader +from opentelemetry.sdk.trace import Span, TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr +from opentelemetry.trace import SpanKind + +from faststream.confluent import KafkaBroker +from faststream.confluent.opentelemetry import KafkaTelemetryMiddleware +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME +from faststream.opentelemetry.middleware import MessageAction as Action +from tests.brokers.confluent.test_consume import TestConsume +from tests.brokers.confluent.test_publish import TestPublish + +from ..basic import LocalTelemetryTestcase + + +@pytest.mark.confluent() +class TestTelemetry(LocalTelemetryTestcase): + messaging_system = "kafka" + include_messages_counters = True + timeout: int = 10 + subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} + broker_class = KafkaBroker + telemetry_middleware_class = KafkaTelemetryMiddleware + + def assert_span( + self, + span: Span, + action: str, + queue: str, + msg: str, + parent_span_id: Optional[str] = None, + ) -> None: + attrs = span.attributes + assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system + assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID + assert span.name == f"{self.destination_name(queue)} {action}" + assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER) + + if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH): + assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue + + if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS): + assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue + assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsStr(regex=r"0-.+") + assert attrs[SpanAttr.MESSAGING_KAFKA_DESTINATION_PARTITION] == 0 + assert attrs[SpanAttr.MESSAGING_KAFKA_MESSAGE_OFFSET] == 0 + + if action == Action.PROCESS: + assert attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] == len(msg) + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if action == Action.PUBLISH: + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if parent_span_id: + assert span.parent.span_id == parent_span_id + + async def test_batch( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class( + meter_provider=meter_provider, tracer_provider=tracer_provider + ) + broker = self.broker_class(middlewares=(mid,)) + expected_msg_count = 3 + + @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish_batch(1, "hi", 3, topic=queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + spans = self.get_spans(trace_exporter) + _, publish, process = spans + + assert ( + publish.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + assert ( + process.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + self.assert_metrics(metrics, count=expected_msg_count) + + assert event.is_set() + mock.assert_called_once_with([1, "hi", 3]) + + +@pytest.mark.confluent() +class TestPublishWithTelemetry(TestPublish): + def get_broker(self, apply_types: bool = False): + return KafkaBroker( + middlewares=(KafkaTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.confluent() +class TestConsumeWithTelemetry(TestConsume): + def get_broker(self, apply_types: bool = False): + return KafkaBroker( + middlewares=(KafkaTelemetryMiddleware(),), + apply_types=apply_types, + ) diff --git a/tests/opentelemetry/kafka/__init__.py b/tests/opentelemetry/kafka/__init__.py new file mode 100644 index 0000000000..bd6bc708fc --- /dev/null +++ b/tests/opentelemetry/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/opentelemetry/kafka/test_kafka.py b/tests/opentelemetry/kafka/test_kafka.py new file mode 100644 index 0000000000..2142825098 --- /dev/null +++ b/tests/opentelemetry/kafka/test_kafka.py @@ -0,0 +1,128 @@ +import asyncio +from typing import Optional +from unittest.mock import Mock + +import pytest +from dirty_equals import IsStr, IsUUID +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import InMemoryMetricReader +from opentelemetry.sdk.trace import Span, TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr +from opentelemetry.trace import SpanKind + +from faststream.kafka import KafkaBroker +from faststream.kafka.opentelemetry import KafkaTelemetryMiddleware +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME +from faststream.opentelemetry.middleware import MessageAction as Action +from tests.brokers.kafka.test_consume import TestConsume +from tests.brokers.kafka.test_publish import TestPublish + +from ..basic import LocalTelemetryTestcase + + +@pytest.mark.kafka() +class TestTelemetry(LocalTelemetryTestcase): + messaging_system = "kafka" + include_messages_counters = True + broker_class = KafkaBroker + telemetry_middleware_class = KafkaTelemetryMiddleware + + def assert_span( + self, + span: Span, + action: str, + queue: str, + msg: str, + parent_span_id: Optional[str] = None, + ) -> None: + attrs = span.attributes + assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system + assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID + assert span.name == f"{self.destination_name(queue)} {action}" + assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER) + + if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH): + assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == queue + + if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS): + assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == queue + assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsStr(regex=r"0-.+") + assert attrs[SpanAttr.MESSAGING_KAFKA_DESTINATION_PARTITION] == 0 + assert attrs[SpanAttr.MESSAGING_KAFKA_MESSAGE_OFFSET] == 0 + + if action == Action.PROCESS: + assert attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] == len(msg) + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if action == Action.PUBLISH: + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if parent_span_id: + assert span.parent.span_id == parent_span_id + + async def test_batch( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class( + meter_provider=meter_provider, tracer_provider=tracer_provider + ) + broker = self.broker_class(middlewares=(mid,)) + expected_msg_count = 3 + + @broker.subscriber(queue, batch=True, **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish_batch(1, "hi", 3, topic=queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + spans = self.get_spans(trace_exporter) + _, publish, process = spans + + assert ( + publish.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + assert ( + process.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + self.assert_metrics(metrics, count=expected_msg_count) + + assert event.is_set() + mock.assert_called_once_with([1, "hi", 3]) + + +@pytest.mark.kafka() +class TestPublishWithTelemetry(TestPublish): + def get_broker(self, apply_types: bool = False): + return KafkaBroker( + middlewares=(KafkaTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.kafka() +class TestConsumeWithTelemetry(TestConsume): + def get_broker(self, apply_types: bool = False): + return KafkaBroker( + middlewares=(KafkaTelemetryMiddleware(),), + apply_types=apply_types, + ) diff --git a/tests/opentelemetry/nats/__init__.py b/tests/opentelemetry/nats/__init__.py new file mode 100644 index 0000000000..87ead90ee6 --- /dev/null +++ b/tests/opentelemetry/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/opentelemetry/nats/test_nats.py b/tests/opentelemetry/nats/test_nats.py new file mode 100644 index 0000000000..b886e46d8f --- /dev/null +++ b/tests/opentelemetry/nats/test_nats.py @@ -0,0 +1,103 @@ +import asyncio +from unittest.mock import Mock + +import pytest +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import InMemoryMetricReader +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr + +from faststream.nats import JStream, NatsBroker, PullSub +from faststream.nats.opentelemetry import NatsTelemetryMiddleware +from tests.brokers.nats.test_consume import TestConsume +from tests.brokers.nats.test_publish import TestPublish + +from ..basic import LocalTelemetryTestcase + + +@pytest.fixture() +def stream(queue): + return JStream(queue) + + +@pytest.mark.nats() +class TestTelemetry(LocalTelemetryTestcase): + messaging_system = "nats" + include_messages_counters = True + broker_class = NatsBroker + telemetry_middleware_class = NatsTelemetryMiddleware + + async def test_batch( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + stream: JStream, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class( + meter_provider=meter_provider, tracer_provider=tracer_provider + ) + broker = self.broker_class(middlewares=(mid,)) + expected_msg_count = 3 + + @broker.subscriber( + queue, + stream=stream, + pull_sub=PullSub(3, batch=True), + **self.subscriber_kwargs, + ) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish(1, queue)), + asyncio.create_task(broker.publish("hi", queue)), + asyncio.create_task(broker.publish(3, queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + proc_dur, proc_msg, pub_dur, pub_msg = metrics + spans = self.get_spans(trace_exporter) + process = spans[-1] + + assert ( + process.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + assert proc_msg.data.data_points[0].value == expected_msg_count + assert pub_msg.data.data_points[0].value == expected_msg_count + assert proc_dur.data.data_points[0].count == 1 + assert pub_dur.data.data_points[0].count == expected_msg_count + + assert event.is_set() + mock.assert_called_once_with([1, "hi", 3]) + + +@pytest.mark.nats() +class TestPublishWithTelemetry(TestPublish): + def get_broker(self, apply_types: bool = False): + return NatsBroker( + middlewares=(NatsTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.nats() +class TestConsumeWithTelemetry(TestConsume): + def get_broker(self, apply_types: bool = False): + return NatsBroker( + middlewares=(NatsTelemetryMiddleware(),), + apply_types=apply_types, + ) diff --git a/tests/opentelemetry/rabbit/__init__.py b/tests/opentelemetry/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/opentelemetry/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/opentelemetry/rabbit/test_rabbit.py b/tests/opentelemetry/rabbit/test_rabbit.py new file mode 100644 index 0000000000..120ac3cd1c --- /dev/null +++ b/tests/opentelemetry/rabbit/test_rabbit.py @@ -0,0 +1,83 @@ +from typing import Optional + +import pytest +from dirty_equals import IsInt, IsUUID +from opentelemetry.sdk.trace import Span +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr +from opentelemetry.trace import SpanKind + +from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME +from faststream.opentelemetry.middleware import MessageAction as Action +from faststream.rabbit import RabbitBroker, RabbitExchange +from faststream.rabbit.opentelemetry import RabbitTelemetryMiddleware +from tests.brokers.rabbit.test_consume import TestConsume +from tests.brokers.rabbit.test_publish import TestPublish + +from ..basic import LocalTelemetryTestcase + + +@pytest.fixture() +def exchange(queue): + return RabbitExchange(name=queue) + + +@pytest.mark.rabbit() +class TestTelemetry(LocalTelemetryTestcase): + messaging_system = "rabbitmq" + include_messages_counters = False + broker_class = RabbitBroker + telemetry_middleware_class = RabbitTelemetryMiddleware + + def destination_name(self, queue: str) -> str: + return f"default.{queue}" + + def assert_span( + self, + span: Span, + action: str, + queue: str, + msg: str, + parent_span_id: Optional[str] = None, + ) -> None: + attrs = span.attributes + assert attrs[SpanAttr.MESSAGING_SYSTEM] == self.messaging_system + assert attrs[SpanAttr.MESSAGING_MESSAGE_CONVERSATION_ID] == IsUUID + assert attrs[SpanAttr.MESSAGING_RABBITMQ_DESTINATION_ROUTING_KEY] == queue + assert span.name == f"{self.destination_name(queue)} {action}" + assert span.kind in (SpanKind.CONSUMER, SpanKind.PRODUCER) + + if span.kind == SpanKind.PRODUCER and action in (Action.CREATE, Action.PUBLISH): + assert attrs[SpanAttr.MESSAGING_DESTINATION_NAME] == "" + + if span.kind == SpanKind.CONSUMER and action in (Action.CREATE, Action.PROCESS): + assert attrs[MESSAGING_DESTINATION_PUBLISH_NAME] == "" + assert attrs["messaging.rabbitmq.message.delivery_tag"] == IsInt + assert attrs[SpanAttr.MESSAGING_MESSAGE_ID] == IsUUID + + if action == Action.PROCESS: + assert attrs[SpanAttr.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES] == len(msg) + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if action == Action.PUBLISH: + assert attrs[SpanAttr.MESSAGING_OPERATION] == action + + if parent_span_id: + assert span.parent.span_id == parent_span_id + + +@pytest.mark.rabbit() +class TestPublishWithTelemetry(TestPublish): + def get_broker(self, apply_types: bool = False): + return RabbitBroker( + middlewares=(RabbitTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.rabbit() +class TestConsumeWithTelemetry(TestConsume): + def get_broker(self, apply_types: bool = False): + return RabbitBroker( + middlewares=(RabbitTelemetryMiddleware(),), + apply_types=apply_types, + ) diff --git a/tests/opentelemetry/redis/__init__.py b/tests/opentelemetry/redis/__init__.py new file mode 100644 index 0000000000..4752ef19b1 --- /dev/null +++ b/tests/opentelemetry/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/opentelemetry/redis/test_redis.py b/tests/opentelemetry/redis/test_redis.py new file mode 100644 index 0000000000..71e079cbac --- /dev/null +++ b/tests/opentelemetry/redis/test_redis.py @@ -0,0 +1,112 @@ +import asyncio +from unittest.mock import Mock + +import pytest +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import InMemoryMetricReader +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.semconv.trace import SpanAttributes as SpanAttr + +from faststream.redis import ListSub, RedisBroker +from faststream.redis.opentelemetry import RedisTelemetryMiddleware +from tests.brokers.redis.test_consume import ( + TestConsume, + TestConsumeList, + TestConsumeStream, +) +from tests.brokers.redis.test_publish import TestPublish + +from ..basic import LocalTelemetryTestcase + + +@pytest.mark.redis() +class TestTelemetry(LocalTelemetryTestcase): + messaging_system = "redis" + include_messages_counters = True + broker_class = RedisBroker + telemetry_middleware_class = RedisTelemetryMiddleware + + async def test_batch( + self, + event: asyncio.Event, + queue: str, + mock: Mock, + meter_provider: MeterProvider, + metric_reader: InMemoryMetricReader, + tracer_provider: TracerProvider, + trace_exporter: InMemorySpanExporter, + ): + mid = self.telemetry_middleware_class( + meter_provider=meter_provider, tracer_provider=tracer_provider + ) + broker = self.broker_class(middlewares=(mid,)) + expected_msg_count = 3 + + @broker.subscriber(list=ListSub(queue, batch=True), **self.subscriber_kwargs) + async def handler(m): + mock(m) + event.set() + + broker = self.patch_broker(broker) + + async with broker: + await broker.start() + tasks = ( + asyncio.create_task(broker.publish_batch(1, "hi", 3, list=queue)), + asyncio.create_task(event.wait()), + ) + await asyncio.wait(tasks, timeout=self.timeout) + + metrics = self.get_metrics(metric_reader) + spans = self.get_spans(trace_exporter) + _, publish, process = spans + + assert ( + publish.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + assert ( + process.attributes[SpanAttr.MESSAGING_BATCH_MESSAGE_COUNT] + == expected_msg_count + ) + self.assert_metrics(metrics, count=expected_msg_count) + + assert event.is_set() + mock.assert_called_once_with([1, "hi", 3]) + + +@pytest.mark.redis() +class TestPublishWithTelemetry(TestPublish): + def get_broker(self, apply_types: bool = False): + return RedisBroker( + middlewares=(RedisTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.redis() +class TestConsumeWithTelemetry(TestConsume): + def get_broker(self, apply_types: bool = False): + return RedisBroker( + middlewares=(RedisTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.redis() +class TestConsumeListWithTelemetry(TestConsumeList): + def get_broker(self, apply_types: bool = False): + return RedisBroker( + middlewares=(RedisTelemetryMiddleware(),), + apply_types=apply_types, + ) + + +@pytest.mark.redis() +class TestConsumeStreamWithTelemetry(TestConsumeStream): + def get_broker(self, apply_types: bool = False): + return RedisBroker( + middlewares=(RedisTelemetryMiddleware(),), + apply_types=apply_types, + ) From 13e46e9a4b04d8632334aa901002872221c549ce Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Sun, 19 May 2024 12:34:39 +0000 Subject: [PATCH 16/43] Update Release Notes for 0.5.7 (#1453) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index b01f54aa22..ce6828b2fa 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,45 @@ hide: --- # Release Notes +## 0.5.7 + +### What's Changed + +Finally, FastStream supports [OpenTelemetry](https://opentelemetry.io/) in a native way to collect the full trace of your services! Big thanks for @draincoder for that! + +First of all you need to install required dependencies to support OpenTelemetry: + +```bash +pip install faststream[otel] +``` + +Then you can just add a middleware for your broker and that's it! + +```python +from faststream import FastStream +from faststream.nats import NatsBroker +from faststream.nats.opentelemetry import NatsTelemetryMiddleware + +broker = NatsBroker( + middlewares=( + NatsTelemetryMiddleware(), + ) +) +app = FastStream(broker) +``` + +To find detailt information just visit our documentation aboout [telemetry](https://faststream.airt.ai/latest/getting-started/opentelemetry/) + +P.S. The release includes basic OpenTelemetry support - messages tracing & basic metrics. Baggage support and correct spans linking in batch processing case will be added soon. + +* fix: serialize TestClient rpc output to mock the real message by @Lancetnik in https://github.com/airtai/faststream/pull/1452 +* feature (#916): Observability by @draincoder in https://github.com/airtai/faststream/pull/1398 + +### New Contributors +* @draincoder made their first contribution in https://github.com/airtai/faststream/pull/1398 + +**Full Changelog**: https://github.com/airtai/faststream/compare/0.5.6...0.5.7 + ## 0.5.6 ### What's Changed From 2d224ee5fc95fd0bc2eaeb0c446afd784b2a1d0b Mon Sep 17 00:00:00 2001 From: andreaimprovised <1066590+andreaimprovised@users.noreply.github.com> Date: Mon, 20 May 2024 22:13:06 -0700 Subject: [PATCH 17/43] fix: when headers() returns None in AsyncConfluentParser, replace it with an empty tuple (#1460) --- faststream/confluent/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index e743c96e6b..a093a16da7 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -20,7 +20,7 @@ async def parse_message( message: "Message", ) -> "StreamMessage[Message]": """Parses a Kafka message.""" - headers = _parse_msg_headers(message.headers()) + headers = _parse_msg_headers(message.headers() or ()) body = message.value() offset = message.offset() @@ -53,7 +53,7 @@ async def parse_message_batch( for m in message: body.append(m.value()) - batch_headers.append(_parse_msg_headers(m.headers())) + batch_headers.append(_parse_msg_headers(m.headers() or ())) headers = next(iter(batch_headers), {}) From 6c667ff6caf420c1de4f2b94e075ac930060e7e0 Mon Sep 17 00:00:00 2001 From: sheldy <85823514+sheldygg@users.noreply.github.com> Date: Wed, 22 May 2024 23:20:35 +0200 Subject: [PATCH 18/43] Implement Kv/Obj watch. (#1383) * Implement Kv/Obj watch. * Ruff * Override AsyncAPI schema for kv/obj watchers. Remove `kv_watch`/`obj_watch` from basic class. * Remove comments * refactor: split NATS Subscribers to specific classes * Tests: 1. AsyncAPI (KV, OS) 2. broker.key_value and broker.object_storage cache instances 3. KV and OS watcher * Fix tests * Fix OS/KV declarer test * Update subscriber signature for Router and FastAPI integration * chore: merge main * fix: correct tests * lint: fix mypy * docs: gen API * docs: update References * docs: update nats kv and os examples * tests: fix nats os example test * docs: update nats kv and os documentation * chore: bump dependencies --------- Co-authored-by: Nikita Pastukhov Co-authored-by: Pastukhov Nikita --- docs/create_api_docs.py | 2 +- docs/docs/SUMMARY.md | 58 +- .../subscriber/factory/create_subscriber.md} | 2 +- .../subscriber/factory/create_subscriber.md | 11 + .../usecase/BatchHandler.md => KvWatch.md} | 2 +- .../usecase/DefaultHandler.md => ObjWatch.md} | 2 +- .../nats/helpers/KVBucketDeclarer.md | 11 + .../nats/helpers/OSBucketDeclarer.md | 11 + .../bucket_declarer/KVBucketDeclarer.md | 11 + .../obj_storage_declarer/OSBucketDeclarer.md} | 2 +- .../helpers/object_builder/StreamBuilder.md | 11 + .../faststream/nats/message/NatsKvMessage.md | 11 + .../faststream/nats/message/NatsObjMessage.md | 11 + .../en/api/faststream/nats/parser/KvParser.md | 11 + .../api/faststream/nats/parser/ObjParser.md | 11 + .../en/api/faststream/nats/schemas/KvWatch.md | 11 + .../api/faststream/nats/schemas/ObjWatch.md | 11 + .../nats/schemas/kv_watch/KvWatch.md | 11 + .../nats/schemas/obj_watch/ObjWatch.md | 11 + .../AsyncAPIBatchPullStreamSubscriber.md | 11 + .../AsyncAPIConcurrentCoreSubscriber.md | 11 + .../AsyncAPIConcurrentPullStreamSubscriber.md | 11 + .../AsyncAPIConcurrentPushStreamSubscriber.md | 11 + .../asyncapi/AsyncAPICoreSubscriber.md | 11 + .../AsyncAPIKeyValueWatchSubscriber.md | 11 + .../AsyncAPIObjStoreWatchSubscriber.md | 11 + .../asyncapi/AsyncAPIPullStreamSubscriber.md | 11 + .../asyncapi/AsyncAPIStreamSubscriber.md | 11 + .../subscriber/factory/create_subscriber.md | 11 + .../subscription/UnsubscribeAdapter.md | 11 + .../subscription/Unsubscriptable.md | 11 + .../nats/subscriber/subscription/Watchable.md | 11 + .../usecase/BatchPullStreamSubscriber.md | 11 + .../usecase/ConcurrentCoreSubscriber.md | 11 + .../usecase/ConcurrentPullStreamSubscriber.md | 11 + .../usecase/ConcurrentPushStreamSubscriber.md | 11 + .../nats/subscriber/usecase/CoreSubscriber.md | 11 + .../usecase/KeyValueWatchSubscriber.md | 11 + .../usecase/ObjStoreWatchSubscriber.md | 11 + .../usecase/PullStreamSubscriber.md | 11 + .../usecase/PushStreamSubscription.md | 11 + .../subscriber/factory/create_subscriber.md | 11 + .../subscriber/factory/create_subscriber.md | 11 + docs/docs/en/nats/jetstream/key-value.md | 44 +- docs/docs/en/nats/jetstream/object.md | 50 +- docs/docs/en/release.md | 2 +- docs/docs_src/nats/js/key_value.py | 31 +- docs/docs_src/nats/js/object.py | 37 +- examples/nats/e06_key_value.py | 28 +- examples/nats/e07_object_storage.py | 34 +- faststream/__about__.py | 2 +- faststream/broker/core/usecase.py | 1 + faststream/broker/subscriber/proto.py | 6 - faststream/confluent/broker/registrator.py | 4 +- faststream/confluent/subscriber/asyncapi.py | 139 --- faststream/confluent/subscriber/factory.py | 148 +++ faststream/kafka/broker/registrator.py | 4 +- faststream/kafka/subscriber/asyncapi.py | 175 +--- faststream/kafka/subscriber/factory.py | 183 ++++ faststream/nats/__init__.py | 4 +- faststream/nats/annotations.py | 27 +- faststream/nats/broker/broker.py | 160 ++- faststream/nats/broker/registrator.py | 33 +- faststream/nats/fastapi/fastapi.py | 12 +- faststream/nats/helpers.py | 35 - faststream/nats/helpers/__init__.py | 9 + faststream/nats/helpers/bucket_declarer.py | 57 ++ .../nats/helpers/obj_storage_declarer.py | 50 + faststream/nats/helpers/object_builder.py | 27 + faststream/nats/message.py | 21 +- faststream/nats/opentelemetry/provider.py | 9 +- faststream/nats/parser.py | 32 +- faststream/nats/publisher/asyncapi.py | 8 +- faststream/nats/router.py | 12 +- faststream/nats/schemas/__init__.py | 4 + faststream/nats/schemas/kv_watch.py | 70 ++ faststream/nats/schemas/obj_watch.py | 73 ++ faststream/nats/schemas/pull_sub.py | 27 +- faststream/nats/subscriber/asyncapi.py | 204 ++-- faststream/nats/subscriber/factory.py | 271 +++++ faststream/nats/subscriber/subscription.py | 26 + faststream/nats/subscriber/usecase.py | 923 ++++++++++++++---- faststream/nats/testing.py | 13 +- faststream/opentelemetry/middleware.py | 14 +- faststream/rabbit/broker/registrator.py | 3 +- faststream/rabbit/subscriber/asyncapi.py | 45 +- faststream/rabbit/subscriber/factory.py | 42 + faststream/redis/broker/registrator.py | 4 +- faststream/redis/schemas/proto.py | 4 - faststream/redis/subscriber/asyncapi.py | 115 +-- faststream/redis/subscriber/factory.py | 119 +++ faststream/redis/subscriber/usecase.py | 4 +- faststream/redis/testing.py | 4 +- faststream/testing/broker.py | 3 + pyproject.toml | 11 +- tests/asyncapi/{ => kafka}/test_app.py | 0 tests/asyncapi/nats/test_kv_schema.py | 14 + tests/asyncapi/nats/test_obj_schema.py | 14 + tests/brokers/nats/test_consume.py | 72 +- tests/brokers/nats/test_kv_declarer_cache.py | 23 + tests/brokers/nats/test_os_declarer_cache.py | 24 + tests/cli/rabbit/__init__.py | 3 + tests/cli/{ => rabbit}/test_app.py | 5 +- tests/cli/{utils => rabbit}/test_logs.py | 11 +- tests/cli/test_asyncapi_docs.py | 6 + tests/cli/test_publish.py | 114 +-- tests/cli/utils/test_imports.py | 7 + .../asyncapi_customization/__init__.py | 3 + .../getting_started/cli/confluent/__init__.py | 3 + .../{ => confluent}/test_confluent_context.py | 0 .../getting_started/cli/kafka/__init__.py | 3 + .../cli/{ => kafka}/test_kafka_context.py | 0 .../docs/getting_started/cli/nats/__init__.py | 3 + .../cli/{ => nats}/test_nats_context.py | 0 .../getting_started/cli/rabbit/__init__.py | 3 + .../cli/{ => rabbit}/test_rabbit_context.py | 0 .../getting_started/cli/redis/__init__.py | 3 + .../cli/{ => redis}/test_redis_context.py | 0 .../docs/getting_started/config/test_usage.py | 3 +- .../getting_started/context/test_annotated.py | 24 +- .../docs/getting_started/context/test_base.py | 22 +- .../docs/getting_started/context/test_cast.py | 22 +- .../context/test_custom_global.py | 22 +- .../context/test_custom_local.py | 22 +- .../context/test_default_arguments.py | 22 +- .../context/test_existed_context.py | 22 +- .../context/test_fields_access.py | 22 +- .../getting_started/context/test_initial.py | 14 +- .../context/test_manual_local_context.py | 22 +- .../getting_started/context/test_nested.py | 4 +- .../dependencies/basic/test_depends.py | 22 +- .../dependencies/basic/test_nested_depends.py | 22 +- .../dependencies/test_basic.py | 4 +- .../dependencies/test_class.py | 4 +- .../dependencies/test_global.py | 4 +- .../dependencies/test_global_broker.py | 4 +- .../dependencies/test_sub_dep.py | 4 +- .../dependencies/test_yield.py | 4 +- .../docs/getting_started/index/test_basic.py | 22 +- .../getting_started/lifespan/test_basic.py | 29 +- .../docs/getting_started/lifespan/test_ml.py | 22 +- .../lifespan/test_ml_context.py | 22 +- .../getting_started/lifespan/test_testing.py | 83 +- .../getting_started/publishing/test_broker.py | 22 +- .../publishing/test_broker_context.py | 22 +- .../publishing/test_decorator.py | 22 +- .../getting_started/publishing/test_direct.py | 78 +- .../getting_started/publishing/test_object.py | 78 +- .../docs/getting_started/routers/test_base.py | 22 +- .../getting_started/routers/test_delay.py | 22 +- .../routers/test_delay_equal.py | 22 +- .../serialization/test_parser.py | 22 +- .../subscription/test_annotated.py | 52 +- .../subscription/test_filter.py | 22 +- .../subscription/test_pydantic.py | 22 +- .../getting_started/subscription/test_real.py | 150 ++- .../subscription/test_testing.py | 158 ++- tests/docs/index/test_basic.py | 22 +- tests/docs/index/test_dependencies.py | 4 +- tests/docs/index/test_pydantic.py | 114 ++- tests/docs/integration/fastapi/test_base.py | 22 +- .../docs/integration/fastapi/test_depends.py | 22 +- .../docs/integration/fastapi/test_multiple.py | 13 + .../fastapi/test_multiple_lifespan.py | 13 + tests/docs/integration/fastapi/test_send.py | 22 +- .../docs/integration/fastapi/test_startup.py | 22 +- tests/docs/integration/fastapi/test_test.py | 62 +- tests/docs/integration/http/test_fastapi.py | 4 +- tests/docs/nats/js/test_kv.py | 2 +- tests/docs/nats/js/test_object.py | 21 +- .../examples/fastapi_integration/test_app.py | 30 +- tests/examples/kafka/__init__.py | 3 + tests/examples/nats/__init__.py | 3 + tests/examples/nats/test_e06_key_value.py | 2 +- .../examples/nats/test_e07_object_storage.py | 21 +- tests/examples/rabbit/__init__.py | 3 + tests/examples/redis/__init__.py | 3 + tests/examples/router/__init__.py | 3 + tests/examples/test_e01_basic_consume.py | 7 +- tests/examples/test_e02_1_basic_publisher.py | 7 +- tests/examples/test_e02_2_basic_publisher.py | 7 +- tests/examples/test_e02_3_basic_publisher.py | 7 +- tests/examples/test_e03_miltiple_pubsub.py | 19 +- tests/examples/test_e04_msg_filter.py | 7 +- tests/examples/test_e05_rpc_request.py | 7 +- tests/examples/test_e06_manual_ack.py | 7 +- tests/examples/test_e07_ack_immediately.py | 7 +- tests/examples/test_e08_testing.py | 12 +- tests/examples/test_e09_testing_mocks.py | 12 +- tests/examples/test_e10_middlewares.py | 7 +- tests/examples/test_e11_settings.py | 7 +- tests/marks.py | 65 ++ tests/opentelemetry/__init__.py | 2 +- tests/utils/context/test_headers.py | 5 +- tests/utils/context/test_path.py | 69 +- 195 files changed, 4409 insertions(+), 1586 deletions(-) rename docs/docs/en/api/faststream/{nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md => confluent/subscriber/factory/create_subscriber.md} (63%) create mode 100644 docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md rename docs/docs/en/api/faststream/nats/{subscriber/usecase/BatchHandler.md => KvWatch.md} (67%) rename docs/docs/en/api/faststream/nats/{subscriber/usecase/DefaultHandler.md => ObjWatch.md} (67%) create mode 100644 docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md create mode 100644 docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md create mode 100644 docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md rename docs/docs/en/api/faststream/nats/{subscriber/asyncapi/AsyncAPIDefaultSubscriber.md => helpers/obj_storage_declarer/OSBucketDeclarer.md} (63%) create mode 100644 docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md create mode 100644 docs/docs/en/api/faststream/nats/message/NatsKvMessage.md create mode 100644 docs/docs/en/api/faststream/nats/message/NatsObjMessage.md create mode 100644 docs/docs/en/api/faststream/nats/parser/KvParser.md create mode 100644 docs/docs/en/api/faststream/nats/parser/ObjParser.md create mode 100644 docs/docs/en/api/faststream/nats/schemas/KvWatch.md create mode 100644 docs/docs/en/api/faststream/nats/schemas/ObjWatch.md create mode 100644 docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md create mode 100644 docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md create mode 100644 docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md create mode 100644 docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md create mode 100644 docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md create mode 100644 faststream/confluent/subscriber/factory.py create mode 100644 faststream/kafka/subscriber/factory.py delete mode 100644 faststream/nats/helpers.py create mode 100644 faststream/nats/helpers/__init__.py create mode 100644 faststream/nats/helpers/bucket_declarer.py create mode 100644 faststream/nats/helpers/obj_storage_declarer.py create mode 100644 faststream/nats/helpers/object_builder.py create mode 100644 faststream/nats/schemas/kv_watch.py create mode 100644 faststream/nats/schemas/obj_watch.py create mode 100644 faststream/nats/subscriber/factory.py create mode 100644 faststream/nats/subscriber/subscription.py create mode 100644 faststream/rabbit/subscriber/factory.py create mode 100644 faststream/redis/subscriber/factory.py rename tests/asyncapi/{ => kafka}/test_app.py (100%) create mode 100644 tests/asyncapi/nats/test_kv_schema.py create mode 100644 tests/asyncapi/nats/test_obj_schema.py create mode 100644 tests/brokers/nats/test_kv_declarer_cache.py create mode 100644 tests/brokers/nats/test_os_declarer_cache.py create mode 100644 tests/cli/rabbit/__init__.py rename tests/cli/{ => rabbit}/test_app.py (97%) rename tests/cli/{utils => rabbit}/test_logs.py (83%) create mode 100644 tests/docs/getting_started/cli/confluent/__init__.py rename tests/docs/getting_started/cli/{ => confluent}/test_confluent_context.py (100%) create mode 100644 tests/docs/getting_started/cli/kafka/__init__.py rename tests/docs/getting_started/cli/{ => kafka}/test_kafka_context.py (100%) create mode 100644 tests/docs/getting_started/cli/nats/__init__.py rename tests/docs/getting_started/cli/{ => nats}/test_nats_context.py (100%) create mode 100644 tests/docs/getting_started/cli/rabbit/__init__.py rename tests/docs/getting_started/cli/{ => rabbit}/test_rabbit_context.py (100%) create mode 100644 tests/docs/getting_started/cli/redis/__init__.py rename tests/docs/getting_started/cli/{ => redis}/test_redis_context.py (100%) diff --git a/docs/create_api_docs.py b/docs/create_api_docs.py index e22a25d236..883d3ef787 100644 --- a/docs/create_api_docs.py +++ b/docs/create_api_docs.py @@ -78,7 +78,7 @@ def _import_module(name: str) -> Optional[ModuleType]: public_api_modules = [ m for m in modules - if m.__file__.replace(str(repo_path) + "/", "") in PUBLIC_API_FILES + if m and m.__file__.replace(str(repo_path) + "/", "") in PUBLIC_API_FILES ] return public_api_modules diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 61f070b7bf..e09163b08a 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -143,10 +143,12 @@ search: - [DiscardPolicy](public_api/faststream/nats/DiscardPolicy.md) - [ExternalStream](public_api/faststream/nats/ExternalStream.md) - [JStream](public_api/faststream/nats/JStream.md) + - [KvWatch](public_api/faststream/nats/KvWatch.md) - [NatsBroker](public_api/faststream/nats/NatsBroker.md) - [NatsPublisher](public_api/faststream/nats/NatsPublisher.md) - [NatsRoute](public_api/faststream/nats/NatsRoute.md) - [NatsRouter](public_api/faststream/nats/NatsRouter.md) + - [ObjWatch](public_api/faststream/nats/ObjWatch.md) - [Placement](public_api/faststream/nats/Placement.md) - [PullSub](public_api/faststream/nats/PullSub.md) - [RePublish](public_api/faststream/nats/RePublish.md) @@ -476,6 +478,8 @@ search: - [AsyncAPIBatchSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - [AsyncAPIDefaultSubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) - [AsyncAPISubscriber](api/faststream/confluent/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/confluent/subscriber/factory/create_subscriber.md) - usecase - [BatchSubscriber](api/faststream/confluent/subscriber/usecase/BatchSubscriber.md) - [DefaultSubscriber](api/faststream/confluent/subscriber/usecase/DefaultSubscriber.md) @@ -560,6 +564,8 @@ search: - [AsyncAPIBatchSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - [AsyncAPIDefaultSubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) - [AsyncAPISubscriber](api/faststream/kafka/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/kafka/subscriber/factory/create_subscriber.md) - usecase - [BatchSubscriber](api/faststream/kafka/subscriber/usecase/BatchSubscriber.md) - [DefaultSubscriber](api/faststream/kafka/subscriber/usecase/DefaultSubscriber.md) @@ -583,10 +589,12 @@ search: - [DiscardPolicy](api/faststream/nats/DiscardPolicy.md) - [ExternalStream](api/faststream/nats/ExternalStream.md) - [JStream](api/faststream/nats/JStream.md) + - [KvWatch](api/faststream/nats/KvWatch.md) - [NatsBroker](api/faststream/nats/NatsBroker.md) - [NatsPublisher](api/faststream/nats/NatsPublisher.md) - [NatsRoute](api/faststream/nats/NatsRoute.md) - [NatsRouter](api/faststream/nats/NatsRouter.md) + - [ObjWatch](api/faststream/nats/ObjWatch.md) - [Placement](api/faststream/nats/Placement.md) - [PullSub](api/faststream/nats/PullSub.md) - [RePublish](api/faststream/nats/RePublish.md) @@ -611,10 +619,20 @@ search: - fastapi - [NatsRouter](api/faststream/nats/fastapi/fastapi/NatsRouter.md) - helpers + - [KVBucketDeclarer](api/faststream/nats/helpers/KVBucketDeclarer.md) + - [OSBucketDeclarer](api/faststream/nats/helpers/OSBucketDeclarer.md) - [StreamBuilder](api/faststream/nats/helpers/StreamBuilder.md) + - bucket_declarer + - [KVBucketDeclarer](api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md) + - obj_storage_declarer + - [OSBucketDeclarer](api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md) + - object_builder + - [StreamBuilder](api/faststream/nats/helpers/object_builder/StreamBuilder.md) - message - [NatsBatchMessage](api/faststream/nats/message/NatsBatchMessage.md) + - [NatsKvMessage](api/faststream/nats/message/NatsKvMessage.md) - [NatsMessage](api/faststream/nats/message/NatsMessage.md) + - [NatsObjMessage](api/faststream/nats/message/NatsObjMessage.md) - opentelemetry - [NatsTelemetryMiddleware](api/faststream/nats/opentelemetry/NatsTelemetryMiddleware.md) - middleware @@ -627,8 +645,10 @@ search: - parser - [BatchParser](api/faststream/nats/parser/BatchParser.md) - [JsParser](api/faststream/nats/parser/JsParser.md) + - [KvParser](api/faststream/nats/parser/KvParser.md) - [NatsBaseParser](api/faststream/nats/parser/NatsBaseParser.md) - [NatsParser](api/faststream/nats/parser/NatsParser.md) + - [ObjParser](api/faststream/nats/parser/ObjParser.md) - publisher - asyncapi - [AsyncAPIPublisher](api/faststream/nats/publisher/asyncapi/AsyncAPIPublisher.md) @@ -643,24 +663,50 @@ search: - [NatsRouter](api/faststream/nats/router/NatsRouter.md) - schemas - [JStream](api/faststream/nats/schemas/JStream.md) + - [KvWatch](api/faststream/nats/schemas/KvWatch.md) + - [ObjWatch](api/faststream/nats/schemas/ObjWatch.md) - [PullSub](api/faststream/nats/schemas/PullSub.md) - js_stream - [JStream](api/faststream/nats/schemas/js_stream/JStream.md) - [compile_nats_wildcard](api/faststream/nats/schemas/js_stream/compile_nats_wildcard.md) - [is_subject_match_wildcard](api/faststream/nats/schemas/js_stream/is_subject_match_wildcard.md) + - kv_watch + - [KvWatch](api/faststream/nats/schemas/kv_watch/KvWatch.md) + - obj_watch + - [ObjWatch](api/faststream/nats/schemas/obj_watch/ObjWatch.md) - pull_sub - [PullSub](api/faststream/nats/schemas/pull_sub/PullSub.md) - security - [parse_security](api/faststream/nats/security/parse_security.md) - subscriber - asyncapi - - [AsyncAPIBatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md) - - [AsyncAPIDefaultSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md) + - [AsyncAPIBatchPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md) + - [AsyncAPIConcurrentCoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md) + - [AsyncAPIConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md) + - [AsyncAPIConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md) + - [AsyncAPICoreSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md) + - [AsyncAPIKeyValueWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md) + - [AsyncAPIObjStoreWatchSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md) + - [AsyncAPIPullStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md) + - [AsyncAPIStreamSubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md) - [AsyncAPISubscriber](api/faststream/nats/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/nats/subscriber/factory/create_subscriber.md) + - subscription + - [UnsubscribeAdapter](api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md) + - [Unsubscriptable](api/faststream/nats/subscriber/subscription/Unsubscriptable.md) + - [Watchable](api/faststream/nats/subscriber/subscription/Watchable.md) - usecase - - [BatchHandler](api/faststream/nats/subscriber/usecase/BatchHandler.md) - - [DefaultHandler](api/faststream/nats/subscriber/usecase/DefaultHandler.md) + - [BatchPullStreamSubscriber](api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md) + - [ConcurrentCoreSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md) + - [ConcurrentPullStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md) + - [ConcurrentPushStreamSubscriber](api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md) + - [CoreSubscriber](api/faststream/nats/subscriber/usecase/CoreSubscriber.md) + - [KeyValueWatchSubscriber](api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md) - [LogicSubscriber](api/faststream/nats/subscriber/usecase/LogicSubscriber.md) + - [ObjStoreWatchSubscriber](api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md) + - [PullStreamSubscriber](api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md) + - [PushStreamSubscription](api/faststream/nats/subscriber/usecase/PushStreamSubscription.md) - testing - [FakeProducer](api/faststream/nats/testing/FakeProducer.md) - [PatchedMessage](api/faststream/nats/testing/PatchedMessage.md) @@ -743,6 +789,8 @@ search: - subscriber - asyncapi - [AsyncAPISubscriber](api/faststream/rabbit/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/rabbit/subscriber/factory/create_subscriber.md) - usecase - [LogicSubscriber](api/faststream/rabbit/subscriber/usecase/LogicSubscriber.md) - testing @@ -848,6 +896,8 @@ search: - [AsyncAPIStreamBatchSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamBatchSubscriber.md) - [AsyncAPIStreamSubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPIStreamSubscriber.md) - [AsyncAPISubscriber](api/faststream/redis/subscriber/asyncapi/AsyncAPISubscriber.md) + - factory + - [create_subscriber](api/faststream/redis/subscriber/factory/create_subscriber.md) - usecase - [BatchListSubscriber](api/faststream/redis/subscriber/usecase/BatchListSubscriber.md) - [BatchStreamSubscriber](api/faststream/redis/subscriber/usecase/BatchStreamSubscriber.md) diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md b/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md similarity index 63% rename from docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md rename to docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md index 5702a67c39..ce811a99d9 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchSubscriber.md +++ b/docs/docs/en/api/faststream/confluent/subscriber/factory/create_subscriber.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.asyncapi.AsyncAPIBatchSubscriber +::: faststream.confluent.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..d9e5fcb4a4 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md b/docs/docs/en/api/faststream/nats/KvWatch.md similarity index 67% rename from docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md rename to docs/docs/en/api/faststream/nats/KvWatch.md index ae5d6858db..1527be51fd 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchHandler.md +++ b/docs/docs/en/api/faststream/nats/KvWatch.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.usecase.BatchHandler +::: faststream.nats.KvWatch diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md b/docs/docs/en/api/faststream/nats/ObjWatch.md similarity index 67% rename from docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md rename to docs/docs/en/api/faststream/nats/ObjWatch.md index ebe246e5f6..50102ecf31 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/usecase/DefaultHandler.md +++ b/docs/docs/en/api/faststream/nats/ObjWatch.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.usecase.DefaultHandler +::: faststream.nats.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md new file mode 100644 index 0000000000..b24feaada6 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/KVBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.KVBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md new file mode 100644 index 0000000000..3ee16a3f24 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/OSBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.OSBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md b/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md new file mode 100644 index 0000000000..fe0eaec17f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/bucket_declarer/KVBucketDeclarer.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.bucket_declarer.KVBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md b/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md similarity index 63% rename from docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md rename to docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md index 92b674793d..b7663051c8 100644 --- a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIDefaultSubscriber.md +++ b/docs/docs/en/api/faststream/nats/helpers/obj_storage_declarer/OSBucketDeclarer.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.nats.subscriber.asyncapi.AsyncAPIDefaultSubscriber +::: faststream.nats.helpers.obj_storage_declarer.OSBucketDeclarer diff --git a/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md b/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md new file mode 100644 index 0000000000..024daf2d14 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/helpers/object_builder/StreamBuilder.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.helpers.object_builder.StreamBuilder diff --git a/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md b/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md new file mode 100644 index 0000000000..5ac6ed9f41 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/message/NatsKvMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.message.NatsKvMessage diff --git a/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md b/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md new file mode 100644 index 0000000000..3671628da4 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/message/NatsObjMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.message.NatsObjMessage diff --git a/docs/docs/en/api/faststream/nats/parser/KvParser.md b/docs/docs/en/api/faststream/nats/parser/KvParser.md new file mode 100644 index 0000000000..acba65e133 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/parser/KvParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.parser.KvParser diff --git a/docs/docs/en/api/faststream/nats/parser/ObjParser.md b/docs/docs/en/api/faststream/nats/parser/ObjParser.md new file mode 100644 index 0000000000..50ff5d0e18 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/parser/ObjParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.parser.ObjParser diff --git a/docs/docs/en/api/faststream/nats/schemas/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/KvWatch.md new file mode 100644 index 0000000000..ce99738043 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/KvWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.KvWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md new file mode 100644 index 0000000000..51c3628e5e --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/ObjWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md b/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md new file mode 100644 index 0000000000..ce07fa305d --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/kv_watch/KvWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.kv_watch.KvWatch diff --git a/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md b/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md new file mode 100644 index 0000000000..55831b8a6a --- /dev/null +++ b/docs/docs/en/api/faststream/nats/schemas/obj_watch/ObjWatch.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.schemas.obj_watch.ObjWatch diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md new file mode 100644 index 0000000000..15bceeedbc --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIBatchPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIBatchPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md new file mode 100644 index 0000000000..f88e14f817 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentCoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentCoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md new file mode 100644 index 0000000000..b5ebf86f93 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md new file mode 100644 index 0000000000..7bb4a6e088 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIConcurrentPushStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIConcurrentPushStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md new file mode 100644 index 0000000000..8819adebab --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPICoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPICoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md new file mode 100644 index 0000000000..b006854b0b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIKeyValueWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIKeyValueWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md new file mode 100644 index 0000000000..0a9157ed55 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIObjStoreWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIObjStoreWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md new file mode 100644 index 0000000000..e9650bef94 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md new file mode 100644 index 0000000000..6d448d3af5 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/asyncapi/AsyncAPIStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.asyncapi.AsyncAPIStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..0e132c3394 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md new file mode 100644 index 0000000000..455885671f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/UnsubscribeAdapter.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.UnsubscribeAdapter diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md new file mode 100644 index 0000000000..c94cb1b731 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/Unsubscriptable.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.Unsubscriptable diff --git a/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md b/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md new file mode 100644 index 0000000000..67638258ea --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/subscription/Watchable.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.subscription.Watchable diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md new file mode 100644 index 0000000000..dfb1c43575 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/BatchPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.BatchPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md new file mode 100644 index 0000000000..e1f100c043 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentCoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentCoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md new file mode 100644 index 0000000000..c1b7207285 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentPullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md new file mode 100644 index 0000000000..ffa2e0c37b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ConcurrentPushStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ConcurrentPushStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md new file mode 100644 index 0000000000..8ddb0b8c04 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/CoreSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.CoreSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md new file mode 100644 index 0000000000..778557ee2b --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/KeyValueWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.KeyValueWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md new file mode 100644 index 0000000000..ad15f32931 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/ObjStoreWatchSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.ObjStoreWatchSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md new file mode 100644 index 0000000000..30f30a893f --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/PullStreamSubscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.PullStreamSubscriber diff --git a/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md b/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md new file mode 100644 index 0000000000..bb29bbb9c2 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/subscriber/usecase/PushStreamSubscription.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.subscriber.usecase.PushStreamSubscription diff --git a/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..79c7082931 --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.subscriber.factory.create_subscriber diff --git a/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md b/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md new file mode 100644 index 0000000000..d5cf7eadc8 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/subscriber/factory/create_subscriber.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.subscriber.factory.create_subscriber diff --git a/docs/docs/en/nats/jetstream/key-value.md b/docs/docs/en/nats/jetstream/key-value.md index a5bbc0f1ef..2ca8d70add 100644 --- a/docs/docs/en/nats/jetstream/key-value.md +++ b/docs/docs/en/nats/jetstream/key-value.md @@ -24,42 +24,34 @@ This interface provides you with rich abilities to use it like a regular *KV* st ## FastStream Details -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to get access into the inner `JetStream` object to create it manually. +**FastStream** has some useful methods to help you with **Key-Value NATS** feature interacting. -First of all, you need to create a *Key-Value* storage object and pass it into the context: +First of all, you need to create a *Key-Value* storage object and put some value to it: -```python linenums="1" hl_lines="12-13" -{! docs_src/nats/js/key_value.py [ln:5-8,11-13,22-27] !} +```python linenums="1" hl_lines="9-10" +{! docs_src/nats/js/key_value.py [ln:1-5,12-16] !} ``` !!! tip - We placed this code in `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. - - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + `#!python broker.key_value(bucket="bucket")` is an idempotent method. It means that it stores all already created storages in memory and do not make new request to **NATS** if your are trying to call it for the same bucket. --- -Next, we are ready to use this object right in our handlers. - -Let's create an annotated object to shorten context object access: - -```python linenums="1" hl_lines="4" -{! docs_src/nats/js/key_value.py [ln:1-3,9] !} -``` - -And just use it in a handler: +Then we are able to use returned `key_value` object as a regular NATS one. But, if you want to watch by any changes by some key in the bucket, **FastStream** allows you to make it via regular `@broker.subscriber` interface: -```python linenums="1" hl_lines="4 6-8" -{! docs_src/nats/js/key_value.py [ln:4,14-19] !} +```python linenums="1" hl_lines="1" +{! docs_src/nats/js/key_value.py [ln:8-10] !} ``` -Finally, let's test our code behavior by putting something into the KV storage and sending a message: +Also, if you want more detail settings for you **Key Value Storage**, we have `KvWatch` object for it: -```python linenums="1" hl_lines="3-4" -{! docs_src/nats/js/key_value.py [ln:30-33] !} -``` +```python linenums="1" hl_lines="5" +from faststream.nats import NatsBroker, KvWatch -??? example "Full listing" - ```python linenums="1" - {!> docs_src/nats/js/key_value.py !} - ``` +@broker.subscriber( + "key", + kv_watch=KvWatch("bucket", declare=False), +) +async def handler(msg: str): + ... +``` \ No newline at end of file diff --git a/docs/docs/en/nats/jetstream/object.md b/docs/docs/en/nats/jetstream/object.md index 2e1501f53a..33aa1055df 100644 --- a/docs/docs/en/nats/jetstream/object.md +++ b/docs/docs/en/nats/jetstream/object.md @@ -22,45 +22,47 @@ The main difference between *KV* and *Object* storages is that in the *Object* s ## FastStream Details -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to access the inner `JetStream` object to create in manually. +**FastStream** has some useful methods to help you with **Object Storage NATS** feature interacting. -First of all, you need to create an *Object* storage object and pass in to the context: +First of all, you need to create a *Object Storage* object and put some value to it: -```python linenums="1" hl_lines="12-13" -{! docs_src/nats/js/object.py [ln:7-10,13-15,24-29] !} +```python linenums="1" hl_lines="11-12" +{! docs_src/nats/js/object.py [ln:1-2,3,5,7-10,23-26] !} ``` !!! tip - We placed this code in the `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. + * [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + * `#!python broker.object_storage(bucket="example-bucket")` is an idempotent method. It means that it stores all already created storages in memory and do not make new request to **NATS** if your are trying to call it for the same bucket. --- -Next, we are ready to use this object right in the our handlers. +Then we are able to use returned `object_storage` object as a regular NATS one. But, if you want to watch by any new files in the bucket, **FastStream** allows you to make it via regular `@broker.subscriber` interface: -Let's create an Annotated object to shorten `Context` object access: - -```python linenums="1" hl_lines="4" -{! docs_src/nats/js/object.py [ln:3-5,11] !} +```python linenums="1" hl_lines="1" +@broker.subscriber("example-bucket", obj_watch=True) +async def handler(filename: str): + assert filename == "file.txt" ``` -And just use it in a handler: +**NATS** deliveres you just a filename (and some more metainformation you can get access via `message.raw_message`) because files can be any size. The framework should protect your service from memory overflow, so we can't upload whole file content right to the memo. By you can make it manually the following way: -```python linenums="1" hl_lines="6 8-9" -{! docs_src/nats/js/object.py [ln:1-2,6,16-21] !} +```python linenums="1" hl_lines="1 6 10-11" +{! docs_src/nats/js/object.py [ln:6-7,12-20] !} ``` -Finally, let's test our code behavior by putting something into the *Object storage* and sending a message: +!!! note + `faststream.nats.annotations.ObjectStorage` is a your current bucket, so you need no to put it to context manually. -```python linenums="1" hl_lines="3-4" -{! docs_src/nats/js/object.py [ln:32-35] !} -``` +Also, if you want more detail settings for you **Object Storage**, we have `ObjWatch` object for it: -!!! tip - [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. +```python linenums="1" hl_lines="5" +from faststream.nats import NatsBroker, ObjWatch -??? example "Full listing" - ```python linenums="1" - {!> docs_src/nats/js/object.py !} - ``` +@broker.subscriber( + "example-bucket", + obj_watch=ObjWatch(declare=False), +) +async def handler(filename: str): + ... +``` \ No newline at end of file diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index ce6828b2fa..8fac6fd678 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -39,7 +39,7 @@ broker = NatsBroker( app = FastStream(broker) ``` -To find detailt information just visit our documentation aboout [telemetry](https://faststream.airt.ai/latest/getting-started/opentelemetry/) +To find detailt information just visit our documentation about [telemetry](https://faststream.airt.ai/latest/getting-started/opentelemetry/) P.S. The release includes basic OpenTelemetry support - messages tracing & basic metrics. Baggage support and correct spans linking in batch processing case will be added soon. diff --git a/docs/docs_src/nats/js/key_value.py b/docs/docs_src/nats/js/key_value.py index 333272eadf..8127109cf3 100644 --- a/docs/docs_src/nats/js/key_value.py +++ b/docs/docs_src/nats/js/key_value.py @@ -1,33 +1,16 @@ -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated - -from faststream import Logger -from faststream import Context, FastStream, Logger +from faststream import FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -KeyValue = Annotated[KV, Context("kv")] broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) +@broker.subscriber("key", kv_watch="bucket") +async def handler(msg: str): + assert msg == "Hello!" @app.after_startup -async def test_send(kv: KeyValue): - await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") +async def setup_broker(): + key_value = await broker.key_value(bucket="bucket") + await key_value.put("key", b"Hello!") diff --git a/docs/docs_src/nats/js/object.py b/docs/docs_src/nats/js/object.py index d072aac7fb..916d068dab 100644 --- a/docs/docs_src/nats/js/object.py +++ b/docs/docs_src/nats/js/object.py @@ -1,35 +1,26 @@ from io import BytesIO -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated - +from faststream import FastStream from faststream import Logger -from faststream import Context, FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -ObjectStorage = Annotated[OS, Context("OS")] +from faststream.nats.annotations import ObjectStorage broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) +@broker.subscriber("example-bucket", obj_watch=True) +async def handler( + filename: str, + storage: ObjectStorage, + logger: Logger, +): + assert filename == "file.txt" + file = await storage.get(filename) + logger.info(file.data) @app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - await broker.publish("Hi!", "subject") +async def test_send(): + object_storage = await broker.object_storage("example-bucket") + await object_storage.put("file.txt", BytesIO(b"File mock")) diff --git a/examples/nats/e06_key_value.py b/examples/nats/e06_key_value.py index 60df373cc4..23d7a0d734 100644 --- a/examples/nats/e06_key_value.py +++ b/examples/nats/e06_key_value.py @@ -1,32 +1,16 @@ -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated - -from faststream import Context, FastStream, Logger +from faststream import FastStream from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -KeyValue = Annotated[KV, Context("kv")] broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) +@broker.subscriber("key", kv_watch="bucket") +async def handler(msg: str): + assert msg == "Hello!" @app.after_startup -async def test_send(kv: KeyValue): +async def setup_broker(): + kv = await broker.key_value(bucket="bucket") await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") diff --git a/examples/nats/e07_object_storage.py b/examples/nats/e07_object_storage.py index 079409d52c..55e801928e 100644 --- a/examples/nats/e07_object_storage.py +++ b/examples/nats/e07_object_storage.py @@ -1,35 +1,21 @@ from io import BytesIO -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated - -from faststream import Context, FastStream, Logger +from faststream import FastStream, Logger from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -ObjectStorage = Annotated[OS, Context("OS")] +from faststream.nats.annotations import ObjectStorage broker = NatsBroker() app = FastStream(broker) -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) +@broker.subscriber("example-bucket", obj_watch=True) +async def handler(filename: str, storage: ObjectStorage, logger: Logger): + assert filename == "file.txt" + file = await storage.get(filename) + logger.info(file.data) @app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - - await broker.publish("Hi!", "subject") +async def test_send(): + os = await broker.object_storage("example-bucket") + await os.put("file.txt", BytesIO(b"File mock")) diff --git a/faststream/__about__.py b/faststream/__about__.py index 6a9efa082f..7a4bf92d8e 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.7" +__version__ = "0.5.8" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/broker/core/usecase.py b/faststream/broker/core/usecase.py index 439230ad8a..c226850ace 100644 --- a/faststream/broker/core/usecase.py +++ b/faststream/broker/core/usecase.py @@ -172,6 +172,7 @@ def __init__( self._connection = None self._producer = None + # TODO: remove useless middleware filter if not is_test_env(): self._middlewares = ( CriticalLogMiddleware(self.logger, log_level), diff --git a/faststream/broker/subscriber/proto.py b/faststream/broker/subscriber/proto.py index 545c5fc169..47bd42b44d 100644 --- a/faststream/broker/subscriber/proto.py +++ b/faststream/broker/subscriber/proto.py @@ -38,12 +38,6 @@ class SubscriberProto( @abstractmethod def add_middleware(self, middleware: "BrokerMiddleware[MsgType]") -> None: ... - @staticmethod - @abstractmethod - def create() -> "SubscriberProto[MsgType]": - """Abstract factory to create a real Subscriber.""" - ... - @abstractmethod def get_log_context( self, diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 6306d10bd9..6d71a21046 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -18,7 +18,7 @@ from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter from faststream.confluent.publisher.asyncapi import AsyncAPIPublisher -from faststream.confluent.subscriber.asyncapi import AsyncAPISubscriber +from faststream.confluent.subscriber.factory import create_subscriber from faststream.exceptions import SetupError if TYPE_CHECKING: @@ -1234,7 +1234,7 @@ def subscriber( raise SetupError("You should install `group_id` with manual commit mode") subscriber = super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( *topics, batch=batch, batch_timeout_ms=batch_timeout_ms, diff --git a/faststream/confluent/subscriber/asyncapi.py b/faststream/confluent/subscriber/asyncapi.py index d31bfa05f2..7ec3ffb965 100644 --- a/faststream/confluent/subscriber/asyncapi.py +++ b/faststream/confluent/subscriber/asyncapi.py @@ -1,16 +1,9 @@ from typing import ( TYPE_CHECKING, Dict, - Iterable, - Literal, - Optional, Tuple, - Union, - overload, ) -from typing_extensions import override - from faststream.asyncapi.schema import ( Channel, ChannelBinding, @@ -29,10 +22,6 @@ if TYPE_CHECKING: from confluent_kafka import Message as ConfluentMsg - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -67,134 +56,6 @@ def get_schema(self) -> Dict[str, Channel]: return channels - @overload # type: ignore[override] - @staticmethod - def create( - *topics: str, - batch: Literal[True], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIBatchSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: Literal[False], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIDefaultSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: ... - - @override - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - connection_data: "AnyDict", - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if batch: - return AsyncAPIBatchSubscriber( - *topics, - batch_timeout_ms=batch_timeout_ms, - max_records=max_records, - group_id=group_id, - connection_data=connection_data, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIDefaultSubscriber( - *topics, - group_id=group_id, - connection_data=connection_data, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - class AsyncAPIDefaultSubscriber( DefaultSubscriber, diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py new file mode 100644 index 0000000000..b7b6b6ca61 --- /dev/null +++ b/faststream/confluent/subscriber/factory.py @@ -0,0 +1,148 @@ +from typing import ( + TYPE_CHECKING, + Iterable, + Literal, + Optional, + Tuple, + Union, + overload, +) + +from faststream.confluent.subscriber.asyncapi import ( + AsyncAPIBatchSubscriber, + AsyncAPIDefaultSubscriber, +) + +if TYPE_CHECKING: + from confluent_kafka import Message as ConfluentMsg + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[True], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIBatchSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[False], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIDefaultSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: ... + + +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + connection_data: "AnyDict", + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConfluentMsg, Tuple[ConfluentMsg, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: + if batch: + return AsyncAPIBatchSubscriber( + *topics, + batch_timeout_ms=batch_timeout_ms, + max_records=max_records, + group_id=group_id, + connection_data=connection_data, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIDefaultSubscriber( + *topics, + group_id=group_id, + connection_data=connection_data, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py index afc69a459c..0633032c06 100644 --- a/faststream/kafka/broker/registrator.py +++ b/faststream/kafka/broker/registrator.py @@ -20,7 +20,7 @@ from faststream.broker.core.abc import ABCBroker from faststream.broker.utils import default_filter from faststream.kafka.publisher.asyncapi import AsyncAPIPublisher -from faststream.kafka.subscriber.asyncapi import AsyncAPISubscriber +from faststream.kafka.subscriber.factory import create_subscriber if TYPE_CHECKING: from aiokafka import ConsumerRecord, TopicPartition @@ -1394,7 +1394,7 @@ def subscriber( "AsyncAPIBatchSubscriber", ]: subscriber = super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( *topics, batch=batch, batch_timeout_ms=batch_timeout_ms, diff --git a/faststream/kafka/subscriber/asyncapi.py b/faststream/kafka/subscriber/asyncapi.py index ec31001633..9adb8dad3c 100644 --- a/faststream/kafka/subscriber/asyncapi.py +++ b/faststream/kafka/subscriber/asyncapi.py @@ -1,16 +1,9 @@ from typing import ( TYPE_CHECKING, Dict, - Iterable, - Literal, - Optional, Tuple, - Union, - overload, ) -from typing_extensions import override - from faststream.asyncapi.schema import ( Channel, ChannelBinding, @@ -21,7 +14,6 @@ from faststream.asyncapi.schema.bindings import kafka from faststream.asyncapi.utils import resolve_payloads from faststream.broker.types import MsgType -from faststream.exceptions import SetupError from faststream.kafka.subscriber.usecase import ( BatchSubscriber, DefaultSubscriber, @@ -29,12 +21,7 @@ ) if TYPE_CHECKING: - from aiokafka import ConsumerRecord, TopicPartition - from aiokafka.abc import ConsumerRebalanceListener - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict + from aiokafka import ConsumerRecord class AsyncAPISubscriber(LogicSubscriber[MsgType]): @@ -69,166 +56,6 @@ def get_schema(self) -> Dict[str, Channel]: return channels - @overload # type: ignore[override] - @staticmethod - def create( - *topics: str, - batch: Literal[True], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIBatchSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: Literal[False], - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPIDefaultSubscriber": ... - - @overload - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: ... - - @override - @staticmethod - def create( - *topics: str, - batch: bool, - batch_timeout_ms: int, - max_records: Optional[int], - # Kafka information - group_id: Optional[str], - listener: Optional["ConsumerRebalanceListener"], - pattern: Optional[str], - connection_args: "AnyDict", - partitions: Iterable["TopicPartition"], - is_manual: bool, - # Subscriber args - no_ack: bool, - retry: bool, - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable[ - "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" - ], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if is_manual and not group_id: - raise SetupError("You should install `group_id` with manual commit mode") - - if not topics and not partitions and not pattern: - raise SetupError( - "You should provide either `topics` or `partitions` or `pattern`." - ) - elif topics and partitions: - raise SetupError("You can't provide both `topics` and `partitions`.") - elif topics and pattern: - raise SetupError("You can't provide both `topics` and `pattern`.") - elif pattern and partitions: - raise SetupError("You can't provide both `pattern` and `partitions`.") - - if batch: - return AsyncAPIBatchSubscriber( - *topics, - batch_timeout_ms=batch_timeout_ms, - max_records=max_records, - group_id=group_id, - listener=listener, - pattern=pattern, - connection_args=connection_args, - partitions=partitions, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIDefaultSubscriber( - *topics, - group_id=group_id, - listener=listener, - pattern=pattern, - connection_args=connection_args, - partitions=partitions, - is_manual=is_manual, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - class AsyncAPIDefaultSubscriber( DefaultSubscriber, diff --git a/faststream/kafka/subscriber/factory.py b/faststream/kafka/subscriber/factory.py new file mode 100644 index 0000000000..fb5de4bf1a --- /dev/null +++ b/faststream/kafka/subscriber/factory.py @@ -0,0 +1,183 @@ +from typing import ( + TYPE_CHECKING, + Iterable, + Literal, + Optional, + Tuple, + Union, + overload, +) + +from faststream.exceptions import SetupError +from faststream.kafka.subscriber.asyncapi import ( + AsyncAPIBatchSubscriber, + AsyncAPIDefaultSubscriber, +) + +if TYPE_CHECKING: + from aiokafka import ConsumerRecord, TopicPartition + from aiokafka.abc import ConsumerRebalanceListener + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[True], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIBatchSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: Literal[False], + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> "AsyncAPIDefaultSubscriber": ... + + +@overload +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: ... + + +def create_subscriber( + *topics: str, + batch: bool, + batch_timeout_ms: int, + max_records: Optional[int], + # Kafka information + group_id: Optional[str], + listener: Optional["ConsumerRebalanceListener"], + pattern: Optional[str], + connection_args: "AnyDict", + partitions: Iterable["TopicPartition"], + is_manual: bool, + # Subscriber args + no_ack: bool, + retry: bool, + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable[ + "BrokerMiddleware[Union[ConsumerRecord, Tuple[ConsumerRecord, ...]]]" + ], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPIDefaultSubscriber", + "AsyncAPIBatchSubscriber", +]: + if is_manual and not group_id: + raise SetupError("You must use `group_id` with manual commit mode.") + + if not topics and not partitions and not pattern: + raise SetupError( + "You should provide either `topics` or `partitions` or `pattern`." + ) + elif topics and partitions: + raise SetupError("You can't provide both `topics` and `partitions`.") + elif topics and pattern: + raise SetupError("You can't provide both `topics` and `pattern`.") + elif partitions and pattern: + raise SetupError("You can't provide both `partitions` and `pattern`.") + + if batch: + return AsyncAPIBatchSubscriber( + *topics, + batch_timeout_ms=batch_timeout_ms, + max_records=max_records, + group_id=group_id, + listener=listener, + pattern=pattern, + connection_args=connection_args, + partitions=partitions, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIDefaultSubscriber( + *topics, + group_id=group_id, + listener=listener, + pattern=pattern, + connection_args=connection_args, + partitions=partitions, + is_manual=is_manual, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/nats/__init__.py b/faststream/nats/__init__.py index 2c41d1bb04..72ba1a2876 100644 --- a/faststream/nats/__init__.py +++ b/faststream/nats/__init__.py @@ -16,7 +16,7 @@ from faststream.nats.annotations import NatsMessage from faststream.nats.broker.broker import NatsBroker from faststream.nats.router import NatsPublisher, NatsRoute, NatsRouter -from faststream.nats.schemas import JStream, PullSub +from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.nats.testing import TestNatsBroker from faststream.testing.app import TestApp @@ -25,6 +25,8 @@ "NatsBroker", "JStream", "PullSub", + "KvWatch", + "ObjWatch", "NatsRoute", "NatsRouter", "NatsPublisher", diff --git a/faststream/nats/annotations.py b/faststream/nats/annotations.py index 11a4bd5fb5..dabbcaa39a 100644 --- a/faststream/nats/annotations.py +++ b/faststream/nats/annotations.py @@ -1,11 +1,14 @@ -from nats.aio.client import Client as NatsClient -from nats.js.client import JetStreamContext +from nats.aio.client import Client as _NatsClient +from nats.js.client import JetStreamContext as _JetStream +from nats.js.object_store import ObjectStore as _ObjectStore from typing_extensions import Annotated from faststream.annotations import ContextRepo, Logger, NoCast -from faststream.nats.broker import NatsBroker as NB -from faststream.nats.message import NatsMessage as NM -from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer +from faststream.nats.broker import NatsBroker as _Broker +from faststream.nats.message import NatsMessage as _Message +from faststream.nats.publisher.producer import NatsFastProducer as _CoreProducer +from faststream.nats.publisher.producer import NatsJSFastProducer as _JsProducer +from faststream.nats.subscriber.usecase import OBJECT_STORAGE_CONTEXT_KEY from faststream.utils.context import Context __all__ = ( @@ -16,11 +19,13 @@ "NatsBroker", "Client", "JsClient", + "ObjectStorage", ) -NatsMessage = Annotated[NM, Context("message")] -NatsBroker = Annotated[NB, Context("broker")] -Client = Annotated[NatsClient, Context("broker._connection")] -JsClient = Annotated[JetStreamContext, Context("broker._stream")] -NatsProducer = Annotated[NatsFastProducer, Context("broker._producer")] -NatsJsProducer = Annotated[NatsJSFastProducer, Context("broker._js_producer")] +ObjectStorage = Annotated[_ObjectStore, Context(OBJECT_STORAGE_CONTEXT_KEY)] +NatsMessage = Annotated[_Message, Context("message")] +NatsBroker = Annotated[_Broker, Context("broker")] +Client = Annotated[_NatsClient, Context("broker._connection")] +JsClient = Annotated[_JetStream, Context("broker._stream")] +NatsProducer = Annotated[_CoreProducer, Context("broker._producer")] +NatsJsProducer = Annotated[_JsProducer, Context("broker._js_producer")] diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index a2e196a535..e6806172c3 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -33,6 +33,7 @@ from faststream.broker.message import gen_cor_id from faststream.nats.broker.logging import NatsLoggingBroker from faststream.nats.broker.registrator import NatsRegistrator +from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer from faststream.nats.publisher.producer import NatsFastProducer, NatsJSFastProducer from faststream.nats.security import parse_security from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber @@ -51,7 +52,10 @@ SignatureCallback, ) from nats.aio.msg import Msg + from nats.js.api import Placement, RePublish, StorageType from nats.js.client import JetStreamContext + from nats.js.kv import KeyValue + from nats.js.object_store import ObjectStore from typing_extensions import TypedDict, Unpack from faststream.asyncapi import schema as asyncapi @@ -218,6 +222,8 @@ class NatsBroker( _producer: Optional["NatsFastProducer"] _js_producer: Optional["NatsJSFastProducer"] + _kv_declarer: Optional["KVBucketDeclarer"] + _os_declarer: Optional["OSBucketDeclarer"] def __init__( self, @@ -541,6 +547,8 @@ def __init__( # JS options self.stream = None self._js_producer = None + self._kv_declarer = None + self._os_declarer = None @override async def connect( # type: ignore[override] @@ -583,6 +591,9 @@ async def _connect(self, **kwargs: Any) -> "Client": parser=self._parser, ) + self._kv_declarer = KVBucketDeclarer(stream) + self._os_declarer = OSBucketDeclarer(stream) + return connection async def _close( @@ -609,44 +620,50 @@ async def start(self) -> None: assert self.stream, "Broker should be started already" # nosec B101 assert self._producer, "Broker should be started already" # nosec B101 - # TODO: filter by already running handlers after TestClient refactor - for handler in self._subscribers.values(): - stream = handler.stream + for stream in filter( + lambda x: x.declare, + self._stream_builder.objects.values(), + ): + try: + await self.stream.add_stream( + config=stream.config, + subjects=stream.subjects, + ) - log_context = handler.get_log_context(None) + except BadRequestError as e: # noqa: PERF203 + log_context = AsyncAPISubscriber.build_log_context( + message=None, + subject="", + queue="", + stream=stream.name, + ) + + if ( + e.description + == "stream name already in use with a different configuration" + ): + old_config = (await self.stream.stream_info(stream.name)).config - if stream is not None and stream.declare: - try: # pragma: no branch - await self.stream.add_stream( + self._log(str(e), logging.WARNING, log_context) + await self.stream.update_stream( config=stream.config, - subjects=stream.subjects, + subjects=tuple( + set(old_config.subjects or ()).union(stream.subjects) + ), ) - except BadRequestError as e: - if ( - e.description - == "stream name already in use with a different configuration" - ): - old_config = (await self.stream.stream_info(stream.name)).config - - self._log(str(e), logging.WARNING, log_context) - await self.stream.update_stream( - config=stream.config, - subjects=tuple( - set(old_config.subjects or ()).union(stream.subjects) - ), - ) + else: # pragma: no cover + self._log(str(e), logging.ERROR, log_context, exc_info=e) - else: # pragma: no cover - self._log(str(e), logging.ERROR, log_context, exc_info=e) - - finally: - # prevent from double declaration - stream.declare = False + finally: + # prevent from double declaration + stream.declare = False + # TODO: filter by already running handlers after TestClient refactor + for handler in self._subscribers.values(): self._log( f"`{handler.call_name}` waiting for messages", - extra=log_context, + extra=handler.get_log_context(None), ) await handler.start() @@ -750,11 +767,30 @@ def setup_subscriber( # type: ignore[override] self, subscriber: "AsyncAPISubscriber", ) -> None: - connection: Union["Client", "JetStreamContext", None] = None + connection: Union[ + "Client", + "JetStreamContext", + KVBucketDeclarer, + OSBucketDeclarer, + None, + ] = None + + if getattr(subscriber, "kv_watch", None): + connection = self._kv_declarer + + elif getattr(subscriber, "obj_watch", None): + connection = self._os_declarer + + elif getattr(subscriber, "stream", None): + connection = self.stream - connection = self._connection if subscriber.stream is None else self.stream + else: + connection = self._connection - return super().setup_subscriber(subscriber, connection=connection) + return super().setup_subscriber( + subscriber, + connection=connection, + ) @override def setup_publisher( # type: ignore[override] @@ -772,6 +808,66 @@ def setup_publisher( # type: ignore[override] super().setup_publisher(publisher, producer=producer) + async def key_value( + self, + bucket: str, + *, + description: Optional[str] = None, + max_value_size: Optional[int] = None, + history: int = 1, + ttl: Optional[float] = None, # in seconds + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + republish: Optional["RePublish"] = None, + direct: Optional[bool] = None, + # custom + declare: bool = True, + ) -> "KeyValue": + assert self._kv_declarer, "Broker should be connected already." # nosec B101 + + return await self._kv_declarer.create_key_value( + bucket=bucket, + description=description, + max_value_size=max_value_size, + history=history, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + republish=republish, + direct=direct, + declare=declare, + ) + + async def object_storage( + self, + bucket: str, + *, + description: Optional[str] = None, + ttl: Optional[float] = None, + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + # custom + declare: bool = True, + ) -> "ObjectStore": + assert self._os_declarer, "Broker should be connected already." # nosec B101 + + return await self._os_declarer.create_object_store( + bucket=bucket, + description=description, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + declare=declare, + ) + def _log_connection_broken( self, error_cb: Optional["ErrorCallback"] = None, diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index c0670e12c1..9fe73a3386 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -1,12 +1,17 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union, cast +from fast_depends.dependencies import Depends from nats.js import api from typing_extensions import Annotated, Doc, deprecated, override from faststream.broker.core.abc import ABCBroker +from faststream.broker.types import CustomCallable from faststream.broker.utils import default_filter +from faststream.nats.helpers import StreamBuilder from faststream.nats.publisher.asyncapi import AsyncAPIPublisher +from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.nats.subscriber.factory import create_subscriber if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -19,7 +24,6 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub class NatsRegistrator(ABCBroker["Msg"]): @@ -28,6 +32,11 @@ class NatsRegistrator(ABCBroker["Msg"]): _subscribers: Dict[int, "AsyncAPISubscriber"] _publishers: Dict[int, "AsyncAPIPublisher"] + def __init__(self, **kwargs: Any) -> None: + self._stream_builder = StreamBuilder() + + super().__init__(**kwargs) + @override def subscriber( # type: ignore[override] self, @@ -102,12 +111,20 @@ def subscriber( # type: ignore[override] ] = None, # pull arguments pull_sub: Annotated[ - Optional["PullSub"], + Union[bool, "PullSub"], Doc( "NATS Pull consumer parameters container. " "Should be used with `stream` only." ), + ] = False, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -187,14 +204,19 @@ def subscriber( # type: ignore[override] You can use it as a handler decorator `@broker.subscriber(...)`. """ + if stream := self._stream_builder.create(stream): + stream.add_subject(subject) + subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( # type: ignore[arg-type] + create_subscriber( subject=subject, queue=queue, stream=stream, - pull_sub=pull_sub, + pull_sub=PullSub.validate(pull_sub), + kv_watch=KvWatch.validate(kv_watch), + obj_watch=ObjWatch.validate(obj_watch), max_workers=max_workers, # extra args pending_msgs_limit=pending_msgs_limit, @@ -295,6 +317,9 @@ def publisher( # type: ignore[override] Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`. """ + if stream := self._stream_builder.create(stream): + stream.add_subject(subject) + publisher = cast( AsyncAPIPublisher, super().publisher( diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py index 4da5f51889..7ea3a2a5df 100644 --- a/faststream/nats/fastapi/fastapi.py +++ b/faststream/nats/fastapi/fastapi.py @@ -66,7 +66,7 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.security import BaseSecurity from faststream.types import AnyDict, LoggerProto @@ -651,6 +651,14 @@ def subscriber( # type: ignore[override] "Should be used with `stream` only." ), ] = None, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), + ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -866,6 +874,8 @@ def subscriber( # type: ignore[override] deliver_policy=deliver_policy, headers_only=headers_only, pull_sub=pull_sub, + kv_watch=kv_watch, + obj_watch=obj_watch, inbox_prefix=inbox_prefix, ack_first=ack_first, stream=stream, diff --git a/faststream/nats/helpers.py b/faststream/nats/helpers.py deleted file mode 100644 index c77dd16c5c..0000000000 --- a/faststream/nats/helpers.py +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Dict, Optional, Union - -from faststream.nats.schemas.js_stream import JStream - - -class StreamBuilder: - """A class to build streams.""" - - streams: Dict[str, JStream] - - def __init__(self) -> None: - """Initialize the stream builder.""" - self.streams = {} - - def stream( - self, - name: Union[str, JStream, None], - ) -> Optional[JStream]: - """Get a stream. - - Args: - *args: The arguments. - name: The stream name. - declare: Whether to declare the stream. - **kwargs: The keyword arguments. - """ - stream = JStream.validate(name) - - if stream is not None: - stream = self.streams[stream.name] = self.streams.get(stream.name, stream) - - return stream - - -stream_builder = StreamBuilder() diff --git a/faststream/nats/helpers/__init__.py b/faststream/nats/helpers/__init__.py new file mode 100644 index 0000000000..28b3479a7b --- /dev/null +++ b/faststream/nats/helpers/__init__.py @@ -0,0 +1,9 @@ +from faststream.nats.helpers.bucket_declarer import KVBucketDeclarer +from faststream.nats.helpers.obj_storage_declarer import OSBucketDeclarer +from faststream.nats.helpers.object_builder import StreamBuilder + +__all__ = ( + "KVBucketDeclarer", + "StreamBuilder", + "OSBucketDeclarer", +) diff --git a/faststream/nats/helpers/bucket_declarer.py b/faststream/nats/helpers/bucket_declarer.py new file mode 100644 index 0000000000..916b706254 --- /dev/null +++ b/faststream/nats/helpers/bucket_declarer.py @@ -0,0 +1,57 @@ +from typing import TYPE_CHECKING, Dict, Optional + +from nats.js.api import KeyValueConfig + +if TYPE_CHECKING: + from nats.js import JetStreamContext + from nats.js.api import Placement, RePublish, StorageType + from nats.js.kv import KeyValue + + +class KVBucketDeclarer: + buckets: Dict[str, "KeyValue"] + + def __init__(self, connection: "JetStreamContext") -> None: + self._connection = connection + self.buckets = {} + + async def create_key_value( + self, + bucket: str, + *, + description: Optional[str] = None, + max_value_size: Optional[int] = None, + history: int = 1, + ttl: Optional[float] = None, # in seconds + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + republish: Optional["RePublish"] = None, + direct: Optional[bool] = None, + # custom + declare: bool = True, + ) -> "KeyValue": + if (key_value := self.buckets.get(bucket)) is None: + if declare: + key_value = await self._connection.create_key_value( + config=KeyValueConfig( + bucket=bucket, + description=description, + max_value_size=max_value_size, + history=history, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + republish=republish, + direct=direct, + ) + ) + else: + key_value = await self._connection.key_value(bucket) + + self.buckets[bucket] = key_value + + return key_value diff --git a/faststream/nats/helpers/obj_storage_declarer.py b/faststream/nats/helpers/obj_storage_declarer.py new file mode 100644 index 0000000000..1d2ae50715 --- /dev/null +++ b/faststream/nats/helpers/obj_storage_declarer.py @@ -0,0 +1,50 @@ +from typing import TYPE_CHECKING, Dict, Optional + +from nats.js.api import ObjectStoreConfig + +if TYPE_CHECKING: + from nats.js import JetStreamContext + from nats.js.api import Placement, StorageType + from nats.js.object_store import ObjectStore + + +class OSBucketDeclarer: + buckets: Dict[str, "ObjectStore"] + + def __init__(self, connection: "JetStreamContext") -> None: + self._connection = connection + self.buckets = {} + + async def create_object_store( + self, + bucket: str, + *, + description: Optional[str] = None, + ttl: Optional[float] = None, + max_bytes: Optional[int] = None, + storage: Optional["StorageType"] = None, + replicas: int = 1, + placement: Optional["Placement"] = None, + # custom + declare: bool = True, + ) -> "ObjectStore": + if (object_store := self.buckets.get(bucket)) is None: + if declare: + object_store = await self._connection.create_object_store( + bucket=bucket, + config=ObjectStoreConfig( + bucket=bucket, + description=description, + ttl=ttl, + max_bytes=max_bytes, + storage=storage, + replicas=replicas, + placement=placement, + ), + ) + else: + object_store = await self._connection.object_store(bucket) + + self.buckets[bucket] = object_store + + return object_store diff --git a/faststream/nats/helpers/object_builder.py b/faststream/nats/helpers/object_builder.py new file mode 100644 index 0000000000..5d40a44da6 --- /dev/null +++ b/faststream/nats/helpers/object_builder.py @@ -0,0 +1,27 @@ +from typing import Dict, Optional, Union + +from faststream.nats.schemas import JStream + + +class StreamBuilder: + """A class to build streams.""" + + __slots__ = ("objects",) + + objects: Dict[str, "JStream"] + + def __init__(self) -> None: + """Initialize the builder.""" + self.objects = {} + + def create( + self, + name: Union[str, "JStream", None], + ) -> Optional["JStream"]: + """Get an object.""" + stream = JStream.validate(name) + + if stream is not None: + stream = self.objects[stream.name] = self.objects.get(stream.name, stream) + + return stream diff --git a/faststream/nats/message.py b/faststream/nats/message.py index 821cebbb71..ee54ef2caa 100644 --- a/faststream/nats/message.py +++ b/faststream/nats/message.py @@ -1,12 +1,13 @@ -from typing import TYPE_CHECKING, List, Union +from typing import List, Union -from faststream.broker.message import StreamMessage +from nats.aio.msg import Msg +from nats.js.api import ObjectInfo +from nats.js.kv import KeyValue -if TYPE_CHECKING: - from nats.aio.msg import Msg +from faststream.broker.message import StreamMessage -class NatsMessage(StreamMessage["Msg"]): +class NatsMessage(StreamMessage[Msg]): """A class to represent a NATS message.""" async def ack(self) -> None: @@ -34,7 +35,7 @@ async def in_progress(self) -> None: await self.raw_message.in_progress() -class NatsBatchMessage(StreamMessage[List["Msg"]]): +class NatsBatchMessage(StreamMessage[List[Msg]]): """A class to represent a NATS batch message.""" async def ack(self) -> None: @@ -73,3 +74,11 @@ async def in_progress(self) -> None: self.raw_message, ): await m.in_progress() + + +class NatsKvMessage(StreamMessage[KeyValue.Entry]): + pass + + +class NatsObjMessage(StreamMessage[ObjectInfo]): + pass diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py index 7aefafed2c..7c33a7d76b 100644 --- a/faststream/nats/opentelemetry/provider.py +++ b/faststream/nats/opentelemetry/provider.py @@ -1,5 +1,6 @@ from typing import TYPE_CHECKING, List, Optional, Sequence, Union, overload +from nats.aio.msg import Msg from opentelemetry.semconv.trace import SpanAttributes from faststream.__about__ import SERVICE_NAME @@ -8,8 +9,6 @@ from faststream.opentelemetry.consts import MESSAGING_DESTINATION_PUBLISH_NAME if TYPE_CHECKING: - from nats.aio.msg import Msg - from faststream.broker.message import StreamMessage from faststream.types import AnyDict @@ -107,8 +106,12 @@ def telemetry_attributes_provider_factory( ) -> Union[ NatsTelemetrySettingsProvider, NatsBatchTelemetrySettingsProvider, + None, ]: if isinstance(msg, Sequence): return NatsBatchTelemetrySettingsProvider() - else: + elif isinstance(msg, Msg) or msg is None: return NatsTelemetrySettingsProvider() + else: + # KeyValue and Object Storage watch cases + return None diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index 940ae70426..ef18834eca 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -1,11 +1,18 @@ -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from faststream.broker.message import StreamMessage, decode_message, gen_cor_id -from faststream.nats.message import NatsBatchMessage, NatsMessage +from faststream.nats.message import ( + NatsBatchMessage, + NatsKvMessage, + NatsMessage, + NatsObjMessage, +) from faststream.nats.schemas.js_stream import compile_nats_wildcard if TYPE_CHECKING: from nats.aio.msg import Msg + from nats.js.api import ObjectInfo + from nats.js.kv import KeyValue from faststream.types import AnyDict, DecodedMessage @@ -36,7 +43,7 @@ def get_path( @staticmethod async def decode_message( - msg: "StreamMessage[Msg]", + msg: "StreamMessage[Any]", ) -> "DecodedMessage": return decode_message(msg) @@ -139,3 +146,22 @@ async def decode_batch( data.append(decode_message(one_msg)) return data + + +class KvParser(NatsBaseParser): + async def parse_message( + self, msg: "KeyValue.Entry" + ) -> StreamMessage["KeyValue.Entry"]: + return NatsKvMessage( + raw_message=msg, + body=msg.value, + path=self.get_path(msg.key) or {}, + ) + + +class ObjParser(NatsBaseParser): + async def parse_message(self, msg: "ObjectInfo") -> StreamMessage["ObjectInfo"]: + return NatsObjMessage( + raw_message=msg, + body=msg.name, + ) diff --git a/faststream/nats/publisher/asyncapi.py b/faststream/nats/publisher/asyncapi.py index 094e1eebb4..1546b675f8 100644 --- a/faststream/nats/publisher/asyncapi.py +++ b/faststream/nats/publisher/asyncapi.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional from typing_extensions import override @@ -11,7 +11,6 @@ ) from faststream.asyncapi.schema.bindings import nats from faststream.asyncapi.utils import resolve_payloads -from faststream.nats.helpers import stream_builder from faststream.nats.publisher.usecase import LogicPublisher if TYPE_CHECKING: @@ -58,7 +57,7 @@ def create( # type: ignore[override] subject: str, reply_to: str, headers: Optional[Dict[str, str]], - stream: Union[str, "JStream", None], + stream: Optional["JStream"], timeout: Optional[float], # Publisher args broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -69,9 +68,6 @@ def create( # type: ignore[override] description_: Optional[str], include_in_schema: bool, ) -> "AsyncAPIPublisher": - if stream := stream_builder.stream(stream): - stream.add_subject(subject) - return cls( subject=subject, reply_to=reply_to, diff --git a/faststream/nats/router.py b/faststream/nats/router.py index 65f07947a9..74215d3e78 100644 --- a/faststream/nats/router.py +++ b/faststream/nats/router.py @@ -28,7 +28,7 @@ SubscriberMiddleware, ) from faststream.nats.message import NatsBatchMessage, NatsMessage - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.types import SendableMessage @@ -205,6 +205,14 @@ def __init__( "Should be used with `stream` only." ), ] = None, + kv_watch: Annotated[ + Union[str, "KvWatch", None], + Doc("KeyValue watch parameters container."), + ] = None, + obj_watch: Annotated[ + Union[bool, "ObjWatch"], + Doc("ObjecStore watch parameters container."), + ] = False, inbox_prefix: Annotated[ bytes, Doc( @@ -295,6 +303,8 @@ def __init__( deliver_policy=deliver_policy, headers_only=headers_only, pull_sub=pull_sub, + kv_watch=kv_watch, + obj_watch=obj_watch, inbox_prefix=inbox_prefix, ack_first=ack_first, stream=stream, diff --git a/faststream/nats/schemas/__init__.py b/faststream/nats/schemas/__init__.py index 24ca18db99..ca9f56f48d 100644 --- a/faststream/nats/schemas/__init__.py +++ b/faststream/nats/schemas/__init__.py @@ -1,7 +1,11 @@ from faststream.nats.schemas.js_stream import JStream +from faststream.nats.schemas.kv_watch import KvWatch +from faststream.nats.schemas.obj_watch import ObjWatch from faststream.nats.schemas.pull_sub import PullSub __all__ = ( "JStream", "PullSub", + "KvWatch", + "ObjWatch", ) diff --git a/faststream/nats/schemas/kv_watch.py b/faststream/nats/schemas/kv_watch.py new file mode 100644 index 0000000000..a1f50fce96 --- /dev/null +++ b/faststream/nats/schemas/kv_watch.py @@ -0,0 +1,70 @@ +from typing import Optional + +from typing_extensions import Annotated, Doc + +from faststream.broker.schemas import NameRequired + + +class KvWatch(NameRequired): + """A class to represent a NATS kv watch subscription.""" + + __slots__ = ( + "bucket", + "headers_only", + "include_history", + "ignore_deletes", + "meta_only", + "inactive_threshold", + "timeout", + "declare", + ) + + def __init__( + self, + bucket: Annotated[ + str, + Doc("Bucket name."), + ], + headers_only: Annotated[ + bool, + Doc("Whether to receive only headers."), + ] = False, + include_history: Annotated[ + bool, + Doc("Whether to include history."), + ] = False, + ignore_deletes: Annotated[ + bool, + Doc("Whether to ignore deletes."), + ] = False, + meta_only: Annotated[ + bool, + Doc("Whether to receive only metadata."), + ] = False, + inactive_threshold: Annotated[ + Optional[float], + Doc("Inactivity threshold."), + ] = None, + timeout: Annotated[ + Optional[float], + Doc("Timeout in seconds."), + ] = 5.0, + # custom + declare: Annotated[ + bool, + Doc("Whether to create bucket automatically or just connect to it."), + ] = True, + ) -> None: + super().__init__(bucket) + + self.headers_only = headers_only + self.include_history = include_history + self.ignore_deletes = ignore_deletes + self.meta_only = meta_only + self.inactive_threshold = inactive_threshold + self.timeout = timeout + + self.declare = declare + + def __hash__(self) -> int: + return hash(self.name) diff --git a/faststream/nats/schemas/obj_watch.py b/faststream/nats/schemas/obj_watch.py new file mode 100644 index 0000000000..998bef9482 --- /dev/null +++ b/faststream/nats/schemas/obj_watch.py @@ -0,0 +1,73 @@ +from typing import Literal, Optional, Union, overload + +from typing_extensions import Annotated, Doc + + +class ObjWatch: + """A class to represent a NATS object storage watch subscription.""" + + __slots__ = ( + "ignore_deletes", + "include_history", + "meta_only", + "timeout", + "declare", + ) + + def __init__( + self, + ignore_deletes: Annotated[ + bool, + Doc("Ignore delete events."), + ] = False, + include_history: Annotated[ + bool, + Doc("Include history."), + ] = False, + meta_only: Annotated[ + bool, + Doc("Only metadata."), + ] = False, + timeout: Annotated[ + float, + Doc("The timeout for the watch."), + ] = 5.0, + # custom + declare: Annotated[ + bool, + Doc( + "Whether to create object storage automatically or just connect to it." + ), + ] = True, + ) -> None: + self.ignore_deletes = ignore_deletes + self.include_history = include_history + self.meta_only = meta_only + self.timeout = timeout + + self.declare = declare + + @overload + @classmethod + def validate(cls, value: Literal[True]) -> "ObjWatch": ... + + @overload + @classmethod + def validate(cls, value: Literal[False]) -> None: ... + + @overload + @classmethod + def validate(cls, value: "ObjWatch") -> "ObjWatch": ... + + @overload + @classmethod + def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]: ... + + @classmethod + def validate(cls, value: Union[bool, "ObjWatch"]) -> Optional["ObjWatch"]: + if value is True: + return ObjWatch() + elif value is False: + return None + else: + return value diff --git a/faststream/nats/schemas/pull_sub.py b/faststream/nats/schemas/pull_sub.py index fbe12b0a8a..7544d17b74 100644 --- a/faststream/nats/schemas/pull_sub.py +++ b/faststream/nats/schemas/pull_sub.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Literal, Optional, Union, overload from typing_extensions import Annotated, Doc @@ -34,3 +34,28 @@ def __init__( self.batch_size = batch_size self.batch = batch self.timeout = timeout + + @overload + @classmethod + def validate(cls, value: Literal[True]) -> "PullSub": ... + + @overload + @classmethod + def validate(cls, value: Literal[False]) -> None: ... + + @overload + @classmethod + def validate(cls, value: "PullSub") -> "PullSub": ... + + @overload + @classmethod + def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]: ... + + @classmethod + def validate(cls, value: Union[bool, "PullSub"]) -> Optional["PullSub"]: + if value is True: + return PullSub() + elif value is False: + return None + else: + return value diff --git a/faststream/nats/subscriber/asyncapi.py b/faststream/nats/subscriber/asyncapi.py index 7e5a6d4312..ad0edb0bca 100644 --- a/faststream/nats/subscriber/asyncapi.py +++ b/faststream/nats/subscriber/asyncapi.py @@ -1,13 +1,5 @@ -from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union +from typing import Any, Dict -from nats.aio.subscription import ( - DEFAULT_SUB_PENDING_BYTES_LIMIT, - DEFAULT_SUB_PENDING_MSGS_LIMIT, -) -from nats.js.client import ( - DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, - DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, -) from typing_extensions import override from faststream.asyncapi.schema import ( @@ -19,22 +11,19 @@ ) from faststream.asyncapi.schema.bindings import nats from faststream.asyncapi.utils import resolve_payloads -from faststream.exceptions import SetupError -from faststream.nats.helpers import stream_builder from faststream.nats.subscriber.usecase import ( - BatchHandler, - DefaultHandler, + BatchPullStreamSubscriber, + ConcurrentCoreSubscriber, + ConcurrentPullStreamSubscriber, + ConcurrentPushStreamSubscriber, + CoreSubscriber, + KeyValueWatchSubscriber, LogicSubscriber, + ObjStoreWatchSubscriber, + PullStreamSubscriber, + PushStreamSubscription, ) -if TYPE_CHECKING: - from fast_depends.dependencies import Depends - from nats.js import api - - from faststream.broker.types import BrokerMiddleware - from faststream.nats.schemas import JStream, PullSub - from faststream.types import AnyDict - class AsyncAPISubscriber(LogicSubscriber[Any]): """A class to represent a NATS handler.""" @@ -60,137 +49,64 @@ def get_schema(self) -> Dict[str, Channel]: bindings=ChannelBinding( nats=nats.ChannelBinding( subject=self.subject, - queue=self.queue or None, + queue=getattr(self, "queue", "") or None, ) ), ) } + +class AsyncAPICoreSubscriber(AsyncAPISubscriber, CoreSubscriber): + """One-message core consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentCoreSubscriber(AsyncAPISubscriber, ConcurrentCoreSubscriber): + """One-message core concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIStreamSubscriber(AsyncAPISubscriber, PushStreamSubscription): + """One-message JS Push consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentPushStreamSubscriber( + AsyncAPISubscriber, ConcurrentPushStreamSubscriber +): + """One-message JS Push concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIPullStreamSubscriber(AsyncAPISubscriber, PullStreamSubscriber): + """One-message JS Pull consumer with AsyncAPI methods.""" + + +class AsyncAPIConcurrentPullStreamSubscriber( + AsyncAPISubscriber, ConcurrentPullStreamSubscriber +): + """One-message JS Pull concurrent consumer with AsyncAPI methods.""" + + +class AsyncAPIBatchPullStreamSubscriber(AsyncAPISubscriber, BatchPullStreamSubscriber): + """Batch-message Pull consumer with AsyncAPI methods.""" + + +class AsyncAPIKeyValueWatchSubscriber(AsyncAPISubscriber, KeyValueWatchSubscriber): + """KeyValueWatch consumer with AsyncAPI methods.""" + @override - @staticmethod - def create( # type: ignore[override] - *, - subject: str, - queue: str, - pending_msgs_limit: Optional[int], - pending_bytes_limit: Optional[int], - # Core args - max_msgs: int, - # JS args - durable: Optional[str], - config: Optional["api.ConsumerConfig"], - ordered_consumer: bool, - idle_heartbeat: Optional[float], - flow_control: bool, - deliver_policy: Optional["api.DeliverPolicy"], - headers_only: Optional[bool], - # pull args - pull_sub: Optional["PullSub"], - inbox_prefix: bytes, - # custom args - ack_first: bool, - max_workers: int, - stream: Union[str, "JStream", None], - # Subscriber args - no_ack: bool, - retry: Union[bool, int], - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[Any]"], - # AsyncAPI information - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> Union[ - "AsyncAPIDefaultSubscriber", - "AsyncAPIBatchSubscriber", - ]: - if stream := stream_builder.stream(stream): - stream.add_subject(subject) - - if pull_sub is not None and stream is None: - raise SetupError("Pull subscriber can be used only with a stream") - - if stream: - # TODO: pull & queue warning - # TODO: push & durable warning - - extra_options: AnyDict = { - "pending_msgs_limit": pending_msgs_limit - or DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, - "pending_bytes_limit": pending_bytes_limit - or DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, - "durable": durable, - "stream": stream.name, - "config": config, - } - - if pull_sub is not None: - extra_options.update({"inbox_prefix": inbox_prefix}) - - else: - extra_options.update( - { - "ordered_consumer": ordered_consumer, - "idle_heartbeat": idle_heartbeat, - "flow_control": flow_control, - "deliver_policy": deliver_policy, - "headers_only": headers_only, - "manual_ack": not ack_first, - } - ) - - else: - extra_options = { - "pending_msgs_limit": pending_msgs_limit - or DEFAULT_SUB_PENDING_MSGS_LIMIT, - "pending_bytes_limit": pending_bytes_limit - or DEFAULT_SUB_PENDING_BYTES_LIMIT, - "max_msgs": max_msgs, - } - - if getattr(pull_sub, "batch", False): - return AsyncAPIBatchSubscriber( - extra_options=extra_options, - # basic args - pull_sub=pull_sub, - subject=subject, - queue=queue, - stream=stream, - # Subscriber args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI information - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) + def get_name(self) -> str: + return "" - else: - return AsyncAPIDefaultSubscriber( - max_workers=max_workers, - extra_options=extra_options, - # basic args - pull_sub=pull_sub, - subject=subject, - queue=queue, - stream=stream, - # Subscriber args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI information - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) + @override + def get_schema(self) -> Dict[str, Channel]: + return {} -class AsyncAPIDefaultSubscriber(DefaultHandler, AsyncAPISubscriber): - """One-message consumer with AsyncAPI methods.""" +class AsyncAPIObjStoreWatchSubscriber(AsyncAPISubscriber, ObjStoreWatchSubscriber): + """ObjStoreWatch consumer with AsyncAPI methods.""" + @override + def get_name(self) -> str: + return "" -class AsyncAPIBatchSubscriber(BatchHandler, AsyncAPISubscriber): - """Batch-message consumer with AsyncAPI methods.""" + @override + def get_schema(self) -> Dict[str, Channel]: + return {} diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py new file mode 100644 index 0000000000..590598a2dd --- /dev/null +++ b/faststream/nats/subscriber/factory.py @@ -0,0 +1,271 @@ +from typing import TYPE_CHECKING, Any, Iterable, Optional, Union + +from nats.aio.subscription import ( + DEFAULT_SUB_PENDING_BYTES_LIMIT, + DEFAULT_SUB_PENDING_MSGS_LIMIT, +) +from nats.js.client import ( + DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, + DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, +) + +from faststream.exceptions import SetupError +from faststream.nats.subscriber.asyncapi import ( + AsyncAPIBatchPullStreamSubscriber, + AsyncAPIConcurrentCoreSubscriber, + AsyncAPIConcurrentPullStreamSubscriber, + AsyncAPIConcurrentPushStreamSubscriber, + AsyncAPICoreSubscriber, + AsyncAPIKeyValueWatchSubscriber, + AsyncAPIObjStoreWatchSubscriber, + AsyncAPIPullStreamSubscriber, + AsyncAPIStreamSubscriber, +) + +if TYPE_CHECKING: + from fast_depends.dependencies import Depends + from nats.js import api + + from faststream.broker.types import BrokerMiddleware + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub + from faststream.types import AnyDict + + +def create_subscriber( + *, + subject: str, + queue: str, + pending_msgs_limit: Optional[int], + pending_bytes_limit: Optional[int], + # Core args + max_msgs: int, + # JS args + durable: Optional[str], + config: Optional["api.ConsumerConfig"], + ordered_consumer: bool, + idle_heartbeat: Optional[float], + flow_control: bool, + deliver_policy: Optional["api.DeliverPolicy"], + headers_only: Optional[bool], + # pull args + pull_sub: Optional["PullSub"], + kv_watch: Optional["KvWatch"], + obj_watch: Optional["ObjWatch"], + inbox_prefix: bytes, + # custom args + ack_first: bool, + max_workers: int, + stream: Optional["JStream"], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[Any]"], + # AsyncAPI information + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> Union[ + "AsyncAPICoreSubscriber", + "AsyncAPIConcurrentCoreSubscriber", + "AsyncAPIStreamSubscriber", + "AsyncAPIConcurrentPushStreamSubscriber", + "AsyncAPIPullStreamSubscriber", + "AsyncAPIConcurrentPullStreamSubscriber", + "AsyncAPIBatchPullStreamSubscriber", + "AsyncAPIKeyValueWatchSubscriber", + "AsyncAPIObjStoreWatchSubscriber", +]: + if pull_sub is not None and stream is None: + raise SetupError("Pull subscriber can be used only with a stream") + + if stream: + # TODO: pull & queue warning + # TODO: push & durable warning + + extra_options: AnyDict = { + "pending_msgs_limit": pending_msgs_limit + or DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, + "pending_bytes_limit": pending_bytes_limit + or DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, + "durable": durable, + "stream": stream.name, + "config": config, + } + + if pull_sub is not None: + extra_options.update({"inbox_prefix": inbox_prefix}) + + else: + extra_options.update( + { + "ordered_consumer": ordered_consumer, + "idle_heartbeat": idle_heartbeat, + "flow_control": flow_control, + "deliver_policy": deliver_policy, + "headers_only": headers_only, + "manual_ack": not ack_first, + } + ) + + else: + extra_options = { + "pending_msgs_limit": pending_msgs_limit or DEFAULT_SUB_PENDING_MSGS_LIMIT, + "pending_bytes_limit": pending_bytes_limit + or DEFAULT_SUB_PENDING_BYTES_LIMIT, + "max_msgs": max_msgs, + } + + if obj_watch is not None: + return AsyncAPIObjStoreWatchSubscriber( + subject=subject, + obj_watch=obj_watch, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + if kv_watch is not None: + return AsyncAPIKeyValueWatchSubscriber( + subject=subject, + kv_watch=kv_watch, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif stream is None: + if max_workers > 1: + return AsyncAPIConcurrentCoreSubscriber( + max_workers=max_workers, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPICoreSubscriber( + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + if max_workers > 1: + if pull_sub is not None: + return AsyncAPIConcurrentPullStreamSubscriber( + max_workers=max_workers, + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIConcurrentPushStreamSubscriber( + max_workers=max_workers, + stream=stream, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + if pull_sub is not None: + if pull_sub.batch: + return AsyncAPIBatchPullStreamSubscriber( + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIPullStreamSubscriber( + pull_sub=pull_sub, + stream=stream, + subject=subject, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + return AsyncAPIStreamSubscriber( + stream=stream, + subject=subject, + queue=queue, + # basic args + extra_options=extra_options, + # Subscriber args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI information + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/nats/subscriber/subscription.py b/faststream/nats/subscriber/subscription.py new file mode 100644 index 0000000000..4bc994842b --- /dev/null +++ b/faststream/nats/subscriber/subscription.py @@ -0,0 +1,26 @@ +from typing import Any, Generic, Optional, Protocol, TypeVar + + +class Unsubscriptable(Protocol): + async def unsubscribe(self) -> None: ... + + +class Watchable(Protocol): + async def stop(self) -> None: ... + + async def updates(self, timeout: float) -> Optional[Any]: ... + + +WatchableT = TypeVar("WatchableT", bound=Watchable) + + +class UnsubscribeAdapter(Unsubscriptable, Generic[WatchableT]): + __slots__ = ("obj",) + + obj: WatchableT + + def __init__(self, subscription: WatchableT) -> None: + self.obj = subscription + + async def unsubscribe(self) -> None: + await self.obj.stop() diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 55a8340a7d..f668ac387e 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -1,11 +1,13 @@ import asyncio from abc import abstractmethod from contextlib import suppress +from functools import cached_property from typing import ( TYPE_CHECKING, Any, Awaitable, Callable, + Coroutine, Dict, Iterable, List, @@ -18,16 +20,29 @@ import anyio from fast_depends.dependencies import Depends from nats.errors import ConnectionClosedError, TimeoutError +from nats.js.api import ObjectInfo +from nats.js.kv import KeyValue from typing_extensions import Annotated, Doc, override from faststream.broker.message import StreamMessage from faststream.broker.publisher.fake import FakePublisher from faststream.broker.subscriber.usecase import SubscriberUsecase from faststream.broker.types import CustomCallable, MsgType -from faststream.exceptions import NOT_CONNECTED_YET, SetupError -from faststream.nats.parser import BatchParser, JsParser, NatsParser +from faststream.exceptions import NOT_CONNECTED_YET +from faststream.nats.parser import ( + BatchParser, + JsParser, + KvParser, + NatsParser, + ObjParser, +) from faststream.nats.schemas.js_stream import compile_nats_wildcard +from faststream.nats.subscriber.subscription import ( + UnsubscribeAdapter, + Unsubscriptable, +) from faststream.types import AnyDict, LoggerProto, SendableMessage +from faststream.utils.context.repository import context if TYPE_CHECKING: from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream @@ -35,6 +50,7 @@ from nats.aio.msg import Msg from nats.aio.subscription import Subscription from nats.js import JetStreamContext + from nats.js.object_store import ObjectStore from faststream.broker.message import StreamMessage from faststream.broker.publisher.proto import ProducerProto @@ -42,19 +58,15 @@ AsyncCallable, BrokerMiddleware, ) - from faststream.nats.schemas import JStream, PullSub + from faststream.nats.helpers import KVBucketDeclarer, OSBucketDeclarer + from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.types import Decorator class LogicSubscriber(SubscriberUsecase[MsgType]): """A class to represent a NATS handler.""" - subscription: Union[ - None, - "Subscription", - "JetStreamContext.PushSubscription", - "JetStreamContext.PullSubscription", - ] + subscription: Optional[Unsubscriptable] producer: Optional["ProducerProto"] _connection: Union["Client", "JetStreamContext", None] @@ -63,9 +75,6 @@ def __init__( *, subject: str, extra_options: Optional[AnyDict], - queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], # Subscriber args default_parser: "AsyncCallable", default_decoder: "AsyncCallable", @@ -78,13 +87,8 @@ def __init__( description_: Optional[str], include_in_schema: bool, ) -> None: - _, path = compile_nats_wildcard(subject) - - self.subject = path - self.queue = queue + self.subject = subject - self.stream = stream - self.pull_sub = pull_sub self.extra_options = extra_options or {} super().__init__( @@ -104,7 +108,6 @@ def __init__( self._connection = None self.subscription = None self.producer = None - self.tasks: List["asyncio.Task[Any]"] = [] @override def setup( # type: ignore[override] @@ -140,6 +143,12 @@ def setup( # type: ignore[override] _call_decorators=_call_decorators, ) + @cached_property + def clear_subject(self) -> str: + """Compile `test.{name}` to `test.*` subject.""" + _, path = compile_nats_wildcard(self.subject) + return path + async def start(self) -> None: """Create NATS subscription and start consume tasks.""" assert self._connection, NOT_CONNECTED_YET # nosec B101 @@ -154,60 +163,21 @@ async def close(self) -> None: await self.subscription.unsubscribe() self.subscription = None - for task in self.tasks: - if not task.done(): - task.cancel() - self.tasks = [] - @abstractmethod async def _create_subscription( self, *, - connection: Union["Client", "JetStreamContext"], + connection: Union[ + "Client", "JetStreamContext", "KVBucketDeclarer", "OSBucketDeclarer" + ], ) -> None: """Create NATS subscription object to consume messages.""" raise NotImplementedError() - def _make_response_publisher( - self, - message: Annotated[ - "StreamMessage[Any]", - Doc("Message requiring reply"), - ], - ) -> Sequence[FakePublisher]: - """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" - if not message.reply_to or self._producer is None: - return () - - return ( - FakePublisher( - self._producer.publish, - publish_kwargs={ - "subject": message.reply_to, - }, - ), - ) - - def __hash__(self) -> int: - return self.get_routing_hash(self.subject) - - @staticmethod - def get_routing_hash( - subject: Annotated[ - str, - Doc("NATS subject to consume messages"), - ], - ) -> int: - """Get handler hash by outer data. - - Using to find handler in `broker.handlers` dictionary. - """ - return hash(subject) - @staticmethod def build_log_context( message: Annotated[ - Optional["StreamMessage[Any]"], + Optional["StreamMessage[MsgType]"], Doc("Message which we are building context for"), ], subject: Annotated[ @@ -220,77 +190,64 @@ def build_log_context( Doc("Using queue group name"), ] = "", stream: Annotated[ - Optional["JStream"], + str, Doc("Stream object we are listening"), - ] = None, + ] = "", ) -> Dict[str, str]: """Static method to build log context out of `self.consume` scope.""" return { "subject": subject, "queue": queue, - "stream": getattr(stream, "name", ""), + "stream": stream, "message_id": getattr(message, "message_id", ""), } - def get_log_context( - self, - message: Annotated[ - Optional["StreamMessage[Any]"], - Doc("Message which we are building context for"), - ], - ) -> Dict[str, str]: - """Log context factory using in `self.consume` scope.""" - return self.build_log_context( - message=message, - subject=self.subject, - queue=self.queue, - stream=self.stream, - ) - def add_prefix(self, prefix: str) -> None: """Include Subscriber in router.""" self.subject = "".join((prefix, self.subject)) + def __hash__(self) -> int: + return self.get_routing_hash(self.subject) -class DefaultHandler(LogicSubscriber["Msg"]): - """One-message consumer class.""" + @staticmethod + def get_routing_hash( + subject: Annotated[ + str, + Doc("NATS subject to consume messages"), + ], + ) -> int: + """Get handler hash by outer data. - send_stream: "MemoryObjectSendStream[Msg]" - receive_stream: "MemoryObjectReceiveStream[Msg]" + Using to find handler in `broker.handlers` dictionary. + """ + return hash(subject) + +class _DefaultSubscriber(LogicSubscriber[MsgType]): def __init__( self, *, - max_workers: int, - # default args subject: str, - queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], + # default args extra_options: Optional[AnyDict], # Subscriber args + default_parser: "AsyncCallable", + default_decoder: "AsyncCallable", no_ack: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], - broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], # AsyncAPI args title_: Optional[str], description_: Optional[str], include_in_schema: bool, ) -> None: - parser_: Union[NatsParser, JsParser] = ( - NatsParser(pattern=subject) if stream is None else JsParser(pattern=subject) - ) - super().__init__( subject=subject, - queue=queue, - stream=stream, - pull_sub=pull_sub, extra_options=extra_options, # subscriber args - default_parser=parser_.parse_message, - default_decoder=parser_.decode_message, + default_parser=default_parser, + default_decoder=default_decoder, # Propagated args no_ack=no_ack, retry=retry, @@ -302,6 +259,70 @@ def __init__( include_in_schema=include_in_schema, ) + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[Any]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + if not message.reply_to or self._producer is None: + return () + + return ( + FakePublisher( + self._producer.publish, + publish_kwargs={ + "subject": message.reply_to, + }, + ), + ) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[MsgType]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + ) + + +class _TasksMixin(LogicSubscriber[Any]): + def __init__(self, **kwargs: Any) -> None: + self.tasks: List["asyncio.Task[Any]"] = [] + + super().__init__(**kwargs) + + def add_task(self, coro: Coroutine[Any, Any, Any]) -> None: + self.tasks.append(asyncio.create_task(coro)) + + async def close(self) -> None: + """Clean up handler subscription, cancel consume task in graceful mode.""" + await super().close() + + for task in self.tasks: + if not task.done(): + task.cancel() + + self.tasks = [] + + +class _ConcurrentMixin(_TasksMixin): + send_stream: "MemoryObjectSendStream[Msg]" + receive_stream: "MemoryObjectReceiveStream[Msg]" + + def __init__( + self, + *, + max_workers: int, + **kwargs: Any, + ) -> None: self.max_workers = max_workers self.send_stream, self.receive_stream = anyio.create_memory_object_stream( @@ -309,38 +330,10 @@ def __init__( ) self.limiter = anyio.Semaphore(max_workers) - async def _create_subscription( - self, - *, - connection: Union["Client", "JetStreamContext"], - ) -> None: - """Create NATS subscription and start consume task.""" - cb: Callable[["Msg"], Awaitable[Any]] - if self.max_workers > 1: - self.tasks.append(asyncio.create_task(self._serve_consume_queue())) - cb = self.__put_msg - else: - cb = self.consume - - if self.pull_sub is not None: - connection = cast("JetStreamContext", connection) - - if self.stream is None: - raise SetupError("Pull subscriber can be used only with a stream") - - self.subscription = await connection.pull_subscribe( - subject=self.subject, - **self.extra_options, - ) - self.tasks.append(asyncio.create_task(self._consume_pull(cb=cb))) - - else: - self.subscription = await connection.subscribe( - subject=self.subject, - queue=self.queue, - cb=cb, # type: ignore[arg-type] - **self.extra_options, - ) + super().__init__(**kwargs) + + def start_consume_task(self) -> None: + self.add_task(self._serve_consume_queue()) async def _serve_consume_queue( self, @@ -351,31 +344,9 @@ async def _serve_consume_queue( """ async with anyio.create_task_group() as tg: async for msg in self.receive_stream: - tg.start_soon(self.__consume_msg, msg) - - async def _consume_pull( - self, - cb: Callable[["Msg"], Awaitable[SendableMessage]], - ) -> None: - """Endless task consuming messages using NATS Pull subscriber.""" - assert self.pull_sub # nosec B101 - - sub = cast("JetStreamContext.PullSubscription", self.subscription) - - while self.running: # pragma: no branch - messages = [] - with suppress(TimeoutError, ConnectionClosedError): - messages = await sub.fetch( - batch=self.pull_sub.batch_size, - timeout=self.pull_sub.timeout, - ) - - if messages: - async with anyio.create_task_group() as tg: - for msg in messages: - tg.start_soon(cb, msg) + tg.start_soon(self._consume_msg, msg) - async def __consume_msg( + async def _consume_msg( self, msg: "Msg", ) -> None: @@ -383,17 +354,14 @@ async def __consume_msg( async with self.limiter: await self.consume(msg) - async def __put_msg(self, msg: "Msg") -> None: + async def _put_msg(self, msg: "Msg") -> None: """Proxy method to put msg into in-memory queue with semaphore block.""" async with self.limiter: await self.send_stream.send(msg) -class BatchHandler(LogicSubscriber[List["Msg"]]): - """Batch-message consumer class.""" - - pull_sub: "PullSub" - stream: "JStream" +class CoreSubscriber(_DefaultSubscriber["Msg"]): + subscription: Optional["Subscription"] def __init__( self, @@ -401,30 +369,27 @@ def __init__( # default args subject: str, queue: str, - stream: Optional["JStream"], - pull_sub: Optional["PullSub"], extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], - broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], # AsyncAPI args title_: Optional[str], description_: Optional[str], include_in_schema: bool, ) -> None: - parser = BatchParser(pattern=subject) + parser_ = NatsParser(pattern=subject) + + self.queue = queue super().__init__( subject=subject, - queue=queue, - stream=stream, - pull_sub=pull_sub, extra_options=extra_options, # subscriber args - default_parser=parser.parse_batch, - default_decoder=parser.decode_batch, + default_parser=parser_.parse_message, + default_decoder=parser_.decode_message, # Propagated args no_ack=no_ack, retry=retry, @@ -440,27 +405,613 @@ def __init__( async def _create_subscription( # type: ignore[override] self, *, - connection: "JetStreamContext", + connection: "Client", ) -> None: """Create NATS subscription and start consume task.""" - self.subscription = await connection.pull_subscribe( - subject=self.subject, + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self.consume, **self.extra_options, ) - self.tasks.append(asyncio.create_task(self._consume_pull())) - async def _consume_pull(self) -> None: - """Endless task consuming messages using NATS Pull subscriber.""" - assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[Msg]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + queue=self.queue, + ) - sub = cast("JetStreamContext.PullSubscription", self.subscription) - while self.running: # pragma: no branch - with suppress(TimeoutError, ConnectionClosedError): - messages = await sub.fetch( - batch=self.pull_sub.batch_size, - timeout=self.pull_sub.timeout, - ) +class ConcurrentCoreSubscriber(_ConcurrentMixin, CoreSubscriber): + def __init__( + self, + *, + max_workers: int, + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + subject=subject, + queue=queue, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) - if messages: - await self.consume(messages) + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "Client", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self._put_msg, + **self.extra_options, + ) + + +class _StreamSubscriber(_DefaultSubscriber["Msg"]): + def __init__( + self, + *, + stream: "JStream", + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser_ = JsParser(pattern=subject) + + self.queue = queue + self.stream = stream + + super().__init__( + subject=subject, + extra_options=extra_options, + # subscriber args + default_parser=parser_.parse_message, + default_decoder=parser_.decode_message, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[Msg]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + queue=self.queue, + stream=self.stream.name, + ) + + +class PushStreamSubscription(_StreamSubscriber): + subscription: Optional["JetStreamContext.PushSubscription"] + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self.consume, + **self.extra_options, + ) + + +class ConcurrentPushStreamSubscriber(_ConcurrentMixin, _StreamSubscriber): + subscription: Optional["JetStreamContext.PushSubscription"] + + def __init__( + self, + *, + max_workers: int, + stream: "JStream", + # default args + subject: str, + queue: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + stream=stream, + subject=subject, + queue=queue, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.subscribe( + subject=self.clear_subject, + queue=self.queue, + cb=self._put_msg, + **self.extra_options, + ) + + +class PullStreamSubscriber(_TasksMixin, _StreamSubscriber): + subscription: Optional["JetStreamContext.PullSubscription"] + + def __init__( + self, + *, + pull_sub: "PullSub", + stream: "JStream", + # default args + subject: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + self.pull_sub = pull_sub + + super().__init__( + # basic args + stream=stream, + subject=subject, + extra_options=extra_options, + queue="", + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull(cb=self.consume)) + + async def _consume_pull( + self, + cb: Callable[["Msg"], Awaitable[SendableMessage]], + ) -> None: + """Endless task consuming messages using NATS Pull subscriber.""" + assert self.subscription # nosec B101 + + while self.running: # pragma: no branch + messages = [] + with suppress(TimeoutError, ConnectionClosedError): + messages = await self.subscription.fetch( + batch=self.pull_sub.batch_size, + timeout=self.pull_sub.timeout, + ) + + if messages: + async with anyio.create_task_group() as tg: + for msg in messages: + tg.start_soon(cb, msg) + + +class ConcurrentPullStreamSubscriber(_ConcurrentMixin, PullStreamSubscriber): + def __init__( + self, + *, + max_workers: int, + # default args + pull_sub: "PullSub", + stream: "JStream", + subject: str, + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[Msg]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + super().__init__( + max_workers=max_workers, + # basic args + pull_sub=pull_sub, + stream=stream, + subject=subject, + extra_options=extra_options, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.start_consume_task() + + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull(cb=self._put_msg)) + + +class BatchPullStreamSubscriber(_TasksMixin, _DefaultSubscriber[List["Msg"]]): + """Batch-message consumer class.""" + + subscription: Optional["JetStreamContext.PullSubscription"] + + def __init__( + self, + *, + # default args + subject: str, + stream: "JStream", + pull_sub: "PullSub", + extra_options: Optional[AnyDict], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = BatchParser(pattern=subject) + + self.stream = stream + self.pull_sub = pull_sub + + super().__init__( + subject=subject, + extra_options=extra_options, + # subscriber args + default_parser=parser.parse_batch, + default_decoder=parser.decode_batch, + # Propagated args + no_ack=no_ack, + retry=retry, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "JetStreamContext", + ) -> None: + """Create NATS subscription and start consume task.""" + self.subscription = await connection.pull_subscribe( + subject=self.clear_subject, + **self.extra_options, + ) + self.add_task(self._consume_pull()) + + async def _consume_pull(self) -> None: + """Endless task consuming messages using NATS Pull subscriber.""" + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + while self.running: # pragma: no branch + with suppress(TimeoutError, ConnectionClosedError): + messages = await self.subscription.fetch( + batch=self.pull_sub.batch_size, + timeout=self.pull_sub.timeout, + ) + + if messages: + await self.consume(messages) + + +class KeyValueWatchSubscriber(_TasksMixin, LogicSubscriber[KeyValue.Entry]): + subscription: Optional["UnsubscribeAdapter[KeyValue.KeyWatcher]"] + + def __init__( + self, + *, + subject: str, + kv_watch: "KvWatch", + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = KvParser(pattern=subject) + self.kv_watch = kv_watch + + super().__init__( + subject=subject, + extra_options=None, + no_ack=True, + retry=False, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "KVBucketDeclarer", + ) -> None: + bucket = await connection.create_key_value( + bucket=self.kv_watch.name, + declare=self.kv_watch.declare, + ) + + self.subscription = UnsubscribeAdapter["KeyValue.KeyWatcher"]( + await bucket.watch( + keys=self.clear_subject, + headers_only=self.kv_watch.headers_only, + include_history=self.kv_watch.include_history, + ignore_deletes=self.kv_watch.ignore_deletes, + meta_only=self.kv_watch.meta_only, + # inactive_threshold=self.kv_watch.inactive_threshold + ) + ) + + self.add_task(self._consume_watch()) + + async def _consume_watch(self) -> None: + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + key_watcher = self.subscription.obj + + while self.running: + with suppress(ConnectionClosedError, TimeoutError): + message = cast( + Optional["KeyValue.Entry"], + await key_watcher.updates(self.kv_watch.timeout), # type: ignore[no-untyped-call] + ) + + if message: + await self.consume(message) + + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[KeyValue.Entry]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + return () + + def __hash__(self) -> int: + return hash(self.kv_watch) + hash(self.subject) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[KeyValue.Entry]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + stream=self.kv_watch.name, + ) + + +OBJECT_STORAGE_CONTEXT_KEY = "__object_storage" + + +class ObjStoreWatchSubscriber(_TasksMixin, LogicSubscriber[ObjectInfo]): + subscription: Optional["UnsubscribeAdapter[ObjectStore.ObjectWatcher]"] + + def __init__( + self, + *, + subject: str, + obj_watch: "ObjWatch", + broker_dependencies: Iterable[Depends], + broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, + ) -> None: + parser = ObjParser(pattern="") + + self.obj_watch = obj_watch + + super().__init__( + subject=subject, + extra_options=None, + no_ack=True, + retry=False, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, + broker_middlewares=broker_middlewares, + broker_dependencies=broker_dependencies, + # AsyncAPI args + description_=description_, + title_=title_, + include_in_schema=include_in_schema, + ) + + @override + async def _create_subscription( # type: ignore[override] + self, + *, + connection: "OSBucketDeclarer", + ) -> None: + self.bucket = await connection.create_object_store( + bucket=self.subject, + declare=self.obj_watch.declare, + ) + + self.subscription = UnsubscribeAdapter["ObjectStore.ObjectWatcher"]( + await self.bucket.watch( + ignore_deletes=self.obj_watch.ignore_deletes, + include_history=self.obj_watch.include_history, + meta_only=self.obj_watch.meta_only, + ) + ) + + self.add_task(self._consume_watch()) + + async def _consume_watch(self) -> None: + assert self.subscription, "You should call `create_subscription` at first." # nosec B101 + + obj_watch = self.subscription.obj + + while self.running: + with suppress(TimeoutError): + message = cast( + Optional["ObjectInfo"], + await obj_watch.updates(self.obj_watch.timeout), # type: ignore[no-untyped-call] + ) + + if message: + with context.scope(OBJECT_STORAGE_CONTEXT_KEY, self.bucket): + await self.consume(message) + + def _make_response_publisher( + self, + message: Annotated[ + "StreamMessage[ObjectInfo]", + Doc("Message requiring reply"), + ], + ) -> Sequence[FakePublisher]: + """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" + return () + + def __hash__(self) -> int: + return hash(self.subject) + + def get_log_context( + self, + message: Annotated[ + Optional["StreamMessage[ObjectInfo]"], + Doc("Message which we are building context for"), + ], + ) -> Dict[str, str]: + """Log context factory using in `self.consume` scope.""" + return self.build_log_context( + message=message, + subject=self.subject, + ) diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 6681ba5b14..5a9190dfd7 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -9,7 +9,7 @@ from faststream.nats.broker import NatsBroker from faststream.nats.publisher.producer import NatsFastProducer from faststream.nats.schemas.js_stream import is_subject_match_wildcard -from faststream.nats.subscriber.asyncapi import AsyncAPISubscriber +from faststream.nats.subscriber.usecase import LogicSubscriber from faststream.testing.broker import TestBroker, call_handler if TYPE_CHECKING: @@ -55,7 +55,7 @@ def remove_publisher_fake_subscriber( broker: NatsBroker, publisher: "AsyncAPIPublisher" ) -> None: broker._subscribers.pop( - AsyncAPISubscriber.get_routing_hash(publisher.subject), None + LogicSubscriber.get_routing_hash(publisher.subject), None ) @@ -91,12 +91,15 @@ async def publish( # type: ignore[override] ) for handler in self.broker._subscribers.values(): # pragma: no branch - if stream and getattr(handler.stream, "name", None) != stream: + if stream and ( + not (handler_stream := getattr(handler, "stream", None)) + or stream != handler_stream.name + ): continue - if is_subject_match_wildcard(subject, handler.subject): + if is_subject_match_wildcard(subject, handler.clear_subject): msg: Union[List[PatchedMessage], PatchedMessage] - if getattr(handler.pull_sub, "batch", False): + if (pull := getattr(handler, "pull_sub", None)) and pull.batch: msg = [incoming] else: msg = incoming diff --git a/faststream/opentelemetry/middleware.py b/faststream/opentelemetry/middleware.py index 9a4ad34c10..7bb0519c68 100644 --- a/faststream/opentelemetry/middleware.py +++ b/faststream/opentelemetry/middleware.py @@ -102,7 +102,9 @@ def __init__( self, *, tracer: "Tracer", - settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + settings_provider_factory: Callable[ + [Any], Optional[TelemetrySettingsProvider[Any]] + ], metrics_container: _MetricsContainer, msg: Optional[Any] = None, ) -> None: @@ -121,7 +123,8 @@ async def publish_scope( *args: Any, **kwargs: Any, ) -> Any: - provider = self.__settings_provider + if (provider := self.__settings_provider) is None: + return await call_next(msg, *args, **kwargs) headers = kwargs.pop("headers", {}) or {} current_context = context.get_current() @@ -182,7 +185,8 @@ async def consume_scope( call_next: "AsyncFuncAny", msg: "StreamMessage[Any]", ) -> Any: - provider = self.__settings_provider + if (provider := self.__settings_provider) is None: + return await call_next(msg) current_context = propagate.extract(msg.headers) destination_name = provider.get_consume_destination_name(msg) @@ -258,7 +262,9 @@ class TelemetryMiddleware: def __init__( self, *, - settings_provider_factory: Callable[[Any], TelemetrySettingsProvider[Any]], + settings_provider_factory: Callable[ + [Any], Optional[TelemetrySettingsProvider[Any]] + ], tracer_provider: Optional["TracerProvider"] = None, meter_provider: Optional["MeterProvider"] = None, meter: Optional["Meter"] = None, diff --git a/faststream/rabbit/broker/registrator.py b/faststream/rabbit/broker/registrator.py index e55d06a30a..e13b7b5261 100644 --- a/faststream/rabbit/broker/registrator.py +++ b/faststream/rabbit/broker/registrator.py @@ -11,6 +11,7 @@ RabbitQueue, ) from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber +from faststream.rabbit.subscriber.factory import create_subscriber if TYPE_CHECKING: from aio_pika import IncomingMessage # noqa: F401 @@ -117,7 +118,7 @@ def subscriber( # type: ignore[override] subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( queue=RabbitQueue.validate(queue), exchange=RabbitExchange.validate(exchange), consume_args=consume_args, diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py index 8e8ddcb5db..158d343dd1 100644 --- a/faststream/rabbit/subscriber/asyncapi.py +++ b/faststream/rabbit/subscriber/asyncapi.py @@ -1,6 +1,4 @@ -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Union - -from typing_extensions import override +from typing import Dict from faststream.asyncapi.schema import ( Channel, @@ -12,17 +10,9 @@ ) from faststream.asyncapi.schema.bindings import amqp from faststream.asyncapi.utils import resolve_payloads -from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig from faststream.rabbit.subscriber.usecase import LogicSubscriber from faststream.rabbit.utils import is_routing_exchange -if TYPE_CHECKING: - from aio_pika import IncomingMessage - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.types import AnyDict - class AsyncAPISubscriber(LogicSubscriber): """AsyncAPI-compatible Rabbit Subscriber class.""" @@ -83,36 +73,3 @@ def get_schema(self) -> Dict[str, Channel]: ), ) } - - @override - @classmethod - def create( # type: ignore[override] - cls, - *, - queue: RabbitQueue, - exchange: Optional["RabbitExchange"], - consume_args: Optional["AnyDict"], - reply_config: Optional["ReplyConfig"], - # Subscriber args - no_ack: bool, - retry: Union[bool, int], - broker_dependencies: Iterable["Depends"], - broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], - # AsyncAPI args - title_: Optional[str], - description_: Optional[str], - include_in_schema: bool, - ) -> "AsyncAPISubscriber": - return cls( - queue=queue, - exchange=exchange, - consume_args=consume_args, - reply_config=reply_config, - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py new file mode 100644 index 0000000000..f0ee6b752a --- /dev/null +++ b/faststream/rabbit/subscriber/factory.py @@ -0,0 +1,42 @@ +from typing import TYPE_CHECKING, Iterable, Optional, Union + +from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig +from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber + +if TYPE_CHECKING: + from aio_pika import IncomingMessage + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.types import AnyDict + + +def create_subscriber( + *, + queue: RabbitQueue, + exchange: Optional["RabbitExchange"], + consume_args: Optional["AnyDict"], + reply_config: Optional["ReplyConfig"], + # Subscriber args + no_ack: bool, + retry: Union[bool, int], + broker_dependencies: Iterable["Depends"], + broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], + # AsyncAPI args + title_: Optional[str], + description_: Optional[str], + include_in_schema: bool, +) -> AsyncAPISubscriber: + return AsyncAPISubscriber( + queue=queue, + exchange=exchange, + consume_args=consume_args, + reply_config=reply_config, + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) diff --git a/faststream/redis/broker/registrator.py b/faststream/redis/broker/registrator.py index 4e06b3872b..7a643d189a 100644 --- a/faststream/redis/broker/registrator.py +++ b/faststream/redis/broker/registrator.py @@ -7,6 +7,7 @@ from faststream.redis.message import UnifyRedisDict from faststream.redis.publisher.asyncapi import AsyncAPIPublisher from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber +from faststream.redis.subscriber.factory import SubsciberType, create_subscriber if TYPE_CHECKING: from fast_depends.dependencies import Depends @@ -20,7 +21,6 @@ from faststream.redis.message import UnifyRedisMessage from faststream.redis.publisher.asyncapi import PublisherType from faststream.redis.schemas import ListSub, PubSub, StreamSub - from faststream.redis.subscriber.asyncapi import SubsciberType from faststream.types import AnyDict @@ -104,7 +104,7 @@ def subscriber( # type: ignore[override] subscriber = cast( AsyncAPISubscriber, super().subscriber( - AsyncAPISubscriber.create( + create_subscriber( channel=channel, list=list, stream=stream, diff --git a/faststream/redis/schemas/proto.py b/faststream/redis/schemas/proto.py index 28e50cf9ba..2521a1a0a3 100644 --- a/faststream/redis/schemas/proto.py +++ b/faststream/redis/schemas/proto.py @@ -17,10 +17,6 @@ def channel_binding(self) -> "redis.ChannelBinding": ... @abstractmethod def get_payloads(self) -> Any: ... - @staticmethod - @abstractmethod - def create() -> Any: ... - def validate_options( *, diff --git a/faststream/redis/subscriber/asyncapi.py b/faststream/redis/subscriber/asyncapi.py index dd2fbe7e56..36171b247b 100644 --- a/faststream/redis/subscriber/asyncapi.py +++ b/faststream/redis/subscriber/asyncapi.py @@ -1,6 +1,4 @@ -from typing import TYPE_CHECKING, Dict, Iterable, Optional, Union - -from typing_extensions import TypeAlias, override +from typing import Dict from faststream.asyncapi.schema import ( Channel, @@ -11,9 +9,8 @@ ) from faststream.asyncapi.schema.bindings import redis from faststream.asyncapi.utils import resolve_payloads -from faststream.exceptions import SetupError -from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub -from faststream.redis.schemas.proto import RedisAsyncAPIProtocol, validate_options +from faststream.redis.schemas import ListSub, StreamSub +from faststream.redis.schemas.proto import RedisAsyncAPIProtocol from faststream.redis.subscriber.usecase import ( BatchListSubscriber, BatchStreamSubscriber, @@ -23,20 +20,6 @@ StreamSubscriber, ) -if TYPE_CHECKING: - from fast_depends.dependencies import Depends - - from faststream.broker.types import BrokerMiddleware - from faststream.redis.message import UnifyRedisDict - -SubsciberType: TypeAlias = Union[ - "AsyncAPIChannelSubscriber", - "AsyncAPIStreamBatchSubscriber", - "AsyncAPIStreamSubscriber", - "AsyncAPIListBatchSubscriber", - "AsyncAPIListSubscriber", -] - class AsyncAPISubscriber(LogicSubscriber, RedisAsyncAPIProtocol): """A class to represent a Redis handler.""" @@ -62,98 +45,6 @@ def get_schema(self) -> Dict[str, Channel]: ) } - @override - @staticmethod - def create( # type: ignore[override] - *, - channel: Union["PubSub", str, None], - list: Union["ListSub", str, None], - stream: Union["StreamSub", str, None], - # Subscriber args - no_ack: bool = False, - retry: bool = False, - broker_dependencies: Iterable["Depends"] = (), - broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (), - # AsyncAPI args - title_: Optional[str] = None, - description_: Optional[str] = None, - include_in_schema: bool = True, - ) -> SubsciberType: - validate_options(channel=channel, list=list, stream=stream) - - if (channel_sub := PubSub.validate(channel)) is not None: - return AsyncAPIChannelSubscriber( - channel=channel_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - elif (stream_sub := StreamSub.validate(stream)) is not None: - if stream_sub.batch: - return AsyncAPIStreamBatchSubscriber( - stream=stream_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIStreamSubscriber( - stream=stream_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - elif (list_sub := ListSub.validate(list)) is not None: - if list_sub.batch: - return AsyncAPIListBatchSubscriber( - list=list_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - else: - return AsyncAPIListSubscriber( - list=list_sub, - # basic args - no_ack=no_ack, - retry=retry, - broker_dependencies=broker_dependencies, - broker_middlewares=broker_middlewares, - # AsyncAPI args - title_=title_, - description_=description_, - include_in_schema=include_in_schema, - ) - - else: - raise SetupError(INCORRECT_SETUP_MSG) - class AsyncAPIChannelSubscriber(ChannelSubscriber, AsyncAPISubscriber): def get_name(self) -> str: diff --git a/faststream/redis/subscriber/factory.py b/faststream/redis/subscriber/factory.py new file mode 100644 index 0000000000..da5fe02898 --- /dev/null +++ b/faststream/redis/subscriber/factory.py @@ -0,0 +1,119 @@ +from typing import TYPE_CHECKING, Iterable, Optional, Union + +from typing_extensions import TypeAlias + +from faststream.exceptions import SetupError +from faststream.redis.schemas import INCORRECT_SETUP_MSG, ListSub, PubSub, StreamSub +from faststream.redis.schemas.proto import validate_options +from faststream.redis.subscriber.asyncapi import ( + AsyncAPIChannelSubscriber, + AsyncAPIListBatchSubscriber, + AsyncAPIListSubscriber, + AsyncAPIStreamBatchSubscriber, + AsyncAPIStreamSubscriber, +) + +if TYPE_CHECKING: + from fast_depends.dependencies import Depends + + from faststream.broker.types import BrokerMiddleware + from faststream.redis.message import UnifyRedisDict + +SubsciberType: TypeAlias = Union[ + "AsyncAPIChannelSubscriber", + "AsyncAPIStreamBatchSubscriber", + "AsyncAPIStreamSubscriber", + "AsyncAPIListBatchSubscriber", + "AsyncAPIListSubscriber", +] + + +def create_subscriber( + *, + channel: Union["PubSub", str, None], + list: Union["ListSub", str, None], + stream: Union["StreamSub", str, None], + # Subscriber args + no_ack: bool = False, + retry: bool = False, + broker_dependencies: Iterable["Depends"] = (), + broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (), + # AsyncAPI args + title_: Optional[str] = None, + description_: Optional[str] = None, + include_in_schema: bool = True, +) -> SubsciberType: + validate_options(channel=channel, list=list, stream=stream) + + if (channel_sub := PubSub.validate(channel)) is not None: + return AsyncAPIChannelSubscriber( + channel=channel_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif (stream_sub := StreamSub.validate(stream)) is not None: + if stream_sub.batch: + return AsyncAPIStreamBatchSubscriber( + stream=stream_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIStreamSubscriber( + stream=stream_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + elif (list_sub := ListSub.validate(list)) is not None: + if list_sub.batch: + return AsyncAPIListBatchSubscriber( + list=list_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + else: + return AsyncAPIListSubscriber( + list=list_sub, + # basic args + no_ack=no_ack, + retry=retry, + broker_dependencies=broker_dependencies, + broker_middlewares=broker_middlewares, + # AsyncAPI args + title_=title_, + description_=description_, + include_in_schema=include_in_schema, + ) + + else: + raise SetupError(INCORRECT_SETUP_MSG) diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 58b2745b14..7919f384f7 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -1,5 +1,5 @@ import asyncio -from abc import ABC, abstractmethod +from abc import abstractmethod from contextlib import suppress from copy import deepcopy from typing import ( @@ -57,7 +57,7 @@ Offset: TypeAlias = bytes -class LogicSubscriber(ABC, SubscriberUsecase[UnifyRedisDict]): +class LogicSubscriber(SubscriberUsecase[UnifyRedisDict]): """A class to represent a Redis handler.""" _client: Optional["Redis[bytes]"] diff --git a/faststream/redis/testing.py b/faststream/redis/testing.py index 1beedc4dc0..2931bf76e2 100644 --- a/faststream/redis/testing.py +++ b/faststream/redis/testing.py @@ -18,7 +18,7 @@ from faststream.redis.parser import RawMessage from faststream.redis.publisher.producer import RedisFastProducer from faststream.redis.schemas import INCORRECT_SETUP_MSG -from faststream.redis.subscriber.asyncapi import AsyncAPISubscriber +from faststream.redis.subscriber.factory import create_subscriber from faststream.testing.broker import TestBroker, call_handler if TYPE_CHECKING: @@ -66,7 +66,7 @@ def remove_publisher_fake_subscriber( publisher: "AsyncAPIPublisher", ) -> None: broker._subscribers.pop( - hash(AsyncAPISubscriber.create(**publisher.subscriber_property)), + hash(create_subscriber(**publisher.subscriber_property)), None, ) diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index f8925210a4..249e5c6846 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -69,6 +69,8 @@ def __init__( self.connect_only = connect_only async def __aenter__(self) -> Broker: + # TODO: remove useless middlewares filter + middlewares = tuple( filter( lambda x: not isinstance(x, CriticalLogMiddleware), @@ -90,6 +92,7 @@ async def __aenter__(self) -> Broker: async def __aexit__(self, *args: Any) -> None: await self._ctx.__aexit__(*args) + # TODO: remove useless middlewares filter middlewares: Tuple["BrokerMiddleware[Any]", ...] = ( CriticalLogMiddleware( # type: ignore[arg-type] logger=self.broker.logger, diff --git a/pyproject.toml b/pyproject.toml index 0ef08d8502..17a90227f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ otel = ["opentelemetry-sdk>=1.24.0,<2.0.0"] optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel]"] devdocs = [ - "mkdocs-material==9.5.21", + "mkdocs-material==9.5.24", "mkdocs-static-i18n==1.2.3", "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", @@ -88,7 +88,7 @@ devdocs = [ "mike==2.1.1", # versioning "mkdocs-minify-plugin==0.8.0", "mkdocs-macros-plugin==1.0.5", # includes with variables - "mkdocs-glightbox==0.3.7", # img zoom + "mkdocs-glightbox==0.4.0", # img zoom "pillow", # required for mkdocs-glightbo "cairosvg", # required for mkdocs-glightbo "requests", # using in CI, do not pin it @@ -99,6 +99,7 @@ types = [ "faststream[optionals]", "mypy==1.10.0", # mypy extensions + "types-Deprecated", "types-PyYAML", "types-setuptools", "types-ujson", @@ -112,14 +113,14 @@ lint = [ "faststream[types]", "ruff==0.4.4", "bandit==1.7.8", - "semgrep==1.70.0", + "semgrep==1.73.0", "codespell==2.2.6", ] test-core = [ "coverage[toml]==7.5.1", - "pytest==8.2.0", - "pytest-asyncio==0.23.6", + "pytest==8.2.1", + "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", ] diff --git a/tests/asyncapi/test_app.py b/tests/asyncapi/kafka/test_app.py similarity index 100% rename from tests/asyncapi/test_app.py rename to tests/asyncapi/kafka/test_app.py diff --git a/tests/asyncapi/nats/test_kv_schema.py b/tests/asyncapi/nats/test_kv_schema.py new file mode 100644 index 0000000000..4b0edc1847 --- /dev/null +++ b/tests/asyncapi/nats/test_kv_schema.py @@ -0,0 +1,14 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.nats import NatsBroker + + +def test_kv_schema(): + broker = NatsBroker() + + @broker.subscriber("test", kv_watch="test") + async def handle(): ... + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema["channels"] == {} diff --git a/tests/asyncapi/nats/test_obj_schema.py b/tests/asyncapi/nats/test_obj_schema.py new file mode 100644 index 0000000000..f7546cbc22 --- /dev/null +++ b/tests/asyncapi/nats/test_obj_schema.py @@ -0,0 +1,14 @@ +from faststream import FastStream +from faststream.asyncapi.generate import get_app_schema +from faststream.nats import NatsBroker + + +def test_obj_schema(): + broker = NatsBroker() + + @broker.subscriber("test", obj_watch=True) + async def handle(): ... + + schema = get_app_schema(FastStream(broker)).to_jsonable() + + assert schema["channels"] == {} diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 5318fb2a69..60ac90a7f3 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -276,5 +276,73 @@ def subscriber(m, msg: NatsMessage): timeout=3, ) - assert event.is_set() - mock.assert_called_once_with(True) + assert event.is_set() + mock.assert_called_once_with(True) + + @pytest.mark.asyncio() + async def test_consume_kv( + self, + queue: str, + event: asyncio.Event, + mock, + ): + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, kv_watch=queue + "1") + async def handler(m): + mock(m) + event.set() + + async with self.patch_broker(consume_broker) as br: + await br.start() + bucket = await br.key_value(queue + "1") + + await asyncio.wait( + ( + asyncio.create_task( + bucket.put( + queue, + b"world", + ) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_with(b"world") + + @pytest.mark.asyncio() + async def test_consume_os( + self, + queue: str, + event: asyncio.Event, + mock, + ): + consume_broker = self.get_broker(apply_types=True) + + @consume_broker.subscriber(queue, obj_watch=True) + async def handler(filename: str): + event.set() + mock(filename) + + async with self.patch_broker(consume_broker) as br: + await br.start() + bucket = await br.object_storage(queue) + + await asyncio.wait( + ( + asyncio.create_task( + bucket.put( + "hello", + b"world", + ) + ), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with("hello") diff --git a/tests/brokers/nats/test_kv_declarer_cache.py b/tests/brokers/nats/test_kv_declarer_cache.py new file mode 100644 index 0000000000..1c85be3a5a --- /dev/null +++ b/tests/brokers/nats/test_kv_declarer_cache.py @@ -0,0 +1,23 @@ +from unittest.mock import patch + +import pytest +from nats.js import JetStreamContext + +from faststream.nats import NatsBroker +from tests.tools import spy_decorator + + +@pytest.mark.asyncio() +@pytest.mark.nats() +async def test_kv_storage_cache(): + broker = NatsBroker() + await broker.connect() + with patch.object( + JetStreamContext, + "create_key_value", + spy_decorator(JetStreamContext.create_key_value), + ) as m: + await broker.key_value("test") + await broker.key_value("test") + assert broker._kv_declarer.buckets["test"] + m.mock.assert_called_once() diff --git a/tests/brokers/nats/test_os_declarer_cache.py b/tests/brokers/nats/test_os_declarer_cache.py new file mode 100644 index 0000000000..d7107e27e0 --- /dev/null +++ b/tests/brokers/nats/test_os_declarer_cache.py @@ -0,0 +1,24 @@ +from unittest.mock import patch + +import pytest +from nats.js import JetStreamContext + +from faststream.nats import NatsBroker +from tests.tools import spy_decorator + + +@pytest.mark.asyncio() +@pytest.mark.nats() +async def test_object_storage_cache(): + broker = NatsBroker() + await broker.connect() + + with patch.object( + JetStreamContext, + "create_object_store", + spy_decorator(JetStreamContext.create_object_store), + ) as m: + await broker.object_storage("test") + await broker.object_storage("test") + assert broker._os_declarer.buckets["test"] + m.mock.assert_called_once() diff --git a/tests/cli/rabbit/__init__.py b/tests/cli/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/cli/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/cli/test_app.py b/tests/cli/rabbit/test_app.py similarity index 97% rename from tests/cli/test_app.py rename to tests/cli/rabbit/test_app.py index 6795d8fd94..00944b7bb5 100644 --- a/tests/cli/test_app.py +++ b/tests/cli/rabbit/test_app.py @@ -10,10 +10,9 @@ from faststream import FastStream, TestApp from faststream._compat import IS_WINDOWS from faststream.log import logger -from faststream.rabbit import RabbitBroker -def test_init(app: FastStream, broker: RabbitBroker): +def test_init(app: FastStream, broker): assert app.broker is broker assert app.logger is logger @@ -26,7 +25,7 @@ def test_init_without_logger(app_without_logger: FastStream): assert app_without_logger.logger is None -def test_set_broker(broker: RabbitBroker, app_without_broker: FastStream): +def test_set_broker(broker, app_without_broker: FastStream): assert app_without_broker.broker is None app_without_broker.set_broker(broker) assert app_without_broker.broker is broker diff --git a/tests/cli/utils/test_logs.py b/tests/cli/rabbit/test_logs.py similarity index 83% rename from tests/cli/utils/test_logs.py rename to tests/cli/rabbit/test_logs.py index 384e71954d..79e140da99 100644 --- a/tests/cli/utils/test_logs.py +++ b/tests/cli/rabbit/test_logs.py @@ -8,15 +8,12 @@ @pytest.mark.parametrize( - ("level", "broker"), + "level", ( # noqa: PT007 - pytest.param(logging.ERROR, RabbitBroker(), id=str(logging.ERROR)), - *( - pytest.param(level, RabbitBroker(), id=level) - for level in LogLevels.__members__ - ), + pytest.param(logging.ERROR, id=str(logging.ERROR)), + *(pytest.param(level, id=level) for level in LogLevels.__members__), *( - pytest.param(level, RabbitBroker(), id=str(level)) + pytest.param(level, id=str(level)) for level in LogLevels.__members__.values() ), ), diff --git a/tests/cli/test_asyncapi_docs.py b/tests/cli/test_asyncapi_docs.py index 7ae5eb625d..816710c9ad 100644 --- a/tests/cli/test_asyncapi_docs.py +++ b/tests/cli/test_asyncapi_docs.py @@ -14,12 +14,14 @@ serve_cmd, ) from faststream.cli.main import cli +from tests.marks import require_aiokafka GEN_JSON_CMD = gen_json_cmd.split(" ")[1:-1] GEN_YAML_CMD = gen_yaml_cmd.split(" ")[1:-1] SERVE_CMD = serve_cmd.split(" ")[1:-1] +@require_aiokafka def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project: Path): r = runner.invoke( cli, [*GEN_JSON_CMD, "--out", "schema.json", str(kafka_basic_project)] @@ -36,6 +38,7 @@ def test_gen_asyncapi_json_for_kafka_app(runner: CliRunner, kafka_basic_project: schema_path.unlink() +@require_aiokafka def test_gen_asyncapi_yaml_for_kafka_app(runner: CliRunner, kafka_basic_project: Path): r = runner.invoke(cli, GEN_YAML_CMD + [str(kafka_basic_project)]) # noqa: RUF005 assert r.exit_code == 0 @@ -56,6 +59,7 @@ def test_gen_wrong_path(runner: CliRunner): assert "No such file or directory" in r.stdout +@require_aiokafka def test_serve_asyncapi_docs( runner: CliRunner, kafka_basic_project: Path, @@ -70,6 +74,7 @@ def test_serve_asyncapi_docs( mock.assert_called_once() +@require_aiokafka @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_serve_asyncapi_json_schema( runner: CliRunner, @@ -90,6 +95,7 @@ def test_serve_asyncapi_json_schema( schema_path.unlink() +@require_aiokafka @pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows") def test_serve_asyncapi_yaml_schema( runner: CliRunner, diff --git a/tests/cli/test_publish.py b/tests/cli/test_publish.py index 980dfb4a8a..7e2aa367ea 100644 --- a/tests/cli/test_publish.py +++ b/tests/cli/test_publish.py @@ -1,55 +1,38 @@ from unittest.mock import AsyncMock, patch -import pytest from dirty_equals import IsPartialDict -from typer.testing import CliRunner from faststream import FastStream from faststream.cli.main import cli as faststream_app -from faststream.confluent import KafkaBroker as ConfluentBroker -from faststream.confluent.publisher.producer import AsyncConfluentFastProducer -from faststream.kafka import KafkaBroker -from faststream.kafka.publisher.producer import AioKafkaFastProducer -from faststream.nats import NatsBroker -from faststream.nats.publisher.producer import NatsFastProducer -from faststream.rabbit import RabbitBroker -from faststream.rabbit.publisher.producer import AioPikaFastProducer -from faststream.redis import RedisBroker -from faststream.redis.publisher.producer import RedisFastProducer - -# Initialize the CLI runner -runner = CliRunner() - - -@pytest.fixture() -def mock_app(request): - app = FastStream() - broker_type = request.param["broker_type"] - producer_type = request.param["producer_type"] +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + +def get_mock_app(broker_type, producer_type) -> FastStream: broker = broker_type() broker.connect = AsyncMock() - mock_producer = AsyncMock(spec=producer_type) mock_producer.publish = AsyncMock() broker._producer = mock_producer + return FastStream(broker) - app.broker = broker - return app +@require_redis +def test_publish_command_with_redis_options(runner): + from faststream.redis import RedisBroker + from faststream.redis.publisher.producer import RedisFastProducer -@pytest.mark.parametrize( - "mock_app", - [ - { - "broker_type": RedisBroker, - "producer_type": RedisFastProducer, - } - ], - indirect=True, -) -def test_publish_command_with_redis_options(mock_app): - with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): + mock_app = get_mock_app(RedisBroker, RedisFastProducer) + + with patch( + "faststream.cli.main.import_from_string", + return_value=(None, mock_app), + ): result = runner.invoke( faststream_app, [ @@ -82,17 +65,13 @@ def test_publish_command_with_redis_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [ - { - "broker_type": ConfluentBroker, - "producer_type": AsyncConfluentFastProducer, - } - ], - indirect=True, -) -def test_publish_command_with_confluent_options(mock_app): +@require_confluent +def test_publish_command_with_confluent_options(runner): + from faststream.confluent import KafkaBroker as ConfluentBroker + from faststream.confluent.publisher.producer import AsyncConfluentFastProducer + + mock_app = get_mock_app(ConfluentBroker, AsyncConfluentFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -116,12 +95,13 @@ def test_publish_command_with_confluent_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": KafkaBroker, "producer_type": AioKafkaFastProducer}], - indirect=True, -) -def test_publish_command_with_kafka_options(mock_app): +@require_aiokafka +def test_publish_command_with_kafka_options(runner): + from faststream.kafka import KafkaBroker + from faststream.kafka.publisher.producer import AioKafkaFastProducer + + mock_app = get_mock_app(KafkaBroker, AioKafkaFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -145,12 +125,13 @@ def test_publish_command_with_kafka_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": NatsBroker, "producer_type": NatsFastProducer}], - indirect=True, -) -def test_publish_command_with_nats_options(mock_app): +@require_nats +def test_publish_command_with_nats_options(runner): + from faststream.nats import NatsBroker + from faststream.nats.publisher.producer import NatsFastProducer + + mock_app = get_mock_app(NatsBroker, NatsFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, @@ -178,12 +159,13 @@ def test_publish_command_with_nats_options(mock_app): ) -@pytest.mark.parametrize( - "mock_app", - [{"broker_type": RabbitBroker, "producer_type": AioPikaFastProducer}], - indirect=True, -) -def test_publish_command_with_rabbit_options(mock_app: AsyncMock): +@require_aiopika +def test_publish_command_with_rabbit_options(runner): + from faststream.rabbit import RabbitBroker + from faststream.rabbit.publisher.producer import AioPikaFastProducer + + mock_app = get_mock_app(RabbitBroker, AioPikaFastProducer) + with patch("faststream.cli.main.import_from_string", return_value=(None, mock_app)): result = runner.invoke( faststream_app, diff --git a/tests/cli/utils/test_imports.py b/tests/cli/utils/test_imports.py index 3c7597c951..f97e26c0ff 100644 --- a/tests/cli/utils/test_imports.py +++ b/tests/cli/utils/test_imports.py @@ -5,6 +5,7 @@ from faststream.app import FastStream from faststream.cli.utils.imports import get_app_path, import_from_string, import_object +from tests.marks import require_aiokafka, require_aiopika, require_nats def test_import_wrong(): @@ -54,6 +55,9 @@ def test_import_from_string_import_wrong(): pytest.param("examples.rabbit.topic:app", "examples/rabbit/topic.py"), ), ) +@require_nats +@require_aiopika +@require_aiokafka def test_import_from_string(test_input, exp_module): module, app = import_from_string(test_input) assert isinstance(app, FastStream) @@ -80,6 +84,9 @@ def test_import_from_string(test_input, exp_module): ), ), ) +@require_nats +@require_aiopika +@require_aiokafka def test_import_module(test_input, exp_module): module, app = import_from_string(test_input) assert isinstance(app, FastStream) diff --git a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py +++ b/tests/docs/getting_started/asyncapi/asyncapi_customization/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/docs/getting_started/cli/confluent/__init__.py b/tests/docs/getting_started/cli/confluent/__init__.py new file mode 100644 index 0000000000..c4a1803708 --- /dev/null +++ b/tests/docs/getting_started/cli/confluent/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("confluent_kafka") diff --git a/tests/docs/getting_started/cli/test_confluent_context.py b/tests/docs/getting_started/cli/confluent/test_confluent_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_confluent_context.py rename to tests/docs/getting_started/cli/confluent/test_confluent_context.py diff --git a/tests/docs/getting_started/cli/kafka/__init__.py b/tests/docs/getting_started/cli/kafka/__init__.py new file mode 100644 index 0000000000..bd6bc708fc --- /dev/null +++ b/tests/docs/getting_started/cli/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/docs/getting_started/cli/test_kafka_context.py b/tests/docs/getting_started/cli/kafka/test_kafka_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_kafka_context.py rename to tests/docs/getting_started/cli/kafka/test_kafka_context.py diff --git a/tests/docs/getting_started/cli/nats/__init__.py b/tests/docs/getting_started/cli/nats/__init__.py new file mode 100644 index 0000000000..87ead90ee6 --- /dev/null +++ b/tests/docs/getting_started/cli/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/docs/getting_started/cli/test_nats_context.py b/tests/docs/getting_started/cli/nats/test_nats_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_nats_context.py rename to tests/docs/getting_started/cli/nats/test_nats_context.py diff --git a/tests/docs/getting_started/cli/rabbit/__init__.py b/tests/docs/getting_started/cli/rabbit/__init__.py new file mode 100644 index 0000000000..ebec43fcd5 --- /dev/null +++ b/tests/docs/getting_started/cli/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/docs/getting_started/cli/test_rabbit_context.py b/tests/docs/getting_started/cli/rabbit/test_rabbit_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_rabbit_context.py rename to tests/docs/getting_started/cli/rabbit/test_rabbit_context.py diff --git a/tests/docs/getting_started/cli/redis/__init__.py b/tests/docs/getting_started/cli/redis/__init__.py new file mode 100644 index 0000000000..4752ef19b1 --- /dev/null +++ b/tests/docs/getting_started/cli/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/docs/getting_started/cli/test_redis_context.py b/tests/docs/getting_started/cli/redis/test_redis_context.py similarity index 100% rename from tests/docs/getting_started/cli/test_redis_context.py rename to tests/docs/getting_started/cli/redis/test_redis_context.py diff --git a/tests/docs/getting_started/config/test_usage.py b/tests/docs/getting_started/config/test_usage.py index cdceaf9e8d..2ae34dda33 100644 --- a/tests/docs/getting_started/config/test_usage.py +++ b/tests/docs/getting_started/config/test_usage.py @@ -1,8 +1,9 @@ -from tests.marks import pydantic_v2 +from tests.marks import pydantic_v2, require_aiopika from tests.mocks import mock_pydantic_settings_env @pydantic_v2 +@require_aiopika def test_exists_and_valid(): with mock_pydantic_settings_env({"url": "localhost:9092"}): from docs.docs_src.getting_started.config.usage import settings diff --git a/tests/docs/getting_started/context/test_annotated.py b/tests/docs/getting_started/context/test_annotated.py index b800ad1956..b85bc816bc 100644 --- a/tests/docs/getting_started/context/test_annotated.py +++ b/tests/docs/getting_started/context/test_annotated.py @@ -1,20 +1,24 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @python39 @pytest.mark.asyncio() +@require_aiokafka async def test_annotated_kafka(): from docs.docs_src.getting_started.context.kafka.annotated import ( base_handler, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -24,11 +28,13 @@ async def test_annotated_kafka(): @python39 @pytest.mark.asyncio() +@require_confluent async def test_annotated_confluent(): from docs.docs_src.getting_started.context.confluent.annotated import ( base_handler, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -38,11 +44,13 @@ async def test_annotated_confluent(): @python39 @pytest.mark.asyncio() +@require_aiopika async def test_annotated_rabbit(): from docs.docs_src.getting_started.context.rabbit.annotated import ( base_handler, broker, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") @@ -52,11 +60,13 @@ async def test_annotated_rabbit(): @python39 @pytest.mark.asyncio() +@require_nats async def test_annotated_nats(): from docs.docs_src.getting_started.context.nats.annotated import ( base_handler, broker, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test") @@ -66,11 +76,13 @@ async def test_annotated_nats(): @python39 @pytest.mark.asyncio() +@require_redis async def test_annotated_redis(): from docs.docs_src.getting_started.context.redis.annotated import ( base_handler, broker, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/context/test_base.py b/tests/docs/getting_started/context/test_base.py index 03e9984f24..5b24fdd827 100644 --- a/tests/docs/getting_started/context/test_base.py +++ b/tests/docs/getting_started/context/test_base.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_base_kafka(): from docs.docs_src.getting_started.context.kafka.base import base_handler, broker + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -18,11 +22,13 @@ async def test_base_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_base_confluent(): from docs.docs_src.getting_started.context.confluent.base import ( base_handler, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test") @@ -31,8 +37,10 @@ async def test_base_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_base_rabbit(): from docs.docs_src.getting_started.context.rabbit.base import base_handler, broker + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") @@ -41,8 +49,10 @@ async def test_base_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_base_nats(): from docs.docs_src.getting_started.context.nats.base import base_handler, broker + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test") @@ -51,8 +61,10 @@ async def test_base_nats(): @pytest.mark.asyncio() +@require_redis async def test_base_redis(): from docs.docs_src.getting_started.context.redis.base import base_handler, broker + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/context/test_cast.py b/tests/docs/getting_started/context/test_cast.py index 6ade96b33e..f99769603e 100644 --- a/tests/docs/getting_started/context/test_cast.py +++ b/tests/docs/getting_started/context/test_cast.py @@ -1,19 +1,23 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_cast_kafka(): from docs.docs_src.getting_started.context.kafka.cast import ( broker, handle, handle_int, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -26,12 +30,14 @@ async def test_cast_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_cast_confluent(): from docs.docs_src.getting_started.context.confluent.cast import ( broker, handle, handle_int, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -44,12 +50,14 @@ async def test_cast_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_cast_rabbit(): from docs.docs_src.getting_started.context.rabbit.cast import ( broker, handle, handle_int, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -62,12 +70,14 @@ async def test_cast_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_cast_nats(): from docs.docs_src.getting_started.context.nats.cast import ( broker, handle, handle_int, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -80,12 +90,14 @@ async def test_cast_nats(): @pytest.mark.asyncio() +@require_redis async def test_cast_redis(): from docs.docs_src.getting_started.context.redis.cast import ( broker, handle, handle_int, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_custom_global.py b/tests/docs/getting_started/context/test_custom_global.py index b9ec555cfe..0541980245 100644 --- a/tests/docs/getting_started/context/test_custom_global.py +++ b/tests/docs/getting_started/context/test_custom_global.py @@ -1,20 +1,24 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_custom_global_context_kafka(): from docs.docs_src.getting_started.context.kafka.custom_global_context import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-topic") @@ -23,12 +27,14 @@ async def test_custom_global_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_custom_global_context_confluent(): from docs.docs_src.getting_started.context.confluent.custom_global_context import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-topic") @@ -37,12 +43,14 @@ async def test_custom_global_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_custom_global_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.custom_global_context import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-queue") @@ -51,12 +59,14 @@ async def test_custom_global_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_custom_global_context_nats(): from docs.docs_src.getting_started.context.nats.custom_global_context import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-subject") @@ -65,12 +75,14 @@ async def test_custom_global_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_custom_global_context_redis(): from docs.docs_src.getting_started.context.redis.custom_global_context import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br, TestApp(app): await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_custom_local.py b/tests/docs/getting_started/context/test_custom_local.py index 8672c6dd65..e2cf2421c0 100644 --- a/tests/docs/getting_started/context/test_custom_local.py +++ b/tests/docs/getting_started/context/test_custom_local.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_custom_local_context_kafka(): from docs.docs_src.getting_started.context.kafka.custom_local_context import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_custom_local_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_custom_local_context_confluent(): from docs.docs_src.getting_started.context.confluent.custom_local_context import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_custom_local_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_custom_local_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.custom_local_context import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_custom_local_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_custom_local_context_nats(): from docs.docs_src.getting_started.context.nats.custom_local_context import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_custom_local_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_custom_local_context_redis(): from docs.docs_src.getting_started.context.redis.custom_local_context import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_default_arguments.py b/tests/docs/getting_started/context/test_default_arguments.py index f583f09f9b..969baf881f 100644 --- a/tests/docs/getting_started/context/test_default_arguments.py +++ b/tests/docs/getting_started/context/test_default_arguments.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_default_arguments_kafka(): from docs.docs_src.getting_started.context.kafka.default_arguments import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_default_arguments_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_default_arguments_confluent(): from docs.docs_src.getting_started.context.confluent.default_arguments import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_default_arguments_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_default_arguments_rabbit(): from docs.docs_src.getting_started.context.rabbit.default_arguments import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_default_arguments_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_default_arguments_nats(): from docs.docs_src.getting_started.context.nats.default_arguments import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_default_arguments_nats(): @pytest.mark.asyncio() +@require_redis async def test_default_arguments_redis(): from docs.docs_src.getting_started.context.redis.default_arguments import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_existed_context.py b/tests/docs/getting_started/context/test_existed_context.py index 5b13128849..e0b0eebafc 100644 --- a/tests/docs/getting_started/context/test_existed_context.py +++ b/tests/docs/getting_started/context/test_existed_context.py @@ -1,17 +1,21 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_existed_context_kafka(): from docs.docs_src.getting_started.context.kafka.existed_context import ( broker_object, ) + from faststream.kafka import TestKafkaBroker @broker_object.subscriber("response") async def resp(): ... @@ -24,10 +28,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_confluent async def test_existed_context_confluent(): from docs.docs_src.getting_started.context.confluent.existed_context import ( broker_object, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @broker_object.subscriber("response") async def resp(): ... @@ -40,10 +46,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_aiopika async def test_existed_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.existed_context import ( broker_object, ) + from faststream.rabbit import TestRabbitBroker @broker_object.subscriber("response") async def resp(): ... @@ -56,10 +64,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_nats async def test_existed_context_nats(): from docs.docs_src.getting_started.context.nats.existed_context import ( broker_object, ) + from faststream.nats import TestNatsBroker @broker_object.subscriber("response") async def resp(): ... @@ -72,10 +82,12 @@ async def resp(): ... @pytest.mark.asyncio() +@require_redis async def test_existed_context_redis(): from docs.docs_src.getting_started.context.redis.existed_context import ( broker_object, ) + from faststream.redis import TestRedisBroker @broker_object.subscriber("response") async def resp(): ... diff --git a/tests/docs/getting_started/context/test_fields_access.py b/tests/docs/getting_started/context/test_fields_access.py index ab8a47d65b..f584a86089 100644 --- a/tests/docs/getting_started/context/test_fields_access.py +++ b/tests/docs/getting_started/context/test_fields_access.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fields_access_kafka(): from docs.docs_src.getting_started.context.kafka.fields_access import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic", headers={"user": "John"}) @@ -21,11 +25,13 @@ async def test_fields_access_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_fields_access_confluent(): from docs.docs_src.getting_started.context.confluent.fields_access import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic", headers={"user": "John"}) @@ -34,11 +40,13 @@ async def test_fields_access_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_fields_access_rabbit(): from docs.docs_src.getting_started.context.rabbit.fields_access import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue", headers={"user": "John"}) @@ -47,11 +55,13 @@ async def test_fields_access_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_fields_access_nats(): from docs.docs_src.getting_started.context.nats.fields_access import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject", headers={"user": "John"}) @@ -60,11 +70,13 @@ async def test_fields_access_nats(): @pytest.mark.asyncio() +@require_redis async def test_fields_access_redis(): from docs.docs_src.getting_started.context.redis.fields_access import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel", headers={"user": "John"}) diff --git a/tests/docs/getting_started/context/test_initial.py b/tests/docs/getting_started/context/test_initial.py index b4a4a9dbbe..2ea77e2ebd 100644 --- a/tests/docs/getting_started/context/test_initial.py +++ b/tests/docs/getting_started/context/test_initial.py @@ -1,11 +1,19 @@ import pytest from faststream import context -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @python39 +@require_aiokafka async def test_kafka(): from docs.docs_src.getting_started.context.kafka.initial import broker from faststream.kafka import TestKafkaBroker @@ -20,6 +28,7 @@ async def test_kafka(): @pytest.mark.asyncio() @python39 +@require_confluent async def test_confluent(): from docs.docs_src.getting_started.context.confluent.initial import broker from faststream.confluent import TestKafkaBroker @@ -34,6 +43,7 @@ async def test_confluent(): @pytest.mark.asyncio() @python39 +@require_aiopika async def test_rabbit(): from docs.docs_src.getting_started.context.rabbit.initial import broker from faststream.rabbit import TestRabbitBroker @@ -48,6 +58,7 @@ async def test_rabbit(): @pytest.mark.asyncio() @python39 +@require_nats async def test_nats(): from docs.docs_src.getting_started.context.nats.initial import broker from faststream.nats import TestNatsBroker @@ -62,6 +73,7 @@ async def test_nats(): @pytest.mark.asyncio() @python39 +@require_redis async def test_redis(): from docs.docs_src.getting_started.context.redis.initial import broker from faststream.redis import TestRedisBroker diff --git a/tests/docs/getting_started/context/test_manual_local_context.py b/tests/docs/getting_started/context/test_manual_local_context.py index 9442d1166c..f7829575b9 100644 --- a/tests/docs/getting_started/context/test_manual_local_context.py +++ b/tests/docs/getting_started/context/test_manual_local_context.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_manual_local_context_kafka(): from docs.docs_src.getting_started.context.kafka.manual_local_context import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -21,11 +25,13 @@ async def test_manual_local_context_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_manual_local_context_confluent(): from docs.docs_src.getting_started.context.confluent.manual_local_context import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("Hi!", "test-topic") @@ -34,11 +40,13 @@ async def test_manual_local_context_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_manual_local_context_rabbit(): from docs.docs_src.getting_started.context.rabbit.manual_local_context import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test-queue") @@ -47,11 +55,13 @@ async def test_manual_local_context_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_manual_local_context_nats(): from docs.docs_src.getting_started.context.nats.manual_local_context import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("Hi!", "test-subject") @@ -60,11 +70,13 @@ async def test_manual_local_context_nats(): @pytest.mark.asyncio() +@require_redis async def test_manual_local_context_redis(): from docs.docs_src.getting_started.context.redis.manual_local_context import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("Hi!", "test-channel") diff --git a/tests/docs/getting_started/context/test_nested.py b/tests/docs/getting_started/context/test_nested.py index 11e6932084..9cf13d0252 100644 --- a/tests/docs/getting_started/context/test_nested.py +++ b/tests/docs/getting_started/context/test_nested.py @@ -1,11 +1,13 @@ import pytest -from faststream.rabbit import TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test(): from docs.docs_src.getting_started.context.nested import broker, handler + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("Hi!", "test") diff --git a/tests/docs/getting_started/dependencies/basic/test_depends.py b/tests/docs/getting_started/dependencies/basic/test_depends.py index 724fab875c..0c46d6a967 100644 --- a/tests/docs/getting_started/dependencies/basic/test_depends.py +++ b/tests/docs/getting_started/dependencies/basic/test_depends.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_depends_kafka(): from docs.docs_src.getting_started.dependencies.basic.kafka.depends import ( broker, handler, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): await broker.publish({}, "test") @@ -20,11 +24,13 @@ async def test_depends_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_depends_confluent(): from docs.docs_src.getting_started.dependencies.basic.confluent.depends import ( broker, handler, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker): await broker.publish({}, "test") @@ -32,11 +38,13 @@ async def test_depends_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_depends_rabbit(): from docs.docs_src.getting_started.dependencies.basic.rabbit.depends import ( broker, handler, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker): await broker.publish({}, "test") @@ -44,11 +52,13 @@ async def test_depends_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_depends_nats(): from docs.docs_src.getting_started.dependencies.basic.nats.depends import ( broker, handler, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker): await broker.publish({}, "test") @@ -56,11 +66,13 @@ async def test_depends_nats(): @pytest.mark.asyncio() +@require_redis async def test_depends_redis(): from docs.docs_src.getting_started.dependencies.basic.redis.depends import ( broker, handler, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker): await broker.publish({}, "test") diff --git a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py index 51578c24a9..e2ffabd2f1 100644 --- a/tests/docs/getting_started/dependencies/basic/test_nested_depends.py +++ b/tests/docs/getting_started/dependencies/basic/test_nested_depends.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_nested_depends_kafka(): from docs.docs_src.getting_started.dependencies.basic.kafka.nested_depends import ( broker, handler, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): await broker.publish({}, "test") @@ -20,11 +24,13 @@ async def test_nested_depends_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_nested_depends_confluent(): from docs.docs_src.getting_started.dependencies.basic.confluent.nested_depends import ( broker, handler, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker): await broker.publish({}, "test") @@ -32,11 +38,13 @@ async def test_nested_depends_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_nested_depends_rabbit(): from docs.docs_src.getting_started.dependencies.basic.rabbit.nested_depends import ( broker, handler, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker): await broker.publish({}, "test") @@ -44,11 +52,13 @@ async def test_nested_depends_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_nested_depends_nats(): from docs.docs_src.getting_started.dependencies.basic.nats.nested_depends import ( broker, handler, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker): await broker.publish({}, "test") @@ -56,11 +66,13 @@ async def test_nested_depends_nats(): @pytest.mark.asyncio() +@require_redis async def test_nested_depends_redis(): from docs.docs_src.getting_started.dependencies.basic.redis.nested_depends import ( broker, handler, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker): await broker.publish({}, "test") diff --git a/tests/docs/getting_started/dependencies/test_basic.py b/tests/docs/getting_started/dependencies/test_basic.py index 81ff933677..a1daf5538e 100644 --- a/tests/docs/getting_started/dependencies/test_basic.py +++ b/tests/docs/getting_started/dependencies/test_basic.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_basic_kafka(): from docs.docs_src.getting_started.dependencies.basic_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_class.py b/tests/docs/getting_started/dependencies/test_class.py index 1ee3207650..a9e3e0517a 100644 --- a/tests/docs/getting_started/dependencies/test_class.py +++ b/tests/docs/getting_started/dependencies/test_class.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_basic_kafka(): from docs.docs_src.getting_started.dependencies.class_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_global.py b/tests/docs/getting_started/dependencies/test_global.py index 05e10097ce..8da64084b1 100644 --- a/tests/docs/getting_started/dependencies/test_global.py +++ b/tests/docs/getting_started/dependencies/test_global.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_global_kafka(): from docs.docs_src.getting_started.dependencies.global_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_global_broker.py b/tests/docs/getting_started/dependencies/test_global_broker.py index 406e359cf1..c944070513 100644 --- a/tests/docs/getting_started/dependencies/test_global_broker.py +++ b/tests/docs/getting_started/dependencies/test_global_broker.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_global_broker_kafka(): from docs.docs_src.getting_started.dependencies.global_broker_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_sub_dep.py b/tests/docs/getting_started/dependencies/test_sub_dep.py index d3fc3b591a..8847ab9064 100644 --- a/tests/docs/getting_started/dependencies/test_sub_dep.py +++ b/tests/docs/getting_started/dependencies/test_sub_dep.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_sub_dep_kafka(): from docs.docs_src.getting_started.dependencies.sub_dep_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with( diff --git a/tests/docs/getting_started/dependencies/test_yield.py b/tests/docs/getting_started/dependencies/test_yield.py index 5ca173d924..84886692d6 100644 --- a/tests/docs/getting_started/dependencies/test_yield.py +++ b/tests/docs/getting_started/dependencies/test_yield.py @@ -1,16 +1,18 @@ import pytest from faststream import TestApp -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_yield_kafka(): from docs.docs_src.getting_started.dependencies.yield_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/index/test_basic.py b/tests/docs/getting_started/index/test_basic.py index 945ae94971..5341408c8d 100644 --- a/tests/docs/getting_started/index/test_basic.py +++ b/tests/docs/getting_started/index/test_basic.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_quickstart_index_kafka(): from docs.docs_src.getting_started.index.base_kafka import base_handler, broker + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish("", "test") @@ -18,8 +22,10 @@ async def test_quickstart_index_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_quickstart_index_confluent(): from docs.docs_src.getting_started.index.base_confluent import base_handler, broker + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish("", "test") @@ -28,8 +34,10 @@ async def test_quickstart_index_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_quickstart_index_rabbit(): from docs.docs_src.getting_started.index.base_rabbit import base_handler, broker + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish("", "test") @@ -38,8 +46,10 @@ async def test_quickstart_index_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_quickstart_index_nats(): from docs.docs_src.getting_started.index.base_nats import base_handler, broker + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish("", "test") @@ -48,8 +58,10 @@ async def test_quickstart_index_nats(): @pytest.mark.asyncio() +@require_redis async def test_quickstart_index_redis(): from docs.docs_src.getting_started.index.base_redis import base_handler, broker + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish("", "test") diff --git a/tests/docs/getting_started/lifespan/test_basic.py b/tests/docs/getting_started/lifespan/test_basic.py index 21ba14a72e..82f0227e59 100644 --- a/tests/docs/getting_started/lifespan/test_basic.py +++ b/tests/docs/getting_started/lifespan/test_basic.py @@ -1,18 +1,23 @@ import pytest from faststream import TestApp, context -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import pydantic_v2 +from tests.marks import ( + pydantic_v2, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) from tests.mocks import mock_pydantic_settings_env @pydantic_v2 @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_basic_lifespan(): + from faststream.rabbit import TestRabbitBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.rabbit.basic import app, broker @@ -22,7 +27,10 @@ async def test_rabbit_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_basic_lifespan(): + from faststream.kafka import TestKafkaBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.kafka.basic import app, broker @@ -32,7 +40,10 @@ async def test_kafka_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_confluent async def test_confluent_basic_lifespan(): + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.confluent.basic import app, broker @@ -42,7 +53,10 @@ async def test_confluent_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_nats async def test_nats_basic_lifespan(): + from faststream.nats import TestNatsBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.nats.basic import app, broker @@ -52,7 +66,10 @@ async def test_nats_basic_lifespan(): @pydantic_v2 @pytest.mark.asyncio() +@require_redis async def test_redis_basic_lifespan(): + from faststream.redis import TestRedisBroker + with mock_pydantic_settings_env({"host": "localhost"}): from docs.docs_src.getting_started.lifespan.redis.basic import app, broker diff --git a/tests/docs/getting_started/lifespan/test_ml.py b/tests/docs/getting_started/lifespan/test_ml.py index 8c4502c856..f63d6bbed7 100644 --- a/tests/docs/getting_started/lifespan/test_ml.py +++ b/tests/docs/getting_started/lifespan/test_ml.py @@ -1,16 +1,20 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_ml_lifespan(): from docs.docs_src.getting_started.lifespan.rabbit.ml import app, broker, predict + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -19,8 +23,10 @@ async def test_rabbit_ml_lifespan(): @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_ml_lifespan(): from docs.docs_src.getting_started.lifespan.kafka.ml import app, broker, predict + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -29,8 +35,10 @@ async def test_kafka_ml_lifespan(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_ml_lifespan(): from docs.docs_src.getting_started.lifespan.confluent.ml import app, broker, predict + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -39,8 +47,10 @@ async def test_confluent_ml_lifespan(): @pytest.mark.asyncio() +@require_nats async def test_nats_ml_lifespan(): from docs.docs_src.getting_started.lifespan.nats.ml import app, broker, predict + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -49,8 +59,10 @@ async def test_nats_ml_lifespan(): @pytest.mark.asyncio() +@require_redis async def test_redis_ml_lifespan(): from docs.docs_src.getting_started.lifespan.redis.ml import app, broker, predict + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) diff --git a/tests/docs/getting_started/lifespan/test_ml_context.py b/tests/docs/getting_started/lifespan/test_ml_context.py index 612fcd2c91..dc7cb57d6a 100644 --- a/tests/docs/getting_started/lifespan/test_ml_context.py +++ b/tests/docs/getting_started/lifespan/test_ml_context.py @@ -1,20 +1,24 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_ml_lifespan(): from docs.docs_src.getting_started.lifespan.rabbit.ml_context import ( app, broker, predict, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -23,12 +27,14 @@ async def test_rabbit_ml_lifespan(): @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_ml_lifespan(): from docs.docs_src.getting_started.lifespan.kafka.ml_context import ( app, broker, predict, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -37,12 +43,14 @@ async def test_kafka_ml_lifespan(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_ml_lifespan(): from docs.docs_src.getting_started.lifespan.confluent.ml_context import ( app, broker, predict, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -51,12 +59,14 @@ async def test_confluent_ml_lifespan(): @pytest.mark.asyncio() +@require_nats async def test_nats_ml_lifespan(): from docs.docs_src.getting_started.lifespan.nats.ml_context import ( app, broker, predict, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) @@ -65,12 +75,14 @@ async def test_nats_ml_lifespan(): @pytest.mark.asyncio() +@require_redis async def test_redis_ml_lifespan(): from docs.docs_src.getting_started.lifespan.redis.ml_context import ( app, broker, predict, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): assert {"result": 42.0} == await broker.publish(1.0, "test", rpc=True) diff --git a/tests/docs/getting_started/lifespan/test_testing.py b/tests/docs/getting_started/lifespan/test_testing.py index cfe19fca37..42dbf0dd58 100644 --- a/tests/docs/getting_started/lifespan/test_testing.py +++ b/tests/docs/getting_started/lifespan/test_testing.py @@ -1,22 +1,65 @@ -from docs.docs_src.getting_started.lifespan.confluent.testing import ( - test_lifespan as _test_lifespan_confluent, -) -from docs.docs_src.getting_started.lifespan.kafka.testing import ( - test_lifespan as _test_lifespan_k, -) -from docs.docs_src.getting_started.lifespan.nats.testing import ( - test_lifespan as _test_lifespan_n, -) -from docs.docs_src.getting_started.lifespan.rabbit.testing import ( - test_lifespan as _test_lifespan_r, -) -from docs.docs_src.getting_started.lifespan.redis.testing import ( - test_lifespan as _test_lifespan_red, +import pytest + +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) -from tests.marks import python39 -test_lifespan_red = python39(_test_lifespan_red) -test_lifespan_r = python39(_test_lifespan_r) -test_lifespan_n = python39(_test_lifespan_n) -test_lifespan_k = python39(_test_lifespan_k) -test_lifespan_confluent = python39(_test_lifespan_confluent) + +@pytest.mark.asyncio() +@python39 +@require_redis +async def test_lifespan_redis(): + from docs.docs_src.getting_started.lifespan.redis.testing import ( + test_lifespan as _test_lifespan_red, + ) + + await _test_lifespan_red() + + +@pytest.mark.asyncio() +@python39 +@require_confluent +async def test_lifespan_confluent(): + from docs.docs_src.getting_started.lifespan.confluent.testing import ( + test_lifespan as _test_lifespan_confluent, + ) + + await _test_lifespan_confluent() + + +@pytest.mark.asyncio() +@python39 +@require_aiokafka +async def test_lifespan_kafka(): + from docs.docs_src.getting_started.lifespan.kafka.testing import ( + test_lifespan as _test_lifespan_k, + ) + + await _test_lifespan_k() + + +@pytest.mark.asyncio() +@python39 +@require_aiopika +async def test_lifespan_rabbit(): + from docs.docs_src.getting_started.lifespan.rabbit.testing import ( + test_lifespan as _test_lifespan_r, + ) + + await _test_lifespan_r() + + +@pytest.mark.asyncio() +@python39 +@require_nats +async def test_lifespan_nats(): + from docs.docs_src.getting_started.lifespan.nats.testing import ( + test_lifespan as _test_lifespan_n, + ) + + await _test_lifespan_n() diff --git a/tests/docs/getting_started/publishing/test_broker.py b/tests/docs/getting_started/publishing/test_broker.py index 2b28895b92..794564df71 100644 --- a/tests/docs/getting_started/publishing/test_broker.py +++ b/tests/docs/getting_started/publishing/test_broker.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_broker_kafka(): from docs.docs_src.getting_started.publishing.kafka.broker import ( app, @@ -16,6 +19,7 @@ async def test_broker_kafka(): handle, handle_next, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -23,6 +27,7 @@ async def test_broker_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_broker_confluent(): from docs.docs_src.getting_started.publishing.confluent.broker import ( app, @@ -30,6 +35,7 @@ async def test_broker_confluent(): handle, handle_next, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -37,6 +43,7 @@ async def test_broker_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_broker_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.broker import ( app, @@ -44,6 +51,7 @@ async def test_broker_rabbit(): handle, handle_next, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -51,6 +59,7 @@ async def test_broker_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_broker_nats(): from docs.docs_src.getting_started.publishing.nats.broker import ( app, @@ -58,6 +67,7 @@ async def test_broker_nats(): handle, handle_next, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -65,6 +75,7 @@ async def test_broker_nats(): @pytest.mark.asyncio() +@require_redis async def test_broker_redis(): from docs.docs_src.getting_started.publishing.redis.broker import ( app, @@ -72,6 +83,7 @@ async def test_broker_redis(): handle, handle_next, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/publishing/test_broker_context.py b/tests/docs/getting_started/publishing/test_broker_context.py index 60deb460e9..aa8d0f194b 100644 --- a/tests/docs/getting_started/publishing/test_broker_context.py +++ b/tests/docs/getting_started/publishing/test_broker_context.py @@ -1,21 +1,25 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @pytest.mark.kafka() +@require_aiokafka async def test_broker_context_kafka(): from docs.docs_src.getting_started.publishing.kafka.broker_context import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -24,12 +28,14 @@ async def test_broker_context_kafka(): @pytest.mark.asyncio() @pytest.mark.confluent() +@require_confluent async def test_broker_context_confluent(): from docs.docs_src.getting_started.publishing.confluent.broker_context import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker, with_real=True), TestApp(app): await handle.wait_call(5) @@ -38,12 +44,14 @@ async def test_broker_context_confluent(): @pytest.mark.asyncio() @pytest.mark.nats() +@require_nats async def test_broker_context_nats(): from docs.docs_src.getting_started.publishing.nats.broker_context import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -52,12 +60,14 @@ async def test_broker_context_nats(): @pytest.mark.asyncio() @pytest.mark.rabbit() +@require_aiopika async def test_broker_context_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.broker_context import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) @@ -66,12 +76,14 @@ async def test_broker_context_rabbit(): @pytest.mark.asyncio() @pytest.mark.redis() +@require_redis async def test_broker_context_redis(): from docs.docs_src.getting_started.publishing.redis.broker_context import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker, with_real=True), TestApp(app): await handle.wait_call(3) diff --git a/tests/docs/getting_started/publishing/test_decorator.py b/tests/docs/getting_started/publishing/test_decorator.py index e4ad61195c..66e860e234 100644 --- a/tests/docs/getting_started/publishing/test_decorator.py +++ b/tests/docs/getting_started/publishing/test_decorator.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_decorator_kafka(): from docs.docs_src.getting_started.publishing.kafka.decorator import ( app, @@ -16,6 +19,7 @@ async def test_decorator_kafka(): handle, handle_next, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -24,6 +28,7 @@ async def test_decorator_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_decorator_confluent(): from docs.docs_src.getting_started.publishing.confluent.decorator import ( app, @@ -31,6 +36,7 @@ async def test_decorator_confluent(): handle, handle_next, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -39,6 +45,7 @@ async def test_decorator_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_decorator_rabbit(): from docs.docs_src.getting_started.publishing.rabbit.decorator import ( app, @@ -46,6 +53,7 @@ async def test_decorator_rabbit(): handle, handle_next, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -54,6 +62,7 @@ async def test_decorator_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_decorator_nats(): from docs.docs_src.getting_started.publishing.nats.decorator import ( app, @@ -61,6 +70,7 @@ async def test_decorator_nats(): handle, handle_next, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @@ -69,6 +79,7 @@ async def test_decorator_nats(): @pytest.mark.asyncio() +@require_redis async def test_decorator_redis(): from docs.docs_src.getting_started.publishing.redis.decorator import ( app, @@ -76,6 +87,7 @@ async def test_decorator_redis(): handle, handle_next, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/publishing/test_direct.py b/tests/docs/getting_started/publishing/test_direct.py index 535a52c1c8..1bbb8e142a 100644 --- a/tests/docs/getting_started/publishing/test_direct.py +++ b/tests/docs/getting_started/publishing/test_direct.py @@ -1,23 +1,59 @@ -from docs.docs_src.getting_started.publishing.confluent.direct_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.publishing.kafka.direct_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.publishing.nats.direct_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.publishing.rabbit.direct_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.publishing.redis.direct_testing import ( - test_handle as test_handle_red, -) +import pytest -__all__ = ( - "test_handle_r", - "test_handle_k", - "test_handle_n", - "test_handle_red", - "test_handle_confluent", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.publishing.kafka.direct_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.publishing.confluent.direct_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.publishing.rabbit.direct_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.publishing.nats.direct_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.publishing.redis.direct_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() diff --git a/tests/docs/getting_started/publishing/test_object.py b/tests/docs/getting_started/publishing/test_object.py index d2bdc6b07c..d270cac965 100644 --- a/tests/docs/getting_started/publishing/test_object.py +++ b/tests/docs/getting_started/publishing/test_object.py @@ -1,23 +1,59 @@ -from docs.docs_src.getting_started.publishing.confluent.object_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.publishing.kafka.object_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.publishing.nats.object_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.publishing.rabbit.object_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.publishing.redis.object_testing import ( - test_handle as test_handle_red, -) +import pytest -__all__ = ( - "test_handle_k", - "test_handle_r", - "test_handle_n", - "test_handle_red", - "test_handle_confluent", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.publishing.kafka.object_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.publishing.confluent.object_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.publishing.rabbit.object_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.publishing.nats.object_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.publishing.redis.object_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() diff --git a/tests/docs/getting_started/routers/test_base.py b/tests/docs/getting_started/routers/test_base.py index e23e788d0e..24004e71e0 100644 --- a/tests/docs/getting_started/routers/test_base.py +++ b/tests/docs/getting_started/routers/test_base.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_base_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router import ( app, @@ -16,6 +19,7 @@ async def test_base_router_kafka(): handle, handle_response, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -23,6 +27,7 @@ async def test_base_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_base_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router import ( app, @@ -30,6 +35,7 @@ async def test_base_router_confluent(): handle, handle_response, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -37,6 +43,7 @@ async def test_base_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_base_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router import ( app, @@ -44,6 +51,7 @@ async def test_base_router_rabbit(): handle, handle_response, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -51,6 +59,7 @@ async def test_base_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_base_router_nats(): from docs.docs_src.getting_started.routers.nats.router import ( app, @@ -58,6 +67,7 @@ async def test_base_router_nats(): handle, handle_response, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -65,6 +75,7 @@ async def test_base_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_base_router_redis(): from docs.docs_src.getting_started.routers.redis.router import ( app, @@ -72,6 +83,7 @@ async def test_base_router_redis(): handle, handle_response, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) diff --git a/tests/docs/getting_started/routers/test_delay.py b/tests/docs/getting_started/routers/test_delay.py index 171aaaef4c..a951584837 100644 --- a/tests/docs/getting_started/routers/test_delay.py +++ b/tests/docs/getting_started/routers/test_delay.py @@ -1,19 +1,23 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router_delay import ( app, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -24,11 +28,13 @@ async def test_delay_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router_delay import ( app, broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -39,11 +45,13 @@ async def test_delay_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router_delay import ( app, broker, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -54,11 +62,13 @@ async def test_delay_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.router_delay import ( app, broker, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ @@ -69,11 +79,13 @@ async def test_delay_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.router_delay import ( app, broker, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br, TestApp(app): next(iter(br._subscribers.values())).calls[ diff --git a/tests/docs/getting_started/routers/test_delay_equal.py b/tests/docs/getting_started/routers/test_delay_equal.py index b0337d5cce..9f68b9edc5 100644 --- a/tests/docs/getting_started/routers/test_delay_equal.py +++ b/tests/docs/getting_started/routers/test_delay_equal.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.delay_equal import ( app, @@ -17,6 +20,7 @@ async def test_delay_router_kafka(): from docs.docs_src.getting_started.routers.kafka.router_delay import ( broker as control_broker, ) + from faststream.kafka import TestKafkaBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -30,6 +34,7 @@ async def test_delay_router_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.delay_equal import ( app, @@ -38,6 +43,7 @@ async def test_delay_router_confluent(): from docs.docs_src.getting_started.routers.confluent.router_delay import ( broker as control_broker, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -51,6 +57,7 @@ async def test_delay_router_confluent(): @pytest.mark.asyncio() +@require_aiopika async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.delay_equal import ( app, @@ -59,6 +66,7 @@ async def test_delay_router_rabbit(): from docs.docs_src.getting_started.routers.rabbit.router_delay import ( broker as control_broker, ) + from faststream.rabbit import TestRabbitBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -72,6 +80,7 @@ async def test_delay_router_rabbit(): @pytest.mark.asyncio() +@require_nats async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.delay_equal import ( app, @@ -80,6 +89,7 @@ async def test_delay_router_nats(): from docs.docs_src.getting_started.routers.nats.router_delay import ( broker as control_broker, ) + from faststream.nats import TestNatsBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() @@ -93,6 +103,7 @@ async def test_delay_router_nats(): @pytest.mark.asyncio() +@require_redis async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.delay_equal import ( app, @@ -101,6 +112,7 @@ async def test_delay_router_redis(): from docs.docs_src.getting_started.routers.redis.router_delay import ( broker as control_broker, ) + from faststream.redis import TestRedisBroker assert broker._subscribers.keys() == control_broker._subscribers.keys() assert broker._publishers.keys() == control_broker._publishers.keys() diff --git a/tests/docs/getting_started/serialization/test_parser.py b/tests/docs/getting_started/serialization/test_parser.py index 371558cc41..5285d7ee9b 100644 --- a/tests/docs/getting_started/serialization/test_parser.py +++ b/tests/docs/getting_started/serialization/test_parser.py @@ -1,68 +1,80 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_nats async def test_parser_nats(): from docs.docs_src.getting_started.serialization.parser_nats import ( app, broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_aiokafka async def test_parser_kafka(): from docs.docs_src.getting_started.serialization.parser_kafka import ( app, broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_confluent async def test_parser_confluent(): from docs.docs_src.getting_started.serialization.parser_confluent import ( app, broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_aiopika async def test_parser_rabbit(): from docs.docs_src.getting_started.serialization.parser_rabbit import ( app, broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") @pytest.mark.asyncio() +@require_redis async def test_parser_redis(): from docs.docs_src.getting_started.serialization.parser_redis import ( app, broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with("") diff --git a/tests/docs/getting_started/subscription/test_annotated.py b/tests/docs/getting_started/subscription/test_annotated.py index 0511ccf3b3..21c1ed758d 100644 --- a/tests/docs/getting_started/subscription/test_annotated.py +++ b/tests/docs/getting_started/subscription/test_annotated.py @@ -1,96 +1,98 @@ import pytest from pydantic import ValidationError -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker -from tests.marks import python39 +from tests.marks import ( + python39, + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() @python39 class BaseCase: async def test_handle(self, setup): - broker, handle = setup + broker, handle, test_class = setup - async with self.test_class(broker) as br: + async with test_class(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test") handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) assert handle.mock is None async def test_validation_error(self, setup): - broker, handle = setup + broker, handle, test_class = setup - async with self.test_class(broker) as br: + async with test_class(broker) as br: with pytest.raises(ValidationError): await br.publish("wrong message", "test") handle.mock.assert_called_once_with("wrong message") +@require_aiokafka class TestKafka(BaseCase): - test_class = TestKafkaBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.kafka.pydantic_annotated_fields import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker - return (broker, handle) + return (broker, handle, TestKafkaBroker) +@require_confluent class TestConfluent(BaseCase): - test_class = TestConfluentKafkaBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.confluent.pydantic_annotated_fields import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker - return (broker, handle) + return (broker, handle, TestConfluentKafkaBroker) +@require_aiopika class TestRabbit(BaseCase): - test_class = TestRabbitBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.rabbit.pydantic_annotated_fields import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker - return (broker, handle) + return (broker, handle, TestRabbitBroker) +@require_nats class TestNats(BaseCase): - test_class = TestNatsBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.nats.pydantic_annotated_fields import ( broker, handle, ) + from faststream.nats import TestNatsBroker - return (broker, handle) + return (broker, handle, TestNatsBroker) +@require_redis class TestRedis(BaseCase): - test_class = TestRedisBroker - @pytest.fixture(scope="class") def setup(self): from docs.docs_src.getting_started.subscription.redis.pydantic_annotated_fields import ( broker, handle, ) + from faststream.redis import TestRedisBroker - return (broker, handle) + return (broker, handle, TestRedisBroker) diff --git a/tests/docs/getting_started/subscription/test_filter.py b/tests/docs/getting_started/subscription/test_filter.py index d7c5475af4..1cd9588f55 100644 --- a/tests/docs/getting_started/subscription/test_filter.py +++ b/tests/docs/getting_started/subscription/test_filter.py @@ -1,14 +1,17 @@ import pytest from faststream import TestApp -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_kafka_filtering(): from docs.docs_src.getting_started.subscription.kafka.filter import ( app, @@ -16,6 +19,7 @@ async def test_kafka_filtering(): default_handler, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -23,6 +27,7 @@ async def test_kafka_filtering(): @pytest.mark.asyncio() +@require_confluent async def test_confluent_filtering(): from docs.docs_src.getting_started.subscription.confluent.filter import ( app, @@ -30,6 +35,7 @@ async def test_confluent_filtering(): default_handler, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -37,6 +43,7 @@ async def test_confluent_filtering(): @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_filtering(): from docs.docs_src.getting_started.subscription.rabbit.filter import ( app, @@ -44,6 +51,7 @@ async def test_rabbit_filtering(): default_handler, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -51,6 +59,7 @@ async def test_rabbit_filtering(): @pytest.mark.asyncio() +@require_nats async def test_nats_filtering(): from docs.docs_src.getting_started.subscription.nats.filter import ( app, @@ -58,6 +67,7 @@ async def test_nats_filtering(): default_handler, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) @@ -65,6 +75,7 @@ async def test_nats_filtering(): @pytest.mark.asyncio() +@require_redis async def test_redis_filtering(): from docs.docs_src.getting_started.subscription.redis.filter import ( app, @@ -72,6 +83,7 @@ async def test_redis_filtering(): default_handler, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker), TestApp(app): handle.mock.assert_called_once_with({"name": "John", "user_id": 1}) diff --git a/tests/docs/getting_started/subscription/test_pydantic.py b/tests/docs/getting_started/subscription/test_pydantic.py index c17a4bc4c4..51344f3ffe 100644 --- a/tests/docs/getting_started/subscription/test_pydantic.py +++ b/tests/docs/getting_started/subscription/test_pydantic.py @@ -1,18 +1,22 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiopika async def test_pydantic_model_rabbit(): from docs.docs_src.getting_started.subscription.rabbit.pydantic_model import ( broker, handle, ) + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-queue") @@ -20,11 +24,13 @@ async def test_pydantic_model_rabbit(): @pytest.mark.asyncio() +@require_aiokafka async def test_pydantic_model_kafka(): from docs.docs_src.getting_started.subscription.kafka.pydantic_model import ( broker, handle, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-topic") @@ -32,11 +38,13 @@ async def test_pydantic_model_kafka(): @pytest.mark.asyncio() +@require_confluent async def test_pydantic_model_confluent(): from docs.docs_src.getting_started.subscription.confluent.pydantic_model import ( broker, handle, ) + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-topic") @@ -44,11 +52,13 @@ async def test_pydantic_model_confluent(): @pytest.mark.asyncio() +@require_nats async def test_pydantic_model_nats(): from docs.docs_src.getting_started.subscription.nats.pydantic_model import ( broker, handle, ) + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-subject") @@ -56,11 +66,13 @@ async def test_pydantic_model_nats(): @pytest.mark.asyncio() +@require_redis async def test_pydantic_model_redis(): from docs.docs_src.getting_started.subscription.redis.pydantic_model import ( broker, handle, ) + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish({"name": "John", "user_id": 1}, "test-channel") diff --git a/tests/docs/getting_started/subscription/test_real.py b/tests/docs/getting_started/subscription/test_real.py index 415bde53c2..74a93869f7 100644 --- a/tests/docs/getting_started/subscription/test_real.py +++ b/tests/docs/getting_started/subscription/test_real.py @@ -1,47 +1,119 @@ import pytest -from docs.docs_src.getting_started.subscription.confluent.real_testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.subscription.confluent.real_testing import ( - test_validation_error as test_validation_error_confluent, -) -from docs.docs_src.getting_started.subscription.kafka.real_testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.subscription.kafka.real_testing import ( - test_validation_error as test_validation_error_k, -) -from docs.docs_src.getting_started.subscription.nats.real_testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.subscription.nats.real_testing import ( - test_validation_error as test_validation_error_n, -) -from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( - test_validation_error as test_validation_error_r, -) -from docs.docs_src.getting_started.subscription.redis.real_testing import ( - test_handle as test_handle_red, -) -from docs.docs_src.getting_started.subscription.redis.real_testing import ( - test_validation_error as test_validation_error_red, +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) -pytest.mark.kafka(test_handle_k) -pytest.mark.kafka(test_validation_error_k) -pytest.mark.confluent(test_handle_confluent) -pytest.mark.confluent(test_validation_error_confluent) +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.subscription.kafka.real_testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_validate_kafka(): + from docs.docs_src.getting_started.subscription.kafka.real_testing import ( + test_validation_error as test_validation_error_k, + ) + + await test_validation_error_k() + + +@pytest.mark.confluent() +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.subscription.confluent.real_testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.confluent() +@require_confluent +async def test_validate_confluent(): + from docs.docs_src.getting_started.subscription.confluent.real_testing import ( + test_validation_error as test_validation_error_confluent, + ) + + await test_validation_error_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_validate_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.real_testing import ( + test_validation_error as test_validation_error_r, + ) + + await test_validation_error_r() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.subscription.nats.real_testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_validate_nats(): + from docs.docs_src.getting_started.subscription.nats.real_testing import ( + test_validation_error as test_validation_error_n, + ) + + await test_validation_error_n() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_handle as test_handle_red, + ) + + await test_handle_red() -pytest.mark.rabbit(test_handle_r) -pytest.mark.rabbit(test_validation_error_r) -pytest.mark.nats(test_handle_n) -pytest.mark.nats(test_validation_error_n) +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_validate_redis(): + from docs.docs_src.getting_started.subscription.redis.real_testing import ( + test_validation_error as test_validation_error_red, + ) -pytest.mark.redis(test_handle_red) -pytest.mark.redis(test_validation_error_red) + await test_validation_error_red() diff --git a/tests/docs/getting_started/subscription/test_testing.py b/tests/docs/getting_started/subscription/test_testing.py index a1f87099af..5f46d5561e 100644 --- a/tests/docs/getting_started/subscription/test_testing.py +++ b/tests/docs/getting_started/subscription/test_testing.py @@ -1,43 +1,119 @@ -from docs.docs_src.getting_started.subscription.confluent.testing import ( - test_handle as test_handle_confluent, -) -from docs.docs_src.getting_started.subscription.confluent.testing import ( - test_validation_error as test_validation_error_confluent, -) -from docs.docs_src.getting_started.subscription.kafka.testing import ( - test_handle as test_handle_k, -) -from docs.docs_src.getting_started.subscription.kafka.testing import ( - test_validation_error as test_validation_error_k, -) -from docs.docs_src.getting_started.subscription.nats.testing import ( - test_handle as test_handle_n, -) -from docs.docs_src.getting_started.subscription.nats.testing import ( - test_validation_error as test_validation_error_n, -) -from docs.docs_src.getting_started.subscription.rabbit.testing import ( - test_handle as test_handle_r, -) -from docs.docs_src.getting_started.subscription.rabbit.testing import ( - test_validation_error as test_validation_error_r, -) -from docs.docs_src.getting_started.subscription.redis.testing import ( - test_handle as test_handle_rd, -) -from docs.docs_src.getting_started.subscription.redis.testing import ( - test_validation_error as test_validation_error_rd, -) +import pytest -__all__ = ( - "test_handle_r", - "test_validation_error_r", - "test_handle_rd", - "test_validation_error_rd", - "test_handle_k", - "test_validation_error_k", - "test_handle_confluent", - "test_validation_error_confluent", - "test_handle_n", - "test_validation_error_n", +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_handle_kafka(): + from docs.docs_src.getting_started.subscription.kafka.testing import ( + test_handle as test_handle_k, + ) + + await test_handle_k() + + +@pytest.mark.kafka() +@pytest.mark.asyncio() +@require_aiokafka +async def test_validate_kafka(): + from docs.docs_src.getting_started.subscription.kafka.testing import ( + test_validation_error as test_validation_error_k, + ) + + await test_validation_error_k() + + +@pytest.mark.confluent() +@pytest.mark.asyncio() +@require_confluent +async def test_handle_confluent(): + from docs.docs_src.getting_started.subscription.confluent.testing import ( + test_handle as test_handle_confluent, + ) + + await test_handle_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.confluent() +@require_confluent +async def test_validate_confluent(): + from docs.docs_src.getting_started.subscription.confluent.testing import ( + test_validation_error as test_validation_error_confluent, + ) + + await test_validation_error_confluent() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_handle_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.testing import ( + test_handle as test_handle_r, + ) + + await test_handle_r() + + +@pytest.mark.asyncio() +@pytest.mark.rabbit() +@require_aiopika +async def test_validate_rabbit(): + from docs.docs_src.getting_started.subscription.rabbit.testing import ( + test_validation_error as test_validation_error_r, + ) + + await test_validation_error_r() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_handle_nats(): + from docs.docs_src.getting_started.subscription.nats.testing import ( + test_handle as test_handle_n, + ) + + await test_handle_n() + + +@pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_validate_nats(): + from docs.docs_src.getting_started.subscription.nats.testing import ( + test_validation_error as test_validation_error_n, + ) + + await test_validation_error_n() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_handle_redis(): + from docs.docs_src.getting_started.subscription.redis.testing import ( + test_handle as test_handle_rd, + ) + + await test_handle_rd() + + +@pytest.mark.asyncio() +@pytest.mark.redis() +@require_redis +async def test_validate_redis(): + from docs.docs_src.getting_started.subscription.redis.testing import ( + test_validation_error as test_validation_error_rd, + ) + + await test_validation_error_rd() diff --git a/tests/docs/index/test_basic.py b/tests/docs/index/test_basic.py index da4d6d246e..b495a0384f 100644 --- a/tests/docs/index/test_basic.py +++ b/tests/docs/index/test_basic.py @@ -1,15 +1,19 @@ import pytest -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_index_kafka_base(): from docs.docs_src.index.kafka.basic import broker, handle_msg + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-topic") @@ -22,8 +26,10 @@ async def test_index_kafka_base(): @pytest.mark.asyncio() +@require_confluent async def test_index_confluent_base(): from docs.docs_src.index.confluent.basic import broker, handle_msg + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-topic") @@ -36,8 +42,10 @@ async def test_index_confluent_base(): @pytest.mark.asyncio() +@require_aiopika async def test_index_rabbit_base(): from docs.docs_src.index.rabbit.basic import broker, handle_msg + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-queue") @@ -50,8 +58,10 @@ async def test_index_rabbit_base(): @pytest.mark.asyncio() +@require_nats async def test_index_nats_base(): from docs.docs_src.index.nats.basic import broker, handle_msg + from faststream.nats import TestNatsBroker async with TestNatsBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-subject") @@ -64,8 +74,10 @@ async def test_index_nats_base(): @pytest.mark.asyncio() +@require_redis async def test_index_redis_base(): from docs.docs_src.index.redis.basic import broker, handle_msg + from faststream.redis import TestRedisBroker async with TestRedisBroker(broker) as br: await br.publish({"user": "John", "user_id": 1}, "in-channel") diff --git a/tests/docs/index/test_dependencies.py b/tests/docs/index/test_dependencies.py index 81cd4d9bb5..b2494469f6 100644 --- a/tests/docs/index/test_dependencies.py +++ b/tests/docs/index/test_dependencies.py @@ -1,11 +1,13 @@ import pytest -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_index_dep(): from docs.docs_src.index.dependencies import base_handler, broker + from faststream.kafka import TestKafkaBroker data = { "user": "John", diff --git a/tests/docs/index/test_pydantic.py b/tests/docs/index/test_pydantic.py index 7a8e2a6251..426a104d5d 100644 --- a/tests/docs/index/test_pydantic.py +++ b/tests/docs/index/test_pydantic.py @@ -1,23 +1,93 @@ -from docs.docs_src.index.confluent.test import test_correct as test_confluent_correct -from docs.docs_src.index.confluent.test import test_invalid as test_confluent_invalid -from docs.docs_src.index.kafka.test import test_correct as test_k_correct -from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid -from docs.docs_src.index.nats.test import test_correct as test_n_correct -from docs.docs_src.index.nats.test import test_invalid as test_n_invalid -from docs.docs_src.index.rabbit.test import test_correct as test_r_correct -from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid -from docs.docs_src.index.redis.test import test_correct as test_red_correct -from docs.docs_src.index.redis.test import test_invalid as test_red_invalid - -__all__ = ( - "test_k_correct", - "test_k_invalid", - "test_confluent_correct", - "test_confluent_invalid", - "test_r_correct", - "test_r_invalid", - "test_n_correct", - "test_n_invalid", - "test_red_correct", - "test_red_invalid", +import pytest + +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka_correct(): + from docs.docs_src.index.kafka.test import test_correct as test_k_correct + + await test_k_correct() + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka_invalid(): + from docs.docs_src.index.kafka.test import test_invalid as test_k_invalid + + await test_k_invalid() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent_correct(): + from docs.docs_src.index.confluent.test import ( + test_correct as test_confluent_correct, + ) + + await test_confluent_correct() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent_invalid(): + from docs.docs_src.index.confluent.test import ( + test_invalid as test_confluent_invalid, + ) + + await test_confluent_invalid() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit_correct(): + from docs.docs_src.index.rabbit.test import test_correct as test_r_correct + + await test_r_correct() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit_invalid(): + from docs.docs_src.index.rabbit.test import test_invalid as test_r_invalid + + await test_r_invalid() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats_correct(): + from docs.docs_src.index.nats.test import test_correct as test_n_correct + + await test_n_correct() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats_invalid(): + from docs.docs_src.index.nats.test import test_invalid as test_n_invalid + + await test_n_invalid() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis_correct(): + from docs.docs_src.index.redis.test import test_correct as test_red_correct + + await test_red_correct() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis_invalid(): + from docs.docs_src.index.redis.test import test_invalid as test_red_invalid + + await test_red_invalid() diff --git a/tests/docs/integration/fastapi/test_base.py b/tests/docs/integration/fastapi/test_base.py index 982cdc378e..d6871fe04a 100644 --- a/tests/docs/integration/fastapi/test_base.py +++ b/tests/docs/integration/fastapi/test_base.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_base(): from docs.docs_src.integrations.fastapi.kafka.base import app, hello, router + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(router.broker) as br: with TestClient(app) as client: @@ -26,8 +30,10 @@ async def test_fastapi_kafka_base(): @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_base(): from docs.docs_src.integrations.fastapi.confluent.base import app, hello, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker async with TestConfluentKafkaBroker(router.broker) as br: with TestClient(app) as client: @@ -43,8 +49,10 @@ async def test_fastapi_confluent_base(): @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_base(): from docs.docs_src.integrations.fastapi.rabbit.base import app, hello, router + from faststream.rabbit import TestRabbitBroker async with TestRabbitBroker(router.broker) as br: with TestClient(app) as client: @@ -60,8 +68,10 @@ async def test_fastapi_rabbit_base(): @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_base(): from docs.docs_src.integrations.fastapi.nats.base import app, hello, router + from faststream.nats import TestNatsBroker async with TestNatsBroker(router.broker) as br: with TestClient(app) as client: @@ -77,8 +87,10 @@ async def test_fastapi_nats_base(): @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_base(): from docs.docs_src.integrations.fastapi.redis.base import app, hello, router + from faststream.redis import TestRedisBroker async with TestRedisBroker(router.broker) as br: with TestClient(app) as client: diff --git a/tests/docs/integration/fastapi/test_depends.py b/tests/docs/integration/fastapi/test_depends.py index ae160bb622..2a7b917e8a 100644 --- a/tests/docs/integration/fastapi/test_depends.py +++ b/tests/docs/integration/fastapi/test_depends.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_depends(): from docs.docs_src.integrations.fastapi.kafka.depends import app, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -23,8 +27,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_depends(): from docs.docs_src.integrations.fastapi.confluent.depends import app, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -37,8 +43,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_depends(): from docs.docs_src.integrations.fastapi.rabbit.depends import app, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -51,8 +59,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_depends(): from docs.docs_src.integrations.fastapi.nats.depends import app, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -65,8 +75,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_depends(): from docs.docs_src.integrations.fastapi.redis.depends import app, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_multiple.py b/tests/docs/integration/fastapi/test_multiple.py index c3252682fe..8077d7b9fa 100644 --- a/tests/docs/integration/fastapi/test_multiple.py +++ b/tests/docs/integration/fastapi/test_multiple.py @@ -1,6 +1,14 @@ import pytest from fastapi.testclient import TestClient +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + class BaseCase: def test_running(self, data): @@ -18,6 +26,7 @@ def test_running(self, data): @pytest.mark.kafka() +@require_aiokafka class TestKafka(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -27,6 +36,7 @@ def data(self): @pytest.mark.confluent() +@require_confluent class TestConfluent(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -39,6 +49,7 @@ def data(self): @pytest.mark.nats() +@require_nats class TestNats(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -48,6 +59,7 @@ def data(self): @pytest.mark.rabbit() +@require_aiopika class TestRabbit(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -57,6 +69,7 @@ def data(self): @pytest.mark.redis() +@require_redis class TestRedis(BaseCase): @pytest.fixture(scope="class") def data(self): diff --git a/tests/docs/integration/fastapi/test_multiple_lifespan.py b/tests/docs/integration/fastapi/test_multiple_lifespan.py index 48099a376f..72dc782c51 100644 --- a/tests/docs/integration/fastapi/test_multiple_lifespan.py +++ b/tests/docs/integration/fastapi/test_multiple_lifespan.py @@ -1,6 +1,14 @@ import pytest from fastapi.testclient import TestClient +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) + class BaseCase: def test_running(self, data): @@ -27,6 +35,7 @@ async def handler2(): ... @pytest.mark.kafka() +@require_aiokafka class TestKafka(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -40,6 +49,7 @@ def data(self): @pytest.mark.confluent() +@require_confluent class TestConfluent(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -53,6 +63,7 @@ def data(self): @pytest.mark.nats() +@require_nats class TestNats(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -66,6 +77,7 @@ def data(self): @pytest.mark.rabbit() +@require_aiopika class TestRabbit(BaseCase): @pytest.fixture(scope="class") def data(self): @@ -79,6 +91,7 @@ def data(self): @pytest.mark.redis() +@require_redis class TestRedis(BaseCase): @pytest.fixture(scope="class") def data(self): diff --git a/tests/docs/integration/fastapi/test_send.py b/tests/docs/integration/fastapi/test_send.py index b8f1fca7d8..f1b3dde966 100644 --- a/tests/docs/integration/fastapi/test_send.py +++ b/tests/docs/integration/fastapi/test_send.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_send(): from docs.docs_src.integrations.fastapi.kafka.send import app, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -23,8 +27,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_send(): from docs.docs_src.integrations.fastapi.confluent.send import app, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -37,8 +43,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_send(): from docs.docs_src.integrations.fastapi.rabbit.send import app, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -51,8 +59,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_send(): from docs.docs_src.integrations.fastapi.nats.send import app, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -65,8 +75,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_send(): from docs.docs_src.integrations.fastapi.redis.send import app, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_startup.py b/tests/docs/integration/fastapi/test_startup.py index d4e80b8851..3c68863171 100644 --- a/tests/docs/integration/fastapi/test_startup.py +++ b/tests/docs/integration/fastapi/test_startup.py @@ -1,16 +1,20 @@ import pytest from fastapi.testclient import TestClient -from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker -from faststream.kafka import TestKafkaBroker -from faststream.nats import TestNatsBroker -from faststream.rabbit import TestRabbitBroker -from faststream.redis import TestRedisBroker +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, +) @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_kafka_startup(): from docs.docs_src.integrations.fastapi.kafka.startup import app, hello, router + from faststream.kafka import TestKafkaBroker @router.subscriber("test") async def handler(): ... @@ -21,8 +25,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_confluent async def test_fastapi_confluent_startup(): from docs.docs_src.integrations.fastapi.confluent.startup import app, hello, router + from faststream.confluent import TestKafkaBroker as TestConfluentKafkaBroker @router.subscriber("test") async def handler(): ... @@ -33,8 +39,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_aiopika async def test_fastapi_rabbit_startup(): from docs.docs_src.integrations.fastapi.rabbit.startup import app, hello, router + from faststream.rabbit import TestRabbitBroker @router.subscriber("test") async def handler(): ... @@ -45,8 +53,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_nats async def test_fastapi_nats_startup(): from docs.docs_src.integrations.fastapi.nats.startup import app, hello, router + from faststream.nats import TestNatsBroker @router.subscriber("test") async def handler(): ... @@ -57,8 +67,10 @@ async def handler(): ... @pytest.mark.asyncio() +@require_redis async def test_fastapi_redis_startup(): from docs.docs_src.integrations.fastapi.redis.startup import app, hello, router + from faststream.redis import TestRedisBroker @router.subscriber("test") async def handler(): ... diff --git a/tests/docs/integration/fastapi/test_test.py b/tests/docs/integration/fastapi/test_test.py index 0d5544e8d4..992335ff77 100644 --- a/tests/docs/integration/fastapi/test_test.py +++ b/tests/docs/integration/fastapi/test_test.py @@ -1,15 +1,49 @@ -from docs.docs_src.integrations.fastapi.confluent.test import ( - test_router as test_confluent, -) -from docs.docs_src.integrations.fastapi.kafka.test import test_router as test_k -from docs.docs_src.integrations.fastapi.nats.test import test_router as test_n -from docs.docs_src.integrations.fastapi.rabbit.test import test_router as test_r -from docs.docs_src.integrations.fastapi.redis.test import test_router as test_red - -__all__ = ( - "test_k", - "test_r", - "test_n", - "test_red", - "test_confluent", +import pytest + +from tests.marks import ( + require_aiokafka, + require_aiopika, + require_confluent, + require_nats, + require_redis, ) + + +@pytest.mark.asyncio() +@require_aiokafka +async def test_kafka(): + from docs.docs_src.integrations.fastapi.kafka.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_confluent +async def test_confluent(): + from docs.docs_src.integrations.fastapi.confluent.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_aiopika +async def test_rabbit(): + from docs.docs_src.integrations.fastapi.rabbit.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_nats +async def test_nats(): + from docs.docs_src.integrations.fastapi.nats.test import test_router + + await test_router() + + +@pytest.mark.asyncio() +@require_redis +async def test_redis(): + from docs.docs_src.integrations.fastapi.redis.test import test_router + + await test_router() diff --git a/tests/docs/integration/http/test_fastapi.py b/tests/docs/integration/http/test_fastapi.py index 5875810de4..54267dbdb6 100644 --- a/tests/docs/integration/http/test_fastapi.py +++ b/tests/docs/integration/http/test_fastapi.py @@ -1,16 +1,18 @@ import pytest from fastapi.testclient import TestClient -from faststream.kafka import TestKafkaBroker +from tests.marks import require_aiokafka @pytest.mark.asyncio() +@require_aiokafka async def test_fastapi_raw_integration(): from docs.docs_src.integrations.http_frameworks_integrations.fastapi import ( app, base_handler, broker, ) + from faststream.kafka import TestKafkaBroker async with TestKafkaBroker(broker): with TestClient(app) as client: diff --git a/tests/docs/nats/js/test_kv.py b/tests/docs/nats/js/test_kv.py index 55a8a4ed4f..2f582a1569 100644 --- a/tests/docs/nats/js/test_kv.py +++ b/tests/docs/nats/js/test_kv.py @@ -11,4 +11,4 @@ async def test_basic(): async with TestNatsBroker(broker, with_real=True), TestApp(app): await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with(b"Hello!") diff --git a/tests/docs/nats/js/test_object.py b/tests/docs/nats/js/test_object.py index 535fae7ff8..b65905d4c6 100644 --- a/tests/docs/nats/js/test_object.py +++ b/tests/docs/nats/js/test_object.py @@ -9,6 +9,23 @@ async def test_basic(): from docs.docs_src.nats.js.object import app, broker, handler - async with TestNatsBroker(broker, with_real=True), TestApp(app): + async with TestNatsBroker(broker, with_real=True): + await broker.start() + + os = await broker.object_storage("example-bucket") + try: + existed_files = await os.list() + except Exception: + existed_files = () + + call = True + for file in existed_files: + if file.name == "file.txt": + call = False + + if call: + async with TestApp(app): + pass + await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with("file.txt") diff --git a/tests/examples/fastapi_integration/test_app.py b/tests/examples/fastapi_integration/test_app.py index 2859777861..c186d046be 100644 --- a/tests/examples/fastapi_integration/test_app.py +++ b/tests/examples/fastapi_integration/test_app.py @@ -1,7 +1,25 @@ -from examples.fastapi_integration.testing import broker, test_handler, test_incorrect +import pytest -__all__ = ( - "test_incorrect", - "test_handler", - "broker", -) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handler(): + from examples.fastapi_integration.testing import router + from examples.fastapi_integration.testing import test_handler as test_ + from faststream.rabbit import TestRabbitBroker + + async with TestRabbitBroker(router.broker) as br: + await test_(br) + + +@pytest.mark.asyncio() +@require_aiopika +async def test_incorrect(): + from examples.fastapi_integration.testing import router + from examples.fastapi_integration.testing import test_incorrect as test_ + from faststream.rabbit import TestRabbitBroker + + async with TestRabbitBroker(router.broker) as br: + await test_(br) diff --git a/tests/examples/kafka/__init__.py b/tests/examples/kafka/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/examples/kafka/__init__.py +++ b/tests/examples/kafka/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/examples/nats/__init__.py b/tests/examples/nats/__init__.py index e69de29bb2..87ead90ee6 100644 --- a/tests/examples/nats/__init__.py +++ b/tests/examples/nats/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("nats") diff --git a/tests/examples/nats/test_e06_key_value.py b/tests/examples/nats/test_e06_key_value.py index cf7bc25b97..e170569312 100644 --- a/tests/examples/nats/test_e06_key_value.py +++ b/tests/examples/nats/test_e06_key_value.py @@ -11,4 +11,4 @@ async def test_basic(): async with TestNatsBroker(broker, with_real=True), TestApp(app): await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with(b"Hello!") diff --git a/tests/examples/nats/test_e07_object_storage.py b/tests/examples/nats/test_e07_object_storage.py index 7d32d7d8a4..1310f71a12 100644 --- a/tests/examples/nats/test_e07_object_storage.py +++ b/tests/examples/nats/test_e07_object_storage.py @@ -9,6 +9,23 @@ async def test_basic(): from examples.nats.e07_object_storage import app, broker, handler - async with TestNatsBroker(broker, with_real=True), TestApp(app): + async with TestNatsBroker(broker, with_real=True): + await broker.start() + + os = await broker.object_storage("example-bucket") + try: + existed_files = await os.list() + except Exception: + existed_files = () + + call = True + for file in existed_files: + if file.name == "file.txt": + call = False + + if call: + async with TestApp(app): + pass + await handler.wait_call(3.0) - handler.mock.assert_called_once_with("Hi!") + handler.mock.assert_called_once_with("file.txt") diff --git a/tests/examples/rabbit/__init__.py b/tests/examples/rabbit/__init__.py index e69de29bb2..ebec43fcd5 100644 --- a/tests/examples/rabbit/__init__.py +++ b/tests/examples/rabbit/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aio_pika") diff --git a/tests/examples/redis/__init__.py b/tests/examples/redis/__init__.py index e69de29bb2..4752ef19b1 100644 --- a/tests/examples/redis/__init__.py +++ b/tests/examples/redis/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("redis") diff --git a/tests/examples/router/__init__.py b/tests/examples/router/__init__.py index e69de29bb2..bd6bc708fc 100644 --- a/tests/examples/router/__init__.py +++ b/tests/examples/router/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("aiokafka") diff --git a/tests/examples/test_e01_basic_consume.py b/tests/examples/test_e01_basic_consume.py index 722034f226..e9dd550bb3 100644 --- a/tests/examples/test_e01_basic_consume.py +++ b/tests/examples/test_e01_basic_consume.py @@ -1,11 +1,14 @@ import pytest -from examples.e01_basic_consume import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e01_basic_consume import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e02_1_basic_publisher.py b/tests/examples/test_e02_1_basic_publisher.py index ff310197db..dee4068ae3 100644 --- a/tests/examples/test_e02_1_basic_publisher.py +++ b/tests/examples/test_e02_1_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_1_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_1_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e02_2_basic_publisher.py b/tests/examples/test_e02_2_basic_publisher.py index c85e245d09..93e1a09579 100644 --- a/tests/examples/test_e02_2_basic_publisher.py +++ b/tests/examples/test_e02_2_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_2_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_2_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e02_3_basic_publisher.py b/tests/examples/test_e02_3_basic_publisher.py index 9e6ce2baec..9e8f40a57d 100644 --- a/tests/examples/test_e02_3_basic_publisher.py +++ b/tests/examples/test_e02_3_basic_publisher.py @@ -1,11 +1,14 @@ import pytest -from examples.e02_3_basic_publisher import app, broker, handle, handle_response -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e02_3_basic_publisher import app, broker, handle, handle_response + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response.wait_call(3) diff --git a/tests/examples/test_e03_miltiple_pubsub.py b/tests/examples/test_e03_miltiple_pubsub.py index a7713f5268..65ee6ed165 100644 --- a/tests/examples/test_e03_miltiple_pubsub.py +++ b/tests/examples/test_e03_miltiple_pubsub.py @@ -1,17 +1,20 @@ import pytest -from examples.e03_miltiple_pubsub import ( - app, - broker, - handle, - handle_response_1, - handle_response_2, -) -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e03_miltiple_pubsub import ( + app, + broker, + handle, + handle_response_1, + handle_response_2, + ) + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) await handle_response_1.wait_call(3) diff --git a/tests/examples/test_e04_msg_filter.py b/tests/examples/test_e04_msg_filter.py index 71ed93d08d..79b5ba1225 100644 --- a/tests/examples/test_e04_msg_filter.py +++ b/tests/examples/test_e04_msg_filter.py @@ -1,11 +1,14 @@ import pytest -from examples.e04_msg_filter import app, broker, handle_json, handle_other_messages -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e04_msg_filter import app, broker, handle_json, handle_other_messages + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle_json.wait_call(3) await handle_other_messages.wait_call(3) diff --git a/tests/examples/test_e05_rpc_request.py b/tests/examples/test_e05_rpc_request.py index 8afa9589e3..6a763b5bf6 100644 --- a/tests/examples/test_e05_rpc_request.py +++ b/tests/examples/test_e05_rpc_request.py @@ -1,11 +1,14 @@ import pytest -from examples.e05_rpc_request import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e05_rpc_request import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e06_manual_ack.py b/tests/examples/test_e06_manual_ack.py index 98d8c44589..70d096dbb3 100644 --- a/tests/examples/test_e06_manual_ack.py +++ b/tests/examples/test_e06_manual_ack.py @@ -1,11 +1,14 @@ import pytest -from examples.e06_manual_ack import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e06_manual_ack import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e07_ack_immediately.py b/tests/examples/test_e07_ack_immediately.py index ab97748daf..393c275dff 100644 --- a/tests/examples/test_e07_ack_immediately.py +++ b/tests/examples/test_e07_ack_immediately.py @@ -1,11 +1,14 @@ import pytest -from examples.e07_ack_immediately import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e07_ack_immediately import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e08_testing.py b/tests/examples/test_e08_testing.py index e9184d3342..a54ba447c7 100644 --- a/tests/examples/test_e08_testing.py +++ b/tests/examples/test_e08_testing.py @@ -1,3 +1,11 @@ -from examples.e08_testing import test_handle +import pytest -__all__ = ("test_handle",) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle(): + from examples.e08_testing import test_handle as _test + + await _test() diff --git a/tests/examples/test_e09_testing_mocks.py b/tests/examples/test_e09_testing_mocks.py index 83f82b2b37..04718322a6 100644 --- a/tests/examples/test_e09_testing_mocks.py +++ b/tests/examples/test_e09_testing_mocks.py @@ -1,3 +1,11 @@ -from examples.e09_testing_mocks import test_handle +import pytest -__all__ = ("test_handle",) +from tests.marks import require_aiopika + + +@pytest.mark.asyncio() +@require_aiopika +async def test_handle(): + from examples.e09_testing_mocks import test_handle as _test + + await _test() diff --git a/tests/examples/test_e10_middlewares.py b/tests/examples/test_e10_middlewares.py index b635260802..4fdb3f15e6 100644 --- a/tests/examples/test_e10_middlewares.py +++ b/tests/examples/test_e10_middlewares.py @@ -1,11 +1,14 @@ import pytest -from examples.e10_middlewares import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e10_middlewares import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/examples/test_e11_settings.py b/tests/examples/test_e11_settings.py index 146bab8b64..3483b57597 100644 --- a/tests/examples/test_e11_settings.py +++ b/tests/examples/test_e11_settings.py @@ -1,11 +1,14 @@ import pytest -from examples.e11_settings import app, broker, handle -from faststream.rabbit import TestApp, TestRabbitBroker +from tests.marks import require_aiopika @pytest.mark.asyncio() +@require_aiopika async def test_example(): + from examples.e11_settings import app, broker, handle + from faststream.rabbit import TestApp, TestRabbitBroker + async with TestRabbitBroker(broker), TestApp(app): await handle.wait_call(3) diff --git a/tests/marks.py b/tests/marks.py index 80bb1cde5c..07bde035b0 100644 --- a/tests/marks.py +++ b/tests/marks.py @@ -23,3 +23,68 @@ not PYDANTIC_V2, reason="requires PydanticV1", ) + + +try: + from faststream.confluent import KafkaBroker +except ImportError: + HAS_CONFLUENT = False +else: + HAS_CONFLUENT = True + +require_confluent = pytest.mark.skipif( + not HAS_CONFLUENT, + reason="requires confluent-kafka", +) + + +try: + from faststream.kafka import KafkaBroker # noqa: F401 +except ImportError: + HAS_AIOKAFKA = False +else: + HAS_AIOKAFKA = True + +require_aiokafka = pytest.mark.skipif( + not HAS_AIOKAFKA, + reason="requires aiokafka", +) + + +try: + from faststream.rabbit import RabbitBroker # noqa: F401 +except ImportError: + HAS_AIOPIKA = False +else: + HAS_AIOPIKA = True + +require_aiopika = pytest.mark.skipif( + not HAS_AIOPIKA, + reason="requires aio-pika", +) + + +try: + from faststream.redis import RedisBroker # noqa: F401 +except ImportError: + HAS_REDIS = False +else: + HAS_REDIS = True + +require_redis = pytest.mark.skipif( + not HAS_REDIS, + reason="requires redis", +) + + +try: + from faststream.nats import NatsBroker # noqa: F401 +except ImportError: + HAS_NATS = False +else: + HAS_NATS = True + +require_nats = pytest.mark.skipif( + not HAS_NATS, + reason="requires nats-py", +) diff --git a/tests/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py index 75763c2fee..20d03f3611 100644 --- a/tests/opentelemetry/__init__.py +++ b/tests/opentelemetry/__init__.py @@ -1,3 +1,3 @@ import pytest -pytest.importorskip("opentelemetry") +pytest.importorskip("opentelemetry.sdk") diff --git a/tests/utils/context/test_headers.py b/tests/utils/context/test_headers.py index 068e8b8bd4..bccf0b0362 100644 --- a/tests/utils/context/test_headers.py +++ b/tests/utils/context/test_headers.py @@ -1,11 +1,14 @@ import pytest from faststream import Header -from faststream.nats import NatsBroker, TestNatsBroker +from tests.marks import require_nats @pytest.mark.asyncio() +@require_nats async def test_nats_headers(): + from faststream.nats import NatsBroker, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in") diff --git a/tests/utils/context/test_path.py b/tests/utils/context/test_path.py index beff946592..babf557b58 100644 --- a/tests/utils/context/test_path.py +++ b/tests/utils/context/test_path.py @@ -1,19 +1,17 @@ +import asyncio +from unittest.mock import Mock + import pytest from faststream import Path -from faststream.nats import NatsBroker, PullSub, TestNatsBroker -from faststream.rabbit import ( - ExchangeType, - RabbitBroker, - RabbitExchange, - RabbitQueue, - TestRabbitBroker, -) -from faststream.redis import RedisBroker, TestRedisBroker +from tests.marks import require_aiopika, require_nats, require_redis @pytest.mark.asyncio() +@require_nats async def test_nats_path(): + from faststream.nats import NatsBroker, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in.{name}.{id}") @@ -38,7 +36,48 @@ async def h( @pytest.mark.asyncio() +@pytest.mark.nats() +@require_nats +async def test_nats_kv_path( + queue: str, + event: asyncio.Event, + mock: Mock, +): + from faststream.nats import NatsBroker + + broker = NatsBroker() + + @broker.subscriber("in.{name}.{id}", kv_watch=queue) + async def h( + msg: int, + name: str = Path(), + id_: int = Path("id"), + ): + mock(msg == 1 and name == "john" and id_ == 1) + event.set() + + async with broker: + await broker.start() + + kv = await broker.key_value(queue) + + await asyncio.wait( + ( + asyncio.create_task(kv.put("in.john.1", b"1")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(True) + + +@pytest.mark.asyncio() +@require_nats async def test_nats_batch_path(): + from faststream.nats import NatsBroker, PullSub, TestNatsBroker + broker = NatsBroker() @broker.subscriber("in.{name}.{id}", stream="test", pull_sub=PullSub(batch=True)) @@ -63,7 +102,10 @@ async def h( @pytest.mark.asyncio() +@require_redis async def test_redis_path(): + from faststream.redis import RedisBroker, TestRedisBroker + broker = RedisBroker() @broker.subscriber("in.{name}.{id}") @@ -88,7 +130,16 @@ async def h( @pytest.mark.asyncio() +@require_aiopika async def test_rabbit_path(): + from faststream.rabbit import ( + ExchangeType, + RabbitBroker, + RabbitExchange, + RabbitQueue, + TestRabbitBroker, + ) + broker = RabbitBroker() @broker.subscriber( From 82853dbe78b4c6a35e4be7685f3a59d3885f45ed Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Thu, 23 May 2024 22:03:50 +0300 Subject: [PATCH 19/43] feat: add subscriber no-reply option (#1461) --- faststream/broker/publisher/usecase.py | 1 - faststream/broker/subscriber/usecase.py | 18 +- faststream/confluent/broker/registrator.py | 529 ++++++++++++------ faststream/confluent/fastapi/fastapi.py | 475 ++++++++++------ faststream/confluent/router.py | 133 +++-- faststream/confluent/subscriber/factory.py | 6 + faststream/confluent/subscriber/usecase.py | 11 +- faststream/kafka/broker/broker.py | 156 ++++-- faststream/kafka/broker/registrator.py | 607 ++++++++++++++------- faststream/kafka/fastapi/fastapi.py | 97 +++- faststream/kafka/publisher/usecase.py | 30 +- faststream/kafka/router.py | 151 +++-- faststream/kafka/subscriber/factory.py | 6 + faststream/kafka/subscriber/usecase.py | 8 +- faststream/nats/broker/registrator.py | 9 +- faststream/nats/fastapi/fastapi.py | 7 + faststream/nats/router.py | 7 + faststream/nats/subscriber/factory.py | 8 + faststream/nats/subscriber/usecase.py | 28 +- faststream/nats/testing.py | 3 +- faststream/rabbit/broker/registrator.py | 7 + faststream/rabbit/fastapi/router.py | 7 + faststream/rabbit/router.py | 7 + faststream/rabbit/subscriber/factory.py | 2 + faststream/rabbit/subscriber/usecase.py | 4 +- faststream/redis/broker/broker.py | 1 - faststream/redis/broker/registrator.py | 7 + faststream/redis/fastapi/fastapi.py | 7 + faststream/redis/router.py | 7 + faststream/redis/subscriber/factory.py | 6 + faststream/redis/subscriber/usecase.py | 21 +- tests/brokers/base/publish.py | 80 ++- tests/brokers/rabbit/test_publish.py | 6 +- 33 files changed, 1718 insertions(+), 734 deletions(-) diff --git a/faststream/broker/publisher/usecase.py b/faststream/broker/publisher/usecase.py index 46bb96ef2a..1bdbc74513 100644 --- a/faststream/broker/publisher/usecase.py +++ b/faststream/broker/publisher/usecase.py @@ -20,7 +20,6 @@ from faststream.asyncapi.utils import to_camelcase from faststream.broker.publisher.proto import PublisherProto from faststream.broker.types import ( - BrokerMiddleware, MsgType, P_HandlerParams, T_HandlerReturn, diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index a2e9d1aa58..e5ee6fadea 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -21,7 +21,6 @@ from faststream.asyncapi.abc import AsyncAPIOperation from faststream.asyncapi.message import parse_handler_params from faststream.asyncapi.utils import to_camelcase -from faststream.broker.publisher.proto import ProducerProto from faststream.broker.subscriber.call_item import HandlerItem from faststream.broker.subscriber.proto import SubscriberProto from faststream.broker.types import ( @@ -40,6 +39,7 @@ from faststream.broker.message import StreamMessage from faststream.broker.middlewares import BaseMiddleware + from faststream.broker.publisher.proto import BasePublisherProto, ProducerProto from faststream.broker.types import ( AsyncCallable, BrokerMiddleware, @@ -93,6 +93,7 @@ def __init__( self, *, no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], @@ -108,6 +109,7 @@ def __init__( self._default_parser = default_parser self._default_decoder = default_decoder + self._no_reply = no_reply # Watcher args self._no_ack = no_ack self._retry = retry @@ -139,7 +141,7 @@ def setup( # type: ignore[override] self, *, logger: Optional["LoggerProto"], - producer: Optional[ProducerProto], + producer: Optional["ProducerProto"], graceful_timeout: Optional[float], extra_context: "AnyDict", # broker options @@ -338,7 +340,7 @@ async def consume(self, msg: MsgType) -> Any: ) for p in chain( - self._make_response_publisher(message), + self.__get_reponse_publisher(message), h.handler._publishers, ): await p.publish( @@ -358,6 +360,16 @@ async def consume(self, msg: MsgType) -> Any: return None + def __get_reponse_publisher( + self, + message: "StreamMessage[MsgType]", + ) -> Iterable["BasePublisherProto"]: + if not message.reply_to or self._no_reply: + return () + + else: + return self._make_response_publisher(message) + def get_log_context( self, message: Optional["StreamMessage[MsgType]"], diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 6d71a21046..277a77ef69 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -66,12 +66,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -89,16 +91,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -107,19 +112,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -128,43 +137,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -174,22 +193,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -197,11 +220,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -210,11 +235,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -224,35 +251,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -276,7 +311,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ Literal[True], @@ -284,12 +320,14 @@ def subscriber( ], batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -331,6 +369,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -358,12 +402,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -381,16 +427,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -399,19 +448,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -420,43 +473,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -466,22 +529,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -489,11 +556,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -502,11 +571,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -516,35 +587,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -568,7 +647,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ Literal[False], @@ -576,12 +656,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -623,6 +705,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -650,12 +738,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -673,16 +763,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -691,19 +784,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -712,43 +809,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -758,22 +865,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -781,11 +892,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -794,11 +907,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -808,35 +923,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -860,7 +983,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ bool, @@ -868,12 +992,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -915,6 +1041,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -945,12 +1077,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -968,16 +1102,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -986,19 +1123,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -1007,43 +1148,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -1053,22 +1204,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -1076,11 +1231,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -1089,11 +1246,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -1103,35 +1262,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -1155,7 +1322,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ bool, @@ -1163,12 +1331,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -1210,6 +1380,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -1264,6 +1440,7 @@ def subscriber( is_manual=not auto_commit, # subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=self._middlewares, broker_dependencies=self._dependencies, @@ -1301,7 +1478,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1309,14 +1487,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1371,7 +1552,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1379,14 +1561,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1441,7 +1626,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1449,14 +1635,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1514,7 +1703,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1522,14 +1712,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], diff --git a/faststream/confluent/fastapi/fastapi.py b/faststream/confluent/fastapi/fastapi.py index 2a494a7712..1897243d6c 100644 --- a/faststream/confluent/fastapi/fastapi.py +++ b/faststream/confluent/fastapi/fastapi.py @@ -414,12 +414,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -437,16 +439,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -455,19 +460,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -476,43 +485,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -522,22 +541,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -545,11 +568,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -558,11 +583,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -572,35 +599,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -624,7 +659,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ Literal[False], @@ -632,12 +668,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -679,6 +717,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -829,12 +873,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -852,16 +898,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -870,19 +919,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -891,43 +944,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -937,22 +1000,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -960,11 +1027,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -973,11 +1042,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -987,35 +1058,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -1039,7 +1118,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ Literal[True], @@ -1047,12 +1127,14 @@ def subscriber( ], batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -1236,12 +1318,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -1259,16 +1343,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -1277,19 +1364,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -1298,43 +1389,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -1344,22 +1445,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -1367,11 +1472,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -1380,11 +1487,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -1394,35 +1503,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -1446,7 +1563,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ bool, @@ -1454,12 +1572,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -1501,6 +1621,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -1654,12 +1780,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -1677,16 +1805,19 @@ def subscriber( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -1695,19 +1826,23 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -1716,43 +1851,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -1762,22 +1907,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -1785,11 +1934,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -1798,11 +1949,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -1812,35 +1965,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -1864,7 +2025,8 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ bool, @@ -1872,12 +2034,14 @@ def subscriber( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -1919,6 +2083,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -2096,6 +2266,7 @@ def subscriber( filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py index 33480a12ea..f24a40e263 100644 --- a/faststream/confluent/router.py +++ b/faststream/confluent/router.py @@ -48,7 +48,8 @@ def __init__( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -56,14 +57,17 @@ def __init__( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -146,12 +150,14 @@ def __init__( ] = (), group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -169,16 +175,19 @@ def __init__( ] = None, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -187,19 +196,23 @@ def __init__( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -208,43 +221,53 @@ def __init__( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence[str], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -254,22 +277,26 @@ def __init__( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = ("roundrobin",), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -277,11 +304,13 @@ def __init__( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -290,11 +319,13 @@ def __init__( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -304,35 +335,43 @@ def __init__( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -356,7 +395,8 @@ def __init__( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch: Annotated[ bool, @@ -364,12 +404,14 @@ def __init__( ] = False, batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -411,6 +453,12 @@ def __init__( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -461,6 +509,7 @@ def __init__( decoder=decoder, middlewares=middlewares, filter=filter, + no_reply=no_reply, # AsyncAPI args title=title, description=description, diff --git a/faststream/confluent/subscriber/factory.py b/faststream/confluent/subscriber/factory.py index b7b6b6ca61..f1d001b888 100644 --- a/faststream/confluent/subscriber/factory.py +++ b/faststream/confluent/subscriber/factory.py @@ -33,6 +33,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConfluentMsg, ...]]"], @@ -55,6 +56,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[ConfluentMsg]"], @@ -77,6 +79,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable[ @@ -103,6 +106,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable[ @@ -125,6 +129,7 @@ def create_subscriber( connection_data=connection_data, is_manual=is_manual, no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -139,6 +144,7 @@ def create_subscriber( connection_data=connection_data, is_manual=is_manual, no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, diff --git a/faststream/confluent/subscriber/usecase.py b/faststream/confluent/subscriber/usecase.py index 28f7ece4e7..dde949848f 100644 --- a/faststream/confluent/subscriber/usecase.py +++ b/faststream/confluent/subscriber/usecase.py @@ -57,6 +57,7 @@ def __init__( default_parser: "AsyncCallable", default_decoder: "AsyncCallable", no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], @@ -70,6 +71,7 @@ def __init__( default_decoder=default_decoder, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -156,9 +158,10 @@ async def close(self) -> None: self.task = None def _make_response_publisher( - self, message: "StreamMessage[Any]" + self, + message: "StreamMessage[Any]", ) -> Sequence[FakePublisher]: - if not message.reply_to or self._producer is None: + if self._producer is None: return () return ( @@ -226,6 +229,7 @@ def __init__( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[Message]"], @@ -244,6 +248,7 @@ def __init__( default_decoder=AsyncConfluentParser.decode_message, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -285,6 +290,7 @@ def __init__( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[Tuple[Message, ...]]"], @@ -306,6 +312,7 @@ def __init__( default_decoder=AsyncConfluentParser.decode_message_batch, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index de0b6980f1..42cc4f281b 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -67,20 +67,24 @@ class KafkaInitKwargs(TypedDict, total=False): ] metadata_max_age_ms: Annotated[ int, - Doc(""" + Doc( + """ The period of time in milliseconds after which we force a refresh of metadata even if we haven't seen any partition leadership changes to proactively discover any new brokers or partitions. - """), + """ + ), ] connections_max_idle_ms: Annotated[ int, - Doc(""" + Doc( + """ Close idle connections after the number of milliseconds specified by this config. Specifying `None` will disable idle checks. - """), + """ + ), ] sasl_kerberos_service_name: str sasl_kerberos_domain_name: Optional[str] @@ -91,18 +95,21 @@ class KafkaInitKwargs(TypedDict, total=False): loop: Optional[AbstractEventLoop] client_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ A name for this client. This string is passed in each request to servers and can be used to identify specific server-side log entries that correspond to this client. Also submitted to :class:`~.consumer.group_coordinator.GroupCoordinator` for logging with respect to consumer group administration. - """), + """ + ), ] # publisher args acks: Annotated[ Union[Literal[0, 1, -1, "all"], object], - Doc(""" + Doc( + """ One of ``0``, ``1``, ``all``. The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the durability of records that are @@ -127,7 +134,8 @@ class KafkaInitKwargs(TypedDict, total=False): If unset, defaults to ``acks=1``. If `enable_idempotence` is :data:`True` defaults to ``acks=all``. - """), + """ + ), ] key_serializer: Annotated[ Optional[Callable[[Any], bytes]], @@ -139,26 +147,31 @@ class KafkaInitKwargs(TypedDict, total=False): ] compression_type: Annotated[ Optional[Literal["gzip", "snappy", "lz4", "zstd"]], - Doc(""" + Doc( + """ The compression type for all data generated bythe producer. Compression is of full batches of data, so the efficacy of batching will also impact the compression ratio (more batching means better compression). - """), + """ + ), ] max_batch_size: Annotated[ int, - Doc(""" + Doc( + """ Maximum size of buffered data per partition. After this amount `send` coroutine will block until batch is drained. - """), + """ + ), ] partitioner: Annotated[ Callable[ [bytes, List[Partition], List[Partition]], Partition, ], - Doc(""" + Doc( + """ Callable used to determine which partition each message is assigned to. Called (after key serialization): ``partitioner(key_bytes, all_partitions, available_partitions)``. @@ -167,21 +180,25 @@ class KafkaInitKwargs(TypedDict, total=False): messages with the same key are assigned to the same partition. When a key is :data:`None`, the message is delivered to a random partition (filtered to partitions with available leaders only, if possible). - """), + """ + ), ] max_request_size: Annotated[ int, - Doc(""" + Doc( + """ The maximum size of a request. This is also effectively a cap on the maximum record size. Note that the server has its own cap on record size which may be different from this. This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests. - """), + """ + ), ] linger_ms: Annotated[ int, - Doc(""" + Doc( + """ The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster @@ -190,19 +207,22 @@ class KafkaInitKwargs(TypedDict, total=False): This setting accomplishes this by adding a small amount of artificial delay; that is, if first request is processed faster, than `linger_ms`, producer will wait ``linger_ms - process_time``. - """), + """ + ), ] send_backoff_ms: int enable_idempotence: Annotated[ bool, - Doc(""" + Doc( + """ When set to `True`, the producer will ensure that exactly one copy of each message is written in the stream. If `False`, producer retries due to broker failures, etc., may write duplicates of the retried message in the stream. Note that enabling idempotence acks to set to ``all``. If it is not explicitly set by the user it will be chosen. - """), + """ + ), ] transactional_id: Optional[str] transaction_timeout_ms: int @@ -219,14 +239,16 @@ def __init__( self, bootstrap_servers: Annotated[ Union[str, Iterable[str]], - Doc(""" + Doc( + """ A `host[:port]` string (or list of `host[:port]` strings) that the consumer should contact to bootstrap initial cluster metadata. This does not have to be the full node list. It just needs to have at least one broker that will respond to a Metadata API Request. Default port is 9092. - """), + """ + ), ] = "localhost", *, # both @@ -240,20 +262,24 @@ def __init__( ] = 100, metadata_max_age_ms: Annotated[ int, - Doc(""" + Doc( + """ The period of time in milliseconds after which we force a refresh of metadata even if we haven't seen any partition leadership changes to proactively discover any new brokers or partitions. - """), + """ + ), ] = 5 * 60 * 1000, connections_max_idle_ms: Annotated[ int, - Doc(""" + Doc( + """ Close idle connections after the number of milliseconds specified by this config. Specifying `None` will disable idle checks. - """), + """ + ), ] = 9 * 60 * 1000, sasl_kerberos_service_name: str = "kafka", sasl_kerberos_domain_name: Optional[str] = None, @@ -264,18 +290,21 @@ def __init__( loop: Optional["AbstractEventLoop"] = None, client_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ A name for this client. This string is passed in each request to servers and can be used to identify specific server-side log entries that correspond to this client. Also submitted to :class:`~.consumer.group_coordinator.GroupCoordinator` for logging with respect to consumer group administration. - """), + """ + ), ] = SERVICE_NAME, # publisher args acks: Annotated[ Union[Literal[0, 1, -1, "all"], object], - Doc(""" + Doc( + """ One of ``0``, ``1``, ``all``. The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the durability of records that are @@ -300,7 +329,8 @@ def __init__( If unset, defaults to ``acks=1``. If `enable_idempotence` is :data:`True` defaults to ``acks=all``. - """), + """ + ), ] = _missing, key_serializer: Annotated[ Optional[Callable[[Any], bytes]], @@ -312,26 +342,31 @@ def __init__( ] = None, compression_type: Annotated[ Optional[Literal["gzip", "snappy", "lz4", "zstd"]], - Doc(""" + Doc( + """ The compression type for all data generated bythe producer. Compression is of full batches of data, so the efficacy of batching will also impact the compression ratio (more batching means better compression). - """), + """ + ), ] = None, max_batch_size: Annotated[ int, - Doc(""" + Doc( + """ Maximum size of buffered data per partition. After this amount `send` coroutine will block until batch is drained. - """), + """ + ), ] = 16 * 1024, partitioner: Annotated[ Callable[ [bytes, List[Partition], List[Partition]], Partition, ], - Doc(""" + Doc( + """ Callable used to determine which partition each message is assigned to. Called (after key serialization): ``partitioner(key_bytes, all_partitions, available_partitions)``. @@ -340,21 +375,25 @@ def __init__( messages with the same key are assigned to the same partition. When a key is :data:`None`, the message is delivered to a random partition (filtered to partitions with available leaders only, if possible). - """), + """ + ), ] = DefaultPartitioner(), max_request_size: Annotated[ int, - Doc(""" + Doc( + """ The maximum size of a request. This is also effectively a cap on the maximum record size. Note that the server has its own cap on record size which may be different from this. This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests. - """), + """ + ), ] = 1024 * 1024, linger_ms: Annotated[ int, - Doc(""" + Doc( + """ The producer groups together any records that arrive in between request transmissions into a single batched request. Normally this occurs only under load when records arrive faster @@ -363,19 +402,22 @@ def __init__( This setting accomplishes this by adding a small amount of artificial delay; that is, if first request is processed faster, than `linger_ms`, producer will wait ``linger_ms - process_time``. - """), + """ + ), ] = 0, send_backoff_ms: int = 100, enable_idempotence: Annotated[ bool, - Doc(""" + Doc( + """ When set to `True`, the producer will ensure that exactly one copy of each message is written in the stream. If `False`, producer retries due to broker failures, etc., may write duplicates of the retried message in the stream. Note that enabling idempotence acks to set to ``all``. If it is not explicitly set by the user it will be chosen. - """), + """ + ), ] = False, transactional_id: Optional[str] = None, transaction_timeout_ms: int = 60 * 1000, @@ -632,7 +674,8 @@ async def publish( # type: ignore[override] *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -640,21 +683,26 @@ async def publish( # type: ignore[override] partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, timestamp_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ Epoch milliseconds (from Jan 1 1970 UTC) to use as the message timestamp. Defaults to current time. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -708,17 +756,21 @@ async def publish_batch( ], partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, timestamp_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ Epoch milliseconds (from Jan 1 1970 UTC) to use as the message timestamp. Defaults to current time. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], diff --git a/faststream/kafka/broker/registrator.py b/faststream/kafka/broker/registrator.py index 0633032c06..1cb3fa38e2 100644 --- a/faststream/kafka/broker/registrator.py +++ b/faststream/kafka/broker/registrator.py @@ -23,7 +23,7 @@ from faststream.kafka.subscriber.factory import create_subscriber if TYPE_CHECKING: - from aiokafka import ConsumerRecord, TopicPartition + from aiokafka import TopicPartition from aiokafka.abc import ConsumerRebalanceListener from aiokafka.coordinator.assignors.abstract import AbstractPartitionAssignor from fast_depends.dependencies import Depends @@ -48,8 +48,8 @@ class KafkaRegistrator( ABCBroker[ Union[ - "ConsumerRecord", - Tuple["ConsumerRecord", ...], + ConsumerRecord, + Tuple[ConsumerRecord, ...], ] ] ): @@ -77,12 +77,14 @@ def subscriber( ] = False, group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -100,7 +102,8 @@ def subscriber( ] = None, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -109,28 +112,34 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -139,43 +148,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence["AbstractPartitionAssignor"], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -185,22 +204,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = (RoundRobinPartitionAssignor,), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -208,11 +231,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -221,11 +246,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -235,35 +262,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -287,16 +322,19 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -304,7 +342,8 @@ def subscriber( ] = None, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -326,20 +365,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -377,6 +421,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -408,12 +458,14 @@ def subscriber( ], group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -431,7 +483,8 @@ def subscriber( ] = None, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -440,28 +493,34 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -470,43 +529,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence["AbstractPartitionAssignor"], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -516,22 +585,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = (RoundRobinPartitionAssignor,), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -539,11 +612,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -552,11 +627,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -566,35 +643,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -618,16 +703,19 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -635,7 +723,8 @@ def subscriber( ] = None, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -657,20 +746,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -708,6 +802,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -739,12 +839,14 @@ def subscriber( ] = False, group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -762,7 +864,8 @@ def subscriber( ] = None, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -771,28 +874,34 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -801,43 +910,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence["AbstractPartitionAssignor"], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -847,22 +966,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = (RoundRobinPartitionAssignor,), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -870,11 +993,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -883,11 +1008,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -897,35 +1024,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -949,16 +1084,19 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -966,7 +1104,8 @@ def subscriber( ] = None, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -988,20 +1127,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -1039,6 +1183,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -1073,12 +1223,14 @@ def subscriber( ] = False, group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -1096,7 +1248,8 @@ def subscriber( ] = None, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -1105,28 +1258,34 @@ def subscriber( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -1135,43 +1294,53 @@ def subscriber( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence["AbstractPartitionAssignor"], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -1181,22 +1350,26 @@ def subscriber( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = (RoundRobinPartitionAssignor,), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -1204,11 +1377,13 @@ def subscriber( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -1217,11 +1392,13 @@ def subscriber( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -1231,35 +1408,43 @@ def subscriber( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -1283,16 +1468,19 @@ def subscriber( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -1300,7 +1488,8 @@ def subscriber( ] = None, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -1322,20 +1511,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -1373,6 +1567,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -1427,6 +1627,7 @@ def subscriber( is_manual=not auto_commit, # subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=self._middlewares, broker_dependencies=self._dependencies, @@ -1465,7 +1666,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1473,14 +1675,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1535,7 +1740,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1543,14 +1749,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1605,7 +1814,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1613,14 +1823,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -1678,7 +1891,8 @@ def publisher( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -1686,14 +1900,17 @@ def publisher( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], diff --git a/faststream/kafka/fastapi/fastapi.py b/faststream/kafka/fastapi/fastapi.py index 541940d79e..18884edd12 100644 --- a/faststream/kafka/fastapi/fastapi.py +++ b/faststream/kafka/fastapi/fastapi.py @@ -890,7 +890,8 @@ def subscriber( ] = False, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -912,20 +913,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -963,6 +969,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -1379,7 +1391,8 @@ def subscriber( ], listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -1401,20 +1414,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -1452,6 +1470,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -1868,7 +1892,8 @@ def subscriber( ] = False, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -1890,20 +1915,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -1941,6 +1971,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -2360,7 +2396,8 @@ def subscriber( ] = False, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -2382,20 +2419,25 @@ def subscriber( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Iterable["TopicPartition"], - Doc(""" + Doc( + """ An explicit partitions list to assign. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -2433,6 +2475,12 @@ def subscriber( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -2613,6 +2661,7 @@ def subscriber( filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/kafka/publisher/usecase.py b/faststream/kafka/publisher/usecase.py index 66a3ed5b38..b254334a61 100644 --- a/faststream/kafka/publisher/usecase.py +++ b/faststream/kafka/publisher/usecase.py @@ -110,7 +110,8 @@ async def publish( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -118,21 +119,26 @@ async def publish( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, timestamp_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ Epoch milliseconds (from Jan 1 1970 UTC) to use as the message timestamp. Defaults to current time. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -205,17 +211,21 @@ async def publish( # type: ignore[override] ] = "", partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, timestamp_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ Epoch milliseconds (from Jan 1 1970 UTC) to use as the message timestamp. Defaults to current time. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], diff --git a/faststream/kafka/router.py b/faststream/kafka/router.py index 44540ee4d5..cef54442c8 100644 --- a/faststream/kafka/router.py +++ b/faststream/kafka/router.py @@ -51,7 +51,8 @@ def __init__( *, key: Annotated[ Union[bytes, Any, None], - Doc(""" + Doc( + """ A key to associate with the message. Can be used to determine which partition to send the message to. If partition is `None` (and producer's partitioner config is left as default), @@ -59,14 +60,17 @@ def __init__( partition (but if key is `None`, partition is chosen randomly). Must be type `bytes`, or be serializable to bytes via configured `key_serializer`. - """), + """ + ), ] = None, partition: Annotated[ Optional[int], - Doc(""" + Doc( + """ Specify a partition. If not set, the partition will be selected using the configured `partitioner`. - """), + """ + ), ] = None, headers: Annotated[ Optional[Dict[str, str]], @@ -156,12 +160,14 @@ def __init__( ] = False, group_id: Annotated[ Optional[str], - Doc(""" + Doc( + """ Name of the consumer group to join for dynamic partition assignment (if enabled), and to use for fetching and committing offsets. If `None`, auto-partition assignment (via group coordinator) and offset commits are disabled. - """), + """ + ), ] = None, key_deserializer: Annotated[ Optional[Callable[[bytes], Any]], @@ -179,7 +185,8 @@ def __init__( ] = None, fetch_max_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data the server should return for a fetch request. This is not an absolute maximum, if the first message in the first non-empty partition of the fetch @@ -188,28 +195,34 @@ def __init__( performs fetches to multiple brokers in parallel so memory usage will depend on the number of brokers containing partitions for the topic. - """), + """ + ), ] = 50 * 1024 * 1024, fetch_min_bytes: Annotated[ int, - Doc(""" + Doc( + """ Minimum amount of data the server should return for a fetch request, otherwise wait up to `fetch_max_wait_ms` for more data to accumulate. - """), + """ + ), ] = 1, fetch_max_wait_ms: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of time in milliseconds the server will block before answering the fetch request if there isn't sufficient data to immediately satisfy the requirement given by `fetch_min_bytes`. - """), + """ + ), ] = 500, max_partition_fetch_bytes: Annotated[ int, - Doc(""" + Doc( + """ The maximum amount of data per-partition the server will return. The maximum total memory used for a request ``= #partitions * max_partition_fetch_bytes``. @@ -218,43 +231,53 @@ def __init__( send messages larger than the consumer can fetch. If that happens, the consumer can get stuck trying to fetch a large message on a certain partition. - """), + """ + ), ] = 1 * 1024 * 1024, auto_offset_reset: Annotated[ Literal["latest", "earliest", "none"], - Doc(""" + Doc( + """ A policy for resetting offsets on `OffsetOutOfRangeError` errors: * `earliest` will move to the oldest available message * `latest` will move to the most recent * `none` will raise an exception so you can handle this case - """), + """ + ), ] = "latest", auto_commit: Annotated[ bool, - Doc(""" + Doc( + """ If `True` the consumer's offset will be periodically committed in the background. - """), + """ + ), ] = True, auto_commit_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds between automatic - offset commits, if `auto_commit` is `True`."""), + offset commits, if `auto_commit` is `True`.""" + ), ] = 5 * 1000, check_crcs: Annotated[ bool, - Doc(""" + Doc( + """ Automatically check the CRC32 of the records consumed. This ensures no on-the-wire or on-disk corruption to the messages occurred. This check adds some overhead, so it may be disabled in cases seeking extreme performance. - """), + """ + ), ] = True, partition_assignment_strategy: Annotated[ Sequence["AbstractPartitionAssignor"], - Doc(""" + Doc( + """ List of objects to use to distribute partition ownership amongst consumer instances when group management is used. This preference is implicit in the order @@ -264,22 +287,26 @@ def __init__( one. The coordinator will choose the old assignment strategy until all members have been updated. Then it will choose the new strategy. - """), + """ + ), ] = (RoundRobinPartitionAssignor,), max_poll_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum allowed time between calls to consume messages in batches. If this interval is exceeded the consumer is considered failed and the group will rebalance in order to reassign the partitions to another consumer group member. If API methods block waiting for messages, that time does not count against this timeout. - """), + """ + ), ] = 5 * 60 * 1000, rebalance_timeout_ms: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum time server will wait for this consumer to rejoin the group in a case of rebalance. In Java client this behaviour is bound to `max.poll.interval.ms` configuration, @@ -287,11 +314,13 @@ def __init__( decouple this setting to allow finer tuning by users that use `ConsumerRebalanceListener` to delay rebalacing. Defaults to ``session_timeout_ms`` - """), + """ + ), ] = None, session_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Client group session and failure detection timeout. The consumer sends periodic heartbeats (`heartbeat.interval.ms`) to indicate its liveness to the broker. @@ -300,11 +329,13 @@ def __init__( group and trigger a rebalance. The allowed range is configured with the **broker** configuration properties `group.min.session.timeout.ms` and `group.max.session.timeout.ms`. - """), + """ + ), ] = 10 * 1000, heartbeat_interval_ms: Annotated[ int, - Doc(""" + Doc( + """ The expected time in milliseconds between heartbeats to the consumer coordinator when using Kafka's group management feature. Heartbeats are used to ensure @@ -314,35 +345,43 @@ def __init__( should be set no higher than 1/3 of that value. It can be adjusted even lower to control the expected time for normal rebalances. - """), + """ + ), ] = 3 * 1000, consumer_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Maximum wait timeout for background fetching routine. Mostly defines how fast the system will see rebalance and request new data for new partitions. - """), + """ + ), ] = 200, max_poll_records: Annotated[ Optional[int], - Doc(""" + Doc( + """ The maximum number of records returned in a single call by batch consumer. Has no limit by default. - """), + """ + ), ] = None, exclude_internal_topics: Annotated[ bool, - Doc(""" + Doc( + """ Whether records from internal topics (such as offsets) should be exposed to the consumer. If set to True the only way to receive records from an internal topic is subscribing to it. - """), + """ + ), ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], - Doc(""" + Doc( + """ Controls how to read messages written transactionally. @@ -366,16 +405,19 @@ def __init__( to the high watermark when there are in flight transactions. Further, when in `read_committed` the seek_to_end method will return the LSO. See method docs below. - """), + """ + ), ] = "read_uncommitted", batch_timeout_ms: Annotated[ int, - Doc(""" + Doc( + """ Milliseconds spent waiting if data is not available in the buffer. If 0, returns immediately with any records that are available currently in the buffer, else returns empty. - """), + """ + ), ] = 200, max_records: Annotated[ Optional[int], @@ -383,7 +425,8 @@ def __init__( ] = None, listener: Annotated[ Optional["ConsumerRebalanceListener"], - Doc(""" + Doc( + """ Optionally include listener callback, which will be called before and after each rebalance operation. @@ -405,19 +448,24 @@ def __init__( to subscribe. It is guaranteed, however, that the partitions revoked/assigned through this interface are from topics subscribed in this call. - """), + """ + ), ] = None, pattern: Annotated[ Optional[str], - Doc(""" + Doc( + """ Pattern to match available topics. You must provide either topics or pattern, but not both. - """), + """ + ), ] = None, partitions: Annotated[ Optional[Iterable["TopicPartition"]], - Doc(""" + Doc( + """ A topic and partition tuple. You can't use 'topics' and 'partitions' in the same time. - """), + """ + ), ] = (), # broker args dependencies: Annotated[ @@ -455,6 +503,12 @@ def __init__( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI args title: Annotated[ Optional[str], @@ -508,6 +562,7 @@ def __init__( decoder=decoder, middlewares=middlewares, filter=filter, + no_reply=no_reply, # AsyncAPI args title=title, description=description, diff --git a/faststream/kafka/subscriber/factory.py b/faststream/kafka/subscriber/factory.py index fb5de4bf1a..0f504667f4 100644 --- a/faststream/kafka/subscriber/factory.py +++ b/faststream/kafka/subscriber/factory.py @@ -38,6 +38,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[Tuple[ConsumerRecord, ...]]"], @@ -63,6 +64,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], @@ -88,6 +90,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable[ @@ -117,6 +120,7 @@ def create_subscriber( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable[ @@ -156,6 +160,7 @@ def create_subscriber( partitions=partitions, is_manual=is_manual, no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -174,6 +179,7 @@ def create_subscriber( partitions=partitions, is_manual=is_manual, no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index 650bae75d1..fa01a11fcb 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -65,6 +65,7 @@ def __init__( default_parser: "AsyncCallable", default_decoder: "AsyncCallable", no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], @@ -78,6 +79,7 @@ def __init__( default_decoder=default_decoder, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -179,7 +181,7 @@ def _make_response_publisher( self, message: "StreamMessage[Any]", ) -> Sequence[FakePublisher]: - if not message.reply_to or self._producer is None: + if self._producer is None: return () return ( @@ -295,6 +297,7 @@ def __init__( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[ConsumerRecord]"], @@ -316,6 +319,7 @@ def __init__( default_decoder=AioKafkaParser.decode_message, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -345,6 +349,7 @@ def __init__( is_manual: bool, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable[ @@ -371,6 +376,7 @@ def __init__( default_decoder=AioKafkaParser.decode_message_batch, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index 9fe73a3386..ca6b84d4d4 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -1,11 +1,9 @@ from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Union, cast -from fast_depends.dependencies import Depends from nats.js import api from typing_extensions import Annotated, Doc, deprecated, override from faststream.broker.core.abc import ABCBroker -from faststream.broker.types import CustomCallable from faststream.broker.utils import default_filter from faststream.nats.helpers import StreamBuilder from faststream.nats.publisher.asyncapi import AsyncAPIPublisher @@ -183,6 +181,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -233,6 +237,7 @@ def subscriber( # type: ignore[override] ack_first=ack_first, # subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=self._middlewares, broker_dependencies=self._dependencies, diff --git a/faststream/nats/fastapi/fastapi.py b/faststream/nats/fastapi/fastapi.py index 7ea3a2a5df..4010c5f02a 100644 --- a/faststream/nats/fastapi/fastapi.py +++ b/faststream/nats/fastapi/fastapi.py @@ -717,6 +717,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -886,6 +892,7 @@ def subscriber( # type: ignore[override] max_workers=max_workers, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/nats/router.py b/faststream/nats/router.py index 74215d3e78..ace895ba59 100644 --- a/faststream/nats/router.py +++ b/faststream/nats/router.py @@ -271,6 +271,12 @@ def __init__( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -317,6 +323,7 @@ def __init__( filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py index 590598a2dd..2ae7c9b820 100644 --- a/faststream/nats/subscriber/factory.py +++ b/faststream/nats/subscriber/factory.py @@ -58,6 +58,7 @@ def create_subscriber( stream: Optional["JStream"], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[Any]"], @@ -148,6 +149,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -165,6 +167,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -186,6 +189,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -205,6 +209,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -225,6 +230,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -243,6 +249,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -261,6 +268,7 @@ def create_subscriber( extra_options=extra_options, # Subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index f668ac387e..76ae509052 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -24,7 +24,6 @@ from nats.js.kv import KeyValue from typing_extensions import Annotated, Doc, override -from faststream.broker.message import StreamMessage from faststream.broker.publisher.fake import FakePublisher from faststream.broker.subscriber.usecase import SubscriberUsecase from faststream.broker.types import CustomCallable, MsgType @@ -79,6 +78,7 @@ def __init__( default_parser: "AsyncCallable", default_decoder: "AsyncCallable", no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], @@ -96,6 +96,7 @@ def __init__( default_decoder=default_decoder, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -234,6 +235,7 @@ def __init__( default_parser: "AsyncCallable", default_decoder: "AsyncCallable", no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[MsgType]"], @@ -250,6 +252,7 @@ def __init__( default_decoder=default_decoder, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -261,13 +264,10 @@ def __init__( def _make_response_publisher( self, - message: Annotated[ - "StreamMessage[Any]", - Doc("Message requiring reply"), - ], + message: "StreamMessage[Any]", ) -> Sequence[FakePublisher]: """Create FakePublisher object to use it as one of `publishers` in `self.consume` scope.""" - if not message.reply_to or self._producer is None: + if self._producer is None: return () return ( @@ -372,6 +372,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -392,6 +393,7 @@ def __init__( default_decoder=parser_.decode_message, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -441,6 +443,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -457,6 +460,7 @@ def __init__( extra_options=extra_options, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -494,6 +498,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -515,6 +520,7 @@ def __init__( default_decoder=parser_.decode_message, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -572,6 +578,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -589,6 +596,7 @@ def __init__( extra_options=extra_options, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -628,6 +636,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -646,6 +655,7 @@ def __init__( queue="", # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -701,6 +711,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[Msg]"], @@ -718,6 +729,7 @@ def __init__( extra_options=extra_options, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -758,6 +770,7 @@ def __init__( extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], @@ -779,6 +792,7 @@ def __init__( default_decoder=parser.decode_batch, # Propagated args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -838,6 +852,7 @@ def __init__( subject=subject, extra_options=None, no_ack=True, + no_reply=True, retry=False, default_parser=parser.parse_message, default_decoder=parser.decode_message, @@ -942,6 +957,7 @@ def __init__( subject=subject, extra_options=None, no_ack=True, + no_reply=True, retry=False, default_parser=parser.parse_message, default_decoder=parser.decode_message, diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 5a9190dfd7..34230cb788 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -145,7 +145,8 @@ async def ack(self) -> None: pass async def ack_sync( - self, timeout: float = 1 + self, + timeout: float = 1, ) -> "PatchedMessage": # pragma: no cover return self diff --git a/faststream/rabbit/broker/registrator.py b/faststream/rabbit/broker/registrator.py index e13b7b5261..0c0f99df70 100644 --- a/faststream/rabbit/broker/registrator.py +++ b/faststream/rabbit/broker/registrator.py @@ -98,6 +98,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -125,6 +131,7 @@ def subscriber( # type: ignore[override] reply_config=reply_config, # subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=self._middlewares, broker_dependencies=self._dependencies, diff --git a/faststream/rabbit/fastapi/router.py b/faststream/rabbit/fastapi/router.py index 6d13beabae..d0445badfb 100644 --- a/faststream/rabbit/fastapi/router.py +++ b/faststream/rabbit/fastapi/router.py @@ -528,6 +528,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -684,6 +690,7 @@ def subscriber( # type: ignore[override] filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/rabbit/router.py b/faststream/rabbit/router.py index 0890433347..18f6747b9d 100644 --- a/faststream/rabbit/router.py +++ b/faststream/rabbit/router.py @@ -250,6 +250,12 @@ def __init__( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -281,6 +287,7 @@ def __init__( filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py index f0ee6b752a..0683d2d62f 100644 --- a/faststream/rabbit/subscriber/factory.py +++ b/faststream/rabbit/subscriber/factory.py @@ -19,6 +19,7 @@ def create_subscriber( reply_config: Optional["ReplyConfig"], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], @@ -33,6 +34,7 @@ def create_subscriber( consume_args=consume_args, reply_config=reply_config, no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py index c0700dcc82..e518d3ca37 100644 --- a/faststream/rabbit/subscriber/usecase.py +++ b/faststream/rabbit/subscriber/usecase.py @@ -55,6 +55,7 @@ def __init__( reply_config: Optional["ReplyConfig"], # Subscriber args no_ack: bool, + no_reply: bool, retry: Union[bool, int], broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], @@ -70,6 +71,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -173,7 +175,7 @@ def _make_response_publisher( self, message: "StreamMessage[Any]", ) -> Sequence["FakePublisher"]: - if not message.reply_to or self._producer is None: + if self._producer is None: return () return ( diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py index 3164c7a01b..93bea0a7f4 100644 --- a/faststream/redis/broker/broker.py +++ b/faststream/redis/broker/broker.py @@ -13,7 +13,6 @@ ) from urllib.parse import urlparse -from fast_depends.dependencies import Depends from redis.asyncio.client import Redis from redis.asyncio.connection import ( Connection, diff --git a/faststream/redis/broker/registrator.py b/faststream/redis/broker/registrator.py index 7a643d189a..8038214f64 100644 --- a/faststream/redis/broker/registrator.py +++ b/faststream/redis/broker/registrator.py @@ -84,6 +84,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -110,6 +116,7 @@ def subscriber( # type: ignore[override] stream=stream, # subscriber args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=self._middlewares, broker_dependencies=self._dependencies, diff --git a/faststream/redis/fastapi/fastapi.py b/faststream/redis/fastapi/fastapi.py index 5d45d2c47d..7a8de1b18d 100644 --- a/faststream/redis/fastapi/fastapi.py +++ b/faststream/redis/fastapi/fastapi.py @@ -488,6 +488,12 @@ def subscriber( # type: ignore[override] bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -648,6 +654,7 @@ def subscriber( # type: ignore[override] filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/redis/router.py b/faststream/redis/router.py index 635f86083e..38964c2c59 100644 --- a/faststream/redis/router.py +++ b/faststream/redis/router.py @@ -163,6 +163,12 @@ def __init__( bool, Doc("Whether to disable **FastStream** autoacknowledgement logic or not."), ] = False, + no_reply: Annotated[ + bool, + Doc( + "Whether to disable **FastStream** RPC and Reply To auto responses or not." + ), + ] = False, # AsyncAPI information title: Annotated[ Optional[str], @@ -193,6 +199,7 @@ def __init__( filter=filter, retry=retry, no_ack=no_ack, + no_reply=no_reply, title=title, description=description, include_in_schema=include_in_schema, diff --git a/faststream/redis/subscriber/factory.py b/faststream/redis/subscriber/factory.py index da5fe02898..ee0ae84c9b 100644 --- a/faststream/redis/subscriber/factory.py +++ b/faststream/redis/subscriber/factory.py @@ -35,6 +35,7 @@ def create_subscriber( stream: Union["StreamSub", str, None], # Subscriber args no_ack: bool = False, + no_reply: bool = False, retry: bool = False, broker_dependencies: Iterable["Depends"] = (), broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"] = (), @@ -50,6 +51,7 @@ def create_subscriber( channel=channel_sub, # basic args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -65,6 +67,7 @@ def create_subscriber( stream=stream_sub, # basic args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -78,6 +81,7 @@ def create_subscriber( stream=stream_sub, # basic args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -93,6 +97,7 @@ def create_subscriber( list=list_sub, # basic args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -106,6 +111,7 @@ def create_subscriber( list=list_sub, # basic args no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 7919f384f7..5dee39ef52 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -69,6 +69,7 @@ def __init__( default_decoder: "AsyncCallable", # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -82,6 +83,7 @@ def __init__( default_decoder=default_decoder, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -129,9 +131,10 @@ def setup( # type: ignore[override] ) def _make_response_publisher( - self, message: "BrokerStreamMessage[UnifyRedisDict]" + self, + message: "BrokerStreamMessage[UnifyRedisDict]", ) -> Sequence[FakePublisher]: - if not message.reply_to or self._producer is None: + if self._producer is None: return () return ( @@ -207,6 +210,7 @@ def __init__( channel: "PubSub", # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -221,6 +225,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -296,6 +301,7 @@ def __init__( default_decoder: "AsyncCallable", # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -309,6 +315,7 @@ def __init__( default_decoder=default_decoder, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -360,6 +367,7 @@ def __init__( list: ListSub, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -375,6 +383,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -407,6 +416,7 @@ def __init__( list: ListSub, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -422,6 +432,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -459,6 +470,7 @@ def __init__( default_decoder: "AsyncCallable", # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -472,6 +484,7 @@ def __init__( default_decoder=default_decoder, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -605,6 +618,7 @@ def __init__( stream: StreamSub, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -620,6 +634,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, @@ -672,6 +687,7 @@ def __init__( stream: StreamSub, # Subscriber args no_ack: bool, + no_reply: bool, retry: bool, broker_dependencies: Iterable["Depends"], broker_middlewares: Iterable["BrokerMiddleware[UnifyRedisDict]"], @@ -687,6 +703,7 @@ def __init__( default_decoder=parser.decode_message, # Propagated options no_ack=no_ack, + no_reply=no_reply, retry=retry, broker_middlewares=broker_middlewares, broker_dependencies=broker_dependencies, diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index 327f31627b..974b12d8cf 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -9,8 +9,8 @@ import pytest from pydantic import BaseModel +from faststream import BaseMiddleware from faststream._compat import dump_json, model_to_json -from faststream.annotations import Logger from faststream.broker.core.usecase import BrokerUsecase @@ -147,17 +147,17 @@ def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, An ) async def test_serialize( self, - mock: Mock, queue: str, message, message_type, expected_message, - event, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @pub_broker.subscriber(queue, **self.subscriber_kwargs) - async def handler(m: message_type, logger: Logger): + async def handler(m: message_type): event.set() mock(m) @@ -178,14 +178,14 @@ async def handler(m: message_type, logger: Logger): @pytest.mark.asyncio() async def test_unwrap_dict( self, - mock: Mock, queue: str, - event, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @pub_broker.subscriber(queue, **self.subscriber_kwargs) - async def m(a: int, b: int, logger: Logger): + async def m(a: int, b: int): event.set() mock({"a": a, "b": b}) @@ -217,7 +217,7 @@ async def test_unwrap_list( pub_broker = self.get_broker(apply_types=True) @pub_broker.subscriber(queue, **self.subscriber_kwargs) - async def m(a: int, b: int, *args: Tuple[int, ...], logger: Logger): + async def m(a: int, b: int, *args: Tuple[int, ...]): event.set() mock({"a": a, "b": b, "args": args}) @@ -238,8 +238,8 @@ async def m(a: int, b: int, *args: Tuple[int, ...], logger: Logger): async def test_base_publisher( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -270,8 +270,8 @@ async def resp(msg): async def test_publisher_object( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -304,8 +304,8 @@ async def resp(msg): async def test_publish_manual( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -337,7 +337,7 @@ async def resp(msg): async def test_multiple_publishers( self, queue: str, - mock, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -380,7 +380,7 @@ async def resp2(msg): async def test_reusable_publishers( self, queue: str, - mock, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -427,8 +427,8 @@ async def resp(): async def test_reply_to( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) @@ -457,12 +457,52 @@ async def handler(m): assert event.is_set() mock.assert_called_with("Hello!") + @pytest.mark.asyncio() + async def test_no_reply( + self, + queue: str, + event: asyncio.Event, + mock: Mock, + ): + class Mid(BaseMiddleware): + async def after_processed(self, *args: Any, **kwargs: Any): + event.set() + + return await super().after_processed(*args, **kwargs) + + pub_broker = self.get_broker(apply_types=True) + pub_broker.add_middleware(Mid) + + @pub_broker.subscriber(queue + "reply", **self.subscriber_kwargs) + async def reply_handler(m): + mock(m) + + @pub_broker.subscriber(queue, no_reply=True, **self.subscriber_kwargs) + async def handler(m): + return m + + async with self.patch_broker(pub_broker) as br: + await br.start() + + await asyncio.wait( + ( + asyncio.create_task( + br.publish("Hello!", queue, reply_to=queue + "reply") + ), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() + assert not mock.called + @pytest.mark.asyncio() async def test_publisher_after_start( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker(apply_types=True) diff --git a/tests/brokers/rabbit/test_publish.py b/tests/brokers/rabbit/test_publish.py index 97be60f066..7e1986246e 100644 --- a/tests/brokers/rabbit/test_publish.py +++ b/tests/brokers/rabbit/test_publish.py @@ -1,5 +1,5 @@ import asyncio -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest @@ -18,8 +18,8 @@ def get_broker(self, apply_types: bool = False) -> RabbitBroker: async def test_reply_config( self, queue: str, - event, - mock, + event: asyncio.Event, + mock: Mock, ): pub_broker = self.get_broker() From f6b33364f97c7a0b424d137e8e2ed2036d107509 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Thu, 23 May 2024 19:53:49 +0000 Subject: [PATCH 20/43] Update Release Notes for 0.5.8 (#1462) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 78 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 8fac6fd678..1a40be40f1 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,84 @@ hide: --- # Release Notes +## 0.5.8 + +### What's Changed + +This is the time for a new **NATS** features! **FastStream** supports **NATS Key-Value** and **Object Storage** subscribption features in a native way now (big thx for @sheldygg)! + +1. KeyValue creation and watching API added (you can read updated [documentation section](https://faststream.airt.ai/latest/nats/jetstream/key-value/) for changes): + + ```python + from faststream import FastStream, Logger + from faststream.nats import NatsBroker + + broker = NatsBroker() + app = FastStream(broker) + + @broker.subscriber("some-key", kv_watch="bucket") + async def handler(msg: int, logger: Logger): + logger.info(msg) + + @app.after_startup + async def test(): + kv = await broker.key_value("bucket") + await kv.put("some-key", b"1") + ``` + +2. ObjectStore API added as well (you can read updated [documentation section](https://faststream.airt.ai/latest/nats/jetstream/object/) for changes): + + ```python + from faststream import FastStream, Logger + from faststream.nats import NatsBroker + + broker = NatsBroker() + app = FastStream(broker) + + @broker.subscriber("file-bucket", obj_watch=True) + async def handler(filename: str, logger: Logger): + logger.info(filename) + + @app.after_startup + async def test(): + object_store = await broker.object_storage("file-bucket") + await object_store.put("some-file.txt", b"1") + ``` + +3. Also now you can use just `pull_sub=True` instead of `pull_sub=PullSub()` in basic case: + + ```python + from faststream import FastStream, Logger + from faststream.nats import NatsBroker + + broker = NatsBroker() + app = FastStream(broker) + + @broker.subscriber("test", stream="stream", pull_sub=True) + async def handler(msg, logger: Logger): + logger.info(msg) + ``` + +Finally, we have a new feature, related to all brokers: special flag to suppress automatic RPC and reply_to responses: + +```python +@broker.subscriber("tests", no_reply=True) +async def handler(): + .... + +# will fail with timeout, because there is no automatic response +msg = await broker.publish("msg", "test", rpc=True) +``` + +* fix: when headers() returns None in AsyncConfluentParser, replace it with an empty tuple by @andreaimprovised in https://github.com/airtai/faststream/pull/1460 +* Implement Kv/Obj watch. by @sheldygg in https://github.com/airtai/faststream/pull/1383 +* feat: add subscriber no-reply option by @Lancetnik in https://github.com/airtai/faststream/pull/1461 + +### New Contributors +* @andreaimprovised made their first contribution in https://github.com/airtai/faststream/pull/1460 + +**Full Changelog**: https://github.com/airtai/faststream/compare/0.5.7...0.5.8 + ## 0.5.7 ### What's Changed From 44de4c9238c29e384765a6773b7ee2f743326f44 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Fri, 24 May 2024 14:43:50 +0530 Subject: [PATCH 21/43] Exclude typing_extensions version 4.12.* (#1467) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 17a90227f0..1f17f89827 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ dependencies = [ "anyio>=3.7.1,<5", "fast-depends>=2.4.0b0,<2.5.0", "typer>=0.9,!=0.12,<1", - "typing-extensions>=4.8.0", + "typing-extensions>=4.8.0,!=4.12.*", ] [project.optional-dependencies] From f3bc25cc21a5fe86451a87c11e4cc5151ec84672 Mon Sep 17 00:00:00 2001 From: Franky Braem Date: Fri, 24 May 2024 12:07:03 +0200 Subject: [PATCH 22/43] fix: add group/consumer to hash to avoid overwriting (#1463) Co-authored-by: zumuta Co-authored-by: Kumaran Rajendhiran --- faststream/redis/schemas/stream_sub.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/faststream/redis/schemas/stream_sub.py b/faststream/redis/schemas/stream_sub.py index 0aa2a34145..7ab768cb28 100644 --- a/faststream/redis/schemas/stream_sub.py +++ b/faststream/redis/schemas/stream_sub.py @@ -57,4 +57,8 @@ def __init__( self.max_records = max_records def __hash__(self) -> int: + if self.group is not None: + return hash( + f"stream:{self.name} group:{self.group} consumer:{self.consumer}" + ) return hash(f"stream:{self.name}") From 9505fad45264924ec6c881762b16353ee3c43bd8 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Fri, 24 May 2024 15:54:52 +0530 Subject: [PATCH 23/43] Bump version to 0.5.9 (#1468) --- faststream/__about__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/faststream/__about__.py b/faststream/__about__.py index 7a4bf92d8e..d0972600b0 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.8" +__version__ = "0.5.9" SERVICE_NAME = f"faststream-{__version__}" From d5dce31a23ac1de6142d04feae4da9af1a083e46 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Fri, 24 May 2024 11:04:01 +0000 Subject: [PATCH 24/43] Update Release Notes for 0.5.9 (#1469) Co-authored-by: kumaranvpl <7011056+kumaranvpl@users.noreply.github.com> --- docs/docs/en/release.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 1a40be40f1..41192308ac 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,19 @@ hide: --- # Release Notes +## 0.5.9 + +### What's Changed +* Update Release Notes for 0.5.8 by @faststream-release-notes-updater in [#1462](https://github.com/airtai/faststream/pull/1462){.external-link target="_blank"} +* Exclude typing_extensions version 4.12.* by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1467](https://github.com/airtai/faststream/pull/1467){.external-link target="_blank"} +* fix: add group/consumer to hash to avoid overwriting by [@fbraem](https://github.com/fbraem){.external-link target="_blank"} in [#1463](https://github.com/airtai/faststream/pull/1463){.external-link target="_blank"} +* Bump version to 0.5.9 by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1468](https://github.com/airtai/faststream/pull/1468){.external-link target="_blank"} + +### New Contributors +* [@fbraem](https://github.com/fbraem){.external-link target="_blank"} made their first contribution in [#1463](https://github.com/airtai/faststream/pull/1463){.external-link target="_blank"} + +**Full Changelog**: [#0.5.8...0.5.9](https://github.com/airtai/faststream/compare/0.5.8...0.5.9){.external-link target="_blank"} + ## 0.5.8 ### What's Changed From 1237edb2120700513380021b77f8e8ecef564b9b Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Sat, 25 May 2024 13:43:45 +0530 Subject: [PATCH 25/43] Pass logger to confluent producer and consumer (#1464) * Pass logger to confluent producer and consumer * Add test to check consumer_logger * Update func signature * Update function signature * User LoggerProto and remove Annotated * Use only logger in signature * Use LoggerProto as type --- .codespell-whitelist.txt | 2 +- .secrets.baseline | 6 +-- docs/docs/en/nats/jetstream/key-value.md | 2 +- docs/docs/en/nats/jetstream/object.md | 2 +- docs/docs/en/release.md | 24 +++++------ faststream/confluent/broker/broker.py | 2 + faststream/confluent/client.py | 19 +++++++- tests/brokers/confluent/test_logger.py | 55 ++++++++++++++++++++++++ 8 files changed, 92 insertions(+), 20 deletions(-) create mode 100644 tests/brokers/confluent/test_logger.py diff --git a/.codespell-whitelist.txt b/.codespell-whitelist.txt index 6b1a432b87..dcfed576bf 100644 --- a/.codespell-whitelist.txt +++ b/.codespell-whitelist.txt @@ -1 +1 @@ -dependant \ No newline at end of file +dependant diff --git a/.secrets.baseline b/.secrets.baseline index 4c3829ee62..5ceae71388 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 1079, + "line_number": 1269, "is_secret": false } ], @@ -138,7 +138,7 @@ "filename": "examples/e10_middlewares.py", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 33, + "line_number": 35, "is_secret": false } ], @@ -163,5 +163,5 @@ } ] }, - "generated_at": "2024-04-23T11:41:19Z" + "generated_at": "2024-05-24T07:31:48Z" } diff --git a/docs/docs/en/nats/jetstream/key-value.md b/docs/docs/en/nats/jetstream/key-value.md index 2ca8d70add..0e579bce5d 100644 --- a/docs/docs/en/nats/jetstream/key-value.md +++ b/docs/docs/en/nats/jetstream/key-value.md @@ -54,4 +54,4 @@ from faststream.nats import NatsBroker, KvWatch ) async def handler(msg: str): ... -``` \ No newline at end of file +``` diff --git a/docs/docs/en/nats/jetstream/object.md b/docs/docs/en/nats/jetstream/object.md index 33aa1055df..9d21914cfc 100644 --- a/docs/docs/en/nats/jetstream/object.md +++ b/docs/docs/en/nats/jetstream/object.md @@ -65,4 +65,4 @@ from faststream.nats import NatsBroker, ObjWatch ) async def handler(filename: str): ... -``` \ No newline at end of file +``` diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 41192308ac..c6546bcd3d 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -32,37 +32,37 @@ hide: This is the time for a new **NATS** features! **FastStream** supports **NATS Key-Value** and **Object Storage** subscribption features in a native way now (big thx for @sheldygg)! 1. KeyValue creation and watching API added (you can read updated [documentation section](https://faststream.airt.ai/latest/nats/jetstream/key-value/) for changes): - + ```python from faststream import FastStream, Logger from faststream.nats import NatsBroker - + broker = NatsBroker() app = FastStream(broker) - + @broker.subscriber("some-key", kv_watch="bucket") async def handler(msg: int, logger: Logger): logger.info(msg) - + @app.after_startup async def test(): kv = await broker.key_value("bucket") await kv.put("some-key", b"1") ``` - + 2. ObjectStore API added as well (you can read updated [documentation section](https://faststream.airt.ai/latest/nats/jetstream/object/) for changes): ```python from faststream import FastStream, Logger from faststream.nats import NatsBroker - + broker = NatsBroker() app = FastStream(broker) - + @broker.subscriber("file-bucket", obj_watch=True) async def handler(filename: str, logger: Logger): logger.info(filename) - + @app.after_startup async def test(): object_store = await broker.object_storage("file-bucket") @@ -74,22 +74,22 @@ This is the time for a new **NATS** features! **FastStream** supports **NATS Key ```python from faststream import FastStream, Logger from faststream.nats import NatsBroker - + broker = NatsBroker() app = FastStream(broker) - + @broker.subscriber("test", stream="stream", pull_sub=True) async def handler(msg, logger: Logger): logger.info(msg) ``` - + Finally, we have a new feature, related to all brokers: special flag to suppress automatic RPC and reply_to responses: ```python @broker.subscriber("tests", no_reply=True) async def handler(): .... - + # will fail with timeout, because there is no automatic response msg = await broker.publish("msg", "test", rpc=True) ``` diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 30c97ae298..960b2606ad 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -448,6 +448,7 @@ async def _connect( # type: ignore[override] producer = AsyncConfluentProducer( **kwargs, client_id=client_id, + logger=self.logger, ) self._producer = AsyncConfluentFastProducer( @@ -457,6 +458,7 @@ async def _connect( # type: ignore[override] return partial( AsyncConfluentConsumer, **filter_by_dict(ConsumerConnectionParams, kwargs), + logger=self.logger, ) async def start(self) -> None: diff --git a/faststream/confluent/client.py b/faststream/confluent/client.py index 4744f9b8d8..f1703c3694 100644 --- a/faststream/confluent/client.py +++ b/faststream/confluent/client.py @@ -2,6 +2,7 @@ from ssl import SSLContext from time import time from typing import ( + TYPE_CHECKING, Any, Callable, Dict, @@ -16,10 +17,14 @@ from confluent_kafka import Consumer, KafkaError, KafkaException, Message, Producer from confluent_kafka.admin import AdminClient, NewTopic from pydantic import BaseModel +from typing_extensions import Annotated, Doc from faststream.log import logger from faststream.utils.functions import call_or_await +if TYPE_CHECKING: + from faststream.types import LoggerProto + _missing = object() @@ -105,7 +110,12 @@ def __init__( sasl_kerberos_service_name: str = "kafka", sasl_kerberos_domain_name: Optional[str] = None, sasl_oauth_token_provider: Optional[str] = None, + logger: Annotated[ + Union["LoggerProto", None, object], + Doc("User specified logger to pass into Context and log service messages."), + ] = logger, ) -> None: + self.logger = logger if isinstance(bootstrap_servers, Iterable) and not isinstance( bootstrap_servers, str ): @@ -145,7 +155,7 @@ def __init__( } ) - self.producer = Producer(self.config) + self.producer = Producer(self.config, logger=self.logger) # self.producer.init_transactions() self.producer.list_topics() self.loop = loop or asyncio.get_event_loop() @@ -295,7 +305,12 @@ def __init__( sasl_kerberos_service_name: str = "kafka", sasl_kerberos_domain_name: Optional[str] = None, sasl_oauth_token_provider: Optional[str] = None, + logger: Annotated[ + Union["LoggerProto", None, object], + Doc("User specified logger to pass into Context and log service messages."), + ] = logger, ) -> None: + self.logger = logger if group_id is None: group_id = "confluent-kafka-consumer-group" @@ -352,7 +367,7 @@ def __init__( self.loop = loop or asyncio.get_event_loop() create_topics(topics=self.topics, config=self.config) - self.consumer = Consumer(self.config) + self.consumer = Consumer(self.config, logger=self.logger) async def start(self) -> None: """Starts the Kafka consumer and subscribes to the specified topics.""" diff --git a/tests/brokers/confluent/test_logger.py b/tests/brokers/confluent/test_logger.py new file mode 100644 index 0000000000..ab72676fc5 --- /dev/null +++ b/tests/brokers/confluent/test_logger.py @@ -0,0 +1,55 @@ +import asyncio +import logging +from typing import Any, ClassVar, Dict + +import pytest + +from faststream.broker.core.usecase import BrokerUsecase +from faststream.confluent import KafkaBroker + + +@pytest.mark.confluent() +class TestLogger: + """A class to represent a test Kafka broker.""" + + timeout: int = 10 + subscriber_kwargs: ClassVar[Dict[str, Any]] = {"auto_offset_reset": "earliest"} + + def get_broker(self, apply_types: bool = False): + return KafkaBroker(apply_types=apply_types) + + def patch_broker(self, broker: BrokerUsecase[Any, Any]) -> BrokerUsecase[Any, Any]: + return broker + + @pytest.mark.asyncio() + async def test_custom_logger( + self, + queue: str, + event: asyncio.Event, + ): + test_logger = logging.getLogger("test_logger") + consume_broker = KafkaBroker(logger=test_logger) + + @consume_broker.subscriber(queue, **self.subscriber_kwargs) + def subscriber(m): + event.set() + + async with self.patch_broker(consume_broker) as br: + await br.start() + + for sub in br._subscribers.values(): + consumer_logger = sub.consumer.logger + assert consumer_logger == test_logger + + producer_logger = br._producer._producer.logger + assert producer_logger == test_logger + + await asyncio.wait( + ( + asyncio.create_task(br.publish("hello", queue)), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() From 3d5f241b4bc98b178f668df1a78ed17ebf87642a Mon Sep 17 00:00:00 2001 From: Alex Date: Sat, 25 May 2024 20:55:50 +0100 Subject: [PATCH 26/43] Fixes #1412 with `TestKafkaBroker` behaviour where Consumer Groups weren't being respected (#1413) * Commit for #1413 * Fixed bug in returned value * Fixed bug in returned value * Merged in from Main * Merged in from Main * Refectored Code slightly * linting * linting + formatting * tests: remove useless markers * Added batch tests for group_id overlap versions of subscribers * Added batch tests for group_id overlap versions of subscribers --------- Co-authored-by: Nikita Pastukhov --- faststream/confluent/testing.py | 8 ++- faststream/kafka/testing.py | 25 ++++---- scripts/set_variables.sh | 2 +- tests/brokers/confluent/test_test_client.py | 68 +++++++++++++++++++++ tests/brokers/kafka/test_test_client.py | 68 +++++++++++++++++++++ 5 files changed, 156 insertions(+), 15 deletions(-) diff --git a/faststream/confluent/testing.py b/faststream/confluent/testing.py index 9420ff3aa5..1a00e829c2 100644 --- a/faststream/confluent/testing.py +++ b/faststream/confluent/testing.py @@ -98,9 +98,11 @@ async def publish( # type: ignore[override] reply_to=reply_to, ) + return_value = None + for handler in self.broker._subscribers.values(): # pragma: no branch if topic in handler.topics: - return await call_handler( + handle_value = await call_handler( handler=handler, message=[incoming] if isinstance(handler, AsyncAPIBatchSubscriber) @@ -110,7 +112,9 @@ async def publish( # type: ignore[override] raise_timeout=raise_timeout, ) - return None + return_value = return_value or handle_value + + return return_value async def publish_batch( self, diff --git a/faststream/kafka/testing.py b/faststream/kafka/testing.py index fd8b520332..5abe59cf97 100755 --- a/faststream/kafka/testing.py +++ b/faststream/kafka/testing.py @@ -105,18 +105,17 @@ async def publish( # type: ignore[override] reply_to=reply_to, ) - for handler in self.broker._subscribers.values(): # pragma: no branch - call: bool = False - - for p in handler.partitions: - if p.topic == topic and (partition is None or p.partition == partition): - call = True + return_value = None - if not call and topic in handler.topics: - call = True - - if call: - return await call_handler( + for handler in self.broker._subscribers.values(): # pragma: no branch + if ( + any( + p.topic == topic and (partition is None or p.partition == partition) + for p in handler.partitions + ) + or topic in handler.topics + ): + handle_value = await call_handler( handler=handler, message=[incoming] if isinstance(handler, AsyncAPIBatchSubscriber) @@ -126,7 +125,9 @@ async def publish( # type: ignore[override] raise_timeout=raise_timeout, ) - return None + return_value = return_value or handle_value + + return return_value async def publish_batch( self, diff --git a/scripts/set_variables.sh b/scripts/set_variables.sh index e2c0c9531c..ef662336bb 100755 --- a/scripts/set_variables.sh +++ b/scripts/set_variables.sh @@ -9,7 +9,7 @@ echo AIRT_PROJECT variable set to $AIRT_PROJECT export UID=$(id -u) export GID=$(id -g) -export DOCKER_COMPOSE_PROJECT="${USER}-faststream" +export DOCKER_COMPOSE_PROJECT="${USER//./_}-faststream" echo DOCKER_COMPOSE_PROJECT variable set to $DOCKER_COMPOSE_PROJECT export KAFKA_HOSTNAME="${DOCKER_COMPOSE_PROJECT}-kafka-1" echo KAFKA_HOSTNAME variable set to $KAFKA_HOSTNAME diff --git a/tests/brokers/confluent/test_test_client.py b/tests/brokers/confluent/test_test_client.py index b8e232802f..53c10b7d20 100644 --- a/tests/brokers/confluent/test_test_client.py +++ b/tests/brokers/confluent/test_test_client.py @@ -134,3 +134,71 @@ async def h2(): ... await h2.wait_call(10) assert len(routes) == 2 + + async def test_multiple_subscribers_different_groups( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, group_id="group2") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 1 + + async def test_multiple_subscribers_same_group( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, group_id="group1") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 0 + + async def test_multiple_batch_subscriber_with_different_group( + self, + test_broker: KafkaBroker, + queue: str, + ): + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, batch=True, group_id="group2") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 1 + + async def test_multiple_batch_subscriber_with_same_group( + self, + test_broker: KafkaBroker, + queue: str, + ): + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 0 diff --git a/tests/brokers/kafka/test_test_client.py b/tests/brokers/kafka/test_test_client.py index a89ecff707..b1c01b8ff0 100644 --- a/tests/brokers/kafka/test_test_client.py +++ b/tests/brokers/kafka/test_test_client.py @@ -182,3 +182,71 @@ async def h2(): ... await h2.wait_call(3) assert len(routes) == 2 + + async def test_multiple_subscribers_different_groups( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, group_id="group2") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 1 + + async def test_multiple_subscribers_same_group( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, group_id="group1") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 0 + + async def test_multiple_batch_subscriber_with_different_group( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, batch=True, group_id="group2") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 1 + + async def test_multiple_batch_subscriber_with_same_group( + self, + queue: str, + test_broker: KafkaBroker, + ): + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber1(): ... + + @test_broker.subscriber(queue, batch=True, group_id="group1") + async def subscriber2(): ... + + await test_broker.start() + await test_broker.publish("", queue) + + assert subscriber1.mock.call_count == 1 + assert subscriber2.mock.call_count == 0 From 9320177fae567fc4093f82cd0e0da9f19df659a9 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Mon, 27 May 2024 20:41:53 +0530 Subject: [PATCH 27/43] Chore: update dependency versions (#1478) --- .secrets.baseline | 4 ++-- pyproject.toml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.secrets.baseline b/.secrets.baseline index 5ceae71388..15213e9215 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 1269, + "line_number": 1282, "is_secret": false } ], @@ -163,5 +163,5 @@ } ] }, - "generated_at": "2024-05-24T07:31:48Z" + "generated_at": "2024-05-27T11:45:58Z" } diff --git a/pyproject.toml b/pyproject.toml index 1f17f89827..4236bd89e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ otel = ["opentelemetry-sdk>=1.24.0,<2.0.0"] optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel]"] devdocs = [ - "mkdocs-material==9.5.24", + "mkdocs-material==9.5.25", "mkdocs-static-i18n==1.2.3", "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", @@ -113,12 +113,12 @@ lint = [ "faststream[types]", "ruff==0.4.4", "bandit==1.7.8", - "semgrep==1.73.0", - "codespell==2.2.6", + "semgrep==1.74.0", + "codespell==2.3.0", ] test-core = [ - "coverage[toml]==7.5.1", + "coverage[toml]==7.5.2", "pytest==8.2.1", "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", @@ -137,7 +137,7 @@ testing = [ dev = [ "faststream[optionals,lint,testing,devdocs]", "pre-commit==3.5.0; python_version < '3.9'", - "pre-commit==3.7.0; python_version >= '3.9'", + "pre-commit==3.7.1; python_version >= '3.9'", "detect-secrets==1.5.0", ] From d83c1cc1e4984be14674a75159f85c38bda49f36 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Mon, 27 May 2024 20:42:14 +0530 Subject: [PATCH 28/43] Remove typing-extensions version restriction (#1477) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4236bd89e7..5b22e5c4c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ dependencies = [ "anyio>=3.7.1,<5", "fast-depends>=2.4.0b0,<2.5.0", "typer>=0.9,!=0.12,<1", - "typing-extensions>=4.8.0,!=4.12.*", + "typing-extensions>=4.8.0", ] [project.optional-dependencies] From 031fd3185aebf93b1705d6cc38855a343dd60540 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Wed, 29 May 2024 08:48:06 +0300 Subject: [PATCH 29/43] feat (#1431): add Response class (#1481) * feat (#1431): add Response class * docs: add Response References * docs: add Confluent Response API --- docs/docs/SUMMARY.md | 24 +++++++++ docs/docs/en/api/faststream/Response.md | 11 ++++ .../faststream/broker/response/Response.md | 11 ++++ .../api/faststream/confluent/KafkaResponse.md | 11 ++++ .../confluent/response/KafkaResponse.md | 11 ++++ .../en/api/faststream/kafka/KafkaResponse.md | 11 ++++ .../kafka/response/KafkaResponse.md | 11 ++++ .../en/api/faststream/nats/NatsResponse.md | 11 ++++ .../faststream/nats/response/NatsResponse.md | 11 ++++ .../api/faststream/rabbit/RabbitResponse.md | 11 ++++ .../rabbit/response/RabbitResponse.md | 11 ++++ .../en/api/faststream/redis/RedisResponse.md | 11 ++++ .../redis/response/RedisResponse.md | 11 ++++ faststream/__about__.py | 2 +- faststream/__init__.py | 3 ++ faststream/broker/response.py | 52 +++++++++++++++++++ faststream/broker/subscriber/usecase.py | 20 ++++--- faststream/confluent/__init__.py | 2 + faststream/confluent/response.py | 5 ++ faststream/kafka/__init__.py | 2 + faststream/kafka/response.py | 5 ++ faststream/nats/__init__.py | 2 + faststream/nats/response.py | 5 ++ faststream/rabbit/__init__.py | 2 + faststream/rabbit/publisher/asyncapi.py | 1 - faststream/rabbit/response.py | 5 ++ faststream/rabbit/testing.py | 19 ++++--- faststream/redis/__init__.py | 2 + faststream/redis/publisher/usecase.py | 8 ++- faststream/redis/response.py | 5 ++ tests/brokers/base/publish.py | 42 ++++++++++++++- tests/brokers/rabbit/test_test_client.py | 12 +++++ tests/brokers/test_response.py | 25 +++++++++ 33 files changed, 357 insertions(+), 18 deletions(-) create mode 100644 docs/docs/en/api/faststream/Response.md create mode 100644 docs/docs/en/api/faststream/broker/response/Response.md create mode 100644 docs/docs/en/api/faststream/confluent/KafkaResponse.md create mode 100644 docs/docs/en/api/faststream/confluent/response/KafkaResponse.md create mode 100644 docs/docs/en/api/faststream/kafka/KafkaResponse.md create mode 100644 docs/docs/en/api/faststream/kafka/response/KafkaResponse.md create mode 100644 docs/docs/en/api/faststream/nats/NatsResponse.md create mode 100644 docs/docs/en/api/faststream/nats/response/NatsResponse.md create mode 100644 docs/docs/en/api/faststream/rabbit/RabbitResponse.md create mode 100644 docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md create mode 100644 docs/docs/en/api/faststream/redis/RedisResponse.md create mode 100644 docs/docs/en/api/faststream/redis/response/RedisResponse.md create mode 100644 faststream/broker/response.py create mode 100644 faststream/confluent/response.py create mode 100644 faststream/kafka/response.py create mode 100644 faststream/nats/response.py create mode 100644 faststream/rabbit/response.py create mode 100644 faststream/redis/response.py create mode 100644 tests/brokers/test_response.py diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index e09163b08a..150de9cc30 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -116,6 +116,7 @@ search: - [FastStream](public_api/faststream/FastStream.md) - [Header](public_api/faststream/Header.md) - [Path](public_api/faststream/Path.md) + - [Response](public_api/faststream/Response.md) - [TestApp](public_api/faststream/TestApp.md) - [apply_types](public_api/faststream/apply_types.md) - asyncapi @@ -124,6 +125,7 @@ search: - confluent - [KafkaBroker](public_api/faststream/confluent/KafkaBroker.md) - [KafkaPublisher](public_api/faststream/confluent/KafkaPublisher.md) + - [KafkaResponse](public_api/faststream/confluent/KafkaResponse.md) - [KafkaRoute](public_api/faststream/confluent/KafkaRoute.md) - [KafkaRouter](public_api/faststream/confluent/KafkaRouter.md) - [TestApp](public_api/faststream/confluent/TestApp.md) @@ -131,6 +133,7 @@ search: - kafka - [KafkaBroker](public_api/faststream/kafka/KafkaBroker.md) - [KafkaPublisher](public_api/faststream/kafka/KafkaPublisher.md) + - [KafkaResponse](public_api/faststream/kafka/KafkaResponse.md) - [KafkaRoute](public_api/faststream/kafka/KafkaRoute.md) - [KafkaRouter](public_api/faststream/kafka/KafkaRouter.md) - [TestApp](public_api/faststream/kafka/TestApp.md) @@ -146,6 +149,7 @@ search: - [KvWatch](public_api/faststream/nats/KvWatch.md) - [NatsBroker](public_api/faststream/nats/NatsBroker.md) - [NatsPublisher](public_api/faststream/nats/NatsPublisher.md) + - [NatsResponse](public_api/faststream/nats/NatsResponse.md) - [NatsRoute](public_api/faststream/nats/NatsRoute.md) - [NatsRouter](public_api/faststream/nats/NatsRouter.md) - [ObjWatch](public_api/faststream/nats/ObjWatch.md) @@ -165,6 +169,7 @@ search: - [RabbitExchange](public_api/faststream/rabbit/RabbitExchange.md) - [RabbitPublisher](public_api/faststream/rabbit/RabbitPublisher.md) - [RabbitQueue](public_api/faststream/rabbit/RabbitQueue.md) + - [RabbitResponse](public_api/faststream/rabbit/RabbitResponse.md) - [RabbitRoute](public_api/faststream/rabbit/RabbitRoute.md) - [RabbitRouter](public_api/faststream/rabbit/RabbitRouter.md) - [ReplyConfig](public_api/faststream/rabbit/ReplyConfig.md) @@ -175,6 +180,7 @@ search: - [PubSub](public_api/faststream/redis/PubSub.md) - [RedisBroker](public_api/faststream/redis/RedisBroker.md) - [RedisPublisher](public_api/faststream/redis/RedisPublisher.md) + - [RedisResponse](public_api/faststream/redis/RedisResponse.md) - [RedisRoute](public_api/faststream/redis/RedisRoute.md) - [RedisRouter](public_api/faststream/redis/RedisRouter.md) - [StreamSub](public_api/faststream/redis/StreamSub.md) @@ -188,6 +194,7 @@ search: - [FastStream](api/faststream/FastStream.md) - [Header](api/faststream/Header.md) - [Path](api/faststream/Path.md) + - [Response](api/faststream/Response.md) - [TestApp](api/faststream/TestApp.md) - [apply_types](api/faststream/apply_types.md) - app @@ -349,6 +356,8 @@ search: - [PublisherProto](api/faststream/broker/publisher/proto/PublisherProto.md) - usecase - [PublisherUsecase](api/faststream/broker/publisher/usecase/PublisherUsecase.md) + - response + - [Response](api/faststream/broker/response/Response.md) - router - [ArgsContainer](api/faststream/broker/router/ArgsContainer.md) - [BrokerRouter](api/faststream/broker/router/BrokerRouter.md) @@ -413,6 +422,7 @@ search: - confluent - [KafkaBroker](api/faststream/confluent/KafkaBroker.md) - [KafkaPublisher](api/faststream/confluent/KafkaPublisher.md) + - [KafkaResponse](api/faststream/confluent/KafkaResponse.md) - [KafkaRoute](api/faststream/confluent/KafkaRoute.md) - [KafkaRouter](api/faststream/confluent/KafkaRouter.md) - [TestApp](api/faststream/confluent/TestApp.md) @@ -464,6 +474,8 @@ search: - [BatchPublisher](api/faststream/confluent/publisher/usecase/BatchPublisher.md) - [DefaultPublisher](api/faststream/confluent/publisher/usecase/DefaultPublisher.md) - [LogicPublisher](api/faststream/confluent/publisher/usecase/LogicPublisher.md) + - response + - [KafkaResponse](api/faststream/confluent/response/KafkaResponse.md) - router - [KafkaPublisher](api/faststream/confluent/router/KafkaPublisher.md) - [KafkaRoute](api/faststream/confluent/router/KafkaRoute.md) @@ -506,6 +518,7 @@ search: - kafka - [KafkaBroker](api/faststream/kafka/KafkaBroker.md) - [KafkaPublisher](api/faststream/kafka/KafkaPublisher.md) + - [KafkaResponse](api/faststream/kafka/KafkaResponse.md) - [KafkaRoute](api/faststream/kafka/KafkaRoute.md) - [KafkaRouter](api/faststream/kafka/KafkaRouter.md) - [TestApp](api/faststream/kafka/TestApp.md) @@ -550,6 +563,8 @@ search: - [BatchPublisher](api/faststream/kafka/publisher/usecase/BatchPublisher.md) - [DefaultPublisher](api/faststream/kafka/publisher/usecase/DefaultPublisher.md) - [LogicPublisher](api/faststream/kafka/publisher/usecase/LogicPublisher.md) + - response + - [KafkaResponse](api/faststream/kafka/response/KafkaResponse.md) - router - [KafkaPublisher](api/faststream/kafka/router/KafkaPublisher.md) - [KafkaRoute](api/faststream/kafka/router/KafkaRoute.md) @@ -592,6 +607,7 @@ search: - [KvWatch](api/faststream/nats/KvWatch.md) - [NatsBroker](api/faststream/nats/NatsBroker.md) - [NatsPublisher](api/faststream/nats/NatsPublisher.md) + - [NatsResponse](api/faststream/nats/NatsResponse.md) - [NatsRoute](api/faststream/nats/NatsRoute.md) - [NatsRouter](api/faststream/nats/NatsRouter.md) - [ObjWatch](api/faststream/nats/ObjWatch.md) @@ -657,6 +673,8 @@ search: - [NatsJSFastProducer](api/faststream/nats/publisher/producer/NatsJSFastProducer.md) - usecase - [LogicPublisher](api/faststream/nats/publisher/usecase/LogicPublisher.md) + - response + - [NatsResponse](api/faststream/nats/response/NatsResponse.md) - router - [NatsPublisher](api/faststream/nats/router/NatsPublisher.md) - [NatsRoute](api/faststream/nats/router/NatsRoute.md) @@ -728,6 +746,7 @@ search: - [RabbitExchange](api/faststream/rabbit/RabbitExchange.md) - [RabbitPublisher](api/faststream/rabbit/RabbitPublisher.md) - [RabbitQueue](api/faststream/rabbit/RabbitQueue.md) + - [RabbitResponse](api/faststream/rabbit/RabbitResponse.md) - [RabbitRoute](api/faststream/rabbit/RabbitRoute.md) - [RabbitRouter](api/faststream/rabbit/RabbitRouter.md) - [ReplyConfig](api/faststream/rabbit/ReplyConfig.md) @@ -764,6 +783,8 @@ search: - usecase - [LogicPublisher](api/faststream/rabbit/publisher/usecase/LogicPublisher.md) - [PublishKwargs](api/faststream/rabbit/publisher/usecase/PublishKwargs.md) + - response + - [RabbitResponse](api/faststream/rabbit/response/RabbitResponse.md) - router - [RabbitPublisher](api/faststream/rabbit/router/RabbitPublisher.md) - [RabbitRoute](api/faststream/rabbit/router/RabbitRoute.md) @@ -808,6 +829,7 @@ search: - [PubSub](api/faststream/redis/PubSub.md) - [RedisBroker](api/faststream/redis/RedisBroker.md) - [RedisPublisher](api/faststream/redis/RedisPublisher.md) + - [RedisResponse](api/faststream/redis/RedisResponse.md) - [RedisRoute](api/faststream/redis/RedisRoute.md) - [RedisRouter](api/faststream/redis/RedisRouter.md) - [StreamSub](api/faststream/redis/StreamSub.md) @@ -869,6 +891,8 @@ search: - [ListPublisher](api/faststream/redis/publisher/usecase/ListPublisher.md) - [LogicPublisher](api/faststream/redis/publisher/usecase/LogicPublisher.md) - [StreamPublisher](api/faststream/redis/publisher/usecase/StreamPublisher.md) + - response + - [RedisResponse](api/faststream/redis/response/RedisResponse.md) - router - [RedisPublisher](api/faststream/redis/router/RedisPublisher.md) - [RedisRoute](api/faststream/redis/router/RedisRoute.md) diff --git a/docs/docs/en/api/faststream/Response.md b/docs/docs/en/api/faststream/Response.md new file mode 100644 index 0000000000..3475e3f584 --- /dev/null +++ b/docs/docs/en/api/faststream/Response.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.Response diff --git a/docs/docs/en/api/faststream/broker/response/Response.md b/docs/docs/en/api/faststream/broker/response/Response.md new file mode 100644 index 0000000000..1163381d7b --- /dev/null +++ b/docs/docs/en/api/faststream/broker/response/Response.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.broker.response.Response diff --git a/docs/docs/en/api/faststream/confluent/KafkaResponse.md b/docs/docs/en/api/faststream/confluent/KafkaResponse.md new file mode 100644 index 0000000000..eb0eab479c --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/KafkaResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.KafkaResponse diff --git a/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md b/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md new file mode 100644 index 0000000000..7fa5542613 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/response/KafkaResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.response.KafkaResponse diff --git a/docs/docs/en/api/faststream/kafka/KafkaResponse.md b/docs/docs/en/api/faststream/kafka/KafkaResponse.md new file mode 100644 index 0000000000..4aab0b965d --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/KafkaResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.KafkaResponse diff --git a/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md b/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md new file mode 100644 index 0000000000..05ecd69c2d --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/response/KafkaResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.response.KafkaResponse diff --git a/docs/docs/en/api/faststream/nats/NatsResponse.md b/docs/docs/en/api/faststream/nats/NatsResponse.md new file mode 100644 index 0000000000..6b967b527a --- /dev/null +++ b/docs/docs/en/api/faststream/nats/NatsResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.NatsResponse diff --git a/docs/docs/en/api/faststream/nats/response/NatsResponse.md b/docs/docs/en/api/faststream/nats/response/NatsResponse.md new file mode 100644 index 0000000000..8a7da66982 --- /dev/null +++ b/docs/docs/en/api/faststream/nats/response/NatsResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.nats.response.NatsResponse diff --git a/docs/docs/en/api/faststream/rabbit/RabbitResponse.md b/docs/docs/en/api/faststream/rabbit/RabbitResponse.md new file mode 100644 index 0000000000..4d20d82b0e --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/RabbitResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.RabbitResponse diff --git a/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md b/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md new file mode 100644 index 0000000000..477cfb9861 --- /dev/null +++ b/docs/docs/en/api/faststream/rabbit/response/RabbitResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.rabbit.response.RabbitResponse diff --git a/docs/docs/en/api/faststream/redis/RedisResponse.md b/docs/docs/en/api/faststream/redis/RedisResponse.md new file mode 100644 index 0000000000..eedecf1ea3 --- /dev/null +++ b/docs/docs/en/api/faststream/redis/RedisResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.RedisResponse diff --git a/docs/docs/en/api/faststream/redis/response/RedisResponse.md b/docs/docs/en/api/faststream/redis/response/RedisResponse.md new file mode 100644 index 0000000000..dd7fbe72eb --- /dev/null +++ b/docs/docs/en/api/faststream/redis/response/RedisResponse.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.redis.response.RedisResponse diff --git a/faststream/__about__.py b/faststream/__about__.py index d0972600b0..33eac3b1c1 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.9" +__version__ = "0.5.10" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/__init__.py b/faststream/__init__.py index 7ef2bad9e4..b0f6456967 100644 --- a/faststream/__init__.py +++ b/faststream/__init__.py @@ -3,6 +3,7 @@ from faststream.annotations import ContextRepo, Logger, NoCast from faststream.app import FastStream from faststream.broker.middlewares import BaseMiddleware +from faststream.broker.response import Response from faststream.testing.app import TestApp from faststream.utils import Context, Depends, Header, Path, apply_types, context @@ -23,4 +24,6 @@ "NoCast", # middlewares "BaseMiddleware", + # basic + "Response", ) diff --git a/faststream/broker/response.py b/faststream/broker/response.py new file mode 100644 index 0000000000..5a811a77e0 --- /dev/null +++ b/faststream/broker/response.py @@ -0,0 +1,52 @@ +from typing import TYPE_CHECKING, Any, Optional, Union + +if TYPE_CHECKING: + from faststream.types import AnyDict, SendableMessage + + +class Response: + def __new__( + cls, + body: Union[ + "SendableMessage", + "Response", + ], + **kwargs: Any, + ) -> "Response": + """Create a new instance of the class.""" + if isinstance(body, cls): + return body + + else: + return super().__new__(cls) + + def __init__( + self, + body: "SendableMessage", + *, + headers: Optional["AnyDict"] = None, + correlation_id: Optional[str] = None, + ) -> None: + """Initialize a handler.""" + if not isinstance(body, Response): + self.body = body + self.headers = headers or {} + self.correlation_id = correlation_id + + def add_headers( + self, + extra_headers: "AnyDict", + *, + override: bool = True, + ) -> None: + if override: + self.headers = {**self.headers, **extra_headers} + else: + self.headers = {**extra_headers, **self.headers} + + def as_publish_kwargs(self) -> "AnyDict": + publish_options = { + "headers": self.headers, + "correlation_id": self.correlation_id, + } + return publish_options diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index e5ee6fadea..2e5ca52151 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -21,6 +21,7 @@ from faststream.asyncapi.abc import AsyncAPIOperation from faststream.asyncapi.message import parse_handler_params from faststream.asyncapi.utils import to_camelcase +from faststream.broker.response import Response from faststream.broker.subscriber.call_item import HandlerItem from faststream.broker.subscriber.proto import SubscriberProto from faststream.broker.types import ( @@ -333,24 +334,29 @@ async def consume(self, msg: MsgType) -> Any: for m in middlewares: stack.push_async_exit(m.__aexit__) - result_msg = await h.call( - message=message, - # consumer middlewares - _extra_middlewares=(m.consume_scope for m in middlewares), + result_msg = Response( + await h.call( + message=message, + # consumer middlewares + _extra_middlewares=(m.consume_scope for m in middlewares), + ) ) + if not result_msg.correlation_id: + result_msg.correlation_id = message.correlation_id + for p in chain( self.__get_reponse_publisher(message), h.handler._publishers, ): await p.publish( - result_msg, - correlation_id=message.correlation_id, + result_msg.body, + **result_msg.as_publish_kwargs(), # publisher middlewares _extra_middlewares=(m.publish_scope for m in middlewares), ) - return result_msg + return result_msg.body # Suitable handler is not founded for m in middlewares: diff --git a/faststream/confluent/__init__.py b/faststream/confluent/__init__.py index 88f8705e0b..9566997b78 100644 --- a/faststream/confluent/__init__.py +++ b/faststream/confluent/__init__.py @@ -1,5 +1,6 @@ from faststream.confluent.annotations import KafkaMessage from faststream.confluent.broker import KafkaBroker +from faststream.confluent.response import KafkaResponse from faststream.confluent.router import KafkaPublisher, KafkaRoute, KafkaRouter from faststream.confluent.testing import TestKafkaBroker from faststream.testing.app import TestApp @@ -10,6 +11,7 @@ "KafkaRouter", "KafkaRoute", "KafkaPublisher", + "KafkaResponse", "TestKafkaBroker", "TestApp", ) diff --git a/faststream/confluent/response.py b/faststream/confluent/response.py new file mode 100644 index 0000000000..dc36bb6932 --- /dev/null +++ b/faststream/confluent/response.py @@ -0,0 +1,5 @@ +from faststream.broker.response import Response + + +class KafkaResponse(Response): + pass diff --git a/faststream/kafka/__init__.py b/faststream/kafka/__init__.py index c81b617033..dc5f5e3cf6 100644 --- a/faststream/kafka/__init__.py +++ b/faststream/kafka/__init__.py @@ -2,6 +2,7 @@ from faststream.kafka.annotations import KafkaMessage from faststream.kafka.broker import KafkaBroker +from faststream.kafka.response import KafkaResponse from faststream.kafka.router import KafkaPublisher, KafkaRoute, KafkaRouter from faststream.kafka.testing import TestKafkaBroker from faststream.testing.app import TestApp @@ -11,6 +12,7 @@ "KafkaMessage", "KafkaRouter", "KafkaRoute", + "KafkaResponse", "KafkaPublisher", "TestKafkaBroker", "TestApp", diff --git a/faststream/kafka/response.py b/faststream/kafka/response.py new file mode 100644 index 0000000000..dc36bb6932 --- /dev/null +++ b/faststream/kafka/response.py @@ -0,0 +1,5 @@ +from faststream.broker.response import Response + + +class KafkaResponse(Response): + pass diff --git a/faststream/nats/__init__.py b/faststream/nats/__init__.py index 72ba1a2876..bae483a17e 100644 --- a/faststream/nats/__init__.py +++ b/faststream/nats/__init__.py @@ -15,6 +15,7 @@ from faststream.nats.annotations import NatsMessage from faststream.nats.broker.broker import NatsBroker +from faststream.nats.response import NatsResponse from faststream.nats.router import NatsPublisher, NatsRoute, NatsRouter from faststream.nats.schemas import JStream, KvWatch, ObjWatch, PullSub from faststream.nats.testing import TestNatsBroker @@ -32,6 +33,7 @@ "NatsPublisher", "TestNatsBroker", "NatsMessage", + "NatsResponse", # Nats imports "ConsumerConfig", "DeliverPolicy", diff --git a/faststream/nats/response.py b/faststream/nats/response.py new file mode 100644 index 0000000000..6b77c7da25 --- /dev/null +++ b/faststream/nats/response.py @@ -0,0 +1,5 @@ +from faststream.broker.response import Response + + +class NatsResponse(Response): + pass diff --git a/faststream/rabbit/__init__.py b/faststream/rabbit/__init__.py index 7c05cb70c8..cfc152d4e0 100644 --- a/faststream/rabbit/__init__.py +++ b/faststream/rabbit/__init__.py @@ -1,5 +1,6 @@ from faststream.rabbit.annotations import RabbitMessage from faststream.rabbit.broker import RabbitBroker +from faststream.rabbit.response import RabbitResponse from faststream.rabbit.router import RabbitPublisher, RabbitRoute, RabbitRouter from faststream.rabbit.schemas import ( ExchangeType, @@ -17,6 +18,7 @@ "RabbitRouter", "RabbitRoute", "RabbitPublisher", + "RabbitResponse", "ExchangeType", "ReplyConfig", "RabbitExchange", diff --git a/faststream/rabbit/publisher/asyncapi.py b/faststream/rabbit/publisher/asyncapi.py index b18a968202..d5f785d7c1 100644 --- a/faststream/rabbit/publisher/asyncapi.py +++ b/faststream/rabbit/publisher/asyncapi.py @@ -32,7 +32,6 @@ class AsyncAPIPublisher(LogicPublisher): # or publisher: AsyncAPIPublisher = router.publisher(...) ``` - """ def get_name(self) -> str: diff --git a/faststream/rabbit/response.py b/faststream/rabbit/response.py new file mode 100644 index 0000000000..aea4457337 --- /dev/null +++ b/faststream/rabbit/response.py @@ -0,0 +1,5 @@ +from faststream.broker.response import Response + + +class RabbitResponse(Response): + pass diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py index e15cbe2cb3..36f79ef60a 100644 --- a/faststream/rabbit/testing.py +++ b/faststream/rabbit/testing.py @@ -49,7 +49,7 @@ def create_publisher_fake_subscriber( publisher: AsyncAPIPublisher, ) -> "HandlerCallWrapper[Any, Any, Any]": sub = broker.subscriber( - queue=publisher.queue, + queue=publisher.routing, exchange=publisher.exchange, ) @@ -70,7 +70,7 @@ def remove_publisher_fake_subscriber( ) -> None: broker._subscribers.pop( AsyncAPISubscriber.get_routing_hash( - queue=publisher.queue, + queue=RabbitQueue.validate(publisher.routing), exchange=publisher.exchange, ), None, @@ -132,7 +132,7 @@ def build_message( priority=priority, correlation_id=correlation_id, expiration=expiration, - message_id=message_id, + message_id=message_id or gen_cor_id(), timestamp=timestamp, message_type=message_type, user_id=user_id, @@ -148,14 +148,21 @@ def build_message( header=ContentHeader( properties=spec.Basic.Properties( content_type=msg.content_type, - message_id=gen_cor_id(), headers=msg.headers, - reply_to=reply_to, + reply_to=msg.reply_to, + content_encoding=msg.content_encoding, + priority=msg.priority, + correlation_id=msg.correlation_id, + message_id=msg.message_id, + timestamp=msg.timestamp, + message_type=message_type, + user_id=msg.user_id, + app_id=msg.app_id, ) ), body=msg.body, channel=AsyncMock(), - ) + ), ) diff --git a/faststream/redis/__init__.py b/faststream/redis/__init__.py index a67f2843ac..7624747bf8 100644 --- a/faststream/redis/__init__.py +++ b/faststream/redis/__init__.py @@ -1,5 +1,6 @@ from faststream.redis.annotations import Redis, RedisMessage from faststream.redis.broker.broker import RedisBroker +from faststream.redis.response import RedisResponse from faststream.redis.router import RedisPublisher, RedisRoute, RedisRouter from faststream.redis.schemas import ListSub, PubSub, StreamSub from faststream.redis.testing import TestRedisBroker @@ -12,6 +13,7 @@ "RedisRoute", "RedisRouter", "RedisPublisher", + "RedisResponse", "TestRedisBroker", "TestApp", "PubSub", diff --git a/faststream/redis/publisher/usecase.py b/faststream/redis/publisher/usecase.py index 24309fcdb4..a887140d84 100644 --- a/faststream/redis/publisher/usecase.py +++ b/faststream/redis/publisher/usecase.py @@ -282,7 +282,6 @@ async def publish( # type: ignore[override] list_sub = ListSub.validate(list or self.list) reply_to = reply_to or self.reply_to - headers = headers or self.headers correlation_id = correlation_id or gen_cor_id() call: "AsyncFunc" = self._producer.publish @@ -301,7 +300,7 @@ async def publish( # type: ignore[override] list=list_sub.name, # basic args reply_to=reply_to, - headers=headers, + headers=headers or self.headers, correlation_id=correlation_id, # RPC args rpc=rpc, @@ -327,6 +326,10 @@ async def publish( # type: ignore[override] Optional[str], Doc("Has no real effect. Option to be compatible with original protocol."), ] = None, + headers: Annotated[ + Optional["AnyDict"], + Doc("Message headers to store metainformation."), + ] = None, # publisher specific _extra_middlewares: Annotated[ Iterable["PublisherMiddleware"], @@ -353,6 +356,7 @@ async def publish( # type: ignore[override] *message, list=list_sub.name, correlation_id=correlation_id, + headers=headers or self.headers, ) diff --git a/faststream/redis/response.py b/faststream/redis/response.py new file mode 100644 index 0000000000..d414b0ebe4 --- /dev/null +++ b/faststream/redis/response.py @@ -0,0 +1,5 @@ +from faststream.broker.response import Response + + +class RedisResponse(Response): + pass diff --git a/tests/brokers/base/publish.py b/tests/brokers/base/publish.py index 974b12d8cf..abf5f0022c 100644 --- a/tests/brokers/base/publish.py +++ b/tests/brokers/base/publish.py @@ -9,7 +9,7 @@ import pytest from pydantic import BaseModel -from faststream import BaseMiddleware +from faststream import BaseMiddleware, Context, Response from faststream._compat import dump_json, model_to_json from faststream.broker.core.usecase import BrokerUsecase @@ -175,6 +175,46 @@ async def handler(m: message_type): assert event.is_set() mock.assert_called_with(expected_message) + @pytest.mark.asyncio() + async def test_response( + self, + queue: str, + event: asyncio.Event, + mock: Mock, + ): + pub_broker = self.get_broker(apply_types=True) + + @pub_broker.subscriber(queue, **self.subscriber_kwargs) + @pub_broker.publisher(queue + "1") + async def m(): + return Response(1, headers={"custom": "1"}, correlation_id="1") + + @pub_broker.subscriber(queue + "1", **self.subscriber_kwargs) + async def m_next(msg=Context("message")): + event.set() + mock( + body=msg.body, + headers=msg.headers["custom"], + correlation_id=msg.correlation_id, + ) + + async with self.patch_broker(pub_broker) as br: + await br.start() + await asyncio.wait( + ( + asyncio.create_task(br.publish(None, queue)), + asyncio.create_task(event.wait()), + ), + timeout=self.timeout, + ) + + assert event.is_set() + mock.assert_called_with( + body=b"1", + correlation_id="1", + headers="1", + ) + @pytest.mark.asyncio() async def test_unwrap_dict( self, diff --git a/tests/brokers/rabbit/test_test_client.py b/tests/brokers/rabbit/test_test_client.py index e07cbd88c0..dde1383ff4 100644 --- a/tests/brokers/rabbit/test_test_client.py +++ b/tests/brokers/rabbit/test_test_client.py @@ -61,6 +61,18 @@ def subscriber(m): assert event.is_set() + async def test_respect_routing_key(self): + broker = self.get_broker() + + publisher = broker.publisher( + exchange=RabbitExchange("test", type=ExchangeType.TOPIC), routing_key="up" + ) + + async with TestRabbitBroker(broker): + await publisher.publish("Hi!") + + publisher.mock.assert_called_once_with("Hi!") + async def test_direct( self, queue: str, diff --git a/tests/brokers/test_response.py b/tests/brokers/test_response.py new file mode 100644 index 0000000000..3766b28f2b --- /dev/null +++ b/tests/brokers/test_response.py @@ -0,0 +1,25 @@ +from faststream.broker.response import Response + + +def test_raw_data(): + resp = Response(1) + assert resp.body == 1 + assert resp.headers == {} + + +def test_response_with_response_instance(): + resp = Response(Response(1, headers={"some": 1})) + assert resp.body == 1 + assert resp.headers == {"some": 1} + + +def test_headers_override(): + resp = Response(1, headers={"some": 1}) + resp.add_headers({"some": 2}) + assert resp.headers == {"some": 2} + + +def test_headers_with_default(): + resp = Response(1, headers={"some": 1}) + resp.add_headers({"some": 2}, override=False) + assert resp.headers == {"some": 1} From 0d3291dc514a8fd619c4267032fe5dbac0006659 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Wed, 29 May 2024 13:03:35 +0000 Subject: [PATCH 30/43] Update Release Notes for 0.5.10 (#1482) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index c6546bcd3d..e60c2acb49 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,32 @@ hide: --- # Release Notes +## 0.5.10 + +### What's Changed + +Now you can return Response class to set more specific outgoing message parameters: + +```python +from faststream import Response + +@broker.subscriber("in") +@broker.subscriber("out") +async def handler(): + return Response(body=b"", headers={}) +``` + +* Pass logger to confluent producer and consumer by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1464](https://github.com/airtai/faststream/pull/1464){.external-link target="_blank"} +* Fixes #1412 with `TestKafkaBroker` behaviour where Consumer Groups weren't being respected by [@sifex](https://github.com/sifex){.external-link target="_blank"} in [#1413](https://github.com/airtai/faststream/pull/1413){.external-link target="_blank"} +* Chore: update dependency versions by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1478](https://github.com/airtai/faststream/pull/1478){.external-link target="_blank"} +* Remove typing-extensions version restriction by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1477](https://github.com/airtai/faststream/pull/1477){.external-link target="_blank"} +* feat (#1431): add Response class by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1481](https://github.com/airtai/faststream/pull/1481){.external-link target="_blank"} + +### New Contributors +* [@sifex](https://github.com/sifex){.external-link target="_blank"} made their first contribution in [#1413](https://github.com/airtai/faststream/pull/1413){.external-link target="_blank"} + +**Full Changelog**: [#0.5.9...0.5.10](https://github.com/airtai/faststream/compare/0.5.9...0.5.10){.external-link target="_blank"} + ## 0.5.9 ### What's Changed From 45d9cf6e7a0513b30b34c6f5ed7a8b725ad62108 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Sat, 1 Jun 2024 06:58:21 +0300 Subject: [PATCH 31/43] feat: provide with an ability to create default RMQ Exchange (#1485) * feat: provide with an ability to create default RMQ Exchange * refactor: optiomize RMQ declarer * fix: correct import in Tests * fix: correct AsyncAPI RMQ Schema * fix: correct exchange propagation * docs: remove useless API file --- docs/docs/SUMMARY.md | 4 +- .../declarer}/RabbitDeclarer.md | 2 +- faststream/rabbit/broker/broker.py | 8 +- faststream/rabbit/helpers/__init__.py | 0 faststream/rabbit/helpers/declarer.py | 79 +++++++++++++++++++ faststream/rabbit/publisher/asyncapi.py | 7 +- faststream/rabbit/publisher/producer.py | 19 ++--- faststream/rabbit/publisher/usecase.py | 4 +- faststream/rabbit/schemas/exchange.py | 23 +++++- faststream/rabbit/subscriber/asyncapi.py | 4 +- faststream/rabbit/subscriber/factory.py | 6 +- faststream/rabbit/subscriber/usecase.py | 26 +++--- faststream/rabbit/testing.py | 2 +- faststream/rabbit/utils.py | 72 +---------------- tests/brokers/rabbit/specific/test_declare.py | 2 +- tests/docs/rabbit/test_declare.py | 4 +- 16 files changed, 147 insertions(+), 115 deletions(-) rename docs/docs/en/api/faststream/rabbit/{utils => helpers/declarer}/RabbitDeclarer.md (67%) create mode 100644 faststream/rabbit/helpers/__init__.py create mode 100644 faststream/rabbit/helpers/declarer.py diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 150de9cc30..f10774cc8c 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -765,6 +765,9 @@ search: - [RabbitRouter](api/faststream/rabbit/fastapi/RabbitRouter.md) - router - [RabbitRouter](api/faststream/rabbit/fastapi/router/RabbitRouter.md) + - helpers + - declarer + - [RabbitDeclarer](api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md) - message - [RabbitMessage](api/faststream/rabbit/message/RabbitMessage.md) - opentelemetry @@ -821,7 +824,6 @@ search: - [apply_pattern](api/faststream/rabbit/testing/apply_pattern.md) - [build_message](api/faststream/rabbit/testing/build_message.md) - utils - - [RabbitDeclarer](api/faststream/rabbit/utils/RabbitDeclarer.md) - [build_url](api/faststream/rabbit/utils/build_url.md) - [is_routing_exchange](api/faststream/rabbit/utils/is_routing_exchange.md) - redis diff --git a/docs/docs/en/api/faststream/rabbit/utils/RabbitDeclarer.md b/docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md similarity index 67% rename from docs/docs/en/api/faststream/rabbit/utils/RabbitDeclarer.md rename to docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md index 28a5a6b7c5..b8fc8a0ebd 100644 --- a/docs/docs/en/api/faststream/rabbit/utils/RabbitDeclarer.md +++ b/docs/docs/en/api/faststream/rabbit/helpers/declarer/RabbitDeclarer.md @@ -8,4 +8,4 @@ search: boost: 0.5 --- -::: faststream.rabbit.utils.RabbitDeclarer +::: faststream.rabbit.helpers.declarer.RabbitDeclarer diff --git a/faststream/rabbit/broker/broker.py b/faststream/rabbit/broker/broker.py index f7ec134f86..6cb357fef7 100644 --- a/faststream/rabbit/broker/broker.py +++ b/faststream/rabbit/broker/broker.py @@ -20,6 +20,7 @@ from faststream.exceptions import NOT_CONNECTED_YET from faststream.rabbit.broker.logging import RabbitLoggingBroker from faststream.rabbit.broker.registrator import RabbitRegistrator +from faststream.rabbit.helpers.declarer import RabbitDeclarer from faststream.rabbit.publisher.producer import AioPikaFastProducer from faststream.rabbit.schemas import ( RABBIT_REPLY, @@ -28,7 +29,7 @@ ) from faststream.rabbit.security import parse_security from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber -from faststream.rabbit.utils import RabbitDeclarer, build_url +from faststream.rabbit.utils import build_url if TYPE_CHECKING: from ssl import SSLContext @@ -429,7 +430,6 @@ async def _connect( # type: ignore[override] await declarer.declare_queue(RABBIT_REPLY) self._producer = AioPikaFastProducer( - channel=channel, declarer=declarer, decoder=self._decoder, parser=self._parser, @@ -437,7 +437,9 @@ async def _connect( # type: ignore[override] if max_consumers: c = AsyncAPISubscriber.build_log_context( - None, RabbitQueue(""), RabbitExchange("") + None, + RabbitQueue(""), + RabbitExchange(""), ) self._log(f"Set max consumers to {max_consumers}", extra=c) await channel.set_qos(prefetch_count=int(max_consumers)) diff --git a/faststream/rabbit/helpers/__init__.py b/faststream/rabbit/helpers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/faststream/rabbit/helpers/declarer.py b/faststream/rabbit/helpers/declarer.py new file mode 100644 index 0000000000..57c21a3a78 --- /dev/null +++ b/faststream/rabbit/helpers/declarer.py @@ -0,0 +1,79 @@ +from typing import TYPE_CHECKING, Dict, cast + +if TYPE_CHECKING: + import aio_pika + + from faststream.rabbit.schemas import RabbitExchange, RabbitQueue + + +class RabbitDeclarer: + """An utility class to declare RabbitMQ queues and exchanges.""" + + __channel: "aio_pika.RobustChannel" + __queues: Dict["RabbitQueue", "aio_pika.RobustQueue"] + __exchanges: Dict["RabbitExchange", "aio_pika.RobustExchange"] + + def __init__(self, channel: "aio_pika.RobustChannel") -> None: + self.__channel = channel + self.__queues = {} + self.__exchanges = {} + + async def declare_queue( + self, + queue: "RabbitQueue", + passive: bool = False, + ) -> "aio_pika.RobustQueue": + """Declare a queue.""" + if (q := self.__queues.get(queue)) is None: + self.__queues[queue] = q = cast( + "aio_pika.RobustQueue", + await self.__channel.declare_queue( + name=queue.name, + durable=queue.durable, + exclusive=queue.exclusive, + passive=passive or queue.passive, + auto_delete=queue.auto_delete, + arguments=queue.arguments, + timeout=queue.timeout, + robust=queue.robust, + ), + ) + + return q + + async def declare_exchange( + self, + exchange: "RabbitExchange", + passive: bool = False, + ) -> "aio_pika.RobustExchange": + """Declare an exchange, parent exchanges and bind them each other.""" + if not exchange.name: + return self.__channel.default_exchange + + if (exch := self.__exchanges.get(exchange)) is None: + self.__exchanges[exchange] = exch = cast( + "aio_pika.RobustExchange", + await self.__channel.declare_exchange( + name=exchange.name, + type=exchange.type.value, + durable=exchange.durable, + auto_delete=exchange.auto_delete, + passive=passive or exchange.passive, + arguments=exchange.arguments, + timeout=exchange.timeout, + robust=exchange.robust, + internal=False, # deprecated RMQ option + ), + ) + + if exchange.bind_to is not None: + parent = await self.declare_exchange(exchange.bind_to) + await exch.bind( + exchange=parent, + routing_key=exchange.routing, + arguments=exchange.bind_arguments, + timeout=exchange.timeout, + robust=exchange.robust, + ) + + return exch diff --git a/faststream/rabbit/publisher/asyncapi.py b/faststream/rabbit/publisher/asyncapi.py index d5f785d7c1..7e0c580cdd 100644 --- a/faststream/rabbit/publisher/asyncapi.py +++ b/faststream/rabbit/publisher/asyncapi.py @@ -40,7 +40,8 @@ def get_name(self) -> str: or (self.queue.routing if is_routing_exchange(self.exchange) else None) or "_" ) - return f"{routing}:{getattr(self.exchange, 'name', '_')}:Publisher" + + return f"{routing}:{getattr(self.exchange, 'name', None) or '_'}:Publisher" def get_schema(self) -> Dict[str, Channel]: payloads = self.get_payloads() @@ -87,7 +88,7 @@ def get_schema(self) -> Dict[str, Channel]: else None, "exchange": ( amqp.Exchange(type="default", vhost=self.virtual_host) - if self.exchange is None + if not self.exchange.name else amqp.Exchange( type=self.exchange.type.value, # type: ignore name=self.exchange.name, @@ -109,7 +110,7 @@ def create( # type: ignore[override] *, routing_key: str, queue: "RabbitQueue", - exchange: Optional["RabbitExchange"], + exchange: "RabbitExchange", message_kwargs: "PublishKwargs", # Publisher args broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], diff --git a/faststream/rabbit/publisher/producer.py b/faststream/rabbit/publisher/producer.py index db21217f50..09b4ffbb3e 100644 --- a/faststream/rabbit/publisher/producer.py +++ b/faststream/rabbit/publisher/producer.py @@ -23,7 +23,7 @@ from types import TracebackType import aiormq - from aio_pika import IncomingMessage, RobustChannel, RobustQueue + from aio_pika import IncomingMessage, RobustQueue from aio_pika.abc import DateType, HeadersType, TimeoutType from anyio.streams.memory import MemoryObjectReceiveStream @@ -31,8 +31,8 @@ AsyncCallable, CustomCallable, ) + from faststream.rabbit.helpers.declarer import RabbitDeclarer from faststream.rabbit.types import AioPikaSendableMessage - from faststream.rabbit.utils import RabbitDeclarer from faststream.types import SendableMessage @@ -45,12 +45,10 @@ class AioPikaFastProducer(ProducerProto): def __init__( self, *, - channel: "RobustChannel", declarer: "RabbitDeclarer", parser: Optional["CustomCallable"], decoder: Optional["CustomCallable"], ) -> None: - self._channel = channel self.declarer = declarer self._rpc_lock = anyio.Lock() @@ -161,14 +159,6 @@ async def _publish( app_id: Optional[str], ) -> Union["aiormq.abc.ConfirmationFrameType", "SendableMessage"]: """Publish a message to a RabbitMQ exchange.""" - p_exchange = RabbitExchange.validate(exchange) - - if p_exchange is None: - exchange_obj = self._channel.default_exchange - else: - p_exchange.passive = True - exchange_obj = await self.declarer.declare_exchange(p_exchange) - message = AioPikaParser.encode_message( message=message, persist=persist, @@ -186,6 +176,11 @@ async def _publish( app_id=app_id, ) + exchange_obj = await self.declarer.declare_exchange( + exchange=RabbitExchange.validate(exchange), + passive=True, + ) + return await exchange_obj.publish( message=message, routing_key=routing_key, diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py index 7ac5dc6389..0472bbc127 100644 --- a/faststream/rabbit/publisher/usecase.py +++ b/faststream/rabbit/publisher/usecase.py @@ -106,7 +106,7 @@ def __init__( *, routing_key: str, queue: "RabbitQueue", - exchange: Optional["RabbitExchange"], + exchange: "RabbitExchange", message_kwargs: "PublishKwargs", # Publisher args broker_middlewares: Iterable["BrokerMiddleware[IncomingMessage]"], @@ -225,7 +225,7 @@ async def publish( "routing_key": routing_key or self.routing_key or RabbitQueue.validate(queue or self.queue).routing, - "exchange": exchange or self.exchange, + "exchange": exchange or self.exchange.name, "app_id": self.app_id, "correlation_id": correlation_id or gen_cor_id(), "message_id": message_id, diff --git a/faststream/rabbit/schemas/exchange.py b/faststream/rabbit/schemas/exchange.py index be8af31c3a..a9dfae79a1 100644 --- a/faststream/rabbit/schemas/exchange.py +++ b/faststream/rabbit/schemas/exchange.py @@ -1,7 +1,7 @@ import warnings -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Any, Optional, Union -from typing_extensions import Annotated, Doc +from typing_extensions import Annotated, Doc, override from faststream.broker.schemas import NameRequired from faststream.rabbit.schemas.constants import ExchangeType @@ -39,12 +39,17 @@ def __hash__(self) -> int: ) ) + @property + def routing(self) -> str: + """Return real routing_key of object.""" + return self.routing_key or self.name + def __init__( self, name: Annotated[ str, Doc("RabbitMQ exchange name."), - ], + ] = "", type: Annotated[ ExchangeType, Doc( @@ -125,3 +130,15 @@ def __init__( self.bind_to = bind_to self.bind_arguments = bind_arguments self.routing_key = routing_key + + @override + @classmethod + def validate( # type: ignore[override] + cls, + value: Union[str, "RabbitExchange", None], + **kwargs: Any, + ) -> "RabbitExchange": + exch = super().validate(value, **kwargs) + if exch is None: + exch = RabbitExchange() + return exch diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py index 158d343dd1..2b0cb4cd5b 100644 --- a/faststream/rabbit/subscriber/asyncapi.py +++ b/faststream/rabbit/subscriber/asyncapi.py @@ -19,7 +19,7 @@ class AsyncAPISubscriber(LogicSubscriber): def get_name(self) -> str: return ( - f"{self.queue.name}:{getattr(self.exchange, 'name', '_')}:{self.call_name}" + f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}" ) def get_schema(self) -> Dict[str, Channel]: @@ -59,7 +59,7 @@ def get_schema(self) -> Dict[str, Channel]: else None, "exchange": ( amqp.Exchange(type="default", vhost=self.virtual_host) - if self.exchange is None + if not self.exchange.name else amqp.Exchange( type=self.exchange.type.value, # type: ignore name=self.exchange.name, diff --git a/faststream/rabbit/subscriber/factory.py b/faststream/rabbit/subscriber/factory.py index 0683d2d62f..1a185cafe6 100644 --- a/faststream/rabbit/subscriber/factory.py +++ b/faststream/rabbit/subscriber/factory.py @@ -1,6 +1,5 @@ from typing import TYPE_CHECKING, Iterable, Optional, Union -from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig from faststream.rabbit.subscriber.asyncapi import AsyncAPISubscriber if TYPE_CHECKING: @@ -8,13 +7,14 @@ from fast_depends.dependencies import Depends from faststream.broker.types import BrokerMiddleware + from faststream.rabbit.schemas import RabbitExchange, RabbitQueue, ReplyConfig from faststream.types import AnyDict def create_subscriber( *, - queue: RabbitQueue, - exchange: Optional["RabbitExchange"], + queue: "RabbitQueue", + exchange: "RabbitExchange", consume_args: Optional["AnyDict"], reply_config: Optional["ReplyConfig"], # Subscriber args diff --git a/faststream/rabbit/subscriber/usecase.py b/faststream/rabbit/subscriber/usecase.py index e518d3ca37..67421df2da 100644 --- a/faststream/rabbit/subscriber/usecase.py +++ b/faststream/rabbit/subscriber/usecase.py @@ -23,13 +23,13 @@ from faststream.broker.message import StreamMessage from faststream.broker.types import BrokerMiddleware, CustomCallable + from faststream.rabbit.helpers.declarer import RabbitDeclarer from faststream.rabbit.publisher.producer import AioPikaFastProducer from faststream.rabbit.schemas import ( RabbitExchange, RabbitQueue, ReplyConfig, ) - from faststream.rabbit.utils import RabbitDeclarer from faststream.types import AnyDict, Decorator, LoggerProto @@ -50,7 +50,7 @@ def __init__( self, *, queue: "RabbitQueue", - exchange: Optional["RabbitExchange"], + exchange: "RabbitExchange", consume_args: Optional["AnyDict"], reply_config: Optional["ReplyConfig"], # Subscriber args @@ -141,16 +141,20 @@ async def start(self) -> None: self._queue_obj = queue = await self.declarer.declare_queue(self.queue) - if self.exchange is not None: + if ( + self.exchange is not None + and not queue.passive # queue just getted from RMQ + and self.exchange.name # check Exchange is not default + ): exchange = await self.declarer.declare_exchange(self.exchange) - if not queue.passive: - await queue.bind( - exchange, - routing_key=self.queue.routing, - arguments=self.queue.bind_arguments, - timeout=self.queue.timeout, - robust=self.queue.robust, - ) + + await queue.bind( + exchange, + routing_key=self.queue.routing, + arguments=self.queue.bind_arguments, + timeout=self.queue.timeout, + robust=self.queue.robust, + ) self._consumer_tag = await queue.consume( # NOTE: aio-pika expects AbstractIncomingMessage, not IncomingMessage diff --git a/faststream/rabbit/testing.py b/faststream/rabbit/testing.py index 36f79ef60a..3d3a274418 100644 --- a/faststream/rabbit/testing.py +++ b/faststream/rabbit/testing.py @@ -71,7 +71,7 @@ def remove_publisher_fake_subscriber( broker._subscribers.pop( AsyncAPISubscriber.get_routing_hash( queue=RabbitQueue.validate(publisher.routing), - exchange=publisher.exchange, + exchange=RabbitExchange.validate(publisher.exchange), ), None, ) diff --git a/faststream/rabbit/utils.py b/faststream/rabbit/utils.py index 4da30f7a50..1af8a30f7b 100644 --- a/faststream/rabbit/utils.py +++ b/faststream/rabbit/utils.py @@ -1,83 +1,15 @@ -from typing import TYPE_CHECKING, Any, Dict, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union from aio_pika.connection import make_url from faststream.rabbit.schemas.constants import ExchangeType if TYPE_CHECKING: - import aio_pika from aio_pika.abc import SSLOptions from pamqp.common import FieldTable from yarl import URL - from faststream.rabbit.schemas import RabbitExchange, RabbitQueue - - -class RabbitDeclarer: - """An utility class to declare RabbitMQ queues and exchanges.""" - - channel: "aio_pika.RobustChannel" - queues: Dict["RabbitQueue", "aio_pika.RobustQueue"] - exchanges: Dict["RabbitExchange", "aio_pika.RobustExchange"] - - def __init__(self, channel: "aio_pika.RobustChannel") -> None: - self.channel = channel - self.queues = {} - self.exchanges = {} - - async def declare_queue( - self, - queue: "RabbitQueue", - ) -> "aio_pika.RobustQueue": - """Declare a queue.""" - if (q := self.queues.get(queue)) is None: - self.queues[queue] = q = cast( - "aio_pika.RobustQueue", - await self.channel.declare_queue( - name=queue.name, - durable=queue.durable, - exclusive=queue.exclusive, - passive=queue.passive, - auto_delete=queue.auto_delete, - arguments=queue.arguments, - timeout=queue.timeout, - robust=queue.robust, - ), - ) - return q - - async def declare_exchange( - self, - exchange: "RabbitExchange", - ) -> "aio_pika.RobustExchange": - """Declare an exchange, parent exchanges and bind them each other.""" - if (exch := self.exchanges.get(exchange)) is None: - self.exchanges[exchange] = exch = cast( - "aio_pika.RobustExchange", - await self.channel.declare_exchange( - name=exchange.name, - type=exchange.type.value, - durable=exchange.durable, - auto_delete=exchange.auto_delete, - passive=exchange.passive, - arguments=exchange.arguments, - timeout=exchange.timeout, - robust=exchange.robust, - internal=False, # deprecated RMQ option - ), - ) - - if exchange.bind_to is not None: - parent = await self.declare_exchange(exchange.bind_to) - await exch.bind( - exchange=parent, - routing_key=exchange.routing_key, - arguments=exchange.bind_arguments, - timeout=exchange.timeout, - robust=exchange.robust, - ) - - return exch + from faststream.rabbit.schemas import RabbitExchange def build_url( diff --git a/tests/brokers/rabbit/specific/test_declare.py b/tests/brokers/rabbit/specific/test_declare.py index 2977572d16..aed6824f3e 100644 --- a/tests/brokers/rabbit/specific/test_declare.py +++ b/tests/brokers/rabbit/specific/test_declare.py @@ -1,7 +1,7 @@ import pytest from faststream.rabbit import RabbitBroker, RabbitExchange, RabbitQueue -from faststream.rabbit.utils import RabbitDeclarer +from faststream.rabbit.helpers.declarer import RabbitDeclarer @pytest.mark.asyncio() diff --git a/tests/docs/rabbit/test_declare.py b/tests/docs/rabbit/test_declare.py index 2157e2a257..e6f3891b12 100644 --- a/tests/docs/rabbit/test_declare.py +++ b/tests/docs/rabbit/test_declare.py @@ -9,5 +9,5 @@ async def test_declare(): from docs.docs_src.rabbit.declare import app, broker async with TestApp(app): - assert len(broker.declarer.exchanges) == 1 - assert len(broker.declarer.queues) == 2 # with `reply-to` + assert len(broker.declarer._RabbitDeclarer__exchanges) == 1 + assert len(broker.declarer._RabbitDeclarer__queues) == 2 # with `reply-to` From 82c9acd278e19bb2afb9828b031347c0aba72b86 Mon Sep 17 00:00:00 2001 From: Alexey Date: Sun, 2 Jun 2024 22:06:42 +0700 Subject: [PATCH 32/43] Fix typos (#1489) --- docs/docs/en/getting-started/lifespan/context.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/en/getting-started/lifespan/context.md b/docs/docs/en/getting-started/lifespan/context.md index 3dcee88b9f..5f32476ff7 100644 --- a/docs/docs/en/getting-started/lifespan/context.md +++ b/docs/docs/en/getting-started/lifespan/context.md @@ -10,7 +10,7 @@ search: # Lifespan Context Manager -Also, you can define *startup* and *shutdown* logic using the `lifespan` parameter of the **FastSTream** app, and a "context manager" (I'll show you what that is in a second). +Also, you can define *startup* and *shutdown* logic using the `lifespan` parameter of the **FastStream** app, and a "context manager" (I'll show you what that is in a second). Let's start with an example from [hooks page](./hooks.md#another-example){.internal-link} and refactor it using "context manager". @@ -21,7 +21,7 @@ We create an async function `lifespan()` with `#!python yield` like this: As you can see, `lifespan` parameter is much suitable for case (than `#!python @app.on_startup` and `#!python @app.after_shutdown` separated calls) if you have object needs to process at application startup and shutdown both. !!! tip - `lifespan` starts **BEFORE** your broken started (`#!python @app.on_startup` hook) and **AFTER** broker was shutdown (`#!python @app.after_shutdown`), so you can't publish any messages here. + `lifespan` starts **BEFORE** your broker started (`#!python @app.on_startup` hook) and **AFTER** broker was shutdown (`#!python @app.after_shutdown`), so you can't publish any messages here. If you want to make some actions will *already/still running broker*, please use `#!python @app.after_startup` and `#!python @app.on_shutdown` hooks. From b8b003df0d171a891d12403e7bc06d77446e316b Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Mon, 3 Jun 2024 10:16:00 +0300 Subject: [PATCH 33/43] chore: update CI triggers to minify useless runs (#1483) * chore: run CQ CI only with PR and by cron * chore: run DR CI only if pyproject changed * format: correct deploy docs CI * chore: run coverage publish CI only after PR merged * chore: run tests only in PR * chore: fix test CI * chore: add CI workflows names conversation * chore: add docs update references CI * chore: make update references CI writeable * chore: finalize CI * chore: correct name in check links CI * chore: run tests only if required * chore: run CQ CI if required * chore: fix broken links CI * chore: make References autocommit * Apply automatic changes * chore: change GHA commit message * chore: make extra trigger for tests * chore: update tests triggers * chore: run tests anyway * chore: add merge queue trigger to CI * chore: make real testsuites fail-fast * chore: remove useless conditions in CI * chore: specify MG trigger --------- Co-authored-by: Lancetnik Co-authored-by: Kumaran Rajendhiran --- ...docs.yaml => docs_check-broken-links.yaml} | 6 +- .../{deploy-docs.yaml => docs_deploy.yaml} | 4 +- .github/workflows/docs_update-references.yaml | 34 +++++ ...es.yaml => docs_update-release-notes.yaml} | 8 +- .../workflows/{codeql.yml => pr_codeql.yml} | 11 +- ...-review.yaml => pr_dependency-review.yaml} | 11 +- .../workflows/{test.yaml => pr_tests.yaml} | 121 ++++++++---------- .github/workflows/publish_coverage.yml | 20 ++- .../{publish_pypi.yml => release_pypi.yml} | 6 +- faststream/broker/middlewares/base.py | 3 +- 10 files changed, 129 insertions(+), 95 deletions(-) rename .github/workflows/{check-broken-links-in-docs.yaml => docs_check-broken-links.yaml} (88%) rename .github/workflows/{deploy-docs.yaml => docs_deploy.yaml} (97%) create mode 100644 .github/workflows/docs_update-references.yaml rename .github/workflows/{update_release_notes.yaml => docs_update-release-notes.yaml} (95%) rename .github/workflows/{codeql.yml => pr_codeql.yml} (96%) rename .github/workflows/{dependency-review.yaml => pr_dependency-review.yaml} (87%) rename .github/workflows/{test.yaml => pr_tests.yaml} (94%) rename .github/workflows/{publish_pypi.yml => release_pypi.yml} (96%) diff --git a/.github/workflows/check-broken-links-in-docs.yaml b/.github/workflows/docs_check-broken-links.yaml similarity index 88% rename from .github/workflows/check-broken-links-in-docs.yaml rename to .github/workflows/docs_check-broken-links.yaml index 98ce376519..e09f0704fd 100644 --- a/.github/workflows/check-broken-links-in-docs.yaml +++ b/.github/workflows/docs_check-broken-links.yaml @@ -2,8 +2,10 @@ name: Check docs for broken links on: workflow_run: - workflows: ["pages-build-deployment"] - types: [completed] + types: + - completed + workflows: + - Deploy Docs to GitHub Pages jobs: check-broken-link: diff --git a/.github/workflows/deploy-docs.yaml b/.github/workflows/docs_deploy.yaml similarity index 97% rename from .github/workflows/deploy-docs.yaml rename to .github/workflows/docs_deploy.yaml index c9da5942b6..d3cd1ce1d2 100644 --- a/.github/workflows/deploy-docs.yaml +++ b/.github/workflows/docs_deploy.yaml @@ -1,4 +1,5 @@ -name: Deploy Docs +name: Deploy Docs to GitHub Pages + on: push: branches: @@ -10,6 +11,7 @@ on: permissions: contents: write + jobs: deploy_docs: runs-on: ubuntu-latest diff --git a/.github/workflows/docs_update-references.yaml b/.github/workflows/docs_update-references.yaml new file mode 100644 index 0000000000..92e306784e --- /dev/null +++ b/.github/workflows/docs_update-references.yaml @@ -0,0 +1,34 @@ +name: Generate API References documentation + +on: + pull_request: + types: + - opened + - synchronize + paths: + - faststream/** + +permissions: + contents: write + +jobs: + check-docs-changes: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: pip install -e ".[dev]" + - name: Run build docs + run: bash scripts/build-docs.sh + - name: Commit + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: "docs: generate API References" diff --git a/.github/workflows/update_release_notes.yaml b/.github/workflows/docs_update-release-notes.yaml similarity index 95% rename from .github/workflows/update_release_notes.yaml rename to .github/workflows/docs_update-release-notes.yaml index b03ea6977f..1c3b3c0e1e 100644 --- a/.github/workflows/update_release_notes.yaml +++ b/.github/workflows/docs_update-release-notes.yaml @@ -1,10 +1,10 @@ -name: Update Release Notes +name: Create PR with updated Release Notes on: workflow_dispatch: null - push: - tags: - - '*' + release: + types: + - published jobs: update-release-notes: diff --git a/.github/workflows/codeql.yml b/.github/workflows/pr_codeql.yml similarity index 96% rename from .github/workflows/codeql.yml rename to .github/workflows/pr_codeql.yml index e0c92f423e..f1fb50d463 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/pr_codeql.yml @@ -12,11 +12,14 @@ name: "CodeQL" on: - push: - branches: [ "main"] pull_request: - # The branches below must be a subset of the branches above - branches: [ "main" ] + types: + - opened + - synchronize + branches: + - main + paths: + - faststream/** schedule: - cron: '39 20 * * 0' diff --git a/.github/workflows/dependency-review.yaml b/.github/workflows/pr_dependency-review.yaml similarity index 87% rename from .github/workflows/dependency-review.yaml rename to .github/workflows/pr_dependency-review.yaml index 0bc48198f8..11ad023407 100644 --- a/.github/workflows/dependency-review.yaml +++ b/.github/workflows/pr_dependency-review.yaml @@ -5,7 +5,16 @@ # Source repository: https://github.com/actions/dependency-review-action # Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement name: 'Dependency Review' -on: [pull_request] + +on: + pull_request: + types: + - opened + - synchronize + branches: + - main + paths: + - pyproject.toml permissions: contents: read diff --git a/.github/workflows/test.yaml b/.github/workflows/pr_tests.yaml similarity index 94% rename from .github/workflows/test.yaml rename to .github/workflows/pr_tests.yaml index ddf783ded9..a68eacda8d 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/pr_tests.yaml @@ -1,12 +1,14 @@ -name: Test +name: Run all tests on: - push: - branches: - - main pull_request: - types: [opened, synchronize] + types: + - opened + - synchronize + # https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-a-merge-queue#triggering-merge-group-checks-with-github-actions merge_group: + types: + - checks_requested jobs: static_analysis: @@ -40,27 +42,7 @@ jobs: shell: bash run: semgrep scan --config auto --error - check-docs-changes: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.12" - cache: "pip" - cache-dependency-path: pyproject.toml - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: pip install -e ".[dev]" - - name: Run build docs - run: bash scripts/build-docs.sh - - name: Check for changes - id: git-diff - run: git diff --exit-code - - test: + test-basic: if: github.event.pull_request.draft == false runs-on: ubuntu-latest strategy: @@ -165,8 +147,28 @@ jobs: - name: Test run: bash scripts/test.sh -m "(slow and (not nats and not kafka and not confluent and not rabbit and not redis)) or (not nats and not kafka and not confluent and not rabbit and not redis)" + test-kafka-smoke: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: pip install .[kafka,test-core] + - name: Test + run: bash scripts/test.sh -m "not kafka" tests/brokers/kafka/test_test_client.py + test-kafka-real: if: github.event.pull_request.draft == false + needs: + - test-basic + - test-kafka-smoke runs-on: ubuntu-latest services: kafka: @@ -208,7 +210,7 @@ jobs: path: coverage if-no-files-found: error - test-kafka-smoke: + test-confluent-smoke: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: @@ -221,12 +223,15 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[kafka,test-core] + run: pip install .[confluent,test-core] - name: Test - run: bash scripts/test.sh -m "not kafka" tests/brokers/kafka/test_test_client.py + run: bash scripts/test.sh -m "not confluent" tests/brokers/confluent/test_test_client.py test-confluent-real: if: github.event.pull_request.draft == false + needs: + - test-basic + - test-confluent-smoke runs-on: ubuntu-latest services: kafka: @@ -268,7 +273,7 @@ jobs: path: coverage if-no-files-found: error - test-confluent-smoke: + test-rabbit-smoke: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: @@ -281,12 +286,15 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[confluent,test-core] + run: pip install .[rabbit,test-core] - name: Test - run: bash scripts/test.sh -m "not confluent" tests/brokers/confluent/test_test_client.py + run: bash scripts/test.sh -m "not rabbit" tests/brokers/rabbit/test_test_client.py test-rabbit-real: if: github.event.pull_request.draft == false + needs: + - test-basic + - test-rabbit-smoke runs-on: ubuntu-latest services: rabbitmq: @@ -317,7 +325,7 @@ jobs: path: coverage if-no-files-found: error - test-rabbit-smoke: + test-nats-smoke: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: @@ -330,12 +338,15 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[rabbit,test-core] + run: pip install .[nats,test-core] - name: Test - run: bash scripts/test.sh -m "not rabbit" tests/brokers/rabbit/test_test_client.py + run: bash scripts/test.sh -m "not nats" tests/brokers/nats/test_test_client.py test-nats-real: if: github.event.pull_request.draft == false + needs: + - test-basic + - test-nats-smoke runs-on: ubuntu-latest services: nats: @@ -366,7 +377,7 @@ jobs: path: coverage if-no-files-found: error - test-nats-smoke: + test-redis-smoke: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: @@ -379,13 +390,16 @@ jobs: cache-dependency-path: pyproject.toml - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[nats,test-core] + run: pip install .[redis,test-core] - name: Test - run: bash scripts/test.sh -m "not nats" tests/brokers/nats/test_test_client.py + run: bash scripts/test.sh -m "not redis" tests/brokers/redis/test_test_client.py test-redis-real: if: github.event.pull_request.draft == false runs-on: ubuntu-latest + needs: + - test-basic + - test-redis-smoke services: nats: image: redis:alpine @@ -415,27 +429,10 @@ jobs: path: coverage if-no-files-found: error - test-redis-smoke: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.12" - cache: "pip" - cache-dependency-path: pyproject.toml - - name: Install Dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: pip install .[redis,test-core] - - name: Test - run: bash scripts/test.sh -m "not redis" tests/brokers/redis/test_test_client.py - coverage-combine: if: github.event.pull_request.draft == false needs: - - test + - test-basic - test-kafka-real - test-confluent-real - test-rabbit-real @@ -478,20 +475,10 @@ jobs: needs: - static_analysis - - check-docs-changes - coverage-combine - test-macos-latest - test-windows-latest - - test-kafka-real - - test-kafka-smoke - - test-confluent-real - - test-confluent-smoke - - test-rabbit-real - - test-rabbit-smoke - - test-nats-real - - test-nats-smoke - - test-redis-real - - test-redis-smoke + - test-orjson runs-on: ubuntu-latest diff --git a/.github/workflows/publish_coverage.yml b/.github/workflows/publish_coverage.yml index f263ee33d4..04983682f8 100644 --- a/.github/workflows/publish_coverage.yml +++ b/.github/workflows/publish_coverage.yml @@ -1,18 +1,15 @@ -name: Smokeshow +name: Publish tests coverage to Smokeshow and set coverage % status to commit on: - workflow_run: - workflows: [Test] - types: [completed] - + push: + branches: + - main permissions: statuses: write - jobs: smokeshow: - if: ${{ github.event.workflow_run.conclusion == 'success' }} runs-on: ubuntu-latest steps: @@ -22,15 +19,16 @@ jobs: - run: pip install smokeshow - - uses: dawidd6/action-download-artifact@v3.1.4 # nosemgrep + - uses: dawidd6/action-download-artifact@v3.1.4 # nosemgrep with: - workflow: test.yaml - commit: ${{ github.event.workflow_run.head_sha }} + workflow: pr_tests.yaml + workflow_conclusion: success + if_no_artifact_found: fail - run: smokeshow upload coverage-html env: SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Coverage {coverage-percentage} - SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 70 + SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 90 SMOKESHOW_GITHUB_CONTEXT: coverage SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.workflow_run.head_sha }} diff --git a/.github/workflows/publish_pypi.yml b/.github/workflows/release_pypi.yml similarity index 96% rename from .github/workflows/publish_pypi.yml rename to .github/workflows/release_pypi.yml index 1f03e52ae0..f1ed153dde 100644 --- a/.github/workflows/publish_pypi.yml +++ b/.github/workflows/release_pypi.yml @@ -2,9 +2,9 @@ name: Publish to PyPi on: workflow_dispatch: null - push: - tags: - - "*" + release: + types: + - published jobs: publish: diff --git a/faststream/broker/middlewares/base.py b/faststream/broker/middlewares/base.py index ab8ccb7c4d..5710c8ec1c 100644 --- a/faststream/broker/middlewares/base.py +++ b/faststream/broker/middlewares/base.py @@ -97,7 +97,7 @@ async def publish_scope( **kwargs: Any, ) -> Any: """Publish a message and return an async iterator.""" - err: Optional[Exception] + err: Optional[Exception] = None try: result = await call_next( await self.on_publish(msg, *args, **kwargs), @@ -109,7 +109,6 @@ async def publish_scope( err = e else: - err = None return result finally: From fcb7297ad0bd6a9ae8c97ded059955693a21af80 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Mon, 3 Jun 2024 13:40:17 +0530 Subject: [PATCH 34/43] Update link to badges (#1496) --- README.md | 12 ++++++------ docs/docs/en/faststream.md | 12 ++++++------ 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 9e1d2329d4..058f3c4890 100644 --- a/README.md +++ b/README.md @@ -5,8 +5,8 @@ ---

- - Test Passing + + Test Passing @@ -27,12 +27,12 @@
-
- CodeQL + + CodeQL - - Dependency Review + + Dependency Review diff --git a/docs/docs/en/faststream.md b/docs/docs/en/faststream.md index d02c7cbf69..62e211745d 100644 --- a/docs/docs/en/faststream.md +++ b/docs/docs/en/faststream.md @@ -12,8 +12,8 @@ search: ---

- - Test Passing + + Test Passing @@ -34,12 +34,12 @@ search:
-
- CodeQL + + CodeQL - - Dependency Review + + Dependency Review From edc0f304cdbeb306edd7ea2657506abe44de74b2 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Mon, 3 Jun 2024 14:21:06 +0530 Subject: [PATCH 35/43] Run tests every day at 12:00 AM (#1497) --- .github/workflows/pr_tests.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr_tests.yaml b/.github/workflows/pr_tests.yaml index a68eacda8d..59c2de5fd4 100644 --- a/.github/workflows/pr_tests.yaml +++ b/.github/workflows/pr_tests.yaml @@ -1,8 +1,10 @@ name: Run all tests on: + schedule: + - cron: "0 0 * * *" pull_request: - types: + types: - opened - synchronize # https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-a-merge-queue#triggering-merge-group-checks-with-github-actions From 4ebaa658fbf29c509a8dc71d19b6a5cd25d5ac3d Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Thu, 6 Jun 2024 15:18:57 +0530 Subject: [PATCH 36/43] Chore: update deps (#1503) * Update dependency versions * Run pre-commit * Limit typing-extensions version for python 3.8 --- .github/workflows/docs_update-references.yaml | 2 +- .github/workflows/pr_dependency-review.yaml | 2 +- .secrets.baseline | 4 ++-- faststream/asyncapi/utils.py | 2 +- faststream/broker/fastapi/route.py | 2 +- faststream/broker/fastapi/router.py | 4 ++-- faststream/broker/message.py | 2 +- faststream/broker/publisher/fake.py | 2 +- faststream/broker/publisher/usecase.py | 2 +- faststream/broker/subscriber/call_item.py | 2 +- faststream/broker/subscriber/usecase.py | 4 ++-- faststream/broker/utils.py | 2 +- faststream/broker/wrapper/call.py | 2 +- faststream/cli/supervisors/multiprocess.py | 2 +- faststream/cli/utils/logs.py | 2 +- faststream/cli/utils/parser.py | 4 ++-- faststream/confluent/broker/broker.py | 2 +- faststream/confluent/parser.py | 4 ++-- faststream/confluent/publisher/usecase.py | 10 +++++----- faststream/kafka/broker/broker.py | 4 ++-- faststream/kafka/parser.py | 4 ++-- faststream/kafka/publisher/usecase.py | 6 +++--- faststream/nats/broker/broker.py | 6 +++--- faststream/nats/parser.py | 6 +++--- faststream/nats/publisher/usecase.py | 4 ++-- faststream/nats/subscriber/usecase.py | 2 +- faststream/rabbit/publisher/producer.py | 4 ++-- faststream/rabbit/publisher/usecase.py | 4 ++-- faststream/rabbit/subscriber/asyncapi.py | 4 +--- faststream/redis/broker/broker.py | 6 +++--- faststream/redis/parser.py | 6 +++--- faststream/redis/publisher/usecase.py | 8 ++++---- faststream/redis/subscriber/usecase.py | 2 +- faststream/testing/broker.py | 2 +- pyproject.toml | 11 ++++++----- 35 files changed, 67 insertions(+), 68 deletions(-) diff --git a/.github/workflows/docs_update-references.yaml b/.github/workflows/docs_update-references.yaml index 92e306784e..30f9d91bea 100644 --- a/.github/workflows/docs_update-references.yaml +++ b/.github/workflows/docs_update-references.yaml @@ -2,7 +2,7 @@ name: Generate API References documentation on: pull_request: - types: + types: - opened - synchronize paths: diff --git a/.github/workflows/pr_dependency-review.yaml b/.github/workflows/pr_dependency-review.yaml index 11ad023407..a241701673 100644 --- a/.github/workflows/pr_dependency-review.yaml +++ b/.github/workflows/pr_dependency-review.yaml @@ -8,7 +8,7 @@ name: 'Dependency Review' on: pull_request: - types: + types: - opened - synchronize branches: diff --git a/.secrets.baseline b/.secrets.baseline index 15213e9215..6cfe352e2a 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 1282, + "line_number": 1308, "is_secret": false } ], @@ -163,5 +163,5 @@ } ] }, - "generated_at": "2024-05-27T11:45:58Z" + "generated_at": "2024-06-06T04:30:54Z" } diff --git a/faststream/asyncapi/utils.py b/faststream/asyncapi/utils.py index f4ed6b99a1..4edddae6ad 100644 --- a/faststream/asyncapi/utils.py +++ b/faststream/asyncapi/utils.py @@ -14,7 +14,7 @@ def resolve_payloads( served_words: int = 1, ) -> "AnyDict": ln = len(payloads) - payload: "AnyDict" + payload: AnyDict if ln > 1: one_of_payloads = {} diff --git a/faststream/broker/fastapi/route.py b/faststream/broker/fastapi/route.py index aa2c3debcb..1ee27caefc 100644 --- a/faststream/broker/fastapi/route.py +++ b/faststream/broker/fastapi/route.py @@ -189,7 +189,7 @@ async def real_consumer(message: "NativeMessage[Any]") -> Any: """An asynchronous function that processes an incoming message and returns a sendable message.""" body = message.decoded_body - fastapi_body: Union["AnyDict", List[Any]] + fastapi_body: Union[AnyDict, List[Any]] if first_arg is not None: if isinstance(body, dict): path = fastapi_body = body or {} diff --git a/faststream/broker/fastapi/router.py b/faststream/broker/fastapi/router.py index 2a7cb33d3c..2d4153f4f0 100644 --- a/faststream/broker/fastapi/router.py +++ b/faststream/broker/fastapi/router.py @@ -181,7 +181,7 @@ def __init__( on_shutdown=on_shutdown, ) - self.weak_dependencies_provider: "WeakSet[Any]" = WeakSet() + self.weak_dependencies_provider: WeakSet[Any] = WeakSet() if dependency_overrides_provider is not None: self.weak_dependencies_provider.add(dependency_overrides_provider) @@ -306,7 +306,7 @@ async def start_broker_lifespan( async with lifespan_context(app) as maybe_context: if maybe_context is None: - context: "AnyDict" = {} + context: AnyDict = {} else: context = dict(maybe_context) diff --git a/faststream/broker/message.py b/faststream/broker/message.py index beec9fe555..dbe89b089d 100644 --- a/faststream/broker/message.py +++ b/faststream/broker/message.py @@ -66,7 +66,7 @@ async def reject(self) -> None: def decode_message(message: "StreamMessage[Any]") -> "DecodedMessage": """Decodes a message.""" body: Any = getattr(message, "body", message) - m: "DecodedMessage" = body + m: DecodedMessage = body if ( content_type := getattr(message, "content_type", Parameter.empty) diff --git a/faststream/broker/publisher/fake.py b/faststream/broker/publisher/fake.py index 492677abdb..d77c43406b 100644 --- a/faststream/broker/publisher/fake.py +++ b/faststream/broker/publisher/fake.py @@ -39,7 +39,7 @@ async def publish( **kwargs, } - call: "AsyncFunc" = self.method + call: AsyncFunc = self.method for m in chain(_extra_middlewares, self.middlewares): call = partial(m, call) diff --git a/faststream/broker/publisher/usecase.py b/faststream/broker/publisher/usecase.py index 1bdbc74513..c401760a81 100644 --- a/faststream/broker/publisher/usecase.py +++ b/faststream/broker/publisher/usecase.py @@ -138,7 +138,7 @@ def __call__( return handler_call def get_payloads(self) -> List[Tuple["AnyDict", str]]: - payloads: List[Tuple["AnyDict", str]] = [] + payloads: List[Tuple[AnyDict, str]] = [] if self.schema_: params = {"response__": (self.schema_, ...)} diff --git a/faststream/broker/subscriber/call_item.py b/faststream/broker/subscriber/call_item.py index cb6e750353..77bdb70c9a 100644 --- a/faststream/broker/subscriber/call_item.py +++ b/faststream/broker/subscriber/call_item.py @@ -155,7 +155,7 @@ async def call( _extra_middlewares: Iterable["SubscriberMiddleware[Any]"], ) -> Any: """Execute wrapped handler with consume middlewares.""" - call: "AsyncFuncAny" = self.handler.call_wrapped + call: AsyncFuncAny = self.handler.call_wrapped for middleware in chain(self.item_middlewares, _extra_middlewares): call = partial(middleware, call) diff --git a/faststream/broker/subscriber/usecase.py b/faststream/broker/subscriber/usecase.py index 2e5ca52151..2b46a4bf1a 100644 --- a/faststream/broker/subscriber/usecase.py +++ b/faststream/broker/subscriber/usecase.py @@ -308,7 +308,7 @@ async def consume(self, msg: MsgType) -> Any: await stack.enter_async_context(self._stop_scope()) # enter all middlewares - middlewares: List["BaseMiddleware"] = [] + middlewares: List[BaseMiddleware] = [] for base_m in self._broker_middlewares: middleware = base_m(msg) middlewares.append(middleware) @@ -412,7 +412,7 @@ def get_description(self) -> Optional[str]: def get_payloads(self) -> List[Tuple["AnyDict", str]]: """Get the payloads of the handler.""" - payloads: List[Tuple["AnyDict", str]] = [] + payloads: List[Tuple[AnyDict, str]] = [] for h in self.calls: if h.dependant is None: diff --git a/faststream/broker/utils.py b/faststream/broker/utils.py index 6903f4c94d..568a1217f8 100644 --- a/faststream/broker/utils.py +++ b/faststream/broker/utils.py @@ -60,7 +60,7 @@ class MultiLock: def __init__(self) -> None: """Initialize a new instance of the class.""" - self.queue: "asyncio.Queue[None]" = asyncio.Queue() + self.queue: asyncio.Queue[None] = asyncio.Queue() def __enter__(self) -> Self: """Enter the context.""" diff --git a/faststream/broker/wrapper/call.py b/faststream/broker/wrapper/call.py index 2dda3bf1ea..0c997eb5b2 100644 --- a/faststream/broker/wrapper/call.py +++ b/faststream/broker/wrapper/call.py @@ -161,7 +161,7 @@ def set_wrapped( f: Callable[..., Awaitable[Any]] = to_async(call) - dependent: Optional["CallModel[..., Any]"] = None + dependent: Optional[CallModel[..., Any]] = None if _get_dependant is None: dependent = build_call_model( f, diff --git a/faststream/cli/supervisors/multiprocess.py b/faststream/cli/supervisors/multiprocess.py index a44f5c27c9..e7ab9dd413 100644 --- a/faststream/cli/supervisors/multiprocess.py +++ b/faststream/cli/supervisors/multiprocess.py @@ -21,7 +21,7 @@ def __init__( super().__init__(target, args, None) self.workers = workers - self.processes: List["SpawnProcess"] = [] + self.processes: List[SpawnProcess] = [] def startup(self) -> None: logger.info(f"Started parent process [{self.pid}]") diff --git a/faststream/cli/utils/logs.py b/faststream/cli/utils/logs.py index 35ca288efb..2f223455f6 100644 --- a/faststream/cli/utils/logs.py +++ b/faststream/cli/utils/logs.py @@ -63,6 +63,6 @@ def set_log_level(level: int, app: "FastStream") -> None: if app.logger and isinstance(app.logger, logging.Logger): app.logger.setLevel(level) - broker_logger: Optional["LoggerProto"] = getattr(app.broker, "logger", None) + broker_logger: Optional[LoggerProto] = getattr(app.broker, "logger", None) if broker_logger is not None and isinstance(broker_logger, logging.Logger): broker_logger.setLevel(level) diff --git a/faststream/cli/utils/parser.py b/faststream/cli/utils/parser.py index 1c58ce65a4..00c904d774 100644 --- a/faststream/cli/utils/parser.py +++ b/faststream/cli/utils/parser.py @@ -7,10 +7,10 @@ def parse_cli_args(*args: str) -> Tuple[str, Dict[str, "SettingField"]]: """Parses command line arguments.""" - extra_kwargs: Dict[str, "SettingField"] = {} + extra_kwargs: Dict[str, SettingField] = {} k: str = "" - v: "SettingField" + v: SettingField field_args: List[str] = [] app = "" diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 960b2606ad..95ceb2bfa2 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -523,7 +523,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/confluent/parser.py b/faststream/confluent/parser.py index a093a16da7..3480aee092 100644 --- a/faststream/confluent/parser.py +++ b/faststream/confluent/parser.py @@ -26,7 +26,7 @@ async def parse_message( offset = message.offset() _, timestamp = message.timestamp() - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, @@ -59,7 +59,7 @@ async def parse_message_batch( _, first_timestamp = first.timestamp() - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, diff --git a/faststream/confluent/publisher/usecase.py b/faststream/confluent/publisher/usecase.py index ae0a9da319..b73b0de78c 100644 --- a/faststream/confluent/publisher/usecase.py +++ b/faststream/confluent/publisher/usecase.py @@ -113,7 +113,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "key": key or self.key, # basic args "topic": topic or self.topic, @@ -124,7 +124,7 @@ async def publish( "correlation_id": correlation_id or gen_cor_id(), } - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -155,13 +155,13 @@ async def publish( # type: ignore[override] ) -> None: assert self._producer, NOT_CONNECTED_YET # nosec B101 - msgs: Iterable["SendableMessage"] + msgs: Iterable[SendableMessage] if extra_messages: msgs = (cast("SendableMessage", message), *extra_messages) else: msgs = cast(Iterable["SendableMessage"], message) - kwargs: "AnyDict" = { + kwargs: AnyDict = { "topic": topic or self.topic, "partition": partition or self.partition, "timestamp_ms": timestamp_ms, @@ -170,7 +170,7 @@ async def publish( # type: ignore[override] "correlation_id": correlation_id or gen_cor_id(), } - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( diff --git a/faststream/kafka/broker/broker.py b/faststream/kafka/broker/broker.py index 42cc4f281b..0827e09060 100644 --- a/faststream/kafka/broker/broker.py +++ b/faststream/kafka/broker/broker.py @@ -607,7 +607,7 @@ async def connect( # type: ignore[override] To startup subscribers too you should use `broker.start()` after/instead this method. """ if bootstrap_servers is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { **kwargs, "bootstrap_servers": bootstrap_servers, } @@ -792,7 +792,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index 8487eb3d0b..49924c9e97 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -21,7 +21,7 @@ async def parse_message( ) -> "StreamMessage[ConsumerRecord]": """Parses a Kafka message.""" headers = {i: j.decode() for i, j in message.headers} - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=message.value, headers=headers, @@ -51,7 +51,7 @@ async def parse_message_batch( headers = next(iter(batch_headers), {}) - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return KafkaMessage( body=body, diff --git a/faststream/kafka/publisher/usecase.py b/faststream/kafka/publisher/usecase.py index b254334a61..8fec375bba 100644 --- a/faststream/kafka/publisher/usecase.py +++ b/faststream/kafka/publisher/usecase.py @@ -170,7 +170,7 @@ async def publish( reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -250,7 +250,7 @@ async def publish( # type: ignore[override] ) -> None: assert self._producer, NOT_CONNECTED_YET # nosec B101 - msgs: Iterable["SendableMessage"] + msgs: Iterable[SendableMessage] if extra_messages: msgs = (cast("SendableMessage", message), *extra_messages) else: @@ -262,7 +262,7 @@ async def publish( # type: ignore[override] reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( diff --git a/faststream/nats/broker/broker.py b/faststream/nats/broker/broker.py index e6806172c3..35e35086c8 100644 --- a/faststream/nats/broker/broker.py +++ b/faststream/nats/broker/broker.py @@ -564,7 +564,7 @@ async def connect( # type: ignore[override] To startup subscribers too you should use `broker.start()` after/instead this method. """ if servers is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { **kwargs, "servers": servers, } @@ -768,8 +768,8 @@ def setup_subscriber( # type: ignore[override] subscriber: "AsyncAPISubscriber", ) -> None: connection: Union[ - "Client", - "JetStreamContext", + Client, + JetStreamContext, KVBucketDeclarer, OSBucketDeclarer, None, diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index ef18834eca..206e851999 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -32,7 +32,7 @@ def get_path( self, subject: str, ) -> Optional["AnyDict"]: - path: Optional["AnyDict"] = None + path: Optional[AnyDict] = None if (path_re := self.__path_re) is not None and ( match := path_re.match(subject) @@ -136,9 +136,9 @@ async def decode_batch( self, msg: "StreamMessage[List[Msg]]", ) -> List["DecodedMessage"]: - data: List["DecodedMessage"] = [] + data: List[DecodedMessage] = [] - path: Optional["AnyDict"] = None + path: Optional[AnyDict] = None for m in msg.raw_message: one_msg = await self.parse_message(m, path=path) path = one_msg.path diff --git a/faststream/nats/publisher/usecase.py b/faststream/nats/publisher/usecase.py index c55c254b65..6f52bd2d96 100644 --- a/faststream/nats/publisher/usecase.py +++ b/faststream/nats/publisher/usecase.py @@ -125,7 +125,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "subject": subject or self.subject, "headers": headers or self.headers, "reply_to": reply_to or self.reply_to, @@ -139,7 +139,7 @@ async def publish( if stream := stream or getattr(self.stream, "name", None): kwargs.update({"stream": stream, "timeout": timeout or self.timeout}) - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 76ae509052..322ef41aa3 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -295,7 +295,7 @@ def get_log_context( class _TasksMixin(LogicSubscriber[Any]): def __init__(self, **kwargs: Any) -> None: - self.tasks: List["asyncio.Task[Any]"] = [] + self.tasks: List[asyncio.Task[Any]] = [] super().__init__(**kwargs) diff --git a/faststream/rabbit/publisher/producer.py b/faststream/rabbit/publisher/producer.py index 09b4ffbb3e..f7a4013bab 100644 --- a/faststream/rabbit/publisher/producer.py +++ b/faststream/rabbit/publisher/producer.py @@ -86,7 +86,7 @@ async def publish( # type: ignore[override] ) -> Optional[Any]: """Publish a message to a RabbitMQ queue.""" context: AsyncContextManager[ - Optional["MemoryObjectReceiveStream[IncomingMessage]"] + Optional[MemoryObjectReceiveStream[IncomingMessage]] ] if rpc: if reply_to is not None: @@ -126,7 +126,7 @@ async def publish( # type: ignore[override] return r else: - msg: Optional["IncomingMessage"] = None + msg: Optional[IncomingMessage] = None with timeout_scope(rpc_timeout, raise_timeout): msg = await response_queue.receive() diff --git a/faststream/rabbit/publisher/usecase.py b/faststream/rabbit/publisher/usecase.py index 0472bbc127..6df3b1078a 100644 --- a/faststream/rabbit/publisher/usecase.py +++ b/faststream/rabbit/publisher/usecase.py @@ -221,7 +221,7 @@ async def publish( ) -> Optional[Any]: assert self._producer, NOT_CONNECTED_YET # nosec B101 - kwargs: "AnyDict" = { + kwargs: AnyDict = { "routing_key": routing_key or self.routing_key or RabbitQueue.validate(queue or self.queue).routing, @@ -238,7 +238,7 @@ async def publish( **publish_kwargs, } - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/rabbit/subscriber/asyncapi.py b/faststream/rabbit/subscriber/asyncapi.py index 2b0cb4cd5b..05313a6247 100644 --- a/faststream/rabbit/subscriber/asyncapi.py +++ b/faststream/rabbit/subscriber/asyncapi.py @@ -18,9 +18,7 @@ class AsyncAPISubscriber(LogicSubscriber): """AsyncAPI-compatible Rabbit Subscriber class.""" def get_name(self) -> str: - return ( - f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}" - ) + return f"{self.queue.name}:{getattr(self.exchange, 'name', None) or '_'}:{self.call_name}" def get_schema(self) -> Dict[str, Channel]: payloads = self.get_payloads() diff --git a/faststream/redis/broker/broker.py b/faststream/redis/broker/broker.py index 93bea0a7f4..4f30e8adfb 100644 --- a/faststream/redis/broker/broker.py +++ b/faststream/redis/broker/broker.py @@ -257,7 +257,7 @@ async def connect( # type: ignore[override] ) -> "Redis[bytes]": """Connect to the Redis server.""" if url is not Parameter.empty: - connect_kwargs: "AnyDict" = { + connect_kwargs: AnyDict = { "url": url, **kwargs, } @@ -291,7 +291,7 @@ async def _connect( # type: ignore[override] parser_class: Type["BaseParser"], encoder_class: Type["Encoder"], ) -> "Redis[bytes]": - url_options: "AnyDict" = { + url_options: AnyDict = { **dict(parse_url(url)), **parse_security(self.security), "client_name": client_name, @@ -467,7 +467,7 @@ async def publish_batch( correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in self._middlewares: call = partial(m(None).publish_scope, call) diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py index 52806b7fbd..bad91875ef 100644 --- a/faststream/redis/parser.py +++ b/faststream/redis/parser.py @@ -107,7 +107,7 @@ def encode( @staticmethod def parse(data: bytes) -> Tuple[bytes, "AnyDict"]: - headers: "AnyDict" + headers: AnyDict try: # FastStream message format @@ -192,7 +192,7 @@ def _parse_data( message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] - batch_headers: List["AnyDict"] = [] + batch_headers: List[AnyDict] = [] for x in message["data"]: msg_data, msg_headers = _decode_batch_body_item(x) @@ -230,7 +230,7 @@ def _parse_data( message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] - batch_headers: List["AnyDict"] = [] + batch_headers: List[AnyDict] = [] for x in message["data"]: msg_data, msg_headers = _decode_batch_body_item(x.get(bDATA_KEY, x)) diff --git a/faststream/redis/publisher/usecase.py b/faststream/redis/publisher/usecase.py index a887140d84..726506985d 100644 --- a/faststream/redis/publisher/usecase.py +++ b/faststream/redis/publisher/usecase.py @@ -159,7 +159,7 @@ async def publish( # type: ignore[override] headers = headers or self.headers correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -284,7 +284,7 @@ async def publish( # type: ignore[override] reply_to = reply_to or self.reply_to correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( @@ -341,7 +341,7 @@ async def publish( # type: ignore[override] list_sub = ListSub.validate(list or self.list) correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish_batch + call: AsyncFunc = self._producer.publish_batch for m in chain( ( @@ -465,7 +465,7 @@ async def publish( # type: ignore[override] headers = headers or self.headers correlation_id = correlation_id or gen_cor_id() - call: "AsyncFunc" = self._producer.publish + call: AsyncFunc = self._producer.publish for m in chain( ( diff --git a/faststream/redis/subscriber/usecase.py b/faststream/redis/subscriber/usecase.py index 5dee39ef52..9e60138b9f 100644 --- a/faststream/redis/subscriber/usecase.py +++ b/faststream/redis/subscriber/usecase.py @@ -94,7 +94,7 @@ def __init__( ) self._client = None - self.task: Optional["asyncio.Task[None]"] = None + self.task: Optional[asyncio.Task[None]] = None @override def setup( # type: ignore[override] diff --git a/faststream/testing/broker.py b/faststream/testing/broker.py index 249e5c6846..df0fdbf083 100644 --- a/faststream/testing/broker.py +++ b/faststream/testing/broker.py @@ -93,7 +93,7 @@ async def __aexit__(self, *args: Any) -> None: await self._ctx.__aexit__(*args) # TODO: remove useless middlewares filter - middlewares: Tuple["BrokerMiddleware[Any]", ...] = ( + middlewares: Tuple[BrokerMiddleware[Any], ...] = ( CriticalLogMiddleware( # type: ignore[arg-type] logger=self.broker.logger, log_level=self.broker._msg_log_level, diff --git a/pyproject.toml b/pyproject.toml index 5b22e5c4c8..fce5418594 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,8 @@ dependencies = [ "anyio>=3.7.1,<5", "fast-depends>=2.4.0b0,<2.5.0", "typer>=0.9,!=0.12,<1", - "typing-extensions>=4.8.0", + "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", + "typing-extensions>=4.8.0; python_version >= '3.9'", ] [project.optional-dependencies] @@ -84,7 +85,7 @@ devdocs = [ "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", "mkdocs-literate-nav==0.6.1", - "mkdocs-git-revision-date-localized-plugin==1.2.5", + "mkdocs-git-revision-date-localized-plugin==1.2.6", "mike==2.1.1", # versioning "mkdocs-minify-plugin==0.8.0", "mkdocs-macros-plugin==1.0.5", # includes with variables @@ -111,14 +112,14 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.4", + "ruff==0.4.7", "bandit==1.7.8", "semgrep==1.74.0", "codespell==2.3.0", ] test-core = [ - "coverage[toml]==7.5.2", + "coverage[toml]==7.5.3", "pytest==8.2.1", "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", @@ -130,7 +131,7 @@ testing = [ "pydantic-settings>=2.0.0,<3.0.0", "httpx==0.27.0", "PyYAML==6.0.1", - "watchfiles==0.21.0", + "watchfiles==0.22.0", "email-validator==2.1.1", ] From a3c353fc9c61e4c965d0f74c07b1eb7678773b0a Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Fri, 7 Jun 2024 18:59:19 +0300 Subject: [PATCH 37/43] fix: include NatsRouter streams to original broker (#1509) * fix: includer NatsRouter streams to original broker * chore: limit typing-extensions version only for tests * docs: generate API References * fix: remove debug message * chore: update ruff * chore: use GHA concurency to cancel previous run at push * chore: test GHA cancelation * chore: add GHA concurency to generating API CI --------- Co-authored-by: Lancetnik --- .github/workflows/docs_update-references.yaml | 4 ++ .github/workflows/pr_codeql.yml | 4 ++ .github/workflows/pr_dependency-review.yaml | 4 ++ .github/workflows/pr_tests.yaml | 4 ++ docs/docs/SUMMARY.md | 2 + .../kafka/message/KafkaAckableMessage.md | 11 +++++ .../kafka/parser/AioKafkaBatchParser.md | 11 +++++ faststream/__about__.py | 2 +- .../confluent/opentelemetry/provider.py | 6 +-- faststream/kafka/annotations.py | 2 + faststream/kafka/message.py | 6 +-- faststream/kafka/opentelemetry/provider.py | 6 +-- faststream/kafka/parser.py | 43 +++++++++++-------- faststream/kafka/subscriber/usecase.py | 23 ++++++---- faststream/nats/broker/registrator.py | 23 +++++++++- faststream/nats/opentelemetry/provider.py | 6 +-- faststream/nats/parser.py | 2 +- faststream/rabbit/opentelemetry/provider.py | 4 +- faststream/rabbit/parser.py | 2 +- faststream/redis/parser.py | 8 ++-- faststream/utils/context/types.py | 4 -- pyproject.toml | 6 +-- tests/brokers/nats/test_router.py | 14 +++++- 23 files changed, 140 insertions(+), 57 deletions(-) create mode 100644 docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md create mode 100644 docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md diff --git a/.github/workflows/docs_update-references.yaml b/.github/workflows/docs_update-references.yaml index 30f9d91bea..56369ec56f 100644 --- a/.github/workflows/docs_update-references.yaml +++ b/.github/workflows/docs_update-references.yaml @@ -8,6 +8,10 @@ on: paths: - faststream/** +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + permissions: contents: write diff --git a/.github/workflows/pr_codeql.yml b/.github/workflows/pr_codeql.yml index f1fb50d463..a316b38777 100644 --- a/.github/workflows/pr_codeql.yml +++ b/.github/workflows/pr_codeql.yml @@ -23,6 +23,10 @@ on: schedule: - cron: '39 20 * * 0' +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: analyze: if: github.event.pull_request.draft == false diff --git a/.github/workflows/pr_dependency-review.yaml b/.github/workflows/pr_dependency-review.yaml index a241701673..f32635858b 100644 --- a/.github/workflows/pr_dependency-review.yaml +++ b/.github/workflows/pr_dependency-review.yaml @@ -16,6 +16,10 @@ on: paths: - pyproject.toml +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + permissions: contents: read diff --git a/.github/workflows/pr_tests.yaml b/.github/workflows/pr_tests.yaml index 59c2de5fd4..a2b545c759 100644 --- a/.github/workflows/pr_tests.yaml +++ b/.github/workflows/pr_tests.yaml @@ -12,6 +12,10 @@ on: types: - checks_requested +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: static_analysis: if: github.event.pull_request.draft == false diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index f10774cc8c..7f53b19fc5 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -540,6 +540,7 @@ search: - message - [ConsumerProtocol](api/faststream/kafka/message/ConsumerProtocol.md) - [FakeConsumer](api/faststream/kafka/message/FakeConsumer.md) + - [KafkaAckableMessage](api/faststream/kafka/message/KafkaAckableMessage.md) - [KafkaMessage](api/faststream/kafka/message/KafkaMessage.md) - opentelemetry - [KafkaTelemetryMiddleware](api/faststream/kafka/opentelemetry/KafkaTelemetryMiddleware.md) @@ -551,6 +552,7 @@ search: - [KafkaTelemetrySettingsProvider](api/faststream/kafka/opentelemetry/provider/KafkaTelemetrySettingsProvider.md) - [telemetry_attributes_provider_factory](api/faststream/kafka/opentelemetry/provider/telemetry_attributes_provider_factory.md) - parser + - [AioKafkaBatchParser](api/faststream/kafka/parser/AioKafkaBatchParser.md) - [AioKafkaParser](api/faststream/kafka/parser/AioKafkaParser.md) - publisher - asyncapi diff --git a/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md b/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md new file mode 100644 index 0000000000..16461be675 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/message/KafkaAckableMessage.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.message.KafkaAckableMessage diff --git a/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md b/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md new file mode 100644 index 0000000000..25df2532c6 --- /dev/null +++ b/docs/docs/en/api/faststream/kafka/parser/AioKafkaBatchParser.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.kafka.parser.AioKafkaBatchParser diff --git a/faststream/__about__.py b/faststream/__about__.py index 33eac3b1c1..6e014d02ac 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.10" +__version__ = "0.5.11" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/confluent/opentelemetry/provider.py b/faststream/confluent/opentelemetry/provider.py index 6add7330ca..3c157851d9 100644 --- a/faststream/confluent/opentelemetry/provider.py +++ b/faststream/confluent/opentelemetry/provider.py @@ -37,8 +37,8 @@ def get_publish_attrs_from_kwargs( return attrs - @staticmethod def get_publish_destination_name( + self, kwargs: "AnyDict", ) -> str: return cast(str, kwargs["topic"]) @@ -66,8 +66,8 @@ def get_consume_attrs_from_message( return attrs - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[Message]", ) -> str: return cast(str, msg.raw_message.topic()) @@ -95,8 +95,8 @@ def get_consume_attrs_from_message( return attrs - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[Tuple[Message, ...]]", ) -> str: return cast(str, msg.raw_message[0].topic()) diff --git a/faststream/kafka/annotations.py b/faststream/kafka/annotations.py index 5ea36b7c4e..efca62b227 100644 --- a/faststream/kafka/annotations.py +++ b/faststream/kafka/annotations.py @@ -1,3 +1,4 @@ +from aiokafka import AIOKafkaConsumer from typing_extensions import Annotated from faststream.annotations import ContextRepo, Logger, NoCast @@ -15,6 +16,7 @@ "KafkaProducer", ) +Consumer = Annotated[AIOKafkaConsumer, Context("handler_.consumer")] KafkaMessage = Annotated[KM, Context("message")] KafkaBroker = Annotated[KB, Context("broker")] KafkaProducer = Annotated[AioKafkaFastProducer, Context("broker._producer")] diff --git a/faststream/kafka/message.py b/faststream/kafka/message.py index c051e8f5ba..52f243a6ab 100644 --- a/faststream/kafka/message.py +++ b/faststream/kafka/message.py @@ -39,16 +39,16 @@ def __init__( self, *args: Any, consumer: ConsumerProtocol, - is_manual: bool = False, **kwargs: Any, ) -> None: super().__init__(*args, **kwargs) - self.is_manual = is_manual self.consumer = consumer + +class KafkaAckableMessage(KafkaMessage): async def ack(self) -> None: """Acknowledge the Kafka message.""" - if self.is_manual and not self.committed: + if not self.committed: await self.consumer.commit() await super().ack() diff --git a/faststream/kafka/opentelemetry/provider.py b/faststream/kafka/opentelemetry/provider.py index b1702b6022..b90d82c9fd 100644 --- a/faststream/kafka/opentelemetry/provider.py +++ b/faststream/kafka/opentelemetry/provider.py @@ -37,8 +37,8 @@ def get_publish_attrs_from_kwargs( return attrs - @staticmethod def get_publish_destination_name( + self, kwargs: "AnyDict", ) -> str: return cast(str, kwargs["topic"]) @@ -66,8 +66,8 @@ def get_consume_attrs_from_message( return attrs - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[ConsumerRecord]", ) -> str: return cast(str, msg.raw_message.topic) @@ -96,8 +96,8 @@ def get_consume_attrs_from_message( return attrs - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[Tuple[ConsumerRecord, ...]]", ) -> str: return cast(str, msg.raw_message[0].topic) diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index 49924c9e97..f6c9964584 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type from faststream.broker.message import decode_message, gen_cor_id from faststream.kafka.message import FAKE_CONSUMER, KafkaMessage @@ -15,14 +15,17 @@ class AioKafkaParser: """A class to parse Kafka messages.""" - @staticmethod + def __init__(self, msg_class: Type[KafkaMessage]) -> None: + self.msg_class = msg_class + async def parse_message( + self, message: "ConsumerRecord", ) -> "StreamMessage[ConsumerRecord]": """Parses a Kafka message.""" headers = {i: j.decode() for i, j in message.headers} - handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") - return KafkaMessage( + handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + return self.msg_class( body=message.value, headers=headers, reply_to=headers.get("reply_to", ""), @@ -31,11 +34,19 @@ async def parse_message( correlation_id=headers.get("correlation_id", gen_cor_id()), raw_message=message, consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER, - is_manual=getattr(handler, "is_manual", True), ) - @staticmethod - async def parse_message_batch( + async def decode_message( + self, + msg: "StreamMessage[ConsumerRecord]", + ) -> "DecodedMessage": + """Decodes a message.""" + return decode_message(msg) + + +class AioKafkaBatchParser(AioKafkaParser): + async def parse_message( + self, message: Tuple["ConsumerRecord", ...], ) -> "StreamMessage[Tuple[ConsumerRecord, ...]]": """Parses a batch of messages from a Kafka consumer.""" @@ -53,7 +64,7 @@ async def parse_message_batch( handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") - return KafkaMessage( + return self.msg_class( body=body, headers=headers, batch_headers=batch_headers, @@ -63,18 +74,14 @@ async def parse_message_batch( correlation_id=headers.get("correlation_id", gen_cor_id()), raw_message=message, consumer=getattr(handler, "consumer", None) or FAKE_CONSUMER, - is_manual=getattr(handler, "is_manual", True), ) - @staticmethod - async def decode_message(msg: "StreamMessage[ConsumerRecord]") -> "DecodedMessage": - """Decodes a message.""" - return decode_message(msg) - - @classmethod - async def decode_message_batch( - cls, + async def decode_message( + self, msg: "StreamMessage[Tuple[ConsumerRecord, ...]]", ) -> "DecodedMessage": """Decode a batch of messages.""" - return [decode_message(await cls.parse_message(m)) for m in msg.raw_message] + return [ + decode_message(await super(AioKafkaBatchParser, self).parse_message(m)) + for m in msg.raw_message + ] diff --git a/faststream/kafka/subscriber/usecase.py b/faststream/kafka/subscriber/usecase.py index fa01a11fcb..5b077faf73 100644 --- a/faststream/kafka/subscriber/usecase.py +++ b/faststream/kafka/subscriber/usecase.py @@ -26,7 +26,8 @@ CustomCallable, MsgType, ) -from faststream.kafka.parser import AioKafkaParser +from faststream.kafka.message import KafkaAckableMessage, KafkaMessage +from faststream.kafka.parser import AioKafkaBatchParser, AioKafkaParser if TYPE_CHECKING: from aiokafka import AIOKafkaConsumer, ConsumerRecord @@ -60,7 +61,6 @@ def __init__( listener: Optional["ConsumerRebalanceListener"], pattern: Optional[str], partitions: Iterable["TopicPartition"], - is_manual: bool, # Subscriber args default_parser: "AsyncCallable", default_decoder: "AsyncCallable", @@ -93,7 +93,6 @@ def __init__( self.partitions = partitions self.group_id = group_id - self.is_manual = is_manual self.builder = None self.consumer = None self.task = None @@ -306,6 +305,10 @@ def __init__( description_: Optional[str], include_in_schema: bool, ) -> None: + parser = AioKafkaParser( + msg_class=KafkaAckableMessage if is_manual else KafkaMessage + ) + super().__init__( *topics, group_id=group_id, @@ -313,10 +316,9 @@ def __init__( pattern=pattern, connection_args=connection_args, partitions=partitions, - is_manual=is_manual, # subscriber args - default_parser=AioKafkaParser.parse_message, - default_decoder=AioKafkaParser.decode_message, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, # Propagated args no_ack=no_ack, no_reply=no_reply, @@ -363,6 +365,10 @@ def __init__( self.batch_timeout_ms = batch_timeout_ms self.max_records = max_records + parser = AioKafkaBatchParser( + msg_class=KafkaAckableMessage if is_manual else KafkaMessage + ) + super().__init__( *topics, group_id=group_id, @@ -370,10 +376,9 @@ def __init__( pattern=pattern, connection_args=connection_args, partitions=partitions, - is_manual=is_manual, # subscriber args - default_parser=AioKafkaParser.parse_message_batch, - default_decoder=AioKafkaParser.decode_message_batch, + default_parser=parser.parse_message, + default_decoder=parser.decode_message, # Propagated args no_ack=no_ack, no_reply=no_reply, diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index ca6b84d4d4..a77b439b98 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -13,9 +13,10 @@ if TYPE_CHECKING: from fast_depends.dependencies import Depends - from nats.aio.msg import Msg # noqa: F401 + from nats.aio.msg import Msg from faststream.broker.types import ( + BrokerMiddleware, CustomCallable, Filter, PublisherMiddleware, @@ -348,3 +349,23 @@ def publisher( # type: ignore[override] ), ) return publisher + + @override + def include_router( # type: ignore[override] + self, + router: "NatsRegistrator", + *, + prefix: str = "", + dependencies: Iterable["Depends"] = (), + middlewares: Iterable["BrokerMiddleware[Msg]"] = (), + include_in_schema: Optional[bool] = None, + ) -> None: + self._stream_builder.objects.update(router._stream_builder.objects) + + return super().include_router( + router, + prefix=prefix, + dependencies=dependencies, + middlewares=middlewares, + include_in_schema=include_in_schema, + ) diff --git a/faststream/nats/opentelemetry/provider.py b/faststream/nats/opentelemetry/provider.py index 7c33a7d76b..a77ff0a2b3 100644 --- a/faststream/nats/opentelemetry/provider.py +++ b/faststream/nats/opentelemetry/provider.py @@ -29,8 +29,8 @@ def get_publish_attrs_from_kwargs( SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], } - @staticmethod def get_publish_destination_name( + self, kwargs: "AnyDict", ) -> str: subject: str = kwargs.get("subject", SERVICE_NAME) @@ -50,8 +50,8 @@ def get_consume_attrs_from_message( MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.subject, } - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[Msg]", ) -> str: return msg.raw_message.subject @@ -73,8 +73,8 @@ def get_consume_attrs_from_message( MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message[0].subject, } - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[List[Msg]]", ) -> str: return msg.raw_message[0].subject diff --git a/faststream/nats/parser.py b/faststream/nats/parser.py index 206e851999..25d61d4901 100644 --- a/faststream/nats/parser.py +++ b/faststream/nats/parser.py @@ -41,8 +41,8 @@ def get_path( return path - @staticmethod async def decode_message( + self, msg: "StreamMessage[Any]", ) -> "DecodedMessage": return decode_message(msg) diff --git a/faststream/rabbit/opentelemetry/provider.py b/faststream/rabbit/opentelemetry/provider.py index da62338e70..7ba8c1900e 100644 --- a/faststream/rabbit/opentelemetry/provider.py +++ b/faststream/rabbit/opentelemetry/provider.py @@ -32,8 +32,8 @@ def get_consume_attrs_from_message( MESSAGING_DESTINATION_PUBLISH_NAME: msg.raw_message.exchange, } - @staticmethod def get_consume_destination_name( + self, msg: "StreamMessage[IncomingMessage]", ) -> str: exchange = msg.raw_message.exchange or "default" @@ -53,8 +53,8 @@ def get_publish_attrs_from_kwargs( SpanAttributes.MESSAGING_MESSAGE_CONVERSATION_ID: kwargs["correlation_id"], } - @staticmethod def get_publish_destination_name( + self, kwargs: "AnyDict", ) -> str: exchange: str = kwargs.get("exchange") or "default" diff --git a/faststream/rabbit/parser.py b/faststream/rabbit/parser.py index 66fed6ed71..8fe02dc4b3 100644 --- a/faststream/rabbit/parser.py +++ b/faststream/rabbit/parser.py @@ -50,8 +50,8 @@ async def parse_message( raw_message=message, ) - @staticmethod async def decode_message( + self, msg: StreamMessage["IncomingMessage"], ) -> "DecodedMessage": """Decode a message.""" diff --git a/faststream/redis/parser.py b/faststream/redis/parser.py index bad91875ef..d42297af77 100644 --- a/faststream/redis/parser.py +++ b/faststream/redis/parser.py @@ -152,8 +152,8 @@ async def parse_message( correlation_id=headers.get("correlation_id", id_), ) - @staticmethod def _parse_data( + self, message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: return (*RawMessage.parse(message["data"]), []) @@ -169,8 +169,8 @@ def get_path(self, message: Mapping[str, Any]) -> "AnyDict": else: return {} - @staticmethod async def decode_message( + self, msg: "StreamMessage[MsgType]", ) -> DecodedMessage: return decode_message(msg) @@ -187,8 +187,8 @@ class RedisListParser(SimpleParser): class RedisBatchListParser(SimpleParser): msg_class = RedisBatchListMessage - @staticmethod def _parse_data( + self, message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] @@ -225,8 +225,8 @@ def _parse_data( class RedisBatchStreamParser(SimpleParser): msg_class = RedisBatchStreamMessage - @staticmethod def _parse_data( + self, message: Mapping[str, Any], ) -> Tuple[bytes, "AnyDict", List["AnyDict"]]: body: List[Any] = [] diff --git a/faststream/utils/context/types.py b/faststream/utils/context/types.py index ee7ce1b5bc..f27d6fe77c 100644 --- a/faststream/utils/context/types.py +++ b/faststream/utils/context/types.py @@ -58,10 +58,6 @@ def use(self, /, **kwargs: Any) -> AnyDict: Returns: A dictionary containing the updated keyword arguments - - Raises: - KeyError: If the parameter name is not found in the keyword arguments - AttributeError: If the parameter name is not a valid attribute """ name = f"{self.prefix}{self.name or self.param_name}" diff --git a/pyproject.toml b/pyproject.toml index fce5418594..1442a88388 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,8 +58,7 @@ dependencies = [ "anyio>=3.7.1,<5", "fast-depends>=2.4.0b0,<2.5.0", "typer>=0.9,!=0.12,<1", - "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", - "typing-extensions>=4.8.0; python_version >= '3.9'", + "typing-extensions>=4.8.0", ] [project.optional-dependencies] @@ -112,7 +111,7 @@ types = [ lint = [ "faststream[types]", - "ruff==0.4.7", + "ruff==0.4.8", "bandit==1.7.8", "semgrep==1.74.0", "codespell==2.3.0", @@ -123,6 +122,7 @@ test-core = [ "pytest==8.2.1", "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", + "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", # to fix dirty-equals ] testing = [ diff --git a/tests/brokers/nats/test_router.py b/tests/brokers/nats/test_router.py index a0951a06d1..c6c2b60bae 100644 --- a/tests/brokers/nats/test_router.py +++ b/tests/brokers/nats/test_router.py @@ -3,7 +3,7 @@ import pytest from faststream import Path -from faststream.nats import NatsPublisher, NatsRoute, NatsRouter +from faststream.nats import NatsBroker, NatsPublisher, NatsRoute, NatsRouter from tests.brokers.base.router import RouterLocalTestcase, RouterTestcase @@ -136,3 +136,15 @@ class TestRouterLocal(RouterLocalTestcase): broker_class = NatsRouter route_class = NatsRoute publisher_class = NatsPublisher + + def test_include_stream( + self, + router: NatsRouter, + pub_broker: NatsBroker, + ): + @router.subscriber("test", stream="stream") + async def handler(): ... + + pub_broker.include_router(router) + + assert next(iter(pub_broker._stream_builder.objects.keys())) == "stream" From 4405abe2f69465ad89216bba87e28791d7fbeb3b Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Fri, 7 Jun 2024 19:56:29 +0300 Subject: [PATCH 38/43] Update Release Notes for 0.5.11 (#1511) Co-authored-by: Lancetnik <44573917+Lancetnik@users.noreply.github.com> --- docs/docs/en/release.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index e60c2acb49..8db02164fd 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,23 @@ hide: --- # Release Notes +## 0.5.11 + +### What's Changed +* Update Release Notes for 0.5.10 by @faststream-release-notes-updater in [#1482](https://github.com/airtai/faststream/pull/1482){.external-link target="_blank"} +* feat: provide with an ability to create default RMQ Exchange by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1485](https://github.com/airtai/faststream/pull/1485){.external-link target="_blank"} +* docs: fix typos by [@crazymidnight](https://github.com/crazymidnight){.external-link target="_blank"} in [#1489](https://github.com/airtai/faststream/pull/1489){.external-link target="_blank"} +* chore: update CI triggers to minify useless runs by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1483](https://github.com/airtai/faststream/pull/1483){.external-link target="_blank"} +* Update link to badges by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1496](https://github.com/airtai/faststream/pull/1496){.external-link target="_blank"} +* Run tests every day at 12:00 AM by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1497](https://github.com/airtai/faststream/pull/1497){.external-link target="_blank"} +* Chore: update deps by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1503](https://github.com/airtai/faststream/pull/1503){.external-link target="_blank"} +* fix: include NatsRouter streams to original broker by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1509](https://github.com/airtai/faststream/pull/1509){.external-link target="_blank"} + +### New Contributors +* [@crazymidnight](https://github.com/crazymidnight){.external-link target="_blank"} made their first contribution in [#1489](https://github.com/airtai/faststream/pull/1489){.external-link target="_blank"} + +**Full Changelog**: [#0.5.10...0.5.11](https://github.com/airtai/faststream/compare/0.5.10...0.5.11){.external-link target="_blank"} + ## 0.5.10 ### What's Changed From c20629c6fe0948f5eb35f13c7afc2b7dfe9584ed Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Mon, 10 Jun 2024 17:23:48 +0300 Subject: [PATCH 39/43] docs: update filters example (#1516) * docs: update filters examples * chore: update dependencies --- .github/workflows/publish_coverage.yml | 2 +- .../subscription/confluent/filter.py | 8 ++++---- .../subscription/kafka/filter.py | 8 ++++---- .../subscription/nats/filter.py | 8 ++++---- .../subscription/rabbit/filter.py | 8 ++++---- .../subscription/redis/filter.py | 8 ++++---- .../subscription/filtering/1.md | 20 +++++++++---------- pyproject.toml | 6 +++--- 8 files changed, 34 insertions(+), 34 deletions(-) diff --git a/.github/workflows/publish_coverage.yml b/.github/workflows/publish_coverage.yml index 04983682f8..99486e096b 100644 --- a/.github/workflows/publish_coverage.yml +++ b/.github/workflows/publish_coverage.yml @@ -19,7 +19,7 @@ jobs: - run: pip install smokeshow - - uses: dawidd6/action-download-artifact@v3.1.4 # nosemgrep + - uses: dawidd6/action-download-artifact@v5 # nosemgrep with: workflow: pr_tests.yaml workflow_conclusion: success diff --git a/docs/docs_src/getting_started/subscription/confluent/filter.py b/docs/docs_src/getting_started/subscription/confluent/filter.py index bb9deabbc3..f9cbfccf43 100644 --- a/docs/docs_src/getting_started/subscription/confluent/filter.py +++ b/docs/docs_src/getting_started/subscription/confluent/filter.py @@ -4,21 +4,21 @@ broker = KafkaBroker("localhost:9092") app = FastStream(broker) +subscriber = broker.subscriber("test-topic") -@broker.subscriber( - "test-topic", +@subscriber( filter=lambda msg: msg.content_type == "application/json", ) async def handle(name: str, user_id: int): assert name == "John" assert user_id == 1 - -@broker.subscriber("test-topic") +@subscriber async def default_handler(msg: str): assert msg == "Hello, FastStream!" + @app.after_startup async def test(): await broker.publish( diff --git a/docs/docs_src/getting_started/subscription/kafka/filter.py b/docs/docs_src/getting_started/subscription/kafka/filter.py index e0ae82f2f0..10737827a3 100644 --- a/docs/docs_src/getting_started/subscription/kafka/filter.py +++ b/docs/docs_src/getting_started/subscription/kafka/filter.py @@ -4,21 +4,21 @@ broker = KafkaBroker("localhost:9092") app = FastStream(broker) +subscriber = broker.subscriber("test-topic") -@broker.subscriber( - "test-topic", +@subscriber( filter=lambda msg: msg.content_type == "application/json", ) async def handle(name: str, user_id: int): assert name == "John" assert user_id == 1 - -@broker.subscriber("test-topic") +@subscriber async def default_handler(msg: str): assert msg == "Hello, FastStream!" + @app.after_startup async def test(): await broker.publish( diff --git a/docs/docs_src/getting_started/subscription/nats/filter.py b/docs/docs_src/getting_started/subscription/nats/filter.py index 6ab5913933..26c4419db1 100644 --- a/docs/docs_src/getting_started/subscription/nats/filter.py +++ b/docs/docs_src/getting_started/subscription/nats/filter.py @@ -4,21 +4,21 @@ broker = NatsBroker("nats://localhost:4222") app = FastStream(broker) +subscriber = broker.subscriber("test-subject") -@broker.subscriber( - "test-subject", +@subscriber( filter=lambda msg: msg.content_type == "application/json", ) async def handle(name: str, user_id: int): assert name == "John" assert user_id == 1 - -@broker.subscriber("test-subject") +@subscriber async def default_handler(msg: str): assert msg == "Hello, FastStream!" + @app.after_startup async def test(): await broker.publish( diff --git a/docs/docs_src/getting_started/subscription/rabbit/filter.py b/docs/docs_src/getting_started/subscription/rabbit/filter.py index 9c50aa548b..73c6d3339b 100644 --- a/docs/docs_src/getting_started/subscription/rabbit/filter.py +++ b/docs/docs_src/getting_started/subscription/rabbit/filter.py @@ -4,21 +4,21 @@ broker = RabbitBroker("amqp://guest:guest@localhost:5672/") app = FastStream(broker) +subscriber = broker.subscriber("test-queue") -@broker.subscriber( - "test-queue", +@subscriber( filter=lambda msg: msg.content_type == "application/json", ) async def handle(name: str, user_id: int): assert name == "John" assert user_id == 1 - -@broker.subscriber("test-queue") +@subscriber async def default_handler(msg: str): assert msg == "Hello, FastStream!" + @app.after_startup async def test(): await broker.publish( diff --git a/docs/docs_src/getting_started/subscription/redis/filter.py b/docs/docs_src/getting_started/subscription/redis/filter.py index 02a017c8a9..ac116f8cf0 100644 --- a/docs/docs_src/getting_started/subscription/redis/filter.py +++ b/docs/docs_src/getting_started/subscription/redis/filter.py @@ -4,21 +4,21 @@ broker = RedisBroker("redis://localhost:6379") app = FastStream(broker) +subscriber = broker.subscriber("test-channel") -@broker.subscriber( - "test-channel", +@subscriber( filter=lambda msg: msg.content_type == "application/json", ) async def handle(name: str, user_id: int): assert name == "John" assert user_id == 1 - -@broker.subscriber("test-channel") +@subscriber async def default_handler(msg: str): assert msg == "Hello, FastStream!" + @app.after_startup async def test(): await broker.publish( diff --git a/docs/includes/getting_started/subscription/filtering/1.md b/docs/includes/getting_started/subscription/filtering/1.md index 4c45e97160..807a7a5e6e 100644 --- a/docs/includes/getting_started/subscription/filtering/1.md +++ b/docs/includes/getting_started/subscription/filtering/1.md @@ -1,24 +1,24 @@ === "AIOKafka" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/kafka/filter.py [ln:1-19] !} + ```python linenums="1" hl_lines="7 9-11 16" + {!> docs_src/getting_started/subscription/kafka/filter.py [ln:1-18] !} ``` === "Confluent" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/confluent/filter.py [ln:1-19] !} + ```python linenums="1" hl_lines="7 9-11 16" + {!> docs_src/getting_started/subscription/confluent/filter.py [ln:1-18] !} ``` === "RabbitMQ" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/rabbit/filter.py [ln:1-19] !} + ```python linenums="1" hl_lines="7 9-11 16" + {!> docs_src/getting_started/subscription/rabbit/filter.py [ln:1-18] !} ``` === "NATS" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/nats/filter.py [ln:1-19] !} + ```python linenums="1" hl_lines="7 9-11 16" + {!> docs_src/getting_started/subscription/nats/filter.py [ln:1-18] !} ``` === "Redis" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/redis/filter.py [ln:1-19] !} + ```python linenums="1" hl_lines="7 9-11 16" + {!> docs_src/getting_started/subscription/redis/filter.py [ln:1-18] !} ``` diff --git a/pyproject.toml b/pyproject.toml index 1442a88388..fb22a76a80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ otel = ["opentelemetry-sdk>=1.24.0,<2.0.0"] optionals = ["faststream[rabbit,kafka,confluent,nats,redis,otel]"] devdocs = [ - "mkdocs-material==9.5.25", + "mkdocs-material==9.5.26", "mkdocs-static-i18n==1.2.3", "mdx-include==1.4.2", "mkdocstrings[python]==0.25.1", @@ -113,13 +113,13 @@ lint = [ "faststream[types]", "ruff==0.4.8", "bandit==1.7.8", - "semgrep==1.74.0", + "semgrep==1.75.0", "codespell==2.3.0", ] test-core = [ "coverage[toml]==7.5.3", - "pytest==8.2.1", + "pytest==8.2.2", "pytest-asyncio==0.23.7", "dirty-equals==0.7.1.post0", "typing-extensions>=4.8.0,<4.12.1; python_version < '3.9'", # to fix dirty-equals From df2a0a138ad1784a76ba6689a6da1b661c279951 Mon Sep 17 00:00:00 2001 From: Kumaran Rajendhiran Date: Tue, 11 Jun 2024 10:44:29 +0530 Subject: [PATCH 40/43] Add config param to pass additional parameters to confluent-kafka-python (#1505) * Add config param to pass additional parameters to confluent-kafka-python * Raise error if ssl_context is passed * Add ConfluentConfig typed dict * Add docs for passing config * Fix mypy failure * Remove unused parameters * Bump version --- .codespell-whitelist.txt | 1 + .secrets.baseline | 4 +- docs/docs/SUMMARY.md | 14 + .../confluent/config/BrokerAddressFamily.md | 11 + .../confluent/config/BuiltinFeatures.md | 11 + .../confluent/config/ClientDNSLookup.md | 11 + .../confluent/config/CompressionCodec.md | 11 + .../confluent/config/CompressionType.md | 11 + .../confluent/config/ConfluentConfig.md | 11 + .../api/faststream/confluent/config/Debug.md | 11 + .../confluent/config/GroupProtocol.md | 11 + .../confluent/config/IsolationLevel.md | 11 + .../confluent/config/OffsetStoreMethod.md | 11 + .../confluent/config/SASLOAUTHBearerMethod.md | 11 + .../confluent/config/SecurityProtocol.md | 11 + .../en/confluent/additional-configuration.md | 27 ++ docs/docs/en/confluent/security.md | 28 +- docs/docs/navigation_template.txt | 1 + .../confluent/additional_config/__init__.py | 0 .../confluent/additional_config/app.py | 22 ++ docs/docs_src/confluent/security/basic.py | 5 +- docs/docs_src/confluent/security/plaintext.py | 5 +- .../confluent/security/sasl_scram256.py | 5 +- .../confluent/security/sasl_scram512.py | 5 +- faststream/__about__.py | 2 +- faststream/confluent/broker/broker.py | 42 +-- faststream/confluent/broker/registrator.py | 239 ---------------- faststream/confluent/client.py | 42 +-- faststream/confluent/config.py | 260 ++++++++++++++++++ faststream/confluent/fastapi/fastapi.py | 238 ---------------- faststream/confluent/router.py | 64 ----- faststream/confluent/schemas/params.py | 7 - faststream/confluent/security.py | 13 +- faststream/kafka/parser.py | 2 +- tests/brokers/confluent/test_security.py | 37 ++- .../confluent/additional_config/__init__.py | 0 .../confluent/additional_config/test_app.py | 15 + 37 files changed, 562 insertions(+), 648 deletions(-) create mode 100644 docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md create mode 100644 docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md create mode 100644 docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md create mode 100644 docs/docs/en/api/faststream/confluent/config/CompressionCodec.md create mode 100644 docs/docs/en/api/faststream/confluent/config/CompressionType.md create mode 100644 docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md create mode 100644 docs/docs/en/api/faststream/confluent/config/Debug.md create mode 100644 docs/docs/en/api/faststream/confluent/config/GroupProtocol.md create mode 100644 docs/docs/en/api/faststream/confluent/config/IsolationLevel.md create mode 100644 docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md create mode 100644 docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md create mode 100644 docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md create mode 100644 docs/docs/en/confluent/additional-configuration.md create mode 100644 docs/docs_src/confluent/additional_config/__init__.py create mode 100644 docs/docs_src/confluent/additional_config/app.py create mode 100644 faststream/confluent/config.py create mode 100644 tests/docs/confluent/additional_config/__init__.py create mode 100644 tests/docs/confluent/additional_config/test_app.py diff --git a/.codespell-whitelist.txt b/.codespell-whitelist.txt index dcfed576bf..cd9d103e1e 100644 --- a/.codespell-whitelist.txt +++ b/.codespell-whitelist.txt @@ -1 +1,2 @@ dependant +unsecure diff --git a/.secrets.baseline b/.secrets.baseline index 6cfe352e2a..3fd4156bb0 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -128,7 +128,7 @@ "filename": "docs/docs/en/release.md", "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", "is_verified": false, - "line_number": 1308, + "line_number": 1325, "is_secret": false } ], @@ -163,5 +163,5 @@ } ] }, - "generated_at": "2024-06-06T04:30:54Z" + "generated_at": "2024-06-10T09:56:52Z" } diff --git a/docs/docs/SUMMARY.md b/docs/docs/SUMMARY.md index 7f53b19fc5..e71b34d952 100644 --- a/docs/docs/SUMMARY.md +++ b/docs/docs/SUMMARY.md @@ -65,6 +65,7 @@ search: - [Acknowledgement](confluent/ack.md) - [Message Information](confluent/message.md) - [Security Configuration](confluent/security.md) + - [Additional Configuration](confluent/additional-configuration.md) - [RabbitMQ](rabbit/index.md) - [Subscription](rabbit/examples/index.md) - [Direct](rabbit/examples/direct.md) @@ -443,6 +444,19 @@ search: - [TopicPartition](api/faststream/confluent/client/TopicPartition.md) - [check_msg_error](api/faststream/confluent/client/check_msg_error.md) - [create_topics](api/faststream/confluent/client/create_topics.md) + - config + - [BrokerAddressFamily](api/faststream/confluent/config/BrokerAddressFamily.md) + - [BuiltinFeatures](api/faststream/confluent/config/BuiltinFeatures.md) + - [ClientDNSLookup](api/faststream/confluent/config/ClientDNSLookup.md) + - [CompressionCodec](api/faststream/confluent/config/CompressionCodec.md) + - [CompressionType](api/faststream/confluent/config/CompressionType.md) + - [ConfluentConfig](api/faststream/confluent/config/ConfluentConfig.md) + - [Debug](api/faststream/confluent/config/Debug.md) + - [GroupProtocol](api/faststream/confluent/config/GroupProtocol.md) + - [IsolationLevel](api/faststream/confluent/config/IsolationLevel.md) + - [OffsetStoreMethod](api/faststream/confluent/config/OffsetStoreMethod.md) + - [SASLOAUTHBearerMethod](api/faststream/confluent/config/SASLOAUTHBearerMethod.md) + - [SecurityProtocol](api/faststream/confluent/config/SecurityProtocol.md) - fastapi - [Context](api/faststream/confluent/fastapi/Context.md) - [KafkaRouter](api/faststream/confluent/fastapi/KafkaRouter.md) diff --git a/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md b/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md new file mode 100644 index 0000000000..bf5cfbaca7 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/BrokerAddressFamily.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.BrokerAddressFamily diff --git a/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md b/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md new file mode 100644 index 0000000000..41e324305d --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/BuiltinFeatures.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.BuiltinFeatures diff --git a/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md b/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md new file mode 100644 index 0000000000..15f67688f1 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/ClientDNSLookup.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.ClientDNSLookup diff --git a/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md b/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md new file mode 100644 index 0000000000..dd9640afd4 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/CompressionCodec.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.CompressionCodec diff --git a/docs/docs/en/api/faststream/confluent/config/CompressionType.md b/docs/docs/en/api/faststream/confluent/config/CompressionType.md new file mode 100644 index 0000000000..8139bfcdda --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/CompressionType.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.CompressionType diff --git a/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md b/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md new file mode 100644 index 0000000000..9ebd97c1ff --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/ConfluentConfig.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.ConfluentConfig diff --git a/docs/docs/en/api/faststream/confluent/config/Debug.md b/docs/docs/en/api/faststream/confluent/config/Debug.md new file mode 100644 index 0000000000..2036046f5d --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/Debug.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.Debug diff --git a/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md b/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md new file mode 100644 index 0000000000..a5cab4b1d9 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/GroupProtocol.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.GroupProtocol diff --git a/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md b/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md new file mode 100644 index 0000000000..d122261f0f --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/IsolationLevel.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.IsolationLevel diff --git a/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md b/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md new file mode 100644 index 0000000000..4b203e65e9 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/OffsetStoreMethod.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.OffsetStoreMethod diff --git a/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md b/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md new file mode 100644 index 0000000000..2cb635c6b0 --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/SASLOAUTHBearerMethod.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.SASLOAUTHBearerMethod diff --git a/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md b/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md new file mode 100644 index 0000000000..8415d3214e --- /dev/null +++ b/docs/docs/en/api/faststream/confluent/config/SecurityProtocol.md @@ -0,0 +1,11 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 0.5 +--- + +::: faststream.confluent.config.SecurityProtocol diff --git a/docs/docs/en/confluent/additional-configuration.md b/docs/docs/en/confluent/additional-configuration.md new file mode 100644 index 0000000000..b2aa7276bc --- /dev/null +++ b/docs/docs/en/confluent/additional-configuration.md @@ -0,0 +1,27 @@ +--- +# 0.5 - API +# 2 - Release +# 3 - Contributing +# 5 - Template Page +# 10 - Default +search: + boost: 10 +--- + +# Passing Additional Configuration to confluent-kafka-python + +The `confluent-kafka-python` package is a Python wrapper around [librdkakfa](https://github.com/confluentinc/librdkafka), which is a C/C++ client library for Apache Kafka. + +`confluent-kafka-python` accepts a `config` dictionary that is then passed on to `librdkafka`. `librdkafka` provides plenty of [configuration properties](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md) to configure the Kafka client. + +**FastStream** also provides users with the ability to pass the config dictionary to `librdkafka` to provide greater customizability. + +## Example + +In the following example, we are setting the parameter `topic.metadata.refresh.fast.interval.ms`'s value to `300` instead of the default value `100` via the `config` parameter. + +```python linenums="1" hl_lines="15 16" +{! docs_src/confluent/additional_config/app.py !} +``` + +Similarly, you could use the `config` parameter to pass any [configuration properties](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md) to `librdkafka`. diff --git a/docs/docs/en/confluent/security.md b/docs/docs/en/confluent/security.md index bb9d960eca..7d505852fa 100644 --- a/docs/docs/en/confluent/security.md +++ b/docs/docs/en/confluent/security.md @@ -22,7 +22,7 @@ This chapter discusses the security options available in **FastStream** and how **Usage:** -```python linenums="1" hl_lines="4 7 9" +```python linenums="1" hl_lines="2 4 6" {! docs_src/confluent/security/basic.py !} ``` @@ -33,7 +33,7 @@ This chapter discusses the security options available in **FastStream** and how **Usage:** ```python linenums="1" -{! docs_src/confluent/security/plaintext.py [ln:1-10.25,11-] !} +{! docs_src/confluent/security/plaintext.py !} ``` **Using any SASL authentication without SSL:** @@ -58,10 +58,30 @@ If the user does not want to use SSL encryption without the warning getting logg === "SCRAM256" ```python linenums="1" - {!> docs_src/confluent/security/sasl_scram256.py [ln:1-10.25,11-] !} + {!> docs_src/confluent/security/sasl_scram256.py [ln:1-6.25,7-] !} ``` === "SCRAM512" ```python linenums="1" - {!> docs_src/confluent/security/sasl_scram512.py [ln:1-10.25,11-] !} + {!> docs_src/confluent/security/sasl_scram512.py [ln:1-6.25,7-] !} ``` + +### 4. Other security related usecases + +**Purpose**: If you want to pass additional values to `confluent-kafka-python`, you can pass a dictionary called `config` to `KafkaBroker`. For example, to pass your own certificate file: + +**Usage:** + +```python +from faststream.confluent import KafkaBroker +from faststream.security import SASLPlaintext + +security = SASLPlaintext( + username="admin", + password="password", # pragma: allowlist secret +) + +config = {"ssl.ca.location": "~/my_certs/CRT_cacerts.pem"} + +broker = KafkaBroker("localhost:9092", security=security, config=config) +``` diff --git a/docs/docs/navigation_template.txt b/docs/docs/navigation_template.txt index fa23f9c3c5..431bb4eb59 100644 --- a/docs/docs/navigation_template.txt +++ b/docs/docs/navigation_template.txt @@ -65,6 +65,7 @@ search: - [Acknowledgement](confluent/ack.md) - [Message Information](confluent/message.md) - [Security Configuration](confluent/security.md) + - [Additional Configuration](confluent/additional-configuration.md) - [RabbitMQ](rabbit/index.md) - [Subscription](rabbit/examples/index.md) - [Direct](rabbit/examples/direct.md) diff --git a/docs/docs_src/confluent/additional_config/__init__.py b/docs/docs_src/confluent/additional_config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/docs/docs_src/confluent/additional_config/app.py b/docs/docs_src/confluent/additional_config/app.py new file mode 100644 index 0000000000..21338f7b97 --- /dev/null +++ b/docs/docs_src/confluent/additional_config/app.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.confluent import KafkaBroker + + +class HelloWorld(BaseModel): + msg: str = Field( + ..., + examples=["Hello"], + description="Demo hello world message", + ) + + +config = {"topic.metadata.refresh.fast.interval.ms": 300} +broker = KafkaBroker("localhost:9092", config=config) +app = FastStream(broker) + + +@broker.subscriber("hello_world") +async def on_hello_world(msg: HelloWorld, logger: Logger): + logger.info(msg) diff --git a/docs/docs_src/confluent/security/basic.py b/docs/docs_src/confluent/security/basic.py index afeb0c9672..8b25e167ef 100644 --- a/docs/docs_src/confluent/security/basic.py +++ b/docs/docs_src/confluent/security/basic.py @@ -1,9 +1,6 @@ -import ssl - from faststream.confluent import KafkaBroker from faststream.security import BaseSecurity -ssl_context = ssl.create_default_context() -security = BaseSecurity(ssl_context=ssl_context) +security = BaseSecurity(use_ssl=True) broker = KafkaBroker("localhost:9092", security=security) diff --git a/docs/docs_src/confluent/security/plaintext.py b/docs/docs_src/confluent/security/plaintext.py index 26d8e2b9cf..2ad3d2d884 100644 --- a/docs/docs_src/confluent/security/plaintext.py +++ b/docs/docs_src/confluent/security/plaintext.py @@ -1,13 +1,10 @@ -import ssl - from faststream.confluent import KafkaBroker from faststream.security import SASLPlaintext -ssl_context = ssl.create_default_context() security = SASLPlaintext( - ssl_context=ssl_context, username="admin", password="password", # pragma: allowlist secret + use_ssl=True, ) broker = KafkaBroker("localhost:9092", security=security) diff --git a/docs/docs_src/confluent/security/sasl_scram256.py b/docs/docs_src/confluent/security/sasl_scram256.py index 1d853052ec..a805befef1 100644 --- a/docs/docs_src/confluent/security/sasl_scram256.py +++ b/docs/docs_src/confluent/security/sasl_scram256.py @@ -1,13 +1,10 @@ -import ssl - from faststream.confluent import KafkaBroker from faststream.security import SASLScram256 -ssl_context = ssl.create_default_context() security = SASLScram256( - ssl_context=ssl_context, username="admin", password="password", # pragma: allowlist secret + use_ssl=True, ) broker = KafkaBroker("localhost:9092", security=security) diff --git a/docs/docs_src/confluent/security/sasl_scram512.py b/docs/docs_src/confluent/security/sasl_scram512.py index feca46b3cd..d4737ff0b9 100644 --- a/docs/docs_src/confluent/security/sasl_scram512.py +++ b/docs/docs_src/confluent/security/sasl_scram512.py @@ -1,13 +1,10 @@ -import ssl - from faststream.confluent import KafkaBroker from faststream.security import SASLScram512 -ssl_context = ssl.create_default_context() security = SASLScram512( - ssl_context=ssl_context, username="admin", password="password", # pragma: allowlist secret + use_ssl=True, ) broker = KafkaBroker("localhost:9092", security=security) diff --git a/faststream/__about__.py b/faststream/__about__.py index 6e014d02ac..bec58210ad 100644 --- a/faststream/__about__.py +++ b/faststream/__about__.py @@ -1,6 +1,6 @@ """Simple and fast framework to create message brokers based microservices.""" -__version__ = "0.5.11" +__version__ = "0.5.12" SERVICE_NAME = f"faststream-{__version__}" diff --git a/faststream/confluent/broker/broker.py b/faststream/confluent/broker/broker.py index 95ceb2bfa2..3d62e1e1d3 100644 --- a/faststream/confluent/broker/broker.py +++ b/faststream/confluent/broker/broker.py @@ -28,6 +28,7 @@ AsyncConfluentProducer, _missing, ) +from faststream.confluent.config import ConfluentConfig from faststream.confluent.publisher.producer import AsyncConfluentFastProducer from faststream.confluent.schemas.params import ConsumerConnectionParams from faststream.confluent.security import parse_security @@ -110,11 +111,6 @@ def __init__( """ ), ] = 9 * 60 * 1000, - sasl_kerberos_service_name: str = "kafka", - sasl_kerberos_domain_name: Optional[str] = None, - sasl_oauth_token_provider: Annotated[ - Optional[str], Doc("OAuthBearer token provider.") - ] = None, loop: Optional[AbstractEventLoop] = None, client_id: Annotated[ Optional[str], @@ -128,6 +124,13 @@ def __init__( """ ), ] = SERVICE_NAME, + config: Annotated[ + Optional[ConfluentConfig], + Doc(""" + Extra configuration for the confluent-kafka-python + producer/consumer. See `confluent_kafka.Config `_. + """), + ] = None, # publisher args acks: Annotated[ Union[Literal[0, 1, -1, "all"], object], @@ -160,14 +163,6 @@ def __init__( """ ), ] = _missing, - key_serializer: Annotated[ - Optional[Callable[[Any], bytes]], - Doc("Used to convert user-supplied keys to bytes."), - ] = None, - value_serializer: Annotated[ - Optional[Callable[[Any], bytes]], - Doc("used to convert user-supplied message values to bytes."), - ] = None, compression_type: Annotated[ Optional[Literal["gzip", "snappy", "lz4", "zstd"]], Doc( @@ -179,15 +174,6 @@ def __init__( """ ), ] = None, - max_batch_size: Annotated[ - int, - Doc( - """ - Maximum size of buffered data per partition. - After this amount `send` coroutine will block until batch is drained. - """ - ), - ] = 16 * 1024, partitioner: Annotated[ Union[ str, @@ -236,7 +222,6 @@ def __init__( """ ), ] = 0, - send_backoff_ms: int = 100, enable_idempotence: Annotated[ bool, Doc( @@ -362,25 +347,17 @@ def __init__( bootstrap_servers=servers, # both args client_id=client_id, - api_version=protocol_version, request_timeout_ms=request_timeout_ms, retry_backoff_ms=retry_backoff_ms, metadata_max_age_ms=metadata_max_age_ms, connections_max_idle_ms=connections_max_idle_ms, - sasl_kerberos_service_name=sasl_kerberos_service_name, - sasl_kerberos_domain_name=sasl_kerberos_domain_name, - sasl_oauth_token_provider=sasl_oauth_token_provider, loop=loop, # publisher args acks=acks, - key_serializer=key_serializer, - value_serializer=value_serializer, compression_type=compression_type, - max_batch_size=max_batch_size, partitioner=partitioner, max_request_size=max_request_size, linger_ms=linger_ms, - send_backoff_ms=send_backoff_ms, enable_idempotence=enable_idempotence, transactional_id=transactional_id, transaction_timeout_ms=transaction_timeout_ms, @@ -409,6 +386,7 @@ def __init__( ) self.client_id = client_id self._producer = None + self.config = config async def _close( self, @@ -449,6 +427,7 @@ async def _connect( # type: ignore[override] **kwargs, client_id=client_id, logger=self.logger, + config=self.config, ) self._producer = AsyncConfluentFastProducer( @@ -459,6 +438,7 @@ async def _connect( # type: ignore[override] AsyncConfluentConsumer, **filter_by_dict(ConsumerConnectionParams, kwargs), logger=self.logger, + config=self.config, ) async def start(self) -> None: diff --git a/faststream/confluent/broker/registrator.py b/faststream/confluent/broker/registrator.py index 277a77ef69..10c2584111 100644 --- a/faststream/confluent/broker/registrator.py +++ b/faststream/confluent/broker/registrator.py @@ -1,7 +1,6 @@ from typing import ( TYPE_CHECKING, Any, - Callable, Dict, Iterable, Literal, @@ -75,20 +74,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -209,20 +194,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -254,36 +225,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -411,20 +352,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -545,20 +472,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -590,36 +503,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -747,20 +630,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -881,20 +750,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -926,36 +781,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -1086,20 +911,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -1220,20 +1031,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -1265,36 +1062,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -1417,8 +1184,6 @@ def subscriber( max_records=max_records, group_id=group_id, connection_data={ - "key_deserializer": key_deserializer, - "value_deserializer": value_deserializer, "fetch_max_wait_ms": fetch_max_wait_ms, "fetch_max_bytes": fetch_max_bytes, "fetch_min_bytes": fetch_min_bytes, @@ -1429,12 +1194,8 @@ def subscriber( "check_crcs": check_crcs, "partition_assignment_strategy": partition_assignment_strategy, "max_poll_interval_ms": max_poll_interval_ms, - "rebalance_timeout_ms": rebalance_timeout_ms, "session_timeout_ms": session_timeout_ms, "heartbeat_interval_ms": heartbeat_interval_ms, - "consumer_timeout_ms": consumer_timeout_ms, - "max_poll_records": max_poll_records, - "exclude_internal_topics": exclude_internal_topics, "isolation_level": isolation_level, }, is_manual=not auto_commit, diff --git a/faststream/confluent/client.py b/faststream/confluent/client.py index f1703c3694..763c218bf8 100644 --- a/faststream/confluent/client.py +++ b/faststream/confluent/client.py @@ -1,10 +1,8 @@ import asyncio -from ssl import SSLContext from time import time from typing import ( TYPE_CHECKING, Any, - Callable, Dict, Iterable, List, @@ -19,6 +17,7 @@ from pydantic import BaseModel from typing_extensions import Annotated, Doc +from faststream.confluent.config import ConfluentConfig from faststream.log import logger from faststream.utils.functions import call_or_await @@ -87,19 +86,13 @@ def __init__( client_id: Optional[str] = None, metadata_max_age_ms: int = 300000, request_timeout_ms: int = 40000, - api_version: str = "auto", acks: Any = _missing, - key_serializer: Optional[Callable[[bytes], bytes]] = None, - value_serializer: Optional[Callable[[bytes], bytes]] = None, compression_type: Optional[str] = None, - max_batch_size: int = 16384, partitioner: str = "consistent_random", max_request_size: int = 1048576, linger_ms: int = 0, - send_backoff_ms: int = 100, retry_backoff_ms: int = 100, security_protocol: str = "PLAINTEXT", - ssl_context: Optional[SSLContext] = None, connections_max_idle_ms: int = 540000, enable_idempotence: bool = False, transactional_id: Optional[Union[str, int]] = None, @@ -107,15 +100,16 @@ def __init__( sasl_mechanism: Optional[str] = None, sasl_plain_password: Optional[str] = None, sasl_plain_username: Optional[str] = None, - sasl_kerberos_service_name: str = "kafka", - sasl_kerberos_domain_name: Optional[str] = None, - sasl_oauth_token_provider: Optional[str] = None, + config: Optional[ConfluentConfig] = None, logger: Annotated[ Union["LoggerProto", None, object], Doc("User specified logger to pass into Context and log service messages."), ] = logger, ) -> None: self.logger = logger + + self.config: Dict[str, Any] = {} if config is None else dict(config) + if isinstance(bootstrap_servers, Iterable) and not isinstance( bootstrap_servers, str ): @@ -127,7 +121,7 @@ def __init__( if acks is _missing or acks == "all": acks = -1 - self.config = { + config_from_params = { # "topic.metadata.refresh.interval.ms": 1000, "bootstrap.servers": bootstrap_servers, "client.id": client_id, @@ -144,8 +138,9 @@ def __init__( "retry.backoff.ms": retry_backoff_ms, "security.protocol": security_protocol.lower(), "connections.max.idle.ms": connections_max_idle_ms, - "sasl.kerberos.service.name": sasl_kerberos_service_name, } + self.config = {**self.config, **config_from_params} + if sasl_mechanism: self.config.update( { @@ -273,13 +268,10 @@ def __init__( client_id: Optional[str] = "confluent-kafka-consumer", group_id: Optional[str] = None, group_instance_id: Optional[str] = None, - key_deserializer: Optional[Callable[[bytes], bytes]] = None, - value_deserializer: Optional[Callable[[bytes], bytes]] = None, fetch_max_wait_ms: int = 500, fetch_max_bytes: int = 52428800, fetch_min_bytes: int = 1, max_partition_fetch_bytes: int = 1 * 1024 * 1024, - request_timeout_ms: int = 40 * 1000, retry_backoff_ms: int = 100, auto_offset_reset: str = "latest", enable_auto_commit: bool = True, @@ -288,29 +280,24 @@ def __init__( metadata_max_age_ms: int = 5 * 60 * 1000, partition_assignment_strategy: Union[str, List[Any]] = "roundrobin", max_poll_interval_ms: int = 300000, - rebalance_timeout_ms: Optional[int] = None, session_timeout_ms: int = 10000, heartbeat_interval_ms: int = 3000, - consumer_timeout_ms: int = 200, - max_poll_records: Optional[int] = None, - ssl_context: Optional[SSLContext] = None, security_protocol: str = "PLAINTEXT", - api_version: str = "auto", - exclude_internal_topics: bool = True, connections_max_idle_ms: int = 540000, isolation_level: str = "read_uncommitted", sasl_mechanism: Optional[str] = None, sasl_plain_password: Optional[str] = None, sasl_plain_username: Optional[str] = None, - sasl_kerberos_service_name: str = "kafka", - sasl_kerberos_domain_name: Optional[str] = None, - sasl_oauth_token_provider: Optional[str] = None, + config: Optional[ConfluentConfig] = None, logger: Annotated[ Union["LoggerProto", None, object], Doc("User specified logger to pass into Context and log service messages."), ] = logger, ) -> None: self.logger = logger + + self.config: Dict[str, Any] = {} if config is None else dict(config) + if group_id is None: group_id = "confluent-kafka-consumer-group" @@ -328,7 +315,7 @@ def __init__( for x in partition_assignment_strategy ] ) - self.config = { + config_from_params = { "allow.auto.create.topics": True, # "topic.metadata.refresh.interval.ms": 1000, "bootstrap.servers": bootstrap_servers, @@ -353,8 +340,9 @@ def __init__( "security.protocol": security_protocol.lower(), "connections.max.idle.ms": connections_max_idle_ms, "isolation.level": isolation_level, - "sasl.kerberos.service.name": sasl_kerberos_service_name, } + self.config = {**self.config, **config_from_params} + if sasl_mechanism: self.config.update( { diff --git a/faststream/confluent/config.py b/faststream/confluent/config.py new file mode 100644 index 0000000000..9ec3518471 --- /dev/null +++ b/faststream/confluent/config.py @@ -0,0 +1,260 @@ +from enum import Enum +from typing import Any, Callable, TypedDict + + +class BuiltinFeatures(Enum): + gzip = "gzip" + snappy = "snappy" + ssl = "ssl" + sasl = "sasl" + regex = "regex" + lz4 = "lz4" + sasl_gssapi = "sasl_gssapi" + sasl_plain = "sasl_plain" + sasl_scram = "sasl_scram" + plugins = "plugins" + zstd = "zstd" + sasl_oauthbearer = "sasl_oauthbearer" + http = "http" + oidc = "oidc" + + +class Debug(Enum): + generic = "generic" + broker = "broker" + topic = "topic" + metadata = "metadata" + feature = "feature" + queue = "queue" + msg = "msg" + protocol = "protocol" + cgrp = "cgrp" + security = "security" + fetch = "fetch" + interceptor = "interceptor" + plugin = "plugin" + consumer = "consumer" + admin = "admin" + eos = "eos" + mock = "mock" + assignor = "assignor" + conf = "conf" + all = "all" + + +class BrokerAddressFamily(Enum): + any = "any" + v4 = "v4" + v6 = "v6" + + +class SecurityProtocol(Enum): + plaintext = "plaintext" + ssl = "ssl" + sasl_plaintext = "sasl_plaintext" + sasl_ssl = "sasl_ssl" + + +class SASLOAUTHBearerMethod(Enum): + default = "default" + oidc = "oidc" + + +class GroupProtocol(Enum): + classic = "classic" + consumer = "consumer" + + +class OffsetStoreMethod(Enum): + none = "none" + file = "file" + broker = "broker" + + +class IsolationLevel(Enum): + read_uncommitted = "read_uncommitted" + read_committed = "read_committed" + + +class CompressionCodec(Enum): + none = "none" + gzip = "gzip" + snappy = "snappy" + lz4 = "lz4" + zstd = "zstd" + + +class CompressionType(Enum): + none = "none" + gzip = "gzip" + snappy = "snappy" + lz4 = "lz4" + zstd = "zstd" + + +class ClientDNSLookup(Enum): + use_all_dns_ips = "use_all_dns_ips" + resolve_canonical_bootstrap_servers_only = ( + "resolve_canonical_bootstrap_servers_only" + ) + + +ConfluentConfig = TypedDict( + "ConfluentConfig", + { + "builtin.features": BuiltinFeatures, + "client.id": str, + "metadata.broker.list": str, + "bootstrap.servers": str, + "message.max.bytes": int, + "message.copy.max.bytes": int, + "receive.message.max.bytes": int, + "max.in.flight.requests.per.connection": int, + "max.in.flight": int, + "topic.metadata.refresh.interval.ms": int, + "metadata.max.age.ms": int, + "topic.metadata.refresh.fast.interval.ms": int, + "topic.metadata.refresh.fast.cnt": int, + "topic.metadata.refresh.sparse": bool, + "topic.metadata.propagation.max.ms": int, + "topic.blacklist": str, + "debug": Debug, + "socket.timeout.ms": int, + "socket.blocking.max.ms": int, + "socket.send.buffer.bytes": int, + "socket.receive.buffer.bytes": int, + "socket.keepalive.enable": bool, + "socket.nagle.disable": bool, + "socket.max.fails": int, + "broker.address.ttl": int, + "broker.address.family": BrokerAddressFamily, + "socket.connection.setup.timeout.ms": int, + "connections.max.idle.ms": int, + "reconnect.backoff.jitter.ms": int, + "reconnect.backoff.ms": int, + "reconnect.backoff.max.ms": int, + "statistics.interval.ms": int, + "enabled_events": int, + "error_cb": Callable[..., Any], + "throttle_cb": Callable[..., Any], + "stats_cb": Callable[..., Any], + "log_cb": Callable[..., Any], + "log_level": int, + "log.queue": bool, + "log.thread.name": bool, + "enable.random.seed": bool, + "log.connection.close": bool, + "background_event_cb": Callable[..., Any], + "socket_cb": Callable[..., Any], + "connect_cb": Callable[..., Any], + "closesocket_cb": Callable[..., Any], + "open_cb": Callable[..., Any], + "resolve_cb": Callable[..., Any], + "opaque": str, + "default_topic_conf": str, + "internal.termination.signal": int, + "api.version.request": bool, + "api.version.request.timeout.ms": int, + "api.version.fallback.ms": int, + "broker.version.fallback": str, + "allow.auto.create.topics": bool, + "security.protocol": SecurityProtocol, + "ssl.cipher.suites": str, + "ssl.curves.list": str, + "ssl.sigalgs.list": str, + "ssl.key.location": str, + "ssl.key.password": str, + "ssl.key.pem": str, + "ssl_key": str, + "ssl.certificate.location": str, + "ssl.certificate.pem": str, + "ssl_certificate": str, + "ssl.ca.location": str, + "ssl.ca.pem": str, + "ssl_ca": str, + "ssl.ca.certificate.stores": str, + "ssl.crl.location": str, + "ssl.keystore.location": str, + "ssl.keystore.password": str, + "ssl.providers": str, + "ssl.engine.location": str, + "ssl.engine.id": str, + "ssl_engine_callback_data": str, + "enable.ssl.certificate.verification": bool, + "ssl.endpoint.identification.algorithm": str, + "ssl.certificate.verify_cb": Callable[..., Any], + "sasl.mechanisms": str, + "sasl.mechanism": str, + "sasl.kerberos.service.name": str, + "sasl.kerberos.principal": str, + "sasl.kerberos.kinit.cmd": str, + "sasl.kerberos.keytab": str, + "sasl.kerberos.min.time.before.relogin": int, + "sasl.username": str, + "sasl.password": str, + "sasl.oauthbearer.config": str, + "enable.sasl.oauthbearer.unsecure.jwt": bool, + "oauthbearer_token_refresh_cb": Callable[..., Any], + "sasl.oauthbearer.method": SASLOAUTHBearerMethod, + "sasl.oauthbearer.client.id": str, + "sasl.oauthbearer.client.secret": str, + "sasl.oauthbearer.scope": str, + "sasl.oauthbearer.extensions": str, + "sasl.oauthbearer.token.endpoint.url": str, + "plugin.library.paths": str, + "interceptors": str, + "group.id": str, + "group.instance.id": str, + "partition.assignment.strategy": str, + "session.timeout.ms": str, + "heartbeat.interval.ms": str, + "group.protocol.type": str, + "group.protocol": GroupProtocol, + "group.remote.assignor": str, + "coordinator.query.interval.ms": int, + "max.poll.interval.ms": int, + "enable.auto.commit": bool, + "auto.commit.interval.ms": int, + "enable.auto.offset.store": bool, + "queued.min.messages": int, + "queued.max.messages.kbytes": int, + "fetch.wait.max.ms": int, + "fetch.queue.backoff.ms": int, + "fetch.message.max.bytes": int, + "max.partition.fetch.bytes": int, + "fetch.max.bytes": int, + "fetch.min.bytes": int, + "fetch.error.backoff.ms": int, + "offset.store.method": OffsetStoreMethod, + "isolation.level": IsolationLevel, + "consume_cb": Callable[..., Any], + "rebalance_cb": Callable[..., Any], + "offset_commit_cb": Callable[..., Any], + "enable.partition.eof": bool, + "check.crcs": bool, + "client.rack": str, + "transactional.id": str, + "transaction.timeout.ms": int, + "enable.idempotence": bool, + "enable.gapless.guarantee": bool, + "queue.buffering.max.messages": int, + "queue.buffering.max.kbytes": int, + "queue.buffering.max.ms": float, + "linger.ms": float, + "message.send.max.retries": int, + "retries": int, + "retry.backoff.ms": int, + "retry.backoff.max.ms": int, + "queue.buffering.backpressure.threshold": int, + "compression.codec": CompressionCodec, + "compression.type": CompressionType, + "batch.num.messages": int, + "batch.size": int, + "delivery.report.only.error": bool, + "dr_cb": Callable[..., Any], + "dr_msg_cb": Callable[..., Any], + "sticky.partitioning.linger.ms": int, + "client.dns.lookup": ClientDNSLookup, + }, + total=False, +) diff --git a/faststream/confluent/fastapi/fastapi.py b/faststream/confluent/fastapi/fastapi.py index 1897243d6c..b1f5c83590 100644 --- a/faststream/confluent/fastapi/fastapi.py +++ b/faststream/confluent/fastapi/fastapi.py @@ -423,20 +423,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -557,20 +543,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -602,36 +574,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -882,20 +824,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -1016,20 +944,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -1061,36 +975,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -1327,20 +1211,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -1461,20 +1331,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -1506,36 +1362,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -1789,20 +1615,6 @@ def subscriber( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -1923,20 +1735,6 @@ def subscriber( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -1968,36 +1766,6 @@ def subscriber( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -2236,8 +2004,6 @@ def subscriber( topics[0], # path *topics, group_id=group_id, - key_deserializer=key_deserializer, - value_deserializer=value_deserializer, fetch_max_wait_ms=fetch_max_wait_ms, fetch_max_bytes=fetch_max_bytes, fetch_min_bytes=fetch_min_bytes, @@ -2248,12 +2014,8 @@ def subscriber( check_crcs=check_crcs, partition_assignment_strategy=partition_assignment_strategy, max_poll_interval_ms=max_poll_interval_ms, - rebalance_timeout_ms=rebalance_timeout_ms, session_timeout_ms=session_timeout_ms, heartbeat_interval_ms=heartbeat_interval_ms, - consumer_timeout_ms=consumer_timeout_ms, - max_poll_records=max_poll_records, - exclude_internal_topics=exclude_internal_topics, isolation_level=isolation_level, batch=batch, max_records=max_records, diff --git a/faststream/confluent/router.py b/faststream/confluent/router.py index f24a40e263..6cff87009c 100644 --- a/faststream/confluent/router.py +++ b/faststream/confluent/router.py @@ -159,20 +159,6 @@ def __init__( """ ), ] = None, - key_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "key and returns a deserialized one." - ), - ] = None, - value_deserializer: Annotated[ - Optional[Callable[[bytes], Any]], - Doc( - "Any callable that takes a raw message `bytes` " - "value and returns a deserialized value." - ), - ] = None, fetch_max_wait_ms: Annotated[ int, Doc( @@ -293,20 +279,6 @@ def __init__( """ ), ] = 5 * 60 * 1000, - rebalance_timeout_ms: Annotated[ - Optional[int], - Doc( - """ - The maximum time server will wait for this - consumer to rejoin the group in a case of rebalance. In Java client - this behaviour is bound to `max.poll.interval.ms` configuration, - but as ``aiokafka`` will rejoin the group in the background, we - decouple this setting to allow finer tuning by users that use - `ConsumerRebalanceListener` to delay rebalacing. Defaults - to ``session_timeout_ms`` - """ - ), - ] = None, session_timeout_ms: Annotated[ int, Doc( @@ -338,36 +310,6 @@ def __init__( """ ), ] = 3 * 1000, - consumer_timeout_ms: Annotated[ - int, - Doc( - """ - Maximum wait timeout for background fetching - routine. Mostly defines how fast the system will see rebalance and - request new data for new partitions. - """ - ), - ] = 200, - max_poll_records: Annotated[ - Optional[int], - Doc( - """ - The maximum number of records returned in a - single call by batch consumer. Has no limit by default. - """ - ), - ] = None, - exclude_internal_topics: Annotated[ - bool, - Doc( - """ - Whether records from internal topics - (such as offsets) should be exposed to the consumer. If set to True - the only way to receive records from an internal topic is - subscribing to it. - """ - ), - ] = True, isolation_level: Annotated[ Literal["read_uncommitted", "read_committed"], Doc( @@ -481,8 +423,6 @@ def __init__( *topics, publishers=publishers, group_id=group_id, - key_deserializer=key_deserializer, - value_deserializer=value_deserializer, fetch_max_wait_ms=fetch_max_wait_ms, fetch_max_bytes=fetch_max_bytes, fetch_min_bytes=fetch_min_bytes, @@ -493,12 +433,8 @@ def __init__( check_crcs=check_crcs, partition_assignment_strategy=partition_assignment_strategy, max_poll_interval_ms=max_poll_interval_ms, - rebalance_timeout_ms=rebalance_timeout_ms, session_timeout_ms=session_timeout_ms, heartbeat_interval_ms=heartbeat_interval_ms, - consumer_timeout_ms=consumer_timeout_ms, - max_poll_records=max_poll_records, - exclude_internal_topics=exclude_internal_topics, isolation_level=isolation_level, max_records=max_records, batch_timeout_ms=batch_timeout_ms, diff --git a/faststream/confluent/schemas/params.py b/faststream/confluent/schemas/params.py index eb2d724b23..7df2ef67eb 100644 --- a/faststream/confluent/schemas/params.py +++ b/faststream/confluent/schemas/params.py @@ -1,4 +1,3 @@ -import ssl from asyncio import AbstractEventLoop from typing import List, Literal, Optional, Union @@ -11,14 +10,12 @@ class ConsumerConnectionParams(TypedDict, total=False): bootstrap_servers: Union[str, List[str]] loop: Optional[AbstractEventLoop] client_id: str - request_timeout_ms: int retry_backoff_ms: int metadata_max_age_ms: int security_protocol: Literal[ "SSL", "PLAINTEXT", ] - api_version: str connections_max_idle_ms: int sasl_mechanism: Literal[ "PLAIN", @@ -29,7 +26,3 @@ class ConsumerConnectionParams(TypedDict, total=False): ] sasl_plain_password: str sasl_plain_username: str - sasl_kerberos_service_name: str - sasl_kerberos_domain_name: str - ssl_context: ssl.SSLContext - sasl_oauth_token_provider: str diff --git a/faststream/confluent/security.py b/faststream/confluent/security.py index dea4a0bc98..e244ec34da 100644 --- a/faststream/confluent/security.py +++ b/faststream/confluent/security.py @@ -1,6 +1,8 @@ +import ssl import warnings from typing import TYPE_CHECKING, Optional +from faststream.exceptions import SetupError from faststream.security import ( BaseSecurity, SASLPlaintext, @@ -14,6 +16,11 @@ def parse_security(security: Optional[BaseSecurity]) -> "AnyDict": + if security and isinstance(security.ssl_context, ssl.SSLContext): + raise SetupError( + "ssl_context in not supported by confluent-kafka-python, please use config instead." + ) + if security is None: return {} elif type(security) == BaseSecurity: @@ -31,12 +38,11 @@ def parse_security(security: Optional[BaseSecurity]) -> "AnyDict": def _parse_base_security(security: BaseSecurity) -> "AnyDict": return { "security_protocol": "SSL" if security.use_ssl else "PLAINTEXT", - "ssl_context": security.ssl_context, } def _parse_sasl_plaintext(security: SASLPlaintext) -> "AnyDict": - if security.ssl_context is None: + if not security.use_ssl: warnings.warn( message=ssl_not_set_error_msg, category=RuntimeWarning, @@ -45,7 +51,6 @@ def _parse_sasl_plaintext(security: SASLPlaintext) -> "AnyDict": return { "security_protocol": "SASL_SSL" if security.use_ssl else "SASL_PLAINTEXT", - "ssl_context": security.ssl_context, "sasl_mechanism": "PLAIN", "sasl_plain_username": security.username, "sasl_plain_password": security.password, @@ -55,7 +60,6 @@ def _parse_sasl_plaintext(security: SASLPlaintext) -> "AnyDict": def _parse_sasl_scram256(security: SASLScram256) -> "AnyDict": return { "security_protocol": "SASL_SSL" if security.use_ssl else "SASL_PLAINTEXT", - "ssl_context": security.ssl_context, "sasl_mechanism": "SCRAM-SHA-256", "sasl_plain_username": security.username, "sasl_plain_password": security.password, @@ -65,7 +69,6 @@ def _parse_sasl_scram256(security: SASLScram256) -> "AnyDict": def _parse_sasl_scram512(security: SASLScram512) -> "AnyDict": return { "security_protocol": "SASL_SSL" if security.use_ssl else "SASL_PLAINTEXT", - "ssl_context": security.ssl_context, "sasl_mechanism": "SCRAM-SHA-512", "sasl_plain_username": security.username, "sasl_plain_password": security.password, diff --git a/faststream/kafka/parser.py b/faststream/kafka/parser.py index f6c9964584..44886c6028 100644 --- a/faststream/kafka/parser.py +++ b/faststream/kafka/parser.py @@ -24,7 +24,7 @@ async def parse_message( ) -> "StreamMessage[ConsumerRecord]": """Parses a Kafka message.""" headers = {i: j.decode() for i, j in message.headers} - handler: Optional["LogicSubscriber[Any]"] = context.get_local("handler_") + handler: Optional[LogicSubscriber[Any]] = context.get_local("handler_") return self.msg_class( body=message.value, headers=headers, diff --git a/tests/brokers/confluent/test_security.py b/tests/brokers/confluent/test_security.py index 7a11bb7119..108c8d5f65 100644 --- a/tests/brokers/confluent/test_security.py +++ b/tests/brokers/confluent/test_security.py @@ -1,4 +1,3 @@ -import ssl from contextlib import contextmanager from typing import Tuple from unittest.mock import AsyncMock, MagicMock, patch @@ -6,6 +5,7 @@ import pytest from docs.docs_src.confluent.security.ssl_warning import test_without_ssl_warning +from faststream.exceptions import SetupError __all__ = ["test_without_ssl_warning"] @@ -34,16 +34,33 @@ async def test_base_security(): producer_call_kwargs = producer.call_args.kwargs call_kwargs = {} - call_kwargs["security_protocol"] = "SSL" assert call_kwargs.items() <= producer_call_kwargs.items() - assert ( - producer_call_kwargs["security_protocol"] - == call_kwargs["security_protocol"] - ) - assert type(producer_call_kwargs["ssl_context"]) == ssl.SSLContext +@pytest.mark.asyncio() +@pytest.mark.confluent() +async def test_base_security_pass_ssl_context(): + import ssl + + from faststream.confluent import KafkaBroker + from faststream.security import BaseSecurity + + ssl_context = ssl.create_default_context() + security = BaseSecurity(ssl_context=ssl_context) + + basic_broker = KafkaBroker("localhost:9092", security=security) + + with patch_aio_consumer_and_producer(), pytest.raises( + SetupError, match="not supported" + ) as e: + async with basic_broker: + pass + + assert ( + str(e.value) + == "ssl_context in not supported by confluent-kafka-python, please use config instead." + ) @pytest.mark.asyncio() @@ -70,8 +87,6 @@ async def test_scram256(): == call_kwargs["security_protocol"] ) - assert type(producer_call_kwargs["ssl_context"]) == ssl.SSLContext - @pytest.mark.asyncio() @pytest.mark.confluent() @@ -97,8 +112,6 @@ async def test_scram512(): == call_kwargs["security_protocol"] ) - assert type(producer_call_kwargs["ssl_context"]) == ssl.SSLContext - @pytest.mark.asyncio() @pytest.mark.confluent() @@ -123,5 +136,3 @@ async def test_plaintext(): producer_call_kwargs["security_protocol"] == call_kwargs["security_protocol"] ) - - assert type(producer_call_kwargs["ssl_context"]) == ssl.SSLContext diff --git a/tests/docs/confluent/additional_config/__init__.py b/tests/docs/confluent/additional_config/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/docs/confluent/additional_config/test_app.py b/tests/docs/confluent/additional_config/test_app.py new file mode 100644 index 0000000000..6a8ec32945 --- /dev/null +++ b/tests/docs/confluent/additional_config/test_app.py @@ -0,0 +1,15 @@ +import pytest + +from docs.docs_src.confluent.additional_config.app import ( + HelloWorld, + broker, + on_hello_world, +) +from faststream.confluent import TestKafkaBroker + + +@pytest.mark.asyncio() +async def test_base_app(): + async with TestKafkaBroker(broker): + await broker.publish(HelloWorld(msg="First Hello"), "hello_world") + on_hello_world.mock.assert_called_with(dict(HelloWorld(msg="First Hello"))) From f054192156f5a9d577ac5356e1ad8b5c75a574e3 Mon Sep 17 00:00:00 2001 From: "faststream-release-notes-updater[bot]" <153718812+faststream-release-notes-updater[bot]@users.noreply.github.com> Date: Tue, 11 Jun 2024 13:09:07 +0530 Subject: [PATCH 41/43] Update Release Notes for 0.5.12 (#1517) Co-authored-by: kumaranvpl <7011056+kumaranvpl@users.noreply.github.com> --- docs/docs/en/release.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/docs/en/release.md b/docs/docs/en/release.md index 8db02164fd..feadf44839 100644 --- a/docs/docs/en/release.md +++ b/docs/docs/en/release.md @@ -12,6 +12,28 @@ hide: --- # Release Notes +## 0.5.12 + +### What's Changed + +Now, `FastStream` provides users with the ability to pass the `config` dictionary to `confluent-kafka-python` for greater customizability. The following example sets the parameter `topic.metadata.refresh.fast.interval.ms`'s value to `300` instead of the default value `100` via the `config` parameter. + +```python +from faststream import FastStream +from faststream.confluent import KafkaBroker + +config = {"topic.metadata.refresh.fast.interval.ms": 300} +broker = KafkaBroker("localhost:9092", config=config) +app = FastStream(broker) +``` + +* Update Release Notes for 0.5.11 by @faststream-release-notes-updater in [#1511](https://github.com/airtai/faststream/pull/1511){.external-link target="_blank"} +* docs: update filters example by [@Lancetnik](https://github.com/Lancetnik){.external-link target="_blank"} in [#1516](https://github.com/airtai/faststream/pull/1516){.external-link target="_blank"} +* Add config param to pass additional parameters to confluent-kafka-python by [@kumaranvpl](https://github.com/kumaranvpl){.external-link target="_blank"} in [#1505](https://github.com/airtai/faststream/pull/1505){.external-link target="_blank"} + + +**Full Changelog**: [#0.5.11...0.5.12](https://github.com/airtai/faststream/compare/0.5.11...0.5.12){.external-link target="_blank"} + ## 0.5.11 ### What's Changed From f7a5c194717f5d71e9aafc43d6b321f4746c16e9 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Wed, 12 Jun 2024 09:08:47 +0300 Subject: [PATCH 42/43] feat: nats filter JS subscription support (#1519) * feat: support NATS multiple subjects JS subscription * feat: NATS test client supports filter subsciption * refactor: add cache for NATS log subject calculation --- faststream/nats/broker/registrator.py | 6 ++-- faststream/nats/subscriber/factory.py | 16 +++++++++- faststream/nats/subscriber/usecase.py | 42 +++++++++++++++++++++++--- faststream/nats/testing.py | 5 ++- tests/brokers/nats/test_consume.py | 33 ++++++++++++++++++-- tests/brokers/nats/test_test_client.py | 25 ++++++++++++--- 6 files changed, 111 insertions(+), 16 deletions(-) diff --git a/faststream/nats/broker/registrator.py b/faststream/nats/broker/registrator.py index a77b439b98..bcd0bab0a2 100644 --- a/faststream/nats/broker/registrator.py +++ b/faststream/nats/broker/registrator.py @@ -42,7 +42,7 @@ def subscriber( # type: ignore[override] subject: Annotated[ str, Doc("NATS subject to subscribe."), - ], + ] = "", queue: Annotated[ str, Doc( @@ -209,7 +209,7 @@ def subscriber( # type: ignore[override] You can use it as a handler decorator `@broker.subscriber(...)`. """ - if stream := self._stream_builder.create(stream): + if (stream := self._stream_builder.create(stream)) and subject: stream.add_subject(subject) subscriber = cast( @@ -323,7 +323,7 @@ def publisher( # type: ignore[override] Or you can create a publisher object to call it lately - `broker.publisher(...).publish(...)`. """ - if stream := self._stream_builder.create(stream): + if (stream := self._stream_builder.create(stream)) and subject: stream.add_subject(subject) publisher = cast( diff --git a/faststream/nats/subscriber/factory.py b/faststream/nats/subscriber/factory.py index 2ae7c9b820..1161c66550 100644 --- a/faststream/nats/subscriber/factory.py +++ b/faststream/nats/subscriber/factory.py @@ -4,6 +4,7 @@ DEFAULT_SUB_PENDING_BYTES_LIMIT, DEFAULT_SUB_PENDING_MSGS_LIMIT, ) +from nats.js.api import ConsumerConfig from nats.js.client import ( DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, DEFAULT_JS_SUB_PENDING_MSGS_LIMIT, @@ -80,6 +81,11 @@ def create_subscriber( if pull_sub is not None and stream is None: raise SetupError("Pull subscriber can be used only with a stream") + if not subject and not config: + raise SetupError("You must provide either `subject` or `config` option.") + + config = config or ConsumerConfig(filter_subjects=[]) + if stream: # TODO: pull & queue warning # TODO: push & durable warning @@ -91,7 +97,6 @@ def create_subscriber( or DEFAULT_JS_SUB_PENDING_BYTES_LIMIT, "durable": durable, "stream": stream.name, - "config": config, } if pull_sub is not None: @@ -120,6 +125,7 @@ def create_subscriber( if obj_watch is not None: return AsyncAPIObjStoreWatchSubscriber( subject=subject, + config=config, obj_watch=obj_watch, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -131,6 +137,7 @@ def create_subscriber( if kv_watch is not None: return AsyncAPIKeyValueWatchSubscriber( subject=subject, + config=config, kv_watch=kv_watch, broker_dependencies=broker_dependencies, broker_middlewares=broker_middlewares, @@ -144,6 +151,7 @@ def create_subscriber( return AsyncAPIConcurrentCoreSubscriber( max_workers=max_workers, subject=subject, + config=config, queue=queue, # basic args extra_options=extra_options, @@ -162,6 +170,7 @@ def create_subscriber( else: return AsyncAPICoreSubscriber( subject=subject, + config=config, queue=queue, # basic args extra_options=extra_options, @@ -185,6 +194,7 @@ def create_subscriber( pull_sub=pull_sub, stream=stream, subject=subject, + config=config, # basic args extra_options=extra_options, # Subscriber args @@ -204,6 +214,7 @@ def create_subscriber( max_workers=max_workers, stream=stream, subject=subject, + config=config, queue=queue, # basic args extra_options=extra_options, @@ -226,6 +237,7 @@ def create_subscriber( pull_sub=pull_sub, stream=stream, subject=subject, + config=config, # basic args extra_options=extra_options, # Subscriber args @@ -245,6 +257,7 @@ def create_subscriber( pull_sub=pull_sub, stream=stream, subject=subject, + config=config, # basic args extra_options=extra_options, # Subscriber args @@ -264,6 +277,7 @@ def create_subscriber( stream=stream, subject=subject, queue=queue, + config=config, # basic args extra_options=extra_options, # Subscriber args diff --git a/faststream/nats/subscriber/usecase.py b/faststream/nats/subscriber/usecase.py index 322ef41aa3..d64cc2cf2d 100644 --- a/faststream/nats/subscriber/usecase.py +++ b/faststream/nats/subscriber/usecase.py @@ -20,7 +20,7 @@ import anyio from fast_depends.dependencies import Depends from nats.errors import ConnectionClosedError, TimeoutError -from nats.js.api import ObjectInfo +from nats.js.api import ConsumerConfig, ObjectInfo from nats.js.kv import KeyValue from typing_extensions import Annotated, Doc, override @@ -73,6 +73,7 @@ def __init__( self, *, subject: str, + config: "ConsumerConfig", extra_options: Optional[AnyDict], # Subscriber args default_parser: "AsyncCallable", @@ -88,6 +89,7 @@ def __init__( include_in_schema: bool, ) -> None: self.subject = subject + self.config = config self.extra_options = extra_options or {} @@ -205,10 +207,20 @@ def build_log_context( def add_prefix(self, prefix: str) -> None: """Include Subscriber in router.""" - self.subject = "".join((prefix, self.subject)) + if self.subject: + self.subject = "".join((prefix, self.subject)) + else: + self.config.filter_subjects = [ + "".join((prefix, subject)) + for subject in (self.config.filter_subjects or ()) + ] + + @cached_property + def _resolved_subject_string(self) -> str: + return self.subject or ", ".join(self.config.filter_subjects or ()) def __hash__(self) -> int: - return self.get_routing_hash(self.subject) + return self.get_routing_hash(self._resolved_subject_string) @staticmethod def get_routing_hash( @@ -229,6 +241,7 @@ def __init__( self, *, subject: str, + config: "ConsumerConfig", # default args extra_options: Optional[AnyDict], # Subscriber args @@ -246,6 +259,7 @@ def __init__( ) -> None: super().__init__( subject=subject, + config=config, extra_options=extra_options, # subscriber args default_parser=default_parser, @@ -368,6 +382,7 @@ def __init__( *, # default args subject: str, + config: "ConsumerConfig", queue: str, extra_options: Optional[AnyDict], # Subscriber args @@ -387,6 +402,7 @@ def __init__( super().__init__( subject=subject, + config=config, extra_options=extra_options, # subscriber args default_parser=parser_.parse_message, @@ -439,6 +455,7 @@ def __init__( max_workers: int, # default args subject: str, + config: "ConsumerConfig", queue: str, extra_options: Optional[AnyDict], # Subscriber args @@ -456,6 +473,7 @@ def __init__( max_workers=max_workers, # basic args subject=subject, + config=config, queue=queue, extra_options=extra_options, # Propagated args @@ -494,6 +512,7 @@ def __init__( stream: "JStream", # default args subject: str, + config: "ConsumerConfig", queue: str, extra_options: Optional[AnyDict], # Subscriber args @@ -514,6 +533,7 @@ def __init__( super().__init__( subject=subject, + config=config, extra_options=extra_options, # subscriber args default_parser=parser_.parse_message, @@ -540,7 +560,7 @@ def get_log_context( """Log context factory using in `self.consume` scope.""" return self.build_log_context( message=message, - subject=self.subject, + subject=self._resolved_subject_string, queue=self.queue, stream=self.stream.name, ) @@ -560,6 +580,7 @@ async def _create_subscription( # type: ignore[override] subject=self.clear_subject, queue=self.queue, cb=self.consume, + config=self.config, **self.extra_options, ) @@ -574,6 +595,7 @@ def __init__( stream: "JStream", # default args subject: str, + config: "ConsumerConfig", queue: str, extra_options: Optional[AnyDict], # Subscriber args @@ -592,6 +614,7 @@ def __init__( # basic args stream=stream, subject=subject, + config=config, queue=queue, extra_options=extra_options, # Propagated args @@ -619,6 +642,7 @@ async def _create_subscription( # type: ignore[override] subject=self.clear_subject, queue=self.queue, cb=self._put_msg, + config=self.config, **self.extra_options, ) @@ -633,6 +657,7 @@ def __init__( stream: "JStream", # default args subject: str, + config: "ConsumerConfig", extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, @@ -651,6 +676,7 @@ def __init__( # basic args stream=stream, subject=subject, + config=config, extra_options=extra_options, queue="", # Propagated args @@ -708,6 +734,7 @@ def __init__( pull_sub: "PullSub", stream: "JStream", subject: str, + config: "ConsumerConfig", extra_options: Optional[AnyDict], # Subscriber args no_ack: bool, @@ -726,6 +753,7 @@ def __init__( pull_sub=pull_sub, stream=stream, subject=subject, + config=config, extra_options=extra_options, # Propagated args no_ack=no_ack, @@ -765,6 +793,7 @@ def __init__( *, # default args subject: str, + config: "ConsumerConfig", stream: "JStream", pull_sub: "PullSub", extra_options: Optional[AnyDict], @@ -786,6 +815,7 @@ def __init__( super().__init__( subject=subject, + config=config, extra_options=extra_options, # subscriber args default_parser=parser.parse_batch, @@ -837,6 +867,7 @@ def __init__( self, *, subject: str, + config: "ConsumerConfig", kv_watch: "KvWatch", broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[KeyValue.Entry]"], @@ -850,6 +881,7 @@ def __init__( super().__init__( subject=subject, + config=config, extra_options=None, no_ack=True, no_reply=True, @@ -941,6 +973,7 @@ def __init__( self, *, subject: str, + config: "ConsumerConfig", obj_watch: "ObjWatch", broker_dependencies: Iterable[Depends], broker_middlewares: Iterable["BrokerMiddleware[List[Msg]]"], @@ -955,6 +988,7 @@ def __init__( super().__init__( subject=subject, + config=config, extra_options=None, no_ack=True, no_reply=True, diff --git a/faststream/nats/testing.py b/faststream/nats/testing.py index 34230cb788..4d13333c5f 100644 --- a/faststream/nats/testing.py +++ b/faststream/nats/testing.py @@ -97,7 +97,10 @@ async def publish( # type: ignore[override] ): continue - if is_subject_match_wildcard(subject, handler.clear_subject): + if is_subject_match_wildcard(subject, handler.clear_subject) or any( + is_subject_match_wildcard(subject, filter_subject) + for filter_subject in (handler.config.filter_subjects or ()) + ): msg: Union[List[PatchedMessage], PatchedMessage] if (pull := getattr(handler, "pull_sub", None)) and pull.batch: msg = [incoming] diff --git a/tests/brokers/nats/test_consume.py b/tests/brokers/nats/test_consume.py index 60ac90a7f3..96e40f447b 100644 --- a/tests/brokers/nats/test_consume.py +++ b/tests/brokers/nats/test_consume.py @@ -1,11 +1,11 @@ import asyncio -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from nats.aio.msg import Msg from faststream.exceptions import AckMessage -from faststream.nats import JStream, NatsBroker, PullSub +from faststream.nats import ConsumerConfig, JStream, NatsBroker, PullSub from faststream.nats.annotations import NatsMessage from tests.brokers.base.consume import BrokerRealConsumeTestcase from tests.tools import spy_decorator @@ -40,6 +40,35 @@ def subscriber(m): assert event.is_set() + async def test_consume_with_filter( + self, + queue, + mock: Mock, + event: asyncio.Event, + ): + consume_broker = self.get_broker() + + @consume_broker.subscriber( + config=ConsumerConfig(filter_subjects=[f"{queue}.a"]), + stream=JStream(queue, subjects=[f"{queue}.*"]), + ) + def subscriber(m): + mock(m) + event.set() + + async with self.patch_broker(consume_broker) as br: + await br.start() + await asyncio.wait( + ( + asyncio.create_task(br.publish(2, f"{queue}.a")), + asyncio.create_task(event.wait()), + ), + timeout=3, + ) + + assert event.is_set() + mock.assert_called_once_with(2) + async def test_consume_pull( self, queue: str, diff --git a/tests/brokers/nats/test_test_client.py b/tests/brokers/nats/test_test_client.py index ebbd1c7887..9718b558b6 100644 --- a/tests/brokers/nats/test_test_client.py +++ b/tests/brokers/nats/test_test_client.py @@ -4,7 +4,7 @@ from faststream import BaseMiddleware from faststream.exceptions import SetupError -from faststream.nats import JStream, NatsBroker, PullSub, TestNatsBroker +from faststream.nats import ConsumerConfig, JStream, NatsBroker, PullSub, TestNatsBroker from tests.brokers.base.testclient import BrokerTestclientTestcase @@ -208,8 +208,6 @@ async def test_consume_batch( self, queue: str, stream: JStream, - event: asyncio.Event, - mock, ): broker = self.get_broker() @@ -219,9 +217,26 @@ async def test_consume_batch( pull_sub=PullSub(1, batch=True), ) def subscriber(m): - mock(m) - event.set() + pass async with TestNatsBroker(broker) as br: await br.publish("hello", queue) subscriber.mock.assert_called_once_with(["hello"]) + + async def test_consume_with_filter( + self, + queue, + ): + broker = self.get_broker() + + @broker.subscriber( + config=ConsumerConfig(filter_subjects=[f"{queue}.a"]), + stream=JStream(queue, subjects=[f"{queue}.*"]), + ) + def subscriber(m): + pass + + async with TestNatsBroker(broker) as br: + await br.publish(1, f"{queue}.b") + await br.publish(2, f"{queue}.a") + subscriber.mock.assert_called_once_with(2) From bb6997fc8a23e00eb8c0f1b582f40314eaf61cf2 Mon Sep 17 00:00:00 2001 From: Pastukhov Nikita Date: Wed, 12 Jun 2024 11:36:59 +0300 Subject: [PATCH 43/43] chore: downgrade upload-artifact action (#1520) --- .github/workflows/publish_coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_coverage.yml b/.github/workflows/publish_coverage.yml index 99486e096b..3624ff6601 100644 --- a/.github/workflows/publish_coverage.yml +++ b/.github/workflows/publish_coverage.yml @@ -19,7 +19,7 @@ jobs: - run: pip install smokeshow - - uses: dawidd6/action-download-artifact@v5 # nosemgrep + - uses: dawidd6/action-download-artifact@v4 # nosemgrep with: workflow: pr_tests.yaml workflow_conclusion: success