diff --git a/README.md b/README.md index da1b79a..748615a 100644 --- a/README.md +++ b/README.md @@ -212,8 +212,9 @@ Development 1. Python >= 3.8 2. Install dependencies `requirements/dev.txt` -3. We use `isort` library to order and format our imports, and we check it using `flake8-isort` library (automatically on `flake8` run). -For convenience you may run `isort .` to order imports. +3. We use `isort` library to order and format our imports, and `black` - to format the code. +We check it using `flake8-isort` and `flake8-black` libraries (automatically on `flake8` run). +For convenience you may run `isort . && black .` to format the code. Testing diff --git a/dj_cqrs/_validation.py b/dj_cqrs/_validation.py index 8b2404a..0fba123 100644 --- a/dj_cqrs/_validation.py +++ b/dj_cqrs/_validation.py @@ -76,9 +76,10 @@ def _validate_master(cqrs_settings): def _validate_master_auto_update_fields(master_settings): if 'CQRS_AUTO_UPDATE_FIELDS' in master_settings: - assert isinstance(master_settings['CQRS_AUTO_UPDATE_FIELDS'], bool), ( - 'CQRS master CQRS_AUTO_UPDATE_FIELDS must be bool.' - ) + assert isinstance( + master_settings['CQRS_AUTO_UPDATE_FIELDS'], + bool, + ), 'CQRS master CQRS_AUTO_UPDATE_FIELDS must be bool.' else: master_settings['CQRS_AUTO_UPDATE_FIELDS'] = DEFAULT_MASTER_AUTO_UPDATE_FIELDS @@ -94,7 +95,8 @@ def _validate_master_message_ttl(master_settings): # TODO: raise error in 2.0.0 logger.warning( 'Settings CQRS_MESSAGE_TTL=%s is invalid, using default %s.', - message_ttl, DEFAULT_MASTER_MESSAGE_TTL, + message_ttl, + DEFAULT_MASTER_MESSAGE_TTL, ) master_settings['CQRS_MESSAGE_TTL'] = DEFAULT_MASTER_MESSAGE_TTL else: @@ -167,7 +169,8 @@ def _validate_replica_max_retries(replica_settings): # TODO: raise error in 2.0.0 logger.warning( 'Replica setting CQRS_MAX_RETRIES=%s is invalid, using default %s.', - max_retries, DEFAULT_REPLICA_MAX_RETRIES, + max_retries, + DEFAULT_REPLICA_MAX_RETRIES, ) replica_settings['CQRS_MAX_RETRIES'] = DEFAULT_REPLICA_MAX_RETRIES else: @@ -184,7 +187,8 @@ def _validate_replica_retry_delay(replica_settings): # TODO: raise error in 2.0.0 logger.warning( 'Replica setting CQRS_RETRY_DELAY=%s is invalid, using default %s.', - retry_delay, DEFAULT_REPLICA_RETRY_DELAY, + retry_delay, + DEFAULT_REPLICA_RETRY_DELAY, ) replica_settings['CQRS_RETRY_DELAY'] = DEFAULT_REPLICA_RETRY_DELAY @@ -199,7 +203,8 @@ def _validate_replica_delay_queue_max_size(replica_settings): # TODO: raise error in 2.0.0 logger.warning( 'Settings delay_queue_max_size=%s is invalid, using default %s.', - max_qsize, DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE, + max_qsize, + DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE, ) max_qsize = DEFAULT_REPLICA_DELAY_QUEUE_MAX_SIZE diff --git a/dj_cqrs/controller/consumer.py b/dj_cqrs/controller/consumer.py index 76f7a3e..c3428bc 100644 --- a/dj_cqrs/controller/consumer.py +++ b/dj_cqrs/controller/consumer.py @@ -15,7 +15,7 @@ def consume(payload): - """ Consumer controller. + """Consumer controller. :param dj_cqrs.dataclasses.TransportPayload payload: Consumed payload from master service. """ @@ -31,9 +31,14 @@ def consume(payload): def route_signal_to_replica_model( - signal_type, cqrs_id, instance_data, previous_data=None, meta=None, queue=None, + signal_type, + cqrs_id, + instance_data, + previous_data=None, + meta=None, + queue=None, ): - """ Routes signal to model method to create/update/delete replica instance. + """Routes signal to model method to create/update/delete replica instance. :param dj_cqrs.constants.SignalType signal_type: Consumed signal type. :param str cqrs_id: Replica model CQRS unique identifier. @@ -85,6 +90,10 @@ def route_signal_to_replica_model( logger.error( '{0}\nCQRS {1} error: pk = {2}, cqrs_revision = {3} ({4}).'.format( - str(e), signal_type, pk_value, cqrs_revision, model_cls.CQRS_ID, + str(e), + signal_type, + pk_value, + cqrs_revision, + model_cls.CQRS_ID, ), ) diff --git a/dj_cqrs/controller/producer.py b/dj_cqrs/controller/producer.py index c3ac2d3..c5a4051 100644 --- a/dj_cqrs/controller/producer.py +++ b/dj_cqrs/controller/producer.py @@ -4,7 +4,7 @@ def produce(payload): - """ Producer controller. + """Producer controller. :param dj_cqrs.dataclasses.TransportPayload payload: TransportPayload. """ diff --git a/dj_cqrs/dataclasses.py b/dj_cqrs/dataclasses.py index 0e68e45..0164ec1 100644 --- a/dj_cqrs/dataclasses.py +++ b/dj_cqrs/dataclasses.py @@ -126,7 +126,7 @@ def retries(self): @retries.setter def retries(self, value): - assert value >= 0, "Payload retries field should be 0 or positive integer." + assert value >= 0, 'Payload retries field should be 0 or positive integer.' self.__retries = value def to_dict(self) -> dict: @@ -157,7 +157,4 @@ def is_expired(self): Returns: (bool): True if payload is expired, False otherwise. """ - return ( - self.__expires is not None - and self.__expires <= timezone.now() - ) + return self.__expires is not None and self.__expires <= timezone.now() diff --git a/dj_cqrs/delay.py b/dj_cqrs/delay.py index f4fc163..71e1300 100644 --- a/dj_cqrs/delay.py +++ b/dj_cqrs/delay.py @@ -27,7 +27,7 @@ class DelayQueue: def __init__(self, max_size=None): if max_size is not None: - assert max_size > 0, "Delay queue max_size should be positive integer." + assert max_size > 0, 'Delay queue max_size should be positive integer.' self._max_size = max_size self._queue = PriorityQueue() @@ -63,19 +63,18 @@ def put(self, delay_message): """ assert isinstance(delay_message, DelayMessage) if self.full(): - raise Full("Delay queue is full") - - self._queue.put(( - delay_message.eta.timestamp(), - delay_message.delivery_tag, - delay_message, - )) + raise Full('Delay queue is full') + + self._queue.put( + ( + delay_message.eta.timestamp(), + delay_message.delivery_tag, + delay_message, + ), + ) def qsize(self): return self._queue.qsize() def full(self): - return ( - self._max_size is not None - and self.qsize() >= self._max_size - ) + return self._max_size is not None and self.qsize() >= self._max_size diff --git a/dj_cqrs/management/commands/cqrs_bulk_dump.py b/dj_cqrs/management/commands/cqrs_bulk_dump.py index 118a372..fb6dad9 100644 --- a/dj_cqrs/management/commands/cqrs_bulk_dump.py +++ b/dj_cqrs/management/commands/cqrs_bulk_dump.py @@ -17,30 +17,35 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - '--cqrs-id', '-c', + '--cqrs-id', + '-c', help='CQRS_ID of the master model', type=str, required=True, ) parser.add_argument( - '--output', '-o', + '--output', + '-o', help='Output file for dumping (- for writing to stdout)', type=str, default=None, ) parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=10000, ) parser.add_argument( - '--progress', '-p', + '--progress', + '-p', help='Display progress', action='store_true', ) parser.add_argument( - '--force', '-f', + '--force', + '-f', help='Override output file', action='store_true', ) @@ -63,8 +68,8 @@ def handle(self, *args, **options): file=sys.stderr, ) for qs in batch_qs( - model.relate_cqrs_serialization(model._default_manager.order_by().all()), - batch_size=batch_size, + model.relate_cqrs_serialization(model._default_manager.order_by().all()), + batch_size=batch_size, ): ts = time.time() cs = counter @@ -76,9 +81,14 @@ def handle(self, *args, **options): ) success_counter += 1 except Exception as e: - print('\nDump record failed for pk={0}: {1}: {2}'.format( - instance.pk, type(e).__name__, str(e), - ), file=sys.stderr) + print( + '\nDump record failed for pk={0}: {1}: {2}'.format( + instance.pk, + type(e).__name__, + str(e), + ), + file=sys.stderr, + ) if progress: rate = (counter - cs) / (time.time() - ts) percent = 100 * counter / db_count @@ -86,13 +96,23 @@ def handle(self, *args, **options): sys.stderr.write( '\r{0} of {1} processed - {2}% with ' 'rate {3:.1f} rps, to go {4} ...{5:20}'.format( - counter, db_count, int(percent), rate, str(eta), ' ', - )) + counter, + db_count, + int(percent), + rate, + str(eta), + ' ', + ), + ) sys.stderr.flush() - print('Done!\n{0} instance(s) saved.\n{1} instance(s) processed.'.format( - success_counter, counter, - ), file=sys.stderr) + print( + 'Done!\n{0} instance(s) saved.\n{1} instance(s) processed.'.format( + success_counter, + counter, + ), + file=sys.stderr, + ) @staticmethod def _get_model(options): diff --git a/dj_cqrs/management/commands/cqrs_bulk_load.py b/dj_cqrs/management/commands/cqrs_bulk_load.py index d1afa9f..5aceb47 100644 --- a/dj_cqrs/management/commands/cqrs_bulk_load.py +++ b/dj_cqrs/management/commands/cqrs_bulk_load.py @@ -15,19 +15,23 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - '--input', '-i', + '--input', + '-i', help='Input file for loading (- for reading from stdin)', - type=str, required=True, + type=str, + required=True, ) parser.add_argument( - '--clear', '-c', + '--clear', + '-c', help='Delete existing models', type=bool, required=False, default=False, ) parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=10000, @@ -58,7 +62,7 @@ def handle(self, *args, **options): try: model._default_manager.all().delete() except DatabaseError: - raise CommandError("Delete operation fails!") + raise CommandError('Delete operation fails!') self._process(f, model, batch_size) diff --git a/dj_cqrs/management/commands/cqrs_consume.py b/dj_cqrs/management/commands/cqrs_consume.py index 2d2d94c..fe8cc3a 100644 --- a/dj_cqrs/management/commands/cqrs_consume.py +++ b/dj_cqrs/management/commands/cqrs_consume.py @@ -17,9 +17,11 @@ def consume(**kwargs): import django + django.setup() from dj_cqrs.transport import current_transport + try: current_transport.consume(**kwargs) except KeyboardInterrupt: @@ -34,15 +36,14 @@ def _display_path(path): class WorkersManager: - def __init__( - self, - consume_kwargs, - workers=1, - reload=False, - ignore_paths=None, - sigint_timeout=5, - sigkill_timeout=1, + self, + consume_kwargs, + workers=1, + reload=False, + ignore_paths=None, + sigint_timeout=5, + sigkill_timeout=1, ): self.pool = [] self.workers = workers @@ -137,10 +138,7 @@ def add_arguments(self, parser): parser.add_argument( '--reload', '-r', - help=( - 'Enable reload signal SIGHUP and autoreload ' - 'on file changes' - ), + help=('Enable reload signal SIGHUP and autoreload ' 'on file changes'), action='store_true', default=False, ) @@ -170,17 +168,16 @@ def add_arguments(self, parser): ) def handle( - self, - *args, - workers=1, - cqrs_id=None, - reload=False, - ignore_paths=None, - sigint_timeout=5, - sigkill_timeout=1, - **options, + self, + *args, + workers=1, + cqrs_id=None, + reload=False, + ignore_paths=None, + sigint_timeout=5, + sigkill_timeout=1, + **options, ): - paths_to_ignore = None if ignore_paths: paths_to_ignore = [Path(p).resolve() for p in ignore_paths.split(',')] diff --git a/dj_cqrs/management/commands/cqrs_dead_letters.py b/dj_cqrs/management/commands/cqrs_dead_letters.py index cfd6f8b..5b52bf8 100644 --- a/dj_cqrs/management/commands/cqrs_dead_letters.py +++ b/dj_cqrs/management/commands/cqrs_dead_letters.py @@ -12,7 +12,6 @@ class RabbitMQTransportService(RabbitMQTransport): - @classmethod def get_consumer_settings(cls): return cls._get_consumer_settings() @@ -50,7 +49,8 @@ def handle(self, *args, **options): queue_name, dead_letter_queue_name, *_ = RabbitMQTransportService.get_consumer_settings() dead_letters_queue = RabbitMQTransportService.declare_queue( - channel, dead_letter_queue_name, + channel, + dead_letter_queue_name, ) dead_letters_count = dead_letters_queue.method.message_count consumer_generator = channel.consume( @@ -72,12 +72,15 @@ def handle(self, *args, **options): def check_transport(self): if not issubclass(current_transport, RabbitMQTransport): - raise CommandError("Dead letters commands available only for RabbitMQTransport.") + raise CommandError('Dead letters commands available only for RabbitMQTransport.') def init_broker(self): host, port, creds, exchange = RabbitMQTransportService.get_common_settings() connection, channel = RabbitMQTransportService.create_connection( - host, port, creds, exchange, + host, + port, + creds, + exchange, ) queue_name, dead_letter_queue_name, *_ = RabbitMQTransportService.get_consumer_settings() @@ -96,9 +99,9 @@ def init_broker(self): return channel, connection def handle_retry(self, channel, consumer_generator, dead_letters_count): - self.stdout.write("Total dead letters: {0}".format(dead_letters_count)) + self.stdout.write('Total dead letters: {0}'.format(dead_letters_count)) for i in range(1, dead_letters_count + 1): - self.stdout.write("Retrying: {0}/{1}".format(i, dead_letters_count)) + self.stdout.write('Retrying: {0}/{1}'.format(i, dead_letters_count)) method_frame, properties, body = next(consumer_generator) dct = ujson.loads(body) @@ -122,7 +125,7 @@ def handle_dump(self, consumer_generator, dead_letters_count): self.stdout.write(body.decode('utf-8')) def handle_purge(self, channel, dead_letter_queue_name, dead_letter_count): - self.stdout.write("Total dead letters: {0}".format(dead_letter_count)) + self.stdout.write('Total dead letters: {0}'.format(dead_letter_count)) if dead_letter_count > 0: channel.queue_purge(dead_letter_queue_name) - self.stdout.write("Purged") + self.stdout.write('Purged') diff --git a/dj_cqrs/management/commands/cqrs_deleted_diff_master.py b/dj_cqrs/management/commands/cqrs_deleted_diff_master.py index 9e594c4..f1a616d 100644 --- a/dj_cqrs/management/commands/cqrs_deleted_diff_master.py +++ b/dj_cqrs/management/commands/cqrs_deleted_diff_master.py @@ -32,7 +32,8 @@ def handle(self, *args, **options): model.objects.filter( pk__in=master_data, ).values_list( - 'pk', flat=True, + 'pk', + flat=True, ), ) diff_ids = list(master_data - exist_pks) diff --git a/dj_cqrs/management/commands/cqrs_deleted_diff_replica.py b/dj_cqrs/management/commands/cqrs_deleted_diff_replica.py index 233fc31..3552e45 100644 --- a/dj_cqrs/management/commands/cqrs_deleted_diff_replica.py +++ b/dj_cqrs/management/commands/cqrs_deleted_diff_replica.py @@ -18,19 +18,22 @@ def serialize_package(cls, package): def add_arguments(self, parser): parser.add_argument( - '--cqrs-id', '-cid', + '--cqrs-id', + '-cid', help='CQRS_ID of the replica model', type=str, required=True, ) parser.add_argument( - '--filter', '-f', + '--filter', + '-f', help='Filter kwargs', type=str, default=None, ) parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=10000, diff --git a/dj_cqrs/management/commands/cqrs_diff_master.py b/dj_cqrs/management/commands/cqrs_diff_master.py index 2fabf2f..825c1ab 100644 --- a/dj_cqrs/management/commands/cqrs_diff_master.py +++ b/dj_cqrs/management/commands/cqrs_diff_master.py @@ -18,19 +18,22 @@ def serialize_package(cls, package): def add_arguments(self, parser): parser.add_argument( - '--cqrs-id', '-cid', + '--cqrs-id', + '-cid', help='CQRS_ID of the master model', type=str, required=True, ) parser.add_argument( - '--filter', '-f', + '--filter', + '-f', help='Filter kwargs', type=str, default=None, ) parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=10000, diff --git a/dj_cqrs/management/commands/cqrs_diff_replica.py b/dj_cqrs/management/commands/cqrs_diff_replica.py index 03dd17c..10b31dc 100644 --- a/dj_cqrs/management/commands/cqrs_diff_replica.py +++ b/dj_cqrs/management/commands/cqrs_diff_replica.py @@ -29,9 +29,13 @@ def handle(self, *args, **options): for package_line in f: master_data = self.deserialize_in(package_line) - qs = model._default_manager.filter( - pk__in=master_data.keys(), - ).order_by().only('pk', 'cqrs_revision') + qs = ( + model._default_manager.filter( + pk__in=master_data.keys(), + ) + .order_by() + .only('pk', 'cqrs_revision') + ) replica_data = {instance.pk: instance.cqrs_revision for instance in qs} diff_ids = set() diff --git a/dj_cqrs/management/commands/cqrs_diff_sync.py b/dj_cqrs/management/commands/cqrs_diff_sync.py index 9abf9bb..c875434 100644 --- a/dj_cqrs/management/commands/cqrs_diff_sync.py +++ b/dj_cqrs/management/commands/cqrs_diff_sync.py @@ -18,13 +18,15 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=DEFAULT_BATCH, ) parser.add_argument( - '--progress', '-p', + '--progress', + '-p', help='Display progress', action='store_true', ) diff --git a/dj_cqrs/management/commands/cqrs_sync.py b/dj_cqrs/management/commands/cqrs_sync.py index 4aa73f7..640c4b8 100644 --- a/dj_cqrs/management/commands/cqrs_sync.py +++ b/dj_cqrs/management/commands/cqrs_sync.py @@ -22,31 +22,36 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - '--cqrs-id', '-cid', + '--cqrs-id', + '-cid', help='CQRS_ID of the master model', type=str, required=True, ) parser.add_argument( - '--filter', '-f', + '--filter', + '-f', help='Filter kwargs', type=str, default=None, ) parser.add_argument( - '--queue', '-q', + '--queue', + '-q', help='Name of the specific replica queue', type=str, default=None, ) parser.add_argument( - '--batch', '-b', + '--batch', + '-b', help='Batch size', type=int, default=DEFAULT_BATCH, ) parser.add_argument( - '--progress', '-p', + '--progress', + '-p', help='Display progress', action='store_true', ) @@ -80,9 +85,13 @@ def handle(self, *args, **options): instance.cqrs_sync(queue=options['queue']) success_counter += 1 except Exception as e: - print('\nSync record failed for pk={0}: {1}: {2}'.format( - instance.pk, type(e).__name__, str(e), - )) + print( + '\nSync record failed for pk={0}: {1}: {2}'.format( + instance.pk, + type(e).__name__, + str(e), + ), + ) close_old_connections() if progress: @@ -92,13 +101,22 @@ def handle(self, *args, **options): sys.stdout.write( '\r{0} of {1} processed - {2}% with ' 'rate {3:.1f} rps, to go {4} ...{5:20}'.format( - counter, db_count, int(percent), rate, str(eta), ' ', - )) + counter, + db_count, + int(percent), + rate, + str(eta), + ' ', + ), + ) sys.stdout.flush() - print('Done!\n{0} instance(s) synced.\n{1} instance(s) processed.'.format( - success_counter, counter, - )) + print( + 'Done!\n{0} instance(s) synced.\n{1} instance(s) processed.'.format( + success_counter, + counter, + ), + ) @staticmethod def _prepare_qs(model, options): diff --git a/dj_cqrs/managers.py b/dj_cqrs/managers.py index 9f2242f..04fab7e 100644 --- a/dj_cqrs/managers.py +++ b/dj_cqrs/managers.py @@ -33,7 +33,7 @@ def bulk_create(self, objs, **kwargs): return objs def bulk_update(self, queryset, **kwargs): - """ Custom update method to support sending of update signals. + """Custom update method to support sending of update signals. Args: queryset (django.db.models.QuerySet): Django Queryset (f.e. filter). @@ -62,7 +62,9 @@ def list_all(): current_dt = timezone.now() result = queryset.update( - cqrs_revision=F('cqrs_revision') + 1, cqrs_updated=current_dt, **kwargs, + cqrs_revision=F('cqrs_revision') + 1, + cqrs_updated=current_dt, + **kwargs, ) objs = list_all() @@ -83,7 +85,7 @@ def save_instance( sync: bool = False, meta: dict = None, ): - """ This method saves (creates or updates) model instance from CQRS master instance data. + """This method saves (creates or updates) model instance from CQRS master instance data. Args: master_data (dict): CQRS master instance data. @@ -130,7 +132,7 @@ def create_instance( sync: bool = False, meta: dict = None, ): - """ This method creates model instance from mapped CQRS master instance data. + """This method creates model instance from mapped CQRS master instance data. Args: mapped_data (dict): Mapped CQRS master instance data. @@ -152,7 +154,9 @@ def create_instance( logger.error( '{0}\nCQRS create error: pk = {1} ({2}).'.format( - str(e), pk_value, self.model.CQRS_ID, + str(e), + pk_value, + self.model.CQRS_ID, ), ) @@ -164,7 +168,7 @@ def update_instance( sync: bool = False, meta: dict = None, ): - """ This method updates model instance from mapped CQRS master instance data. + """This method updates model instance from mapped CQRS master instance data. Args: instance (django.db.models.Model): ReplicaMixin model instance. @@ -186,9 +190,14 @@ def update_instance( 'CQRS revision downgrade on sync: pk = {0}, ' 'cqrs_revision = new {1} / existing {2} ({3}).' ) - logger.warning(w_tpl.format( - pk_value, current_cqrs_revision, existing_cqrs_revision, self.model.CQRS_ID, - )) + logger.warning( + w_tpl.format( + pk_value, + current_cqrs_revision, + existing_cqrs_revision, + self.model.CQRS_ID, + ), + ) else: if existing_cqrs_revision > current_cqrs_revision: @@ -196,21 +205,29 @@ def update_instance( 'Wrong CQRS sync order: pk = {0}, ' 'cqrs_revision = new {1} / existing {2} ({3}).' ) - logger.error(e_tpl.format( - pk_value, current_cqrs_revision, existing_cqrs_revision, self.model.CQRS_ID, - )) + logger.error( + e_tpl.format( + pk_value, + current_cqrs_revision, + existing_cqrs_revision, + self.model.CQRS_ID, + ), + ) return instance if existing_cqrs_revision == current_cqrs_revision: logger.error( 'Received duplicate CQRS data: pk = {0}, cqrs_revision = {1} ({2}).'.format( - pk_value, current_cqrs_revision, self.model.CQRS_ID, + pk_value, + current_cqrs_revision, + self.model.CQRS_ID, ), ) if current_cqrs_revision == 0: logger.warning( 'CQRS potential creation race condition: pk = {0} ({1}).'.format( - pk_value, self.model.CQRS_ID, + pk_value, + self.model.CQRS_ID, ), ) @@ -220,10 +237,14 @@ def update_instance( w_tpl = ( 'Lost or filtered out {0} CQRS packages: pk = {1}, cqrs_revision = {2} ({3})' ) - logger.warning(w_tpl.format( - current_cqrs_revision - instance.cqrs_revision - 1, - pk_value, current_cqrs_revision, self.model.CQRS_ID, - )) + logger.warning( + w_tpl.format( + current_cqrs_revision - instance.cqrs_revision - 1, + pk_value, + current_cqrs_revision, + self.model.CQRS_ID, + ), + ) f_kw = {'previous_data': previous_data} if self.model.CQRS_META: @@ -234,12 +255,15 @@ def update_instance( except (Error, ValidationError) as e: logger.error( '{0}\nCQRS update error: pk = {1}, cqrs_revision = {2} ({3}).'.format( - str(e), pk_value, current_cqrs_revision, self.model.CQRS_ID, + str(e), + pk_value, + current_cqrs_revision, + self.model.CQRS_ID, ), ) def delete_instance(self, master_data: dict) -> bool: - """ This method deletes model instance from mapped CQRS master instance data. + """This method deletes model instance from mapped CQRS master instance data. Args: master_data (dict): CQRS master instance data. @@ -258,7 +282,9 @@ def delete_instance(self, master_data: dict) -> bool: except Error as e: logger.error( '{0}\nCQRS delete error: pk = {1} ({2}).'.format( - str(e), pk_value, self.model.CQRS_ID, + str(e), + pk_value, + self.model.CQRS_ID, ), ) @@ -308,9 +334,12 @@ def _make_initial_mapping(self, master_data): } for master_name, replica_name in self.model.CQRS_MAPPING.items(): if master_name not in master_data: - logger.error('Bad master-replica mapping for {0} ({1}).'.format( - master_name, self.model.CQRS_ID, - )) + logger.error( + 'Bad master-replica mapping for {0} ({1}).'.format( + master_name, + self.model.CQRS_ID, + ), + ) return mapped_data[replica_name] = master_data[master_name] @@ -318,17 +347,13 @@ def _make_initial_mapping(self, master_data): def _remove_excessive_data(self, data): opts = self.model._meta - possible_field_names = { - f.name for f in opts.fields - } + possible_field_names = {f.name for f in opts.fields} return {k: v for k, v in data.items() if k in possible_field_names} def _all_required_fields_are_filled(self, mapped_data): opts = self.model._meta - required_field_names = { - f.name for f in opts.fields if not f.null - } + required_field_names = {f.name for f in opts.fields if not f.null} if not (required_field_names - set(mapped_data.keys())): return True diff --git a/dj_cqrs/metas.py b/dj_cqrs/metas.py index c898dc0..ecbfa7b 100644 --- a/dj_cqrs/metas.py +++ b/dj_cqrs/metas.py @@ -50,12 +50,12 @@ def _check_cqrs_tracked_fields(model_cls): _MetaUtils._check_unexisting_names(model_cls, tracked_fields, 'CQRS_TRACKED_FIELDS') return - e = "Model {0}: Invalid configuration for CQRS_TRACKED_FIELDS".format(model_cls.__name__) + e = 'Model {0}: Invalid configuration for CQRS_TRACKED_FIELDS'.format(model_cls.__name__) assert isinstance(tracked_fields, str) and tracked_fields == ALL_BASIC_FIELDS, e @staticmethod def _check_correct_configuration(model_cls): - """ Check that model has correct CQRS configuration. + """Check that model has correct CQRS configuration. :param dj_cqrs.mixins.MasterMixin model_cls: CQRS Master Model. :raises: AssertionError @@ -68,7 +68,7 @@ def _check_correct_configuration(model_cls): @staticmethod def _check_cqrs_fields(model_cls): - """ Check that model has correct CQRS fields configuration. + """Check that model has correct CQRS fields configuration. :param dj_cqrs.mixins.MasterMixin model_cls: CQRS Master Model. :raises: AssertionError @@ -95,7 +95,7 @@ def register(model_cls): @staticmethod def _check_cqrs_mapping(model_cls): - """ Check that model has correct CQRS mapping configuration. + """Check that model has correct CQRS mapping configuration. :param dj_cqrs.mixins.ReplicaMixin model_cls: CQRS Replica Model. :raises: AssertionError @@ -115,7 +115,7 @@ def check_cqrs_field_setting(cls, model_cls, cqrs_field_names, cqrs_attr): @staticmethod def check_cqrs_id(model_cls): - """ Check that CQRS Model has CQRS_ID set up. """ + """Check that CQRS Model has CQRS_ID set up.""" assert model_cls.CQRS_ID, 'CQRS_ID must be set for every model, that uses CQRS.' @staticmethod diff --git a/dj_cqrs/mixins.py b/dj_cqrs/mixins.py index bc2900f..2bd23a4 100644 --- a/dj_cqrs/mixins.py +++ b/dj_cqrs/mixins.py @@ -64,12 +64,14 @@ class directly.**""" """Manager that adds needed CQRS queryset methods.""" cqrs_revision = IntegerField( - default=0, help_text="This field must be incremented on any model update. " - "It's used to for CQRS sync.", + default=0, + help_text='This field must be incremented on any model update. ' + "It's used to for CQRS sync.", ) cqrs_updated = DateTimeField( - auto_now=True, help_text="This field must be incremented on every model update. " - "It's used to for CQRS sync.", + auto_now=True, + help_text='This field must be incremented on every model update. ' + "It's used to for CQRS sync.", ) class Meta: @@ -182,7 +184,11 @@ def cqrs_sync(self, using: str = None, queue: str = None) -> bool: return False MasterSignals.post_save( - self._meta.model, instance=self, using=using, queue=queue, sync=True, + self._meta.model, + instance=self, + using=using, + queue=queue, + sync=True, ) return True @@ -230,12 +236,12 @@ def relate_cqrs_serialization(cls, queryset): return queryset def get_custom_cqrs_delete_data(self): - """ This method should be overridden when additional data is needed in DELETE payload. """ + """This method should be overridden when additional data is needed in DELETE payload.""" pass @classmethod def call_post_bulk_create(cls, instances: list, using=None): - """ Post bulk create signal caller (django doesn't support it by default). + """Post bulk create signal caller (django doesn't support it by default). ``` py3 @@ -247,7 +253,7 @@ def call_post_bulk_create(cls, instances: list, using=None): @classmethod def call_post_update(cls, instances, using=None): - """ Post bulk update signal caller (django doesn't support it by default). + """Post bulk update signal caller (django doesn't support it by default). ``` py3 @@ -322,7 +328,7 @@ def _refresh_f_expr_values(self, using): @property def _cqrs_serializer_cls(self): - """ Serialization class loader. """ + """Serialization class loader.""" if hasattr(self.__class__, '_cqrs_serializer_class'): return self.__class__._cqrs_serializer_class @@ -340,6 +346,7 @@ class MasterMixin(RawMasterMixin, metaclass=MasterMeta): """ Mixin for the master CQRS model, that will send data updates to it's replicas. """ + class Meta: abstract = True @@ -393,6 +400,7 @@ class ReplicaMixin(RawReplicaMixin, Model, metaclass=ReplicaMeta): Mixin for the replica CQRS model, that will receive data updates from master. Models, using this mixin should be readonly, but this is not enforced (f.e. for admin). """ + CQRS_ID = None """Unique CQRS identifier for all microservices.""" @@ -432,7 +440,7 @@ def cqrs_save( sync: bool = False, meta: dict = None, ): - """ This method saves (creates or updates) model instance from CQRS master instance data. + """This method saves (creates or updates) model instance from CQRS master instance data. This method must not be overridden. Otherwise, sync checks need to be implemented manually. Args: @@ -457,7 +465,7 @@ def cqrs_create( previous_data: dict = None, meta: dict = None, ): - """ This method creates model instance from CQRS mapped instance data. It must be overridden + """This method creates model instance from CQRS mapped instance data. It must be overridden by replicas of master models with custom serialization. Args: @@ -478,7 +486,7 @@ def cqrs_update( previous_data: dict = None, meta: dict = None, ): - """ This method updates model instance from CQRS mapped instance data. It must be overridden + """This method updates model instance from CQRS mapped instance data. It must be overridden by replicas of master models with custom serialization. Args: @@ -498,7 +506,7 @@ def cqrs_update( @classmethod def cqrs_delete(cls, master_data: dict, meta: dict = None) -> bool: - """ This method deletes model instance from mapped CQRS master instance data. + """This method deletes model instance from mapped CQRS master instance data. Args: master_data (dict): CQRS master instance data. diff --git a/dj_cqrs/registries.py b/dj_cqrs/registries.py index 0a0400c..ade5150 100644 --- a/dj_cqrs/registries.py +++ b/dj_cqrs/registries.py @@ -11,7 +11,7 @@ class RegistryMixin: @classmethod def register_model(cls, model_cls): - """ Registration of CQRS model identifiers. """ + """Registration of CQRS model identifiers.""" e = "Two models can't have the same CQRS_ID: {0}.".format(model_cls.CQRS_ID) assert model_cls.CQRS_ID not in cls.models, e diff --git a/dj_cqrs/signals.py b/dj_cqrs/signals.py index 99a33be..af436f4 100644 --- a/dj_cqrs/signals.py +++ b/dj_cqrs/signals.py @@ -28,7 +28,8 @@ class MasterSignals: - """ Signals registry and handlers for CQRS master models. """ + """Signals registry and handlers for CQRS master models.""" + @classmethod def register_model(cls, model_cls): """ diff --git a/dj_cqrs/tracker.py b/dj_cqrs/tracker.py index 338ec3e..2dcf40d 100644 --- a/dj_cqrs/tracker.py +++ b/dj_cqrs/tracker.py @@ -8,20 +8,15 @@ class _CQRSTrackerInstance(FieldInstanceTracker): - def __init__(self, instance, fields, field_map): super().__init__(instance, fields, field_map) self._attr_to_field_map = { - f.attname: f.name - for f in instance._meta.concrete_fields if f.is_relation + f.attname: f.name for f in instance._meta.concrete_fields if f.is_relation } def changed(self): changed_fields = super().changed() - return { - self._attr_to_field_map.get(k, k): v - for k, v in changed_fields.items() - } + return {self._attr_to_field_map.get(k, k): v for k, v in changed_fields.items()} def changed_initial(self): return {field: None for field in self.fields if self.get_field_value(field) is not None} @@ -33,7 +28,6 @@ def get_field_value(self, field): class CQRSTracker(FieldTracker): - tracker_class = _CQRSTrackerInstance @classmethod diff --git a/dj_cqrs/transport/kombu.py b/dj_cqrs/transport/kombu.py index 895d745..415e428 100644 --- a/dj_cqrs/transport/kombu.py +++ b/dj_cqrs/transport/kombu.py @@ -25,7 +25,6 @@ class _KombuConsumer(ConsumerMixin): - def __init__(self, url, exchange_name, queue_name, prefetch_count, callback, cqrs_ids=None): self.connection = Connection(url) self.exchange = Exchange( @@ -76,6 +75,7 @@ def get_consumers(self, Consumer, channel): class KombuTransport(LoggingMixin, BaseTransport): """Transport class for Kombu.""" + CONSUMER_RETRY_TIMEOUT = 5 @classmethod @@ -121,9 +121,12 @@ def produce(cls, payload): cls._produce_message(channel, exchange, payload) cls.log_produced(payload) except KombuError: - logger.error("CQRS couldn't be published: pk = {0} ({1}).".format( - payload.pk, payload.cqrs_id, - )) + logger.error( + "CQRS couldn't be published: pk = {0} ({1}).".format( + payload.pk, + payload.cqrs_id, + ), + ) finally: if connection: connection.close() diff --git a/dj_cqrs/transport/mixins.py b/dj_cqrs/transport/mixins.py index 7d4aee1..3ec4974 100644 --- a/dj_cqrs/transport/mixins.py +++ b/dj_cqrs/transport/mixins.py @@ -7,7 +7,7 @@ class LoggingMixin: - _BASE_PAYLOAD_LOG_TEMPLATE = "CQRS is %s: pk = %s (%s), correlation_id = %s." + _BASE_PAYLOAD_LOG_TEMPLATE = 'CQRS is %s: pk = %s (%s), correlation_id = %s.' @staticmethod def log_consumed(payload): @@ -15,7 +15,7 @@ def log_consumed(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = "CQRS is received: pk = %s (%s), correlation_id = %s." + msg = 'CQRS is received: pk = %s (%s), correlation_id = %s.' logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id) @staticmethod @@ -24,7 +24,7 @@ def log_consumed_accepted(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = "CQRS is applied: pk = %s (%s), correlation_id = %s." + msg = 'CQRS is applied: pk = %s (%s), correlation_id = %s.' logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id) @staticmethod @@ -33,7 +33,7 @@ def log_consumed_denied(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = "CQRS is denied: pk = %s (%s), correlation_id = %s." + msg = 'CQRS is denied: pk = %s (%s), correlation_id = %s.' logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id) @staticmethod @@ -42,11 +42,13 @@ def log_consumed_failed(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = ( - "CQRS is failed: pk = %s (%s), correlation_id = %s, retries = %s.", - ) + msg = ('CQRS is failed: pk = %s (%s), correlation_id = %s, retries = %s.',) logger.warning( - msg, payload.pk, payload.cqrs_id, payload.correlation_id, payload.retries, + msg, + payload.pk, + payload.cqrs_id, + payload.correlation_id, + payload.retries, ) @staticmethod @@ -55,7 +57,7 @@ def log_dead_letter(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = "CQRS is added to dead letter queue: pk = %s (%s), correlation_id = %s." + msg = 'CQRS is added to dead letter queue: pk = %s (%s), correlation_id = %s.' logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id) @staticmethod @@ -66,11 +68,14 @@ def log_delayed(payload, delay, eta): delay (int): Seconds to wait before requeuing message. eta (datetime): Requeuing datetime. """ - msg = ( - "CQRS is delayed: pk = %s (%s), correlation_id = %s, delay = %s sec, eta = %s.", - ) + msg = ('CQRS is delayed: pk = %s (%s), correlation_id = %s, delay = %s sec, eta = %s.',) logger.warning( - msg, payload.pk, payload.cqrs_id, payload.correlation_id, delay, eta, + msg, + payload.pk, + payload.cqrs_id, + payload.correlation_id, + delay, + eta, ) @staticmethod @@ -79,9 +84,7 @@ def log_requeued(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = ( - "CQRS is requeued: pk = %s (%s), correlation_id = %s.", - ) + msg = ('CQRS is requeued: pk = %s (%s), correlation_id = %s.',) logger.warning(msg, payload.pk, payload.cqrs_id, payload.correlation_id) @staticmethod @@ -90,5 +93,5 @@ def log_produced(payload): Args: payload (dj_cqrs.dataclasses.TransportPayload): Transport payload from master model. """ - msg = "CQRS is published: pk = %s (%s), correlation_id = %s." + msg = 'CQRS is published: pk = %s (%s), correlation_id = %s.' logger.info(msg, payload.pk, payload.cqrs_id, payload.correlation_id) diff --git a/dj_cqrs/transport/rabbit_mq.py b/dj_cqrs/transport/rabbit_mq.py index fbe3349..b4ea68e 100644 --- a/dj_cqrs/transport/rabbit_mq.py +++ b/dj_cqrs/transport/rabbit_mq.py @@ -33,6 +33,7 @@ class RabbitMQTransport(LoggingMixin, BaseTransport): """Transport class for RabbitMQ.""" + CONSUMER_RETRY_TIMEOUT = 5 PRODUCER_RETRIES = 1 @@ -47,7 +48,7 @@ def clean_connection(cls): try: connection.close() except (exceptions.StreamLostError, exceptions.ConnectionClosed, ConnectionError): - logger.warning("Connection was closed or is closing. Skip it...") + logger.warning('Connection was closed or is closing. Skip it...') cls._producer_connection = None cls._producer_channel = None @@ -67,19 +68,26 @@ def consume(cls, cqrs_ids=None): try: delay_queue = DelayQueue(max_size=get_delay_queue_max_size()) connection, channel, consumer_generator = cls._get_consumer_rmq_objects( - *(common_rabbit_settings + consumer_rabbit_settings), cqrs_ids=cqrs_ids, + *(common_rabbit_settings + consumer_rabbit_settings), + cqrs_ids=cqrs_ids, ) for method_frame, properties, body in consumer_generator: if method_frame is not None: cls._consume_message( - channel, method_frame, properties, body, delay_queue, + channel, + method_frame, + properties, + body, + delay_queue, ) cls._process_delay_messages(channel, delay_queue) - except (exceptions.AMQPError, - exceptions.ChannelError, - exceptions.ReentrancyError, - gaierror): + except ( + exceptions.AMQPError, + exceptions.ChannelError, + exceptions.ReentrancyError, + gaierror, + ): logger.warning('AMQP connection error. Reconnecting...', exc_info=True) time.sleep(cls.CONSUMER_RETRY_TIMEOUT) finally: @@ -103,7 +111,8 @@ def _produce_with_retries(cls, payload, retries): exchange = rmq_settings[-1] # Decided not to create context-manager to stay within the class _, channel = cls._get_producer_rmq_objects( - *rmq_settings, signal_type=payload.signal_type, + *rmq_settings, + signal_type=payload.signal_type, ) cls._produce_message(channel, exchange, payload) @@ -119,15 +128,19 @@ def _produce_with_retries(cls, payload, retries): cls.clean_connection() base_log_message = "CQRS couldn't be published: pk = {0} ({1}).".format( - payload.pk, payload.cqrs_id, + payload.pk, + payload.cqrs_id, ) if not retries: logger.exception(base_log_message) return - logger.warning('{0} Error: {1}. Reconnect...'.format( - base_log_message, e.__class__.__name__, - )) + logger.warning( + '{0} Error: {1}. Reconnect...'.format( + base_log_message, + e.__class__.__name__, + ), + ) cls._produce_with_retries(payload, retries - 1) @@ -162,13 +175,17 @@ def _consume_message(cls, ch, method, properties, body, delay_queue): instance = consumer.consume(payload) except Exception as e: exception = e - logger.error("CQRS service exception", exc_info=True) + logger.error('CQRS service exception', exc_info=True) if instance and exception is None: cls._ack(ch, delivery_tag, payload) else: cls._fail_message( - ch, delivery_tag, payload, exception, delay_queue, + ch, + delivery_tag, + payload, + exception, + delay_queue, ) @classmethod @@ -176,7 +193,7 @@ def _fail_message(cls, channel, delivery_tag, payload, exception, delay_queue): cls.log_consumed_failed(payload) model_cls = ReplicaRegistry.get_model_by_cqrs_id(payload.cqrs_id) if model_cls is None: - logger.error("Model for cqrs_id {0} is not found.".format(payload.cqrs_id)) + logger.error('Model for cqrs_id {0} is not found.'.format(payload.cqrs_id)) cls._nack(channel, delivery_tag) return @@ -361,7 +378,7 @@ def _parse_url(url): scheme = urlparse(url).scheme assert scheme == 'amqp', 'Scheme must be "amqp" for RabbitMQTransport.' - schemeless = url[len(scheme) + 3:] + schemeless = url[len(scheme) + 3 :] parts = urlparse('http://' + schemeless) return ( diff --git a/examples/demo_project/master_service/app/migrations/0001_initial.py b/examples/demo_project/master_service/app/migrations/0001_initial.py index 3fabcc9..d44735e 100644 --- a/examples/demo_project/master_service/app/migrations/0001_initial.py +++ b/examples/demo_project/master_service/app/migrations/0001_initial.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -18,21 +17,110 @@ class Migration(migrations.Migration): migrations.CreateModel( name='User', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), ('password', models.CharField(max_length=128, verbose_name='password')), - ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), - ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), - ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), - ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), - ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), - ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), - ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), - ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), - ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), - ('cqrs_revision', models.IntegerField(default=0, help_text="This field must be incremented on any model update. It's used to for CQRS sync.")), - ('cqrs_updated', models.DateTimeField(auto_now=True, help_text="This field must be incremented on every model update. It's used to for CQRS sync.")), - ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), - ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ( + 'last_login', + models.DateTimeField(blank=True, null=True, verbose_name='last login'), + ), + ( + 'is_superuser', + models.BooleanField( + default=False, + help_text='Designates that this user has all permissions without explicitly assigning them.', + verbose_name='superuser status', + ), + ), + ( + 'username', + models.CharField( + error_messages={'unique': 'A user with that username already exists.'}, + help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', + max_length=150, + unique=True, + validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], + verbose_name='username', + ), + ), + ( + 'first_name', + models.CharField(blank=True, max_length=150, verbose_name='first name'), + ), + ( + 'last_name', + models.CharField(blank=True, max_length=150, verbose_name='last name'), + ), + ( + 'email', + models.EmailField(blank=True, max_length=254, verbose_name='email address'), + ), + ( + 'is_staff', + models.BooleanField( + default=False, + help_text='Designates whether the user can log into this admin site.', + verbose_name='staff status', + ), + ), + ( + 'is_active', + models.BooleanField( + default=True, + help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', + verbose_name='active', + ), + ), + ( + 'date_joined', + models.DateTimeField( + default=django.utils.timezone.now, + verbose_name='date joined', + ), + ), + ( + 'cqrs_revision', + models.IntegerField( + default=0, + help_text="This field must be incremented on any model update. It's used to for CQRS sync.", + ), + ), + ( + 'cqrs_updated', + models.DateTimeField( + auto_now=True, + help_text="This field must be incremented on every model update. It's used to for CQRS sync.", + ), + ), + ( + 'groups', + models.ManyToManyField( + blank=True, + help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', + related_name='user_set', + related_query_name='user', + to='auth.Group', + verbose_name='groups', + ), + ), + ( + 'user_permissions', + models.ManyToManyField( + blank=True, + help_text='Specific permissions for this user.', + related_name='user_set', + related_query_name='user', + to='auth.Permission', + verbose_name='user permissions', + ), + ), ], options={ 'abstract': False, @@ -41,9 +129,29 @@ class Migration(migrations.Migration): migrations.CreateModel( name='Product', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cqrs_revision', models.IntegerField(default=0, help_text="This field must be incremented on any model update. It's used to for CQRS sync.")), - ('cqrs_updated', models.DateTimeField(auto_now=True, help_text="This field must be incremented on every model update. It's used to for CQRS sync.")), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), + ( + 'cqrs_revision', + models.IntegerField( + default=0, + help_text="This field must be incremented on any model update. It's used to for CQRS sync.", + ), + ), + ( + 'cqrs_updated', + models.DateTimeField( + auto_now=True, + help_text="This field must be incremented on every model update. It's used to for CQRS sync.", + ), + ), ('name', models.CharField(max_length=50)), ], options={ @@ -53,19 +161,59 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ProductType', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), ('name', models.CharField(max_length=50)), ], ), migrations.CreateModel( name='Purchase', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('cqrs_revision', models.IntegerField(default=0, help_text="This field must be incremented on any model update. It's used to for CQRS sync.")), - ('cqrs_updated', models.DateTimeField(auto_now=True, help_text="This field must be incremented on every model update. It's used to for CQRS sync.")), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), + ( + 'cqrs_revision', + models.IntegerField( + default=0, + help_text="This field must be incremented on any model update. It's used to for CQRS sync.", + ), + ), + ( + 'cqrs_updated', + models.DateTimeField( + auto_now=True, + help_text="This field must be incremented on every model update. It's used to for CQRS sync.", + ), + ), ('action_time', models.DateTimeField(auto_now_add=True)), - ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.product')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + 'product', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='app.product', + ), + ), + ( + 'user', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], options={ 'abstract': False, @@ -74,6 +222,9 @@ class Migration(migrations.Migration): migrations.AddField( model_name='product', name='product_type', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.producttype'), + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='app.producttype', + ), ), ] diff --git a/examples/demo_project/master_service/app/migrations/0002_fixtures.py b/examples/demo_project/master_service/app/migrations/0002_fixtures.py index d85c7a6..c49f001 100644 --- a/examples/demo_project/master_service/app/migrations/0002_fixtures.py +++ b/examples/demo_project/master_service/app/migrations/0002_fixtures.py @@ -29,7 +29,6 @@ def create_products(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ('app', '0001_initial'), ] diff --git a/examples/demo_project/master_service/app/serializers.py b/examples/demo_project/master_service/app/serializers.py index 5b69199..0561125 100644 --- a/examples/demo_project/master_service/app/serializers.py +++ b/examples/demo_project/master_service/app/serializers.py @@ -6,8 +6,9 @@ class ProductSerializer: """ - Simple serializer + Simple serializer """ + def __init__(self, instance): self.instance = instance @@ -25,8 +26,9 @@ def data(self): class PurchaseSerializer(serializers.ModelSerializer): """ - Django REST Framework serializers are compatible + Django REST Framework serializers are compatible """ + product_name = serializers.CharField(source='product.name') class Meta: diff --git a/examples/demo_project/master_service/app/views.py b/examples/demo_project/master_service/app/views.py index 0a4a833..521f85b 100644 --- a/examples/demo_project/master_service/app/views.py +++ b/examples/demo_project/master_service/app/views.py @@ -12,13 +12,17 @@ def _render_page(request, **kwargs): - return render(request, 'main.html', { - 'users': User.objects.order_by('pk'), - 'product_types': ProductType.objects.order_by('pk'), - 'products': Product.objects.order_by('pk'), - 'purchases': Purchase.objects.order_by('pk'), - **kwargs, - }) + return render( + request, + 'main.html', + { + 'users': User.objects.order_by('pk'), + 'product_types': ProductType.objects.order_by('pk'), + 'products': Product.objects.order_by('pk'), + 'purchases': Purchase.objects.order_by('pk'), + **kwargs, + }, + ) def render_main_page_if_get(f): @@ -28,6 +32,7 @@ def wrap(request, *args, **kwargs): if request.method != 'POST': return HttpResponseNotAllowed(['GET', 'POST']) return f(request, *args, **kwargs) + return wrap diff --git a/examples/demo_project/master_service/manage.py b/examples/demo_project/master_service/manage.py index a5a4145..138d369 100755 --- a/examples/demo_project/master_service/manage.py +++ b/examples/demo_project/master_service/manage.py @@ -12,8 +12,8 @@ def main(): except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?", + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?', ) from exc execute_from_command_line(sys.argv) diff --git a/examples/demo_project/replica_service/app/migrations/0001_initial.py b/examples/demo_project/replica_service/app/migrations/0001_initial.py index 36f25f3..e17ee85 100644 --- a/examples/demo_project/replica_service/app/migrations/0001_initial.py +++ b/examples/demo_project/replica_service/app/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -17,18 +16,40 @@ class Migration(migrations.Migration): migrations.CreateModel( name='ProductType', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), ('name', models.CharField(max_length=50)), ], ), migrations.CreateModel( name='Product', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), ('cqrs_revision', models.IntegerField()), ('cqrs_updated', models.DateTimeField()), ('name', models.CharField(max_length=100)), - ('product_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.producttype')), + ( + 'product_type', + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to='app.producttype', + ), + ), ], options={ 'abstract': False, @@ -37,21 +58,98 @@ class Migration(migrations.Migration): migrations.CreateModel( name='User', fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ( + 'id', + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name='ID', + ), + ), ('password', models.CharField(max_length=128, verbose_name='password')), - ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), - ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), - ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), - ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), - ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), - ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), - ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), - ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), - ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ( + 'last_login', + models.DateTimeField(blank=True, null=True, verbose_name='last login'), + ), + ( + 'is_superuser', + models.BooleanField( + default=False, + help_text='Designates that this user has all permissions without explicitly assigning them.', + verbose_name='superuser status', + ), + ), + ( + 'username', + models.CharField( + error_messages={'unique': 'A user with that username already exists.'}, + help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', + max_length=150, + unique=True, + validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], + verbose_name='username', + ), + ), + ( + 'first_name', + models.CharField(blank=True, max_length=150, verbose_name='first name'), + ), + ( + 'last_name', + models.CharField(blank=True, max_length=150, verbose_name='last name'), + ), + ( + 'email', + models.EmailField(blank=True, max_length=254, verbose_name='email address'), + ), + ( + 'is_staff', + models.BooleanField( + default=False, + help_text='Designates whether the user can log into this admin site.', + verbose_name='staff status', + ), + ), + ( + 'is_active', + models.BooleanField( + default=True, + help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', + verbose_name='active', + ), + ), + ( + 'date_joined', + models.DateTimeField( + default=django.utils.timezone.now, + verbose_name='date joined', + ), + ), ('cqrs_revision', models.IntegerField()), ('cqrs_updated', models.DateTimeField()), - ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')), - ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')), + ( + 'groups', + models.ManyToManyField( + blank=True, + help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', + related_name='user_set', + related_query_name='user', + to='auth.Group', + verbose_name='groups', + ), + ), + ( + 'user_permissions', + models.ManyToManyField( + blank=True, + help_text='Specific permissions for this user.', + related_name='user_set', + related_query_name='user', + to='auth.Permission', + verbose_name='user permissions', + ), + ), ], options={ 'abstract': False, diff --git a/examples/demo_project/replica_service/app/models.py b/examples/demo_project/replica_service/app/models.py index 16c7b74..1fd34b8 100644 --- a/examples/demo_project/replica_service/app/models.py +++ b/examples/demo_project/replica_service/app/models.py @@ -8,8 +8,9 @@ class User(ReplicaMixin, AbstractUser): """ - Simple replica which sync all fields + Simple replica which sync all fields """ + CQRS_ID = 'user' @@ -19,8 +20,9 @@ class ProductType(models.Model): class Product(ReplicaMixin, models.Model): """ - Replica with custom serialization and relation control + Replica with custom serialization and relation control """ + CQRS_ID = 'product' CQRS_CUSTOM_SERIALIZATION = True @@ -56,11 +58,12 @@ def cqrs_update(self, sync, mapped_data, previous_data=None, meta=None): class Purchase(ReplicaMixin): """ - Replica model with custom storage mechanism. + Replica model with custom storage mechanism. - To simplify we use redis cache storage for this demo, but any SQL and NoSQL storage can - be used. + To simplify we use redis cache storage for this demo, but any SQL and NoSQL storage can + be used. """ + CQRS_ID = 'purchase' CQRS_CUSTOM_SERIALIZATION = True diff --git a/examples/demo_project/replica_service/app/settings.py b/examples/demo_project/replica_service/app/settings.py index 741ce19..35e042b 100644 --- a/examples/demo_project/replica_service/app/settings.py +++ b/examples/demo_project/replica_service/app/settings.py @@ -84,7 +84,7 @@ 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'redis://redis:6379/0', 'OPTIONS': { - "CLIENT_CLASS": 'django_redis.client.DefaultClient', + 'CLIENT_CLASS': 'django_redis.client.DefaultClient', }, }, } diff --git a/examples/demo_project/replica_service/app/views.py b/examples/demo_project/replica_service/app/views.py index bf71507..c56d6de 100644 --- a/examples/demo_project/replica_service/app/views.py +++ b/examples/demo_project/replica_service/app/views.py @@ -6,8 +6,12 @@ def main_page_view(request): - return render(request, 'main.html', { - 'users': User.objects.order_by('pk'), - 'products': Product.objects.select_related('product_type').order_by('pk'), - 'purchases': [cache.get(key) for key in cache.keys('purchase_*')], - }) + return render( + request, + 'main.html', + { + 'users': User.objects.order_by('pk'), + 'products': Product.objects.select_related('product_type').order_by('pk'), + 'purchases': [cache.get(key) for key in cache.keys('purchase_*')], + }, + ) diff --git a/examples/demo_project/replica_service/manage.py b/examples/demo_project/replica_service/manage.py index a5a4145..138d369 100755 --- a/examples/demo_project/replica_service/manage.py +++ b/examples/demo_project/replica_service/manage.py @@ -12,8 +12,8 @@ def main(): except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?", + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?', ) from exc execute_from_command_line(sys.argv) diff --git a/integration_tests/manage.py b/integration_tests/manage.py index 0d8f90d..584d93e 100755 --- a/integration_tests/manage.py +++ b/integration_tests/manage.py @@ -6,8 +6,9 @@ import sys -if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "replica_settings") +if __name__ == '__main__': + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'replica_settings') from django.core.management import execute_from_command_line + execute_from_command_line(sys.argv) diff --git a/integration_tests/master_settings.py b/integration_tests/master_settings.py index c8f1ba3..a59908c 100644 --- a/integration_tests/master_settings.py +++ b/integration_tests/master_settings.py @@ -19,7 +19,6 @@ 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', - 'dj_cqrs', 'tests.dj_master', ] diff --git a/integration_tests/replica_settings.py b/integration_tests/replica_settings.py index 3fb19c9..b4d6194 100644 --- a/integration_tests/replica_settings.py +++ b/integration_tests/replica_settings.py @@ -19,7 +19,6 @@ 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', - 'dj_cqrs', 'tests.dj_replica', ] diff --git a/integration_tests/tests/conftest.py b/integration_tests/tests/conftest.py index 3944b28..7db7195 100644 --- a/integration_tests/tests/conftest.py +++ b/integration_tests/tests/conftest.py @@ -36,7 +36,7 @@ def clean_rabbit_transport_connection(): @pytest.fixture def replica_channel(settings): if current_transport is not RabbitMQTransport: - pytest.skip("Replica channel is implemented only for RabbitMQTransport.") + pytest.skip('Replica channel is implemented only for RabbitMQTransport.') connection = BlockingConnection( parameters=URLParameters(settings.CQRS['url']), diff --git a/integration_tests/tests/test_asynchronous_consuming.py b/integration_tests/tests/test_asynchronous_consuming.py index 273a0b3..c7e6fee 100644 --- a/integration_tests/tests/test_asynchronous_consuming.py +++ b/integration_tests/tests/test_asynchronous_consuming.py @@ -17,13 +17,15 @@ def test_both_consumers_consume(replica_cursor, clean_rabbit_transport_connectio assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 0 assert count_replica_rows(replica_cursor, REPLICA_EVENT_TABLE) == 0 - BasicFieldsModel.cqrs.bulk_create([ - BasicFieldsModel( - int_field=index, - char_field='text', - ) - for index in range(1, 10) - ]) + BasicFieldsModel.cqrs.bulk_create( + [ + BasicFieldsModel( + int_field=index, + char_field='text', + ) + for index in range(1, 10) + ], + ) transport_delay(3) assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 9 diff --git a/integration_tests/tests/test_bulk_operations.py b/integration_tests/tests/test_bulk_operations.py index f6eb407..abe8347 100644 --- a/integration_tests/tests/test_bulk_operations.py +++ b/integration_tests/tests/test_bulk_operations.py @@ -17,13 +17,15 @@ def test_flow(replica_cursor, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 0 # Create - BasicFieldsModel.cqrs.bulk_create([ - BasicFieldsModel( - int_field=index, - char_field='text', - ) - for index in range(1, 4) - ]) + BasicFieldsModel.cqrs.bulk_create( + [ + BasicFieldsModel( + int_field=index, + char_field='text', + ) + for index in range(1, 4) + ], + ) transport_delay() assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 3 @@ -42,8 +44,12 @@ def test_flow(replica_cursor, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 3 assert ['new_text', 'new_text', 'text'] == [ - t[0] for t in get_replica_all( - replica_cursor, REPLICA_BASIC_TABLE, ('char_field',), order_asc_by='int_field', + t[0] + for t in get_replica_all( + replica_cursor, + REPLICA_BASIC_TABLE, + ('char_field',), + order_asc_by='int_field', ) ] @@ -54,5 +60,7 @@ def test_flow(replica_cursor, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 assert (2, 'new_text', 1) == get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'cqrs_revision'), + replica_cursor, + REPLICA_BASIC_TABLE, + ('int_field', 'char_field', 'cqrs_revision'), ) diff --git a/integration_tests/tests/test_dead_letter.py b/integration_tests/tests/test_dead_letter.py index 594dc1d..b6cb42a 100644 --- a/integration_tests/tests/test_dead_letter.py +++ b/integration_tests/tests/test_dead_letter.py @@ -17,7 +17,9 @@ def test_add_to_dead_letter(settings, replica_cursor, replica_channel): assert queue.method.message_count == 0 dead_queue = replica_channel.queue_declare( - 'dead_letter_replica', durable=True, exclusive=False, + 'dead_letter_replica', + durable=True, + exclusive=False, ) assert dead_queue.method.message_count == 1 @@ -39,13 +41,17 @@ def test_dead_letter_expire(settings, replica_cursor, replica_channel): transport_delay(5) dead_queue = replica_channel.queue_declare( - 'dead_letter_replica', durable=True, exclusive=False, + 'dead_letter_replica', + durable=True, + exclusive=False, ) assert dead_queue.method.message_count == 1 transport_delay(5) dead_queue = replica_channel.queue_declare( - 'dead_letter_replica', durable=True, exclusive=False, + 'dead_letter_replica', + durable=True, + exclusive=False, ) assert dead_queue.method.message_count == 0 diff --git a/integration_tests/tests/test_single_basic_instance.py b/integration_tests/tests/test_single_basic_instance.py index 0c6b88d..0c621da 100644 --- a/integration_tests/tests/test_single_basic_instance.py +++ b/integration_tests/tests/test_single_basic_instance.py @@ -29,7 +29,8 @@ def test_flow(replica_cursor, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'date_field', 'cqrs_revision', 'cqrs_updated', 'bool_field'), ) assert ( @@ -57,7 +58,8 @@ def test_flow(replica_cursor, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 updated_replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'cqrs_revision', 'cqrs_updated', 'bool_field'), ) assert ( diff --git a/integration_tests/tests/test_sync_to_a_certain_service.py b/integration_tests/tests/test_sync_to_a_certain_service.py index 332e3f5..10bab7d 100644 --- a/integration_tests/tests/test_sync_to_a_certain_service.py +++ b/integration_tests/tests/test_sync_to_a_certain_service.py @@ -26,7 +26,8 @@ def test_flow(replica_cursor, mocker, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'cqrs_revision', 'cqrs_updated'), ) assert ( @@ -52,7 +53,8 @@ def test_flow(replica_cursor, mocker, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'cqrs_revision', 'cqrs_updated'), ) assert replica_tuple[0] == 1 @@ -66,7 +68,8 @@ def test_flow(replica_cursor, mocker, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'cqrs_revision', 'cqrs_updated'), ) assert replica_tuple[0] == 1 @@ -85,7 +88,8 @@ def test_flow(replica_cursor, mocker, clean_rabbit_transport_connection): assert count_replica_rows(replica_cursor, REPLICA_BASIC_TABLE) == 1 replica_tuple = get_replica_first( - replica_cursor, REPLICA_BASIC_TABLE, + replica_cursor, + REPLICA_BASIC_TABLE, ('int_field', 'char_field', 'cqrs_revision', 'cqrs_updated'), ) assert replica_tuple[0] == 1 diff --git a/poetry.lock b/poetry.lock index 77b064e..a733dc3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -95,9 +95,61 @@ files = [ {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] +[package.dependencies] +tzdata = {version = "*", optional = true, markers = "extra == \"tzdata\""} + [package.extras] tzdata = ["tzdata"] +[[package]] +name = "black" +version = "23.3.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2023.5.7" @@ -298,13 +350,13 @@ toml = ["tomli"] [[package]] name = "django" -version = "4.2.1" +version = "4.2.2" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.1-py3-none-any.whl", hash = "sha256:066b6debb5ac335458d2a713ed995570536c8b59a580005acb0732378d5eb1ee"}, - {file = "Django-4.2.1.tar.gz", hash = "sha256:7efa6b1f781a6119a10ac94b4794ded90db8accbe7802281cd26f8664ffed59c"}, + {file = "Django-4.2.2-py3-none-any.whl", hash = "sha256:672b3fa81e1f853bb58be1b51754108ab4ffa12a77c06db86aa8df9ed0c46fe5"}, + {file = "Django-4.2.2.tar.gz", hash = "sha256:2a6b6fbff5b59dd07bef10bcb019bee2ea97a30b2a656d51346596724324badf"}, ] [package.dependencies] @@ -421,6 +473,25 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.9.0,<2.10.0" pyflakes = ">=2.5.0,<2.6.0" +[[package]] +name = "flake8-black" +version = "0.3.6" +description = "flake8 plugin to call black as a code style validator" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-black-0.3.6.tar.gz", hash = "sha256:0dfbca3274777792a5bcb2af887a4cad72c72d0e86c94e08e3a3de151bb41c34"}, + {file = "flake8_black-0.3.6-py3-none-any.whl", hash = "sha256:fe8ea2eca98d8a504f22040d9117347f6b367458366952862ac3586e7d4eeaca"}, +] + +[package.dependencies] +black = ">=22.1.0" +flake8 = ">=3" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +develop = ["build", "twine"] + [[package]] name = "flake8-broken-line" version = "0.6.0" @@ -512,19 +583,19 @@ pycodestyle = "*" [[package]] name = "flake8-eradicate" -version = "1.4.0" +version = "1.5.0" description = "Flake8 plugin to find commented out code" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<4.0" files = [ - {file = "flake8-eradicate-1.4.0.tar.gz", hash = "sha256:3088cfd6717d1c9c6c3ac45ef2e5f5b6c7267f7504d5a74b781500e95cb9c7e1"}, - {file = "flake8_eradicate-1.4.0-py3-none-any.whl", hash = "sha256:e3bbd0871be358e908053c1ab728903c114f062ba596b4d40c852fd18f473d56"}, + {file = "flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22"}, + {file = "flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6"}, ] [package.dependencies] attrs = "*" eradicate = ">=2.0,<3.0" -flake8 = ">=3.5,<6" +flake8 = ">5" [[package]] name = "flake8-future-import" @@ -697,34 +768,37 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "kombu" -version = "5.2.4" +version = "5.3.0" description = "Messaging library for Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, - {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, + {file = "kombu-5.3.0-py3-none-any.whl", hash = "sha256:fa9be55281bb351ba9da582b2a74e3dd5015b8d075b287e4d16f0b2f25fefcc2"}, + {file = "kombu-5.3.0.tar.gz", hash = "sha256:d084ec1f96f7a7c37ba9e816823bdbc08f0fc7ddb3a5be555805e692102297d8"}, ] [package.dependencies] -amqp = ">=5.0.9,<6.0.0" +amqp = ">=5.1.1,<6.0.0" +"backports.zoneinfo" = {version = ">=0.2.1", extras = ["tzdata"], markers = "python_version < \"3.9\""} +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} vine = "*" [package.extras] -azureservicebus = ["azure-servicebus (>=7.0.0)"] -azurestoragequeues = ["azure-storage-queue"] -consul = ["python-consul (>=0.6.0)"] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (==2.1.1)"] +consul = ["python-consul2"] librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=3.3.0,<3.12.1)"] +mongodb = ["pymongo (>=4.1.1)"] msgpack = ["msgpack"] pyro = ["pyro4"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +redis = ["redis (>=4.5.2)"] slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy"] -sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (==7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] +zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "markdown" @@ -745,61 +819,61 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -973,6 +1047,17 @@ files = [ griffe = ">=0.24" mkdocstrings = ">=0.19" +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "packaging" version = "23.1" @@ -984,6 +1069,17 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "pathspec" +version = "0.11.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] + [[package]] name = "pika" version = "1.3.2" @@ -1000,6 +1096,21 @@ gevent = ["gevent"] tornado = ["tornado"] twisted = ["twisted"] +[[package]] +name = "platformdirs" +version = "3.5.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, + {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, +] + +[package.extras] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + [[package]] name = "pluggy" version = "1.0.0" @@ -1260,99 +1371,99 @@ pyyaml = "*" [[package]] name = "regex" -version = "2023.5.5" +version = "2023.6.3" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" files = [ - {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, - {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, - {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, - {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, - {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, - {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, - {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, - {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, - {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, - {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, - {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, - {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, - {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, - {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, - {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, - {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, - {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, ] [[package]] @@ -1457,13 +1568,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.2" +version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, - {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] @@ -1553,13 +1664,13 @@ files = [ [[package]] name = "urllib3" -version = "2.0.2" +version = "2.0.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, ] [package.extras] @@ -1666,4 +1777,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "44f16b24c26b2232a9a9c3b45982784d45b82eb0a87c1c511b66c89602841eff" +content-hash = "0f398fb8a10d41395fcff13718c475702f8be95e3178985a0b72167a0be01738" diff --git a/pyproject.toml b/pyproject.toml index 4ebfb32..550ee91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ python-dateutil = ">=2.4" watchfiles = "^0.18.1" [tool.poetry.group.test.dependencies] +black = ">=23.3" pytest = ">=7.2.0,<8" pytest-cov = ">=2.10.1,<5" pytest-mock = "^3.10" @@ -53,6 +54,7 @@ pytest-randomly = ">=3.12" pytest-deadfixtures = "^2.2.1" coverage = {extras = ["toml"], version = ">=5.3,<7"} flake8 = ">=3.8,<6" +flake8-black = ">=0.3" flake8-bugbear = ">=20,<23" flake8-cognitive-complexity = "^0.1" flake8-commas = "~2.1" @@ -121,4 +123,9 @@ exclude = [ show-source = true max-line-length = 100 max-cognitive-complexity = 20 +select = "B" ignore = ["FI1", "W503", "W605"] + +[tool.black] +line_length = 100 +skip-string-normalization = true diff --git a/requirements/test.txt b/requirements/test.txt index f1033af..8459391 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,3 +1,4 @@ +black coverage flake8<5 pytest @@ -8,6 +9,7 @@ pytest-deadfixtures pytest-randomly djangorestframework django-mptt +flake8-black flake8-bugbear flake8-broken-line flake8-commas diff --git a/tests/dj/settings.py b/tests/dj/settings.py index 3fc843d..c13c85b 100644 --- a/tests/dj/settings.py +++ b/tests/dj/settings.py @@ -19,7 +19,6 @@ 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', - 'dj_cqrs', 'tests.dj_master', 'tests.dj_replica', diff --git a/tests/dj/transport.py b/tests/dj/transport.py index 6e1eb88..ed0146b 100644 --- a/tests/dj/transport.py +++ b/tests/dj/transport.py @@ -23,6 +23,7 @@ class RabbitMQTransportWithEvents(RabbitMQTransport): @staticmethod def log_consumed(payload): from tests.dj_replica.models import Event + Event.objects.create( pid=os.getpid(), cqrs_id=payload.cqrs_id, @@ -34,6 +35,7 @@ class KombuTransportWithEvents(KombuTransport): @staticmethod def log_consumed(payload): from tests.dj_replica.models import Event + Event.objects.create( pid=os.getpid(), cqrs_id=payload.cqrs_id, diff --git a/tests/dj_master/management/commands/bulk_demo.py b/tests/dj_master/management/commands/bulk_demo.py index 075b5d9..b862bf2 100644 --- a/tests/dj_master/management/commands/bulk_demo.py +++ b/tests/dj_master/management/commands/bulk_demo.py @@ -13,7 +13,11 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - '--count', '-c', help='Simulation of N signals.', type=int, default=3000, + '--count', + '-c', + help='Simulation of N signals.', + type=int, + default=3000, ) @staticmethod diff --git a/tests/dj_master/models.py b/tests/dj_master/models.py index 97d923c..d8f5a38 100644 --- a/tests/dj_master/models.py +++ b/tests/dj_master/models.py @@ -86,7 +86,10 @@ class Author(MasterMixin, models.Model): name = models.CharField(max_length=20) publisher = models.ForeignKey( - Publisher, related_name='authors', on_delete=models.SET_NULL, null=True, + Publisher, + related_name='authors', + on_delete=models.SET_NULL, + null=True, ) @classmethod @@ -120,7 +123,11 @@ class NonMetaClassModel(MPTTModel, RawMasterMixin): name = models.CharField(max_length=50, unique=True) parent = TreeForeignKey( - 'self', on_delete=models.CASCADE, null=True, blank=True, related_name='children', + 'self', + on_delete=models.CASCADE, + null=True, + blank=True, + related_name='children', ) @@ -162,7 +169,11 @@ class MPTTWithTrackingModel(MPTTModel, RawMasterMixin): name = models.CharField(max_length=50, unique=True) parent = TreeForeignKey( - 'self', on_delete=models.CASCADE, null=True, blank=True, related_name='children', + 'self', + on_delete=models.CASCADE, + null=True, + blank=True, + related_name='children', ) diff --git a/tests/dj_replica/models.py b/tests/dj_replica/models.py index bd73b48..4a7b6c9 100644 --- a/tests/dj_replica/models.py +++ b/tests/dj_replica/models.py @@ -192,6 +192,7 @@ def cqrs_delete(cls, master_data, meta=None): for cqrs_id in ('document1', 'document2'): + class DocCls(RawReplicaMixin): CQRS_ID = cqrs_id CQRS_META = True diff --git a/tests/test_commands/test_bulk_load.py b/tests/test_commands/test_bulk_load.py index b370ca3..c8b3be6 100644 --- a/tests/test_commands/test_bulk_load.py +++ b/tests/test_commands/test_bulk_load.py @@ -29,14 +29,14 @@ def test_empty_file(): with pytest.raises(CommandError) as e: call_command(COMMAND_NAME, '-i={0}empty_file.dump'.format(DUMPS_PATH)) - assert "empty_file.dump is empty!" in str(e) + assert 'empty_file.dump is empty!' in str(e) def test_no_cqrs_id(): with pytest.raises(CommandError) as e: call_command(COMMAND_NAME, '-i={0}bad_cqrs_id.dump'.format(DUMPS_PATH)) - assert "Wrong CQRS ID: publisher!" in str(e) + assert 'Wrong CQRS ID: publisher!' in str(e) @pytest.mark.django_db @@ -98,7 +98,7 @@ def test_delete_operation_fails(mocker): with pytest.raises(CommandError) as e: call_command(COMMAND_NAME, '--input={0}no_rows.dump'.format(DUMPS_PATH), '--clear=true') - assert "Delete operation fails!" in str(e) + assert 'Delete operation fails!' in str(e) @pytest.mark.django_db diff --git a/tests/test_commands/test_consume.py b/tests/test_commands/test_consume.py index 42d13c4..489385e 100644 --- a/tests/test_commands/test_consume.py +++ b/tests/test_commands/test_consume.py @@ -73,7 +73,7 @@ def test_wrong_cqrs_id(reload_transport): with pytest.raises(CommandError) as e: call_command(COMMAND_NAME, cqrs_id=['author', 'random', 'no_db']) - assert "Wrong CQRS ID: random!" in str(e) + assert 'Wrong CQRS ID: random!' in str(e) def test_worker_manager_constructor_with_reload(mocker): diff --git a/tests/test_commands/test_dead_letters.py b/tests/test_commands/test_dead_letters.py index 56fc1b4..3dcfc87 100644 --- a/tests/test_commands/test_dead_letters.py +++ b/tests/test_commands/test_dead_letters.py @@ -130,4 +130,4 @@ def test_check_transport(settings): with pytest.raises(CommandError) as e: command.check_transport() - assert "Dead letters commands available only for RabbitMQTransport." in str(e) + assert 'Dead letters commands available only for RabbitMQTransport.' in str(e) diff --git a/tests/test_commands/test_diff_sync.py b/tests/test_commands/test_diff_sync.py index 5baf86d..eb8db98 100644 --- a/tests/test_commands/test_diff_sync.py +++ b/tests/test_commands/test_diff_sync.py @@ -112,9 +112,11 @@ def test_sync_no_queue(mocker): mocker.patch.object(sys, 'stdin', StringIO('author,dt,{0}\n[1]\n'.format(NO_QUEUE))) call_command(COMMAND_NAME, '--progress') - sync_mock.assert_called_once_with(**{ - 'cqrs_id': 'author', - 'filter': '{"id__in": [1]}', - 'batch': 10000, - 'progress': True, - }) + sync_mock.assert_called_once_with( + **{ + 'cqrs_id': 'author', + 'filter': '{"id__in": [1]}', + 'batch': 10000, + 'progress': True, + } + ) diff --git a/tests/test_controller.py b/tests/test_controller.py index 45cfdae..95053cc 100644 --- a/tests/test_controller.py +++ b/tests/test_controller.py @@ -34,7 +34,12 @@ def test_consumer(mocker): consume(TransportPayload('a', 'b', {}, 'c', previous_data={'e': 'f'}, queue='xyz')) factory_mock.assert_called_once_with( - 'a', 'b', {}, previous_data={'e': 'f'}, meta=None, queue='xyz', + 'a', + 'b', + {}, + previous_data={'e': 'f'}, + meta=None, + queue='xyz', ) @@ -94,21 +99,27 @@ def test_route_signal_to_replica_model_without_db(): @pytest.mark.parametrize('queue', ('abc', None)) def test_route_signal_to_replica_with_only_direct_syncs(queue): - assert route_signal_to_replica_model( - signal_type=SignalType.SYNC, - cqrs_id=OnlyDirectSyncModel.CQRS_ID, - instance_data={}, - queue=queue, - ) is True + assert ( + route_signal_to_replica_model( + signal_type=SignalType.SYNC, + cqrs_id=OnlyDirectSyncModel.CQRS_ID, + instance_data={}, + queue=queue, + ) + is True + ) @pytest.mark.django_db @pytest.mark.parametrize('data, pk_repr', (({}, 'None'), ({'id': '123'}, '123'))) def test_route_signal_to_replica_exception(data, pk_repr, caplog): - assert route_signal_to_replica_model( - signal_type=SignalType.SAVE, - cqrs_id=AbstractModel.CQRS_ID, - instance_data=data, - ) is None + assert ( + route_signal_to_replica_model( + signal_type=SignalType.SAVE, + cqrs_id=AbstractModel.CQRS_ID, + instance_data=data, + ) + is None + ) assert 'pk = {pk}'.format(pk=pk_repr) in caplog.text diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py index 1892ced..11ac227 100644 --- a/tests/test_dataclasses.py +++ b/tests/test_dataclasses.py @@ -7,13 +7,15 @@ def test_transport_payload_infinite_expires(): - payload = TransportPayload.from_message({ - 'signal_type': SignalType.SYNC, - 'cqrs_id': 'cqrs_id', - 'instance_data': {}, - 'instance_pk': 'id', - 'expires': None, - }) + payload = TransportPayload.from_message( + { + 'signal_type': SignalType.SYNC, + 'cqrs_id': 'cqrs_id', + 'instance_data': {}, + 'instance_pk': 'id', + 'expires': None, + }, + ) assert payload.expires is None @@ -25,11 +27,13 @@ def test_transport_payload_without_expires(mocker, settings): settings.CQRS['master']['CQRS_MESSAGE_TTL'] = 10 expected_expires = datetime(2020, 1, 1, second=10, tzinfo=timezone.utc) - payload = TransportPayload.from_message({ - 'signal_type': SignalType.SYNC, - 'cqrs_id': 'cqrs_id', - 'instance_data': {}, - 'instance_pk': 'id', - }) + payload = TransportPayload.from_message( + { + 'signal_type': SignalType.SYNC, + 'cqrs_id': 'cqrs_id', + 'instance_data': {}, + 'instance_pk': 'id', + }, + ) assert payload.expires == expected_expires diff --git a/tests/test_delay.py b/tests/test_delay.py index 94979df..b6ca556 100644 --- a/tests/test_delay.py +++ b/tests/test_delay.py @@ -97,4 +97,4 @@ def test_delay_queue_invalid_max_size(): with pytest.raises(AssertionError) as e: DelayQueue(max_size=0) - assert e.value.args[0] == "Delay queue max_size should be positive integer." + assert e.value.args[0] == 'Delay queue max_size should be positive integer.' diff --git a/tests/test_master/test_mixin.py b/tests/test_master/test_mixin.py index 31eaaa3..726a7fa 100644 --- a/tests/test_master/test_mixin.py +++ b/tests/test_master/test_mixin.py @@ -133,16 +133,19 @@ def test_to_cqrs_dict_basic_types(): url_field='http://example.com', uuid_field=uid, ) - assert_is_sub_dict({ - 'int_field': 1, - 'bool_field': False, - 'char_field': 'str', - 'date_field': None, - 'datetime_field': str(dt), - 'float_field': 1.23, - 'url_field': 'http://example.com', - 'uuid_field': str(uid), - }, m.to_cqrs_dict()) + assert_is_sub_dict( + { + 'int_field': 1, + 'bool_field': False, + 'char_field': 'str', + 'date_field': None, + 'datetime_field': str(dt), + 'float_field': 1.23, + 'url_field': 'http://example.com', + 'uuid_field': str(uid), + }, + m.to_cqrs_dict(), + ) def test_to_cqrs_dict_all_fields(): @@ -189,7 +192,10 @@ def test_cqrs_sync_not_saved(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SYNC, models.ChosenFieldsModel.CQRS_ID, {'char_field': 'old', 'id': m.pk}, m.pk, + SignalType.SYNC, + models.ChosenFieldsModel.CQRS_ID, + {'char_field': 'old', 'id': m.pk}, + m.pk, ) @@ -205,7 +211,10 @@ def test_cqrs_sync(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SYNC, models.ChosenFieldsModel.CQRS_ID, {'char_field': 'new', 'id': m.pk}, m.pk, + SignalType.SYNC, + models.ChosenFieldsModel.CQRS_ID, + {'char_field': 'new', 'id': m.pk}, + m.pk, ) @@ -342,7 +351,10 @@ def test_transaction_commited(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.BasicFieldsModel.CQRS_ID, {'char_field': 'str', 'int_field': 1}, 1, + SignalType.SAVE, + models.BasicFieldsModel.CQRS_ID, + {'char_field': 'str', 'int_field': 1}, + 1, ) @@ -364,7 +376,10 @@ def test_transaction_rollbacked_to_savepoint(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.BasicFieldsModel.CQRS_ID, {'char_field': 'str', 'int_field': 1}, 1, + SignalType.SAVE, + models.BasicFieldsModel.CQRS_ID, + {'char_field': 'str', 'int_field': 1}, + 1, ) @@ -375,14 +390,16 @@ def test_serialization_no_related_instance(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.Author.CQRS_ID, + SignalType.SAVE, + models.Author.CQRS_ID, { 'id': 1, 'name': 'author', 'publisher': None, 'books': [], 'cqrs_revision': 0, - }, 1, + }, + 1, ) @@ -409,7 +426,8 @@ def test_save_serialization(mocker, django_assert_num_queries, django_v_trans_q_ assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.Author.CQRS_ID, + SignalType.SAVE, + models.Author.CQRS_ID, { 'id': 1, 'name': 'author', @@ -417,15 +435,19 @@ def test_save_serialization(mocker, django_assert_num_queries, django_v_trans_q_ 'id': 1, 'name': 'publisher', }, - 'books': [{ - 'id': 1, - 'name': '1', - }, { - 'id': 2, - 'name': '2', - }], + 'books': [ + { + 'id': 1, + 'name': '1', + }, + { + 'id': 2, + 'name': '2', + }, + ], 'cqrs_revision': 0, - }, 1, + }, + 1, ) @@ -455,10 +477,12 @@ def test_create_from_related_table(mocker): 'id': 1, 'name': 'author', 'publisher': None, - 'books': [{ - 'id': 1, - 'name': 'title', - }], + 'books': [ + { + 'id': 1, + 'name': 'title', + }, + ], 'cqrs_revision': 1, }, publisher_mock.call_args[0][0].instance_data, @@ -661,9 +685,15 @@ def test_cqrs_tracked_fields_date_and_datetime_tracking(mocker): instance.save() tracked_data = instance.get_tracked_fields_data() - assert publisher_mock.call_args[0][0].previous_data == tracked_data == { - 'cqrs_revision': 0, 'datetime_field': str(old_dt), 'date_field': str(old_d), - } + assert ( + publisher_mock.call_args[0][0].previous_data + == tracked_data + == { + 'cqrs_revision': 0, + 'datetime_field': str(old_dt), + 'date_field': str(old_d), + } + ) def test_mptt_cqrs_tracked_fields_model_has_tracker(): diff --git a/tests/test_master/test_signals.py b/tests/test_master/test_signals.py index 0093617..05d1c90 100644 --- a/tests/test_master/test_signals.py +++ b/tests/test_master/test_signals.py @@ -25,7 +25,10 @@ def test_post_save_create(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.SimplestModel.CQRS_ID, {'id': 1, 'name': None}, 1, + SignalType.SAVE, + models.SimplestModel.CQRS_ID, + {'id': 1, 'name': None}, + 1, ) @@ -57,7 +60,10 @@ def test_post_save_update(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.SAVE, models.SimplestModel.CQRS_ID, {'id': 1, 'name': 'new'}, 1, + SignalType.SAVE, + models.SimplestModel.CQRS_ID, + {'id': 1, 'name': 'new'}, + 1, ) @@ -70,7 +76,10 @@ def test_post_save_delete(mocker): assert_publisher_once_called_with_args( publisher_mock, - SignalType.DELETE, models.SimplestModel.CQRS_ID, {'id': 1, 'cqrs_revision': 1}, 1, + SignalType.DELETE, + models.SimplestModel.CQRS_ID, + {'id': 1, 'cqrs_revision': 1}, + 1, ) cqrs_updated = publisher_mock.call_args[0][0].to_dict()['instance_data']['cqrs_updated'] @@ -83,8 +92,7 @@ def test_post_save_instance_doesnt_exist(caplog): models.Author.objects.create(id=1, name='The author') models.Author.objects.get(id=1).delete() assert ( - "Can't produce message from master model 'Author': " - "The instance doesn't exist (pk=1)" + "Can't produce message from master model 'Author': " "The instance doesn't exist (pk=1)" ) in caplog.text @@ -123,9 +131,9 @@ def test_manual_post_bulk_create(mocker): def test_automatic_post_bulk_create(mocker): publisher_mock = mocker.patch('dj_cqrs.controller.producer.produce') - instances = models.SimplestTrackedModel.cqrs.bulk_create([ - models.SimplestTrackedModel(id=i, status='new') for i in range(1, 4) - ]) + instances = models.SimplestTrackedModel.cqrs.bulk_create( + [models.SimplestTrackedModel(id=i, status='new') for i in range(1, 4)], + ) assert len(instances) == 3 for index in range(3): @@ -201,7 +209,6 @@ def test_post_bulk_update_wout_prev_data(mocker, filter_kwargs): ( (0, {'description': 'old', 'status': None}, 1), (1, {'description': 'old', 'status': 'x'}, 2), - ), ), ), diff --git a/tests/test_registries.py b/tests/test_registries.py index 30a72c8..0c10499 100644 --- a/tests/test_registries.py +++ b/tests/test_registries.py @@ -18,12 +18,15 @@ class Cls(object): assert str(e.value) == "Two models can't have the same CQRS_ID: basic." -@pytest.mark.parametrize('model,registry', ( - (master_models.SimplestModel, MasterRegistry), - (master_models.AutoFieldsModel, MasterRegistry), - (replica_models.BasicFieldsModelRef, ReplicaRegistry), - (replica_models.BadTypeModelRef, ReplicaRegistry), -)) +@pytest.mark.parametrize( + 'model,registry', + ( + (master_models.SimplestModel, MasterRegistry), + (master_models.AutoFieldsModel, MasterRegistry), + (replica_models.BasicFieldsModelRef, ReplicaRegistry), + (replica_models.BadTypeModelRef, ReplicaRegistry), + ), +) def test_models_are_registered(model, registry): assert registry.models[model.CQRS_ID] == model assert registry.get_model_by_cqrs_id(model.CQRS_ID) == model diff --git a/tests/test_replica/test_mixin.py b/tests/test_replica/test_mixin.py index fbb23b9..cdb8cc8 100644 --- a/tests/test_replica/test_mixin.py +++ b/tests/test_replica/test_mixin.py @@ -84,16 +84,18 @@ class Cls(object): @pytest.mark.django_db def test_create_simple(): - instance = models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - 'bool_field': False, - 'date_field': None, - 'datetime_field': now(), - 'float_field': 1.25, - }) + instance = models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + 'bool_field': False, + 'date_field': None, + 'datetime_field': now(), + 'float_field': 1.25, + }, + ) assert isinstance(instance, models.BasicFieldsModelRef) instance.refresh_from_db() @@ -103,35 +105,41 @@ def test_create_simple(): @pytest.mark.django_db def test_create_simple_excessive_data(): - instance = models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - 'unexpected_field': 'value', - }) + instance = models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + 'unexpected_field': 'value', + }, + ) assert isinstance(instance, models.BasicFieldsModelRef) @pytest.mark.django_db def test_create_simple_insufficient_data(caplog): - models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + }, + ) assert 'Not all required CQRS fields are provided in data (basic).' in caplog.text @pytest.mark.django_db def test_create_mapped(caplog): - instance = models.MappedFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + instance = models.MappedFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + }, + ) assert isinstance(instance, models.MappedFieldsModelRef) instance.refresh_from_db() @@ -141,12 +149,14 @@ def test_create_mapped(caplog): @pytest.mark.django_db def test_create_mapped_bad_mapping(caplog): - models.BadMappingModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BadMappingModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + }, + ) assert 'Bad master-replica mapping for invalid_field (basic_3).' in caplog.text @@ -155,31 +165,37 @@ def test_create_mapped_bad_mapping(caplog): def test_create_db_error(mocker, caplog): mocker.patch.object(models.BasicFieldsModelRef.objects, 'create', side_effect=db_error) - models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + }, + ) assert 'CQRS create error: pk = 1 (basic).' in caplog.text @pytest.mark.django_db def test_update_ok(): - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) - instance = models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 1, - 'cqrs_updated': now(), - 'char_field': 'new_text', - 'float_field': 1.30, - }) + instance = models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 1, + 'cqrs_updated': now(), + 'char_field': 'new_text', + 'float_field': 1.30, + }, + ) assert isinstance(instance, models.BasicFieldsModelRef) @@ -191,39 +207,47 @@ def test_update_ok(): @pytest.mark.django_db def test_update_db_error(mocker, caplog): - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) mocker.patch.object(models.BasicFieldsModelRef, 'save', side_effect=db_error) - models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 1, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 1, + 'cqrs_updated': now(), + 'char_field': 'text', + }, + ) assert 'CQRS update error: pk = 1, cqrs_revision = 1 (basic).' in caplog.text @pytest.mark.django_db def test_delete_ok(): dt = now() - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': dt, - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': dt, + 'char_field': 'text', + } + ) - is_deleted = models.BasicFieldsModelRef.cqrs_delete({ - 'id': 1, - 'cqrs_revision': 0, - 'cqrs_updated': dt, - }) + is_deleted = models.BasicFieldsModelRef.cqrs_delete( + { + 'id': 1, + 'cqrs_revision': 0, + 'cqrs_updated': dt, + }, + ) assert is_deleted assert models.BasicFieldsModelRef.objects.count() == 0 @@ -231,11 +255,13 @@ def test_delete_ok(): @pytest.mark.django_db def test_delete_non_existing_id(): - is_deleted = models.BasicFieldsModelRef.cqrs_delete({ - 'id': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - }) + is_deleted = models.BasicFieldsModelRef.cqrs_delete( + { + 'id': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + }, + ) assert is_deleted assert models.BasicFieldsModelRef.objects.count() == 0 @@ -245,11 +271,13 @@ def test_delete_non_existing_id(): def test_delete_db_error(mocker, caplog): mocker.patch.object(models.BasicFieldsModelRef.objects, 'filter', side_effect=db_error) - is_deleted = models.BasicFieldsModelRef.cqrs_delete({ - 'id': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - }) + is_deleted = models.BasicFieldsModelRef.cqrs_delete( + { + 'id': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + }, + ) assert not is_deleted assert 'CQRS delete error: pk = 1' in caplog.text @@ -257,44 +285,52 @@ def test_delete_db_error(mocker, caplog): @pytest.mark.django_db def test_save_bad_master_data_field_type(caplog): - models.BadTypeModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'datetime_field': now(), - }) + models.BadTypeModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'datetime_field': now(), + }, + ) assert 'CQRS create error: pk = 1 (basic_1).' in caplog.text @pytest.mark.django_db def test_save_no_pk_in_master_data(caplog): - models.BasicFieldsModelRef.cqrs_save({ - 'id': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'id': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + }, + ) assert 'CQRS PK is not provided in data (basic).' in caplog.text @pytest.mark.django_db def test_save_no_cqrs_fields_in_master_data(caplog): - models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 0, - 'char_field': 'text', - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 0, + 'char_field': 'text', + }, + ) assert 'CQRS sync fields are not provided in data (basic).' in caplog.text @pytest.mark.django_db def test_delete_no_id_in_master_data(caplog): - is_deleted = models.BasicFieldsModelRef.cqrs_delete({ - 'cqrs_revision': 0, - 'cqrs_updated': now(), - }) + is_deleted = models.BasicFieldsModelRef.cqrs_delete( + { + 'cqrs_revision': 0, + 'cqrs_updated': now(), + }, + ) assert not is_deleted assert 'CQRS PK is not provided in data (basic).' in caplog.text @@ -302,10 +338,12 @@ def test_delete_no_id_in_master_data(caplog): @pytest.mark.django_db def test_delete_no_cqrs_fields_in_master_data(caplog): - is_deleted = models.BasicFieldsModelRef.cqrs_delete({ - 'id': 1, - 'cqrs_revision': 0, - }) + is_deleted = models.BasicFieldsModelRef.cqrs_delete( + { + 'id': 1, + 'cqrs_revision': 0, + }, + ) assert not is_deleted assert 'CQRS sync fields are not provided in data (basic).' in caplog.text @@ -356,12 +394,14 @@ def test_update_before_create_is_over(caplog): @pytest.mark.django_db(transaction=True) def test_wrong_update_order(caplog): - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) update_data_1 = { 'int_field': 1, @@ -391,12 +431,14 @@ def test_wrong_update_order(caplog): @pytest.mark.django_db(transaction=True) def test_de_duplication(caplog): - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) update_data = { 'int_field': 1, @@ -421,12 +463,14 @@ def test_create_before_delete_is_over(caplog): # and are not unique in the infinite timeline. # This will lead to expected inconsistency. - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) delete_data = { 'id': 1, @@ -450,19 +494,23 @@ def test_create_before_delete_is_over(caplog): @pytest.mark.django_db def test_updates_were_lost(caplog): - models.BasicFieldsModelRef.objects.create(**{ - 'int_field': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - 'char_field': 'text', - }) + models.BasicFieldsModelRef.objects.create( + **{ + 'int_field': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + 'char_field': 'text', + } + ) - models.BasicFieldsModelRef.cqrs_save({ - 'int_field': 1, - 'cqrs_revision': 5, - 'cqrs_updated': now(), - 'char_field': 'text1', - }) + models.BasicFieldsModelRef.cqrs_save( + { + 'int_field': 1, + 'cqrs_revision': 5, + 'cqrs_updated': now(), + 'char_field': 'text1', + }, + ) assert 'Lost or filtered out 4 CQRS packages: pk = 1, cqrs_revision = 5 (basic)' in caplog.text @@ -495,14 +543,18 @@ def test_tracked_fields_mapped(mocker): @pytest.mark.django_db def test_select_for_update_lock(mocker): m = mocker.patch.object( - QuerySet, 'select_for_update', return_value=models.LockModelRef.objects.all(), + QuerySet, + 'select_for_update', + return_value=models.LockModelRef.objects.all(), ) - instance = models.LockModelRef.cqrs_save({ - 'id': 1, - 'cqrs_revision': 0, - 'cqrs_updated': now(), - }) + instance = models.LockModelRef.cqrs_save( + { + 'id': 1, + 'cqrs_revision': 0, + 'cqrs_updated': now(), + }, + ) assert instance.id == 1 m.assert_called_once() @@ -518,7 +570,8 @@ def test_nodb(mocker): @pytest.mark.parametrize( - 'cqrs_max_retries, current_retry, expected_result', [ + 'cqrs_max_retries, current_retry, expected_result', + [ (5, 0, True), (5, 5, False), (-1, 0, False), @@ -583,7 +636,10 @@ def test_support_for_meta_update(): ) assert t == ( - True, {'id': 2, 'cqrs_revision': 1, 'cqrs_updated': cqrs_updated}, None, [1, 2, 3], + True, + {'id': 2, 'cqrs_revision': 1, 'cqrs_updated': cqrs_updated}, + None, + [1, 2, 3], ) diff --git a/tests/test_transport/test_kombu.py b/tests/test_transport/test_kombu.py index 60eeafb..0f86d64 100644 --- a/tests/test_transport/test_kombu.py +++ b/tests/test_transport/test_kombu.py @@ -100,7 +100,10 @@ def test_produce_connection_error(kombu_transport, mocker, caplog): kombu_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert "CQRS couldn't be published: pk = 1 (CQRS_ID)." in caplog.text @@ -108,13 +111,18 @@ def test_produce_connection_error(kombu_transport, mocker, caplog): def test_produce_publish_error(kombu_transport, mocker, caplog): mocker.patch.object( - KombuTransport, '_get_producer_kombu_objects', return_value=(mocker.MagicMock(), None), + KombuTransport, + '_get_producer_kombu_objects', + return_value=(mocker.MagicMock(), None), ) mocker.patch.object(KombuTransport, '_produce_message', side_effect=kombu_error) kombu_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert "CQRS couldn't be published: pk = 1 (CQRS_ID)." in caplog.text @@ -123,13 +131,18 @@ def test_produce_publish_error(kombu_transport, mocker, caplog): def test_produce_ok(kombu_transport, mocker, caplog): caplog.set_level(logging.INFO) mocker.patch.object( - KombuTransport, '_get_producer_kombu_objects', return_value=(mocker.MagicMock(), None), + KombuTransport, + '_get_producer_kombu_objects', + return_value=(mocker.MagicMock(), None), ) mocker.patch.object(KombuTransport, '_produce_message', return_value=True) kombu_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert 'CQRS is published: pk = 1 (CQRS_ID)' in caplog.text @@ -138,7 +151,11 @@ def test_produce_ok(kombu_transport, mocker, caplog): def test_produce_message_ok(mocker): channel = mocker.MagicMock() payload = TransportPayload( - SignalType.SAVE, 'cqrs_id', {}, 'id', previous_data={'e': 'f'}, + SignalType.SAVE, + 'cqrs_id', + {}, + 'id', + previous_data={'e': 'f'}, ) exchange = PublicKombuTransport.create_exchange('exchange') @@ -251,8 +268,7 @@ def test_consume_message_ack_deprecated_structure(mocker, caplog): consumer_mock = mocker.patch('dj_cqrs.controller.consumer.consume') PublicKombuTransport.consume_message( - '{"signal_type":"signal","cqrs_id":"cqrs_id",' - '"instance_data":{},"previous_data":null}', + '{"signal_type":"signal","cqrs_id":"cqrs_id",' '"instance_data":{},"previous_data":null}', mocker.MagicMock(), ) @@ -295,7 +311,7 @@ def test_consume_message_json_parsing_error(mocker, caplog): mocker.MagicMock(), ) - assert ": {bad_payload:." in caplog.text + assert ': {bad_payload:.' in caplog.text def test_consume_message_package_structure_error(mocker, caplog): diff --git a/tests/test_transport/test_rabbit_mq.py b/tests/test_transport/test_rabbit_mq.py index fcff07d..b94283b 100644 --- a/tests/test_transport/test_rabbit_mq.py +++ b/tests/test_transport/test_rabbit_mq.py @@ -173,14 +173,18 @@ def rabbit_transport(settings): @pytest.mark.parametrize( - 'exception', (AMQPError, ChannelError, ReentrancyError, AMQPConnectorException, AssertionError), + 'exception', + (AMQPError, ChannelError, ReentrancyError, AMQPConnectorException, AssertionError), ) def test_produce_connection_error(exception, rabbit_transport, mocker, caplog): mocker.patch.object(RabbitMQTransport, '_get_producer_rmq_objects', side_effect=exception) rabbit_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert "CQRS couldn't be published: pk = 1 (CQRS_ID)." in caplog.text @@ -188,13 +192,18 @@ def test_produce_connection_error(exception, rabbit_transport, mocker, caplog): def test_produce_publish_error(rabbit_transport, mocker, caplog): mocker.patch.object( - RabbitMQTransport, '_get_producer_rmq_objects', return_value=(mocker.MagicMock(), None), + RabbitMQTransport, + '_get_producer_rmq_objects', + return_value=(mocker.MagicMock(), None), ) mocker.patch.object(RabbitMQTransport, '_produce_message', side_effect=AMQPError) rabbit_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert "CQRS couldn't be published: pk = 1 (CQRS_ID)." in caplog.text @@ -203,13 +212,18 @@ def test_produce_publish_error(rabbit_transport, mocker, caplog): def test_produce_ok(rabbit_transport, mocker, caplog): caplog.set_level(logging.INFO) mocker.patch.object( - RabbitMQTransport, '_get_producer_rmq_objects', return_value=(mocker.MagicMock(), None), + RabbitMQTransport, + '_get_producer_rmq_objects', + return_value=(mocker.MagicMock(), None), ) mocker.patch.object(RabbitMQTransport, '_produce_message', return_value=True) rabbit_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) assert 'CQRS is published: pk = 1 (CQRS_ID)' in caplog.text @@ -217,15 +231,22 @@ def test_produce_ok(rabbit_transport, mocker, caplog): def test_produce_retry_on_error(rabbit_transport, mocker, caplog): caplog.set_level(logging.INFO) - mocker.patch.object(RabbitMQTransport, '_get_producer_rmq_objects', side_effect=[ - AMQPConnectorException, - (mocker.MagicMock(), None), - ]) + mocker.patch.object( + RabbitMQTransport, + '_get_producer_rmq_objects', + side_effect=[ + AMQPConnectorException, + (mocker.MagicMock(), None), + ], + ) mocker.patch.object(RabbitMQTransport, '_produce_message', return_value=True) rabbit_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) @@ -234,7 +255,7 @@ def test_produce_retry_on_error(rabbit_transport, mocker, caplog): 'django-cqrs', logging.WARNING, "CQRS couldn't be published: pk = 1 (CQRS_ID)." - " Error: AMQPConnectorException. Reconnect...", + ' Error: AMQPConnectorException. Reconnect...', ), ( 'django-cqrs', @@ -245,15 +266,22 @@ def test_produce_retry_on_error(rabbit_transport, mocker, caplog): def test_produce_retry_on_error_1(rabbit_transport, mocker, caplog): - mocker.patch.object(RabbitMQTransport, '_get_producer_rmq_objects', side_effect=[ - StreamLostError, - StreamLostError, - ]) + mocker.patch.object( + RabbitMQTransport, + '_get_producer_rmq_objects', + side_effect=[ + StreamLostError, + StreamLostError, + ], + ) mocker.patch.object(RabbitMQTransport, '_produce_message', return_value=True) rabbit_transport.produce( TransportPayload( - SignalType.SAVE, 'CQRS_ID', {'id': 1}, 1, + SignalType.SAVE, + 'CQRS_ID', + {'id': 1}, + 1, ), ) @@ -353,7 +381,9 @@ def test_produce_sync_message_queue(mocker): def test_consume_connection_error(rabbit_transport, mocker, caplog): mocker.patch.object( - RabbitMQTransport, '_get_consumer_rmq_objects', side_effect=AMQPError, + RabbitMQTransport, + '_get_consumer_rmq_objects', + side_effect=AMQPError, ) mocker.patch('time.sleep', side_effect=db_error) @@ -371,7 +401,9 @@ def test_consume_ok(rabbit_transport, mocker): return_value=(None, None, consumer_generator), ) mocker.patch.object( - RabbitMQTransport, '_consume_message', db_error, + RabbitMQTransport, + '_consume_message', + db_error, ) with pytest.raises(DatabaseError): @@ -463,15 +495,23 @@ def test_consume_message_expired(mocker, caplog): def test_consume_message_json_parsing_error(mocker, caplog): PublicRabbitMQTransport.consume_message( - mocker.MagicMock(), mocker.MagicMock(), None, '{bad_payload:', mocker.MagicMock(), + mocker.MagicMock(), + mocker.MagicMock(), + None, + '{bad_payload:', + mocker.MagicMock(), ) - assert ": {bad_payload:." in caplog.text + assert ': {bad_payload:.' in caplog.text def test_consume_message_package_structure_error(mocker, caplog): PublicRabbitMQTransport.consume_message( - mocker.MagicMock(), mocker.MagicMock(), None, 'inv{"pk":"1"}', mocker.MagicMock(), + mocker.MagicMock(), + mocker.MagicMock(), + None, + 'inv{"pk":"1"}', + mocker.MagicMock(), ) assert """CQRS couldn't be parsed: inv{"pk":"1"}""" in caplog.text @@ -498,7 +538,12 @@ def test_message_without_retry_dead_letter(settings, mocker, caplog): channel = mocker.MagicMock() payload = TransportPayload( - SignalType.SAVE, 'basic', {'id': 1}, 1, correlation_id='abc', retries=2, + SignalType.SAVE, + 'basic', + {'id': 1}, + 1, + correlation_id='abc', + retries=2, ) delay_queue = DelayQueue() @@ -514,9 +559,7 @@ def test_message_without_retry_dead_letter(settings, mocker, caplog): assert getattr(produce_message, 'is_dead_letter', False) assert 'CQRS is failed: pk = 1 (basic), correlation_id = abc, retries = 2.' in caplog.text - assert ( - 'CQRS is added to dead letter queue: pk = 1 (basic), correlation_id = abc' in caplog.text - ) + assert 'CQRS is added to dead letter queue: pk = 1 (basic), correlation_id = abc' in caplog.text def test_fail_message_invalid_model(mocker, caplog): @@ -528,7 +571,11 @@ def test_fail_message_invalid_model(mocker, caplog): delivery_tag = 101 PublicRabbitMQTransport.fail_message( - mocker.MagicMock(), delivery_tag, payload, None, delay_queue, + mocker.MagicMock(), + delivery_tag, + payload, + None, + delay_queue, ) assert delay_queue.qsize() == 0 @@ -602,14 +649,16 @@ def test_delay_message_with_requeue(mocker, caplog): exceeding_delay = 0 exceeding_payload = TransportPayload(SignalType.SAVE, 'CQRS_ID', {'id': 4}, 4) PublicRabbitMQTransport.delay_message( - channel, 4, exceeding_payload, exceeding_delay, delay_queue, + channel, + 4, + exceeding_payload, + exceeding_delay, + delay_queue, ) assert delay_queue.qsize() == 3 assert delay_queue.get().payload is exceeding_payload - assert ( - 'CQRS is delayed: pk = 4 (CQRS_ID), correlation_id = None, delay = 0 sec' in caplog.text - ) + assert 'CQRS is delayed: pk = 4 (CQRS_ID), correlation_id = None, delay = 0 sec' in caplog.text assert requeue_message.call_count == 1 diff --git a/tests/test_utils.py b/tests/test_utils.py index 79cc948..8025317 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -73,13 +73,16 @@ def test_get_messaged_prefetch_count_per_worker_with_delay_queue(settings): assert get_messages_prefetch_count_per_worker() == 5 -@pytest.mark.parametrize('value,result', ( - (None, None), - (1, 1), - (datetime(2022, 1, 1, second=0, tzinfo=timezone.utc), '2022-01-01 00:00:00+00:00'), - (date(2022, 2, 1), '2022-02-01'), - (UUID('0419d87b-d477-44e4-82c4-310f56faa3c7'), '0419d87b-d477-44e4-82c4-310f56faa3c7'), - ('abc', 'abc'), -)) +@pytest.mark.parametrize( + 'value,result', + ( + (None, None), + (1, 1), + (datetime(2022, 1, 1, second=0, tzinfo=timezone.utc), '2022-01-01 00:00:00+00:00'), + (date(2022, 2, 1), '2022-02-01'), + (UUID('0419d87b-d477-44e4-82c4-310f56faa3c7'), '0419d87b-d477-44e4-82c4-310f56faa3c7'), + ('abc', 'abc'), + ), +) def test_get_json_valid_value(value, result): assert get_json_valid_value(value) == result diff --git a/tests/test_validation.py b/tests/test_validation.py index 5550c6a..de47a66 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -11,28 +11,27 @@ def test_full_configuration(): def f(*a, **kw): pass - settings = MagicMock(CQRS={ - 'queue': 'start', - - 'transport': 'dj_cqrs.transport.rabbit_mq.RabbitMQTransport', - 'host': 'host', - 'port': 1234, - 'user': 'user', - 'password': 'pswd', - - 'master': { - 'CQRS_AUTO_UPDATE_FIELDS': True, - 'CQRS_MESSAGE_TTL': 10, - 'correlation_function': f, - 'meta_function': f, + settings = MagicMock( + CQRS={ + 'queue': 'start', + 'transport': 'dj_cqrs.transport.rabbit_mq.RabbitMQTransport', + 'host': 'host', + 'port': 1234, + 'user': 'user', + 'password': 'pswd', + 'master': { + 'CQRS_AUTO_UPDATE_FIELDS': True, + 'CQRS_MESSAGE_TTL': 10, + 'correlation_function': f, + 'meta_function': f, + }, + 'replica': { + 'CQRS_MAX_RETRIES': 5, + 'CQRS_RETRY_DELAY': 4, + 'delay_queue_max_size': 2, + }, }, - - 'replica': { - 'CQRS_MAX_RETRIES': 5, - 'CQRS_RETRY_DELAY': 4, - 'delay_queue_max_size': 2, - }, - }) + ) validate_settings(settings) @@ -75,10 +74,12 @@ def test_transport_has_wrong_inheritance(): @pytest.fixture def cqrs_settings(): - return MagicMock(CQRS={ - 'transport': 'dj_cqrs.transport.mock.TransportMock', - 'queue': 'replica', - }) + return MagicMock( + CQRS={ + 'transport': 'dj_cqrs.transport.mock.TransportMock', + 'queue': 'replica', + }, + ) def test_master_configuration_not_set(cqrs_settings): diff --git a/tests/utils.py b/tests/utils.py index 2f2b657..d45f8f5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -27,8 +27,7 @@ def db_error(*args, **kwargs): def assert_tracked_fields(model_cls, fields): if model_cls.CQRS_TRACKED_FIELDS == '__all__': fields_to_track = { - f.attname if f.is_relation else f.name - for f in model_cls._meta.concrete_fields + f.attname if f.is_relation else f.name for f in model_cls._meta.concrete_fields } else: fields_to_track = set()