From 0d58385db2ce20ceca50f3904af982e712ef00fa Mon Sep 17 00:00:00 2001 From: Jamie MacDonald Date: Wed, 12 Jul 2023 17:06:49 +0100 Subject: [PATCH 01/33] Remove Snyk jobs from CI scanning --- .github/workflows/security-scans.yml | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/.github/workflows/security-scans.yml b/.github/workflows/security-scans.yml index 7348db48311f..a421295f4ab5 100644 --- a/.github/workflows/security-scans.yml +++ b/.github/workflows/security-scans.yml @@ -128,25 +128,4 @@ jobs: - name: Run Bandit 🔪 if: needs.changes.outputs.backend == 'true' - run: make lint-security - - snyk: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - name: Run Snyk Open Source to check for Python vulnerabilities - uses: snyk/actions/python-3.8@master - continue-on-error: true - env: - SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - with: - command: monitor - args: --all-projects --org=rasa --skip-unresolved - - name: Run Snyk Open Source to check for JS vulnerabilities - uses: snyk/actions/node@master - continue-on-error: true - env: - SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - with: - command: monitor - args: --org=rasa --yarn-workspaces --strict-out-of-sync=false --prune-repeated-subdependencies + run: make lint-security \ No newline at end of file From dc6f00de718dbcf244e800fe90912f0dfd6c45f8 Mon Sep 17 00:00:00 2001 From: Jamie MacDonald Date: Wed, 12 Jul 2023 17:10:28 +0100 Subject: [PATCH 02/33] Fix line formatting --- .github/workflows/security-scans.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/security-scans.yml b/.github/workflows/security-scans.yml index a421295f4ab5..b4cbf69cc92f 100644 --- a/.github/workflows/security-scans.yml +++ b/.github/workflows/security-scans.yml @@ -128,4 +128,4 @@ jobs: - name: Run Bandit 🔪 if: needs.changes.outputs.backend == 'true' - run: make lint-security \ No newline at end of file + run: make lint-security From 56446c2b4d8c1cfec2ffbbcdd4363de6b389818f Mon Sep 17 00:00:00 2001 From: Tawakalt Date: Mon, 23 Oct 2023 10:21:51 +0200 Subject: [PATCH 03/33] prepared release of version 3.6.13 --- CHANGELOG.mdx | 7 +++++++ changelog/12927.bugfix.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) delete mode 100644 changelog/12927.bugfix.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index 14c7da88d3b8..9a6177e3632c 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.13] - 2023-10-23 + +Rasa 3.6.13 (2023-10-23) +### Bugfixes +- [#12927](https://github.com/rasahq/rasa/issues/12927): Fix wrong conflicts that occur when rasa validate stories is run with slots that have active_loop set to null in mapping conditions. + + ## [3.6.12] - 2023-10-10 Rasa 3.6.12 (2023-10-10) diff --git a/changelog/12927.bugfix.md b/changelog/12927.bugfix.md deleted file mode 100644 index 7b9ff9d69410..000000000000 --- a/changelog/12927.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Fix wrong conflicts that occur when rasa validate stories is run with slots that have active_loop set to null in mapping conditions. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 904736791eeb..c3809092b5c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.12" +version = "3.6.13" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index 98e1d7a3ca12..7f4d3334548b 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.12" +__version__ = "3.6.13" From 58e122fe459fe3a21441fc44f9a1082d490eb9f2 Mon Sep 17 00:00:00 2001 From: Thomas Werkmeister Date: Thu, 16 Nov 2023 10:06:17 +0100 Subject: [PATCH 04/33] Made use of drop-small-last-batch logic only possible in DIET and configurable (#12948) * Made use of drop-small-last-batch logic only possible in DIET and configurable --- changelog/12948.bugfix.md | 1 + rasa/nlu/classifiers/diet_classifier.py | 5 +++ rasa/utils/tensorflow/constants.py | 1 + rasa/utils/tensorflow/data_generator.py | 19 ++++++--- rasa/utils/train_utils.py | 5 +++ tests/nlu/classifiers/test_diet_classifier.py | 41 +++++++++++++------ 6 files changed, 54 insertions(+), 18 deletions(-) create mode 100644 changelog/12948.bugfix.md diff --git a/changelog/12948.bugfix.md b/changelog/12948.bugfix.md new file mode 100644 index 000000000000..7479161831be --- /dev/null +++ b/changelog/12948.bugfix.md @@ -0,0 +1 @@ +Fixed UnexpecTEDIntentlessPolicy training errors that resulted from a change to batching behavior. Changed the batching behavior back to the original for all components. Made the changed batching behavior accessible in DietClassifier using `drop_small_last_batch: True`. diff --git a/rasa/nlu/classifiers/diet_classifier.py b/rasa/nlu/classifiers/diet_classifier.py index 1cc65c89b3c9..bea4735da6fe 100644 --- a/rasa/nlu/classifiers/diet_classifier.py +++ b/rasa/nlu/classifiers/diet_classifier.py @@ -50,6 +50,7 @@ from rasa.shared.nlu.training_data.training_data import TrainingData from rasa.shared.nlu.training_data.message import Message from rasa.utils.tensorflow.constants import ( + DROP_SMALL_LAST_BATCH, LABEL, IDS, HIDDEN_LAYERS_SIZES, @@ -288,6 +289,9 @@ def get_default_config() -> Dict[Text, Any]: # a few steps, as the compilation of the graph tends to take more time than # running it. It is recommended to not adjust the optimization parameter. RUN_EAGERLY: False, + # Determines whether the last batch should be dropped if it contains fewer + # than half a batch size of examples + DROP_SMALL_LAST_BATCH: False, } def __init__( @@ -931,6 +935,7 @@ def train(self, training_data: TrainingData) -> Resource: self.component_config[BATCH_STRATEGY], self.component_config[EVAL_NUM_EXAMPLES], self.component_config[RANDOM_SEED], + drop_small_last_batch=self.component_config[DROP_SMALL_LAST_BATCH], ) callbacks = train_utils.create_common_callbacks( self.component_config[EPOCHS], diff --git a/rasa/utils/tensorflow/constants.py b/rasa/utils/tensorflow/constants.py index 047db9878c67..39d5ea6d0560 100644 --- a/rasa/utils/tensorflow/constants.py +++ b/rasa/utils/tensorflow/constants.py @@ -113,3 +113,4 @@ USE_GPU = "use_gpu" RUN_EAGERLY = "run_eagerly" +DROP_SMALL_LAST_BATCH = "drop_small_last_batch" diff --git a/rasa/utils/tensorflow/data_generator.py b/rasa/utils/tensorflow/data_generator.py index a696f607c026..e54b95dad335 100644 --- a/rasa/utils/tensorflow/data_generator.py +++ b/rasa/utils/tensorflow/data_generator.py @@ -344,6 +344,7 @@ def __init__( epochs: int = 1, batch_strategy: Text = SEQUENCE, shuffle: bool = True, + drop_small_last_batch: bool = False, ): """Initializes the increasing batch size data generator. @@ -353,6 +354,8 @@ def __init__( epochs: The total number of epochs. batch_strategy: The batch strategy. shuffle: If 'True', data will be shuffled. + drop_small_last_batch: if 'True', the last batch in an epoch will be dropped + if it has less examples than half the batch size """ super().__init__(model_data, batch_size, batch_strategy, shuffle) @@ -370,6 +373,7 @@ def __init__( self._current_batch_size = 0 # create separate data variable that will store modified data for each batch self._data: Data = {} + self.drop_small_last_batch = drop_small_last_batch self.on_epoch_end() def __len__(self) -> int: @@ -381,11 +385,16 @@ def __len__(self) -> int: # data was rebalanced, so need to recalculate number of examples num_examples = self.model_data.number_of_examples(self._data) batch_size = self._current_batch_size - # keep last batch only if it has at least half a batch size of examples - last_batch_half_full = num_examples % batch_size >= math.ceil(batch_size / 2) - num_batches = num_examples // batch_size + int(last_batch_half_full) - # Return at least 1 if there is an example - return max(num_batches, int(num_examples > 0)) + if self.drop_small_last_batch: + # keep last batch only if it has at least half a batch size of examples + last_batch_half_full = num_examples % batch_size >= math.ceil( + batch_size / 2 + ) + num_batches = num_examples // batch_size + int(last_batch_half_full) + # Return at least 1 if there is an example + return max(num_batches, int(num_examples > 0)) + else: + return num_examples // batch_size + int(num_examples % batch_size > 0) def __getitem__(self, index: int) -> Tuple[Any, Any]: """Gets batch at position `index`. diff --git a/rasa/utils/train_utils.py b/rasa/utils/train_utils.py index 36de0370d210..764507d7e39d 100644 --- a/rasa/utils/train_utils.py +++ b/rasa/utils/train_utils.py @@ -302,6 +302,7 @@ def create_data_generators( eval_num_examples: int = 0, random_seed: Optional[int] = None, shuffle: bool = True, + drop_small_last_batch: bool = False, ) -> Tuple[RasaBatchDataGenerator, Optional[RasaBatchDataGenerator]]: """Create data generators for train and optional validation data. @@ -313,6 +314,8 @@ def create_data_generators( eval_num_examples: Number of examples to use for validation data. random_seed: The random seed. shuffle: Whether to shuffle data inside the data generator. + drop_small_last_batch: whether to drop the last batch if it has fewer than half + a batch size of examples Returns: The training data generator and optional validation data generator. @@ -328,6 +331,7 @@ def create_data_generators( epochs=epochs, batch_strategy=batch_strategy, shuffle=shuffle, + drop_small_last_batch=drop_small_last_batch, ) data_generator = RasaBatchDataGenerator( @@ -336,6 +340,7 @@ def create_data_generators( epochs=epochs, batch_strategy=batch_strategy, shuffle=shuffle, + drop_small_last_batch=drop_small_last_batch, ) return data_generator, validation_data_generator diff --git a/tests/nlu/classifiers/test_diet_classifier.py b/tests/nlu/classifiers/test_diet_classifier.py index 1f0c37a85faa..1fd84fdac47d 100644 --- a/tests/nlu/classifiers/test_diet_classifier.py +++ b/tests/nlu/classifiers/test_diet_classifier.py @@ -971,24 +971,35 @@ async def test_no_bilou_when_entity_recognition_off( @pytest.mark.timeout(120, func_only=True) @pytest.mark.parametrize( - "batch_size, expected_num_batches", + "batch_size, expected_num_batches, drop_small_last_batch", # the training dataset has 48 NLU examples [ - (1, 48), - (8, 6), - (15, 3), - (16, 3), - (18, 3), - (20, 2), - (32, 2), - (64, 1), - (128, 1), - (256, 1), + (1, 48, True), + (8, 6, True), + (15, 3, True), + (16, 3, True), + (18, 3, True), + (20, 2, True), + (32, 2, True), + (64, 1, True), + (128, 1, True), + (256, 1, True), + (1, 48, False), + (8, 6, False), + (15, 4, False), + (16, 3, False), + (18, 3, False), + (20, 3, False), + (32, 2, False), + (64, 1, False), + (128, 1, False), + (256, 1, False), ], ) async def test_dropping_of_last_partial_batch( batch_size: int, expected_num_batches: int, + drop_small_last_batch: bool, create_diet: Callable[..., DIETClassifier], train_and_preprocess: Callable[..., Tuple[TrainingData, List[GraphComponent]]], ): @@ -1012,7 +1023,9 @@ async def test_dropping_of_last_partial_batch( ) model_data = diet.preprocess_train_data(training_data) - data_generator, _ = train_utils.create_data_generators(model_data, batch_size, 1) + data_generator, _ = train_utils.create_data_generators( + model_data, batch_size, 1, drop_small_last_batch=drop_small_last_batch + ) assert len(data_generator) == expected_num_batches @@ -1041,6 +1054,8 @@ async def test_dropping_of_last_partial_batch_empty_data( ) model_data = diet.preprocess_train_data(training_data) - data_generator, _ = train_utils.create_data_generators(model_data, 64, 1) + data_generator, _ = train_utils.create_data_generators( + model_data, 64, 1, drop_small_last_batch=True + ) assert len(data_generator) == 0 From 048dc805b1781535a0b10705e4b104faec066abb Mon Sep 17 00:00:00 2001 From: Thomas Werkmeister Date: Fri, 17 Nov 2023 15:11:59 +0100 Subject: [PATCH 05/33] adjusted release script to work with public remote (#12953) --- scripts/release.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/release.py b/scripts/release.py index d1ac98325f80..068bd14e5fd2 100644 --- a/scripts/release.py +++ b/scripts/release.py @@ -30,6 +30,10 @@ RELEASE_BRANCH_PATTERN = re.compile(r"^\d+\.\d+\.x$") +PUBLIC_REMOTE = "public" +DEFAULT_REMOTE = "origin" +FIRST_CALM_VERSION = "3.7.0" + def create_argument_parser() -> argparse.ArgumentParser: """Parse all the command line arguments for the release script.""" @@ -247,9 +251,9 @@ def create_commit(version: Version) -> None: check_call(["git", "commit", "-m", f"prepared release of version {version}"]) -def push_changes() -> None: - """Pushes the current branch to origin.""" - check_call(["git", "push", "origin", "HEAD"]) +def push_changes(remote: str = DEFAULT_REMOTE) -> None: + """Pushes the current branch to the specified remote.""" + check_call(["git", "push", remote, "HEAD"]) def ensure_clean_git() -> None: @@ -337,10 +341,11 @@ def main(args: argparse.Namespace) -> None: # never update changelog on a prerelease version generate_changelog(version) + remote = PUBLIC_REMOTE if str(version) < FIRST_CALM_VERSION else DEFAULT_REMOTE # alpha workflow on feature branch when a version bump is required if version.is_alpha and not git_current_branch_is_main_or_release(): create_commit(version) - push_changes() + push_changes(remote) print_done_message_same_branch(version) else: @@ -348,7 +353,7 @@ def main(args: argparse.Namespace) -> None: branch = create_release_branch(version) create_commit(version) - push_changes() + push_changes(remote) print_done_message(branch, base, version) From 241bf28342cb8b56e3d395d6136587286debbe9c Mon Sep 17 00:00:00 2001 From: Thomas Werkmeister Date: Mon, 20 Nov 2023 10:10:11 +0100 Subject: [PATCH 06/33] Prepare release 3.6.14 (#12956) * prepared release of version 3.6.14 --- CHANGELOG.mdx | 7 +++++++ changelog/12948.bugfix.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) delete mode 100644 changelog/12948.bugfix.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index 9a6177e3632c..b574a76b1b96 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.14] - 2023-11-17 + +Rasa 3.6.14 (2023-11-17) +### Bugfixes +- [#12948](https://github.com/rasahq/rasa/issues/12948): Fixed UnexpecTEDIntentlessPolicy training errors that resulted from a change to batching behavior. Changed the batching behavior back to the original for all components. Made the changed batching behavior accessible in DietClassifier using `drop_small_last_batch: True`. + + ## [3.6.13] - 2023-10-23 Rasa 3.6.13 (2023-10-23) diff --git a/changelog/12948.bugfix.md b/changelog/12948.bugfix.md deleted file mode 100644 index 7479161831be..000000000000 --- a/changelog/12948.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Fixed UnexpecTEDIntentlessPolicy training errors that resulted from a change to batching behavior. Changed the batching behavior back to the original for all components. Made the changed batching behavior accessible in DietClassifier using `drop_small_last_batch: True`. diff --git a/pyproject.toml b/pyproject.toml index c3809092b5c2..4bf33f2a32d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.13" +version = "3.6.14" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index 7f4d3334548b..e4512ce41043 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.13" +__version__ = "3.6.14" From 609287ec3cdaa3151f867753878e4ffeb9344da9 Mon Sep 17 00:00:00 2001 From: Varun Shankar S Date: Thu, 30 Nov 2023 13:26:52 +0100 Subject: [PATCH 07/33] Port "connection timeout to action server" changes to 3.6.x - [ENG 689] (#12965) * Merge pull request #106 from RasaHQ/ENG-680-DEFAULT_KEEP_ALIVE_TIMEOUT Fix connection to action server - [ENG 680] --------- Co-authored-by: Tom Bocklisch --- rasa/core/agent.py | 96 +++++++++++++++++++---------------- rasa/core/constants.py | 2 + rasa/core/run.py | 82 ++++++++++++++---------------- rasa/utils/endpoints.py | 68 ++++++++++++++----------- tests/core/test_run.py | 3 -- tests/utils/test_endpoints.py | 42 +++------------ 6 files changed, 136 insertions(+), 157 deletions(-) diff --git a/rasa/core/agent.py b/rasa/core/agent.py index bf3d42236e70..47e3360f6a5b 100644 --- a/rasa/core/agent.py +++ b/rasa/core/agent.py @@ -112,53 +112,59 @@ async def _pull_model_and_fingerprint( logger.debug(f"Requesting model from server {model_server.url}...") - try: - params = model_server.combine_parameters() - async with model_server.session.request( - "GET", - model_server.url, - timeout=DEFAULT_REQUEST_TIMEOUT, - headers=headers, - params=params, - ) as resp: - if resp.status in [204, 304]: - logger.debug( - "Model server returned {} status code, " - "indicating that no new model is available. " - "Current fingerprint: {}" - "".format(resp.status, fingerprint) - ) - return None - elif resp.status == 404: - logger.debug( - "Model server could not find a model at the requested " - "endpoint '{}'. It's possible that no model has been " - "trained, or that the requested tag hasn't been " - "assigned.".format(model_server.url) - ) - return None - elif resp.status != 200: - logger.debug( - "Tried to fetch model from server, but server response " - "status code is {}. We'll retry later..." - "".format(resp.status) + async with model_server.session() as session: + try: + params = model_server.combine_parameters() + async with session.request( + "GET", + model_server.url, + timeout=DEFAULT_REQUEST_TIMEOUT, + headers=headers, + params=params, + ) as resp: + + if resp.status in [204, 304]: + logger.debug( + "Model server returned {} status code, " + "indicating that no new model is available. " + "Current fingerprint: {}" + "".format(resp.status, fingerprint) + ) + return None + elif resp.status == 404: + logger.debug( + "Model server could not find a model at the requested " + "endpoint '{}'. It's possible that no model has been " + "trained, or that the requested tag hasn't been " + "assigned.".format(model_server.url) + ) + return None + elif resp.status != 200: + logger.debug( + "Tried to fetch model from server, but server response " + "status code is {}. We'll retry later..." + "".format(resp.status) + ) + return None + + model_path = Path(model_directory) / resp.headers.get( + "filename", "model.tar.gz" ) - return None - model_path = Path(model_directory) / resp.headers.get( - "filename", "model.tar.gz" + with open(model_path, "wb") as file: + file.write(await resp.read()) + + logger.debug("Saved model to '{}'".format(os.path.abspath(model_path))) + + # return the new fingerprint + return resp.headers.get("ETag") + + except aiohttp.ClientError as e: + logger.debug( + "Tried to fetch model from server, but " + "couldn't reach server. We'll retry later... " + "Error: {}.".format(e) ) - with open(model_path, "wb") as file: - file.write(await resp.read()) - logger.debug("Saved model to '{}'".format(os.path.abspath(model_path))) - # return the new fingerprint - return resp.headers.get("ETag") - except aiohttp.ClientError as e: - logger.debug( - "Tried to fetch model from server, but " - "couldn't reach server. We'll retry later... " - "Error: {}.".format(e) - ) - return None + return None async def _run_model_pulling_worker(model_server: EndpointConfig, agent: Agent) -> None: diff --git a/rasa/core/constants.py b/rasa/core/constants.py index 973e4e7b3a99..40d65c3299bb 100644 --- a/rasa/core/constants.py +++ b/rasa/core/constants.py @@ -24,6 +24,8 @@ DEFAULT_LOCK_LIFETIME = 60 # in seconds +DEFAULT_KEEP_ALIVE_TIMEOUT = 120 # in seconds + BEARER_TOKEN_PREFIX = "Bearer " # The lowest priority is intended to be used by machine learning policies. diff --git a/rasa/core/run.py b/rasa/core/run.py index 5270162809dd..3a8133613c3f 100644 --- a/rasa/core/run.py +++ b/rasa/core/run.py @@ -1,9 +1,19 @@ import asyncio import logging import uuid +import platform import os from functools import partial -from typing import Any, List, Optional, TYPE_CHECKING, Text, Union, Dict +from typing import ( + Any, + Callable, + List, + Optional, + Text, + Tuple, + Union, + Dict, +) import rasa.core.utils from rasa.plugin import plugin_manager @@ -23,8 +33,6 @@ from sanic import Sanic from asyncio import AbstractEventLoop -if TYPE_CHECKING: - from aiohttp import ClientSession logger = logging.getLogger() # get the root logger @@ -80,6 +88,14 @@ def _create_app_without_api(cors: Optional[Union[Text, List[Text]]] = None) -> S return app +def _is_apple_silicon_system() -> bool: + # check if the system is MacOS + if platform.system().lower() != "darwin": + return False + # check for arm architecture, indicating apple silicon + return platform.machine().startswith("arm") or os.uname().machine.startswith("arm") + + def configure_app( input_channels: Optional[List["InputChannel"]] = None, cors: Optional[Union[Text, List[Text], None]] = None, @@ -99,6 +115,9 @@ def configure_app( syslog_port: Optional[int] = None, syslog_protocol: Optional[Text] = None, request_timeout: Optional[int] = None, + server_listeners: Optional[List[Tuple[Callable, Text]]] = None, + use_uvloop: Optional[bool] = True, + keep_alive_timeout: int = constants.DEFAULT_KEEP_ALIVE_TIMEOUT, ) -> Sanic: """Run the agent.""" rasa.core.utils.configure_file_logging( @@ -118,6 +137,14 @@ def configure_app( else: app = _create_app_without_api(cors) + app.config.KEEP_ALIVE_TIMEOUT = keep_alive_timeout + if _is_apple_silicon_system() or not use_uvloop: + app.config.USE_UVLOOP = False + # some library still sets the loop to uvloop, even if disabled for sanic + # using uvloop leads to breakingio errors, see + # https://rasahq.atlassian.net/browse/ENG-667 + asyncio.set_event_loop_policy(None) + if input_channels: channels.channel.register(input_channels, app, route=route) else: @@ -150,6 +177,10 @@ async def run_cmdline_io(running_app: Sanic) -> None: app.add_task(run_cmdline_io) + if server_listeners: + for (listener, event) in server_listeners: + app.register_listener(listener, event) + return app @@ -179,6 +210,7 @@ def serve_application( syslog_port: Optional[int] = None, syslog_protocol: Optional[Text] = None, request_timeout: Optional[int] = None, + server_listeners: Optional[List[Tuple[Callable, Text]]] = None, ) -> None: """Run the API entrypoint.""" if not channel and not credentials: @@ -204,6 +236,7 @@ def serve_application( syslog_port=syslog_port, syslog_protocol=syslog_protocol, request_timeout=request_timeout, + server_listeners=server_listeners, ) ssl_context = server.create_ssl_context( @@ -217,7 +250,7 @@ def serve_application( partial(load_agent_on_start, model_path, endpoints, remote_storage), "before_server_start", ) - app.register_listener(create_connection_pools, "after_server_start") + app.register_listener(close_resources, "after_server_stop") number_of_workers = rasa.core.utils.number_of_sanic_workers( @@ -279,44 +312,3 @@ async def close_resources(app: Sanic, _: AbstractEventLoop) -> None: event_broker = current_agent.tracker_store.event_broker if event_broker: await event_broker.close() - - action_endpoint = current_agent.action_endpoint - if action_endpoint: - await action_endpoint.session.close() - - model_server = current_agent.model_server - if model_server: - await model_server.session.close() - - -async def create_connection_pools(app: Sanic, _: AbstractEventLoop) -> None: - """Create connection pools for the agent's action server and model server.""" - current_agent = getattr(app.ctx, "agent", None) - if not current_agent: - logger.debug("No agent found after server start.") - return None - - create_action_endpoint_connection_pool(current_agent) - create_model_server_connection_pool(current_agent) - - return None - - -def create_action_endpoint_connection_pool(agent: Agent) -> Optional["ClientSession"]: - """Create a connection pool for the action endpoint.""" - action_endpoint = agent.action_endpoint - if not action_endpoint: - logger.debug("No action endpoint found after server start.") - return None - - return action_endpoint.session - - -def create_model_server_connection_pool(agent: Agent) -> Optional["ClientSession"]: - """Create a connection pool for the model server.""" - model_server = agent.model_server - if not model_server: - logger.debug("No model server endpoint found after server start.") - return None - - return model_server.session diff --git a/rasa/utils/endpoints.py b/rasa/utils/endpoints.py index 5e1032778e6b..31d1ea7228bc 100644 --- a/rasa/utils/endpoints.py +++ b/rasa/utils/endpoints.py @@ -1,8 +1,6 @@ import ssl -from functools import cached_property import aiohttp -import logging import os from aiohttp.client_exceptions import ContentTypeError from sanic.request import Request @@ -11,10 +9,11 @@ from rasa.shared.exceptions import FileNotFoundException import rasa.shared.utils.io import rasa.utils.io +import structlog from rasa.core.constants import DEFAULT_REQUEST_TIMEOUT -logger = logging.getLogger(__name__) +structlogger = structlog.get_logger() def read_endpoint_config( @@ -32,9 +31,13 @@ def read_endpoint_config( return EndpointConfig.from_dict(content[endpoint_type]) except FileNotFoundError: - logger.error( - "Failed to read endpoint configuration " - "from {}. No such file.".format(os.path.abspath(filename)) + structlogger.error( + "endpoint.read.failed_no_such_file", + filename=os.path.abspath(filename), + event_info=( + "Failed to read endpoint configuration file - " + "the file was not found." + ), ) return None @@ -56,9 +59,13 @@ def concat_url(base: Text, subpath: Optional[Text]) -> Text: """ if not subpath: if base.endswith("/"): - logger.debug( - f"The URL '{base}' has a trailing slash. Please make sure the " - f"target server supports trailing slashes for this endpoint." + structlogger.debug( + "endpoint.concat_url.trailing_slash", + url=base, + event_info=( + "The URL has a trailing slash. Please make sure the " + "target server supports trailing slashes for this endpoint." + ), ) return base @@ -95,7 +102,6 @@ def __init__( self.cafile = cafile self.kwargs = kwargs - @cached_property def session(self) -> aiohttp.ClientSession: """Creates and returns a configured aiohttp client session.""" # create authentication parameters @@ -164,23 +170,26 @@ async def request( f"'{os.path.abspath(self.cafile)}' does not exist." ) from e - async with self.session.request( - method, - url, - headers=headers, - params=self.combine_parameters(kwargs), - compress=compress, - ssl=sslcontext, - **kwargs, - ) as response: - if response.status >= 400: - raise ClientResponseError( - response.status, response.reason, await response.content.read() - ) - try: - return await response.json() - except ContentTypeError: - return None + async with self.session() as session: + async with session.request( + method, + url, + headers=headers, + params=self.combine_parameters(kwargs), + compress=compress, + ssl=sslcontext, + **kwargs, + ) as response: + if response.status >= 400: + raise ClientResponseError( + response.status, + response.reason, + await response.content.read(), + ) + try: + return await response.json() + except ContentTypeError: + return None @classmethod def from_dict(cls, data: Dict[Text, Any]) -> "EndpointConfig": @@ -263,7 +272,7 @@ def float_arg( try: return float(str(arg)) except (ValueError, TypeError): - logger.warning(f"Failed to convert '{arg}' to float.") + structlogger.warning("endpoint.float_arg.convert_failed", arg=arg, key=key) return default @@ -291,5 +300,6 @@ def int_arg( try: return int(str(arg)) except (ValueError, TypeError): - logger.warning(f"Failed to convert '{arg}' to int.") + + structlogger.warning("endpoint.int_arg.convert_failed", arg=arg, key=key) return default diff --git a/tests/core/test_run.py b/tests/core/test_run.py index 1ac276d43772..8eda15058c0d 100644 --- a/tests/core/test_run.py +++ b/tests/core/test_run.py @@ -1,7 +1,6 @@ import warnings from unittest.mock import Mock -import aiohttp import pytest from typing import Text @@ -84,8 +83,6 @@ async def test_close_resources(loop: AbstractEventLoop): broker = SQLEventBroker() app = Mock() app.ctx.agent.tracker_store.event_broker = broker - app.ctx.agent.action_endpoint.session = aiohttp.ClientSession() - app.ctx.agent.model_server.session = aiohttp.ClientSession() with warnings.catch_warnings() as record: await run.close_resources(app, loop) diff --git a/tests/utils/test_endpoints.py b/tests/utils/test_endpoints.py index 071e54ee9318..711f2fd25faa 100644 --- a/tests/utils/test_endpoints.py +++ b/tests/utils/test_endpoints.py @@ -1,4 +1,4 @@ -import logging +import structlog from pathlib import Path from typing import Text, Optional, Union from unittest.mock import Mock @@ -35,13 +35,14 @@ def test_concat_url(base, subpath, expected_result): assert endpoint_utils.concat_url(base, subpath) == expected_result -def test_warning_for_base_paths_with_trailing_slash(caplog): +def test_warning_for_base_paths_with_trailing_slash(): test_path = "base/" - - with caplog.at_level(logging.DEBUG, logger="rasa.utils.endpoints"): + with structlog.testing.capture_logs() as caplog: assert endpoint_utils.concat_url(test_path, None) == test_path - assert len(caplog.records) == 1 + assert len(caplog) == 1 + assert caplog[0]["event"] == "endpoint.concat_url.trailing_slash" + assert caplog[0]["log_level"] == "debug" async def test_endpoint_config(): @@ -88,7 +89,7 @@ async def test_endpoint_config(): # unfortunately, the mock library won't report any headers stored on # the session object, so we need to verify them separately - async with endpoint.session as s: + async with endpoint.session() as s: assert s._default_headers.get("X-Powered-By") == "Rasa" assert s._default_auth.login == "user" assert s._default_auth.password == "pass" @@ -231,32 +232,3 @@ def test_int_arg(value: Optional[Union[int, str]], default: int, expected_result if value is not None: request.args = {"key": value} assert endpoint_utils.int_arg(request, "key", default) == expected_result - - -async def test_endpoint_config_caches_session() -> None: - """Test that the EndpointConfig session is cached. - - Assert identity of the session object, which should not be recreated when calling - the property `session` multiple times. - """ - endpoint = endpoint_utils.EndpointConfig("https://example.com/") - session = endpoint.session - - assert endpoint.session is session - - # teardown - await endpoint.session.close() - - -async def test_endpoint_config_constructor_does_not_create_session_cached_property() -> None: # noqa: E501 - """Test that the instantiation of EndpointConfig does not create the session cached property.""" # noqa: E501 - endpoint = endpoint_utils.EndpointConfig("https://example.com/") - - assert endpoint.__dict__.get("url") == "https://example.com/" - assert endpoint.__dict__.get("session") is None - - # the property is created when it is accessed - async with endpoint.session as session: - assert session is not None - - assert endpoint.__dict__.get("session") is session From 7cfc7d07e3f5502ed7333ad1910d5d85891eb473 Mon Sep 17 00:00:00 2001 From: Varun Shankar S Date: Thu, 30 Nov 2023 19:51:17 +0100 Subject: [PATCH 08/33] prepared release of version 3.6.15 (#12967) * prepared release of version 3.6.15 --- CHANGELOG.mdx | 7 +++++++ Makefile | 2 +- pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index b574a76b1b96..fce125567d98 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.15] - 2023-11-30 + +Rasa 3.6.15 (2023-11-30) +### Bugfixes +- [#12965](https://github.com/rasahq/rasa/issues/12965): Fixed connection timeout to action server by setting KEEP_ALIVE_TIMEOUT to 120, and reverting changes introduced in #12886. + + ## [3.6.14] - 2023-11-17 Rasa 3.6.14 (2023-11-17) diff --git a/Makefile b/Makefile index f3ebb6135e25..23b5799e1d5f 100644 --- a/Makefile +++ b/Makefile @@ -136,7 +136,7 @@ prepare-tests-windows: # It will retry the installation 5 times if it fails # See: https://github.com/actions/virtual-environments/blob/main/images/win/scripts/ImageHelpers/ChocoHelpers.ps1 prepare-tests-windows-gha: - powershell -command "Choco-Install wget graphviz" + powershell -command "Install-ChocoPackage wget graphviz" test: clean # OMP_NUM_THREADS can improve overall performance using one thread by process (on tensorflow), avoiding overload diff --git a/pyproject.toml b/pyproject.toml index 4bf33f2a32d5..92d55359da8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.14" +version = "3.6.15" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index e4512ce41043..3d8e9f0ee007 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.14" +__version__ = "3.6.15" From 865902f3c576a4a66b9b9492f3e8234261c14e17 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Mon, 11 Dec 2023 10:29:05 +0100 Subject: [PATCH 09/33] Additional load testing recommendations --- .../monitoring/load-testing-guidelines.mdx | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/docs/monitoring/load-testing-guidelines.mdx b/docs/docs/monitoring/load-testing-guidelines.mdx index ff40486853b5..23b104917177 100644 --- a/docs/docs/monitoring/load-testing-guidelines.mdx +++ b/docs/docs/monitoring/load-testing-guidelines.mdx @@ -17,6 +17,29 @@ In our tests we used the Rasa [HTTP-API](https://rasa.com/docs/rasa/pages/http-a | Up to 50,000 | 6vCPU | 16 GB | | Up to 80,000 | 6vCPU, with almost 90% CPU usage | 16 GB | +:::info This is the most optimal AWS setup tested on EKS with + +ec2: c5.2xlarge - 9.2rps/node throughput +ec2: c5.4xlarge - 19.5rps/node throughput +You can always choose a bigger compute efficient instance like c5.4xlarge with more CPU per node to maximize throughput per node + +::: + +| AWS | RasaPro | Rasa Action Server | +|--------------------------|----------------------------------------------|-------------------------------------------| +| EC2: C52xlarge | 3vCPU, 10Gb Memory, 3 Sanic Threads | 3vCPU, 2Gb Memory, 3 Sanic Threads | +| EC2: C54xlarge | 7vCPU, 16Gb Memory, 7 Sanic Threads | 7vCPU, 12Gb Memory, 7 Sanic Threads | + +### Some recommendations to improve latency +- Running action as a sidecar, saves about ~100ms on average trips from the action server on the concluded tests. Results may vary depending on the number of calls made to the action server. +- Sanic Workers must be mapped 1:1 to CPU for both Rasa Pro and Rasa Action Server +- Create `async` actions to avoid any blocking I/O +- Use KEDA for pre-emptive autoscaling of rasa pods in production based on http requests +- `enable_selective_domain: true` : Domain is only sent for actions that needs it. This massively trims the payload between the two pods. +- Consider using c5n.nxlarge machines which are more compute optimized and support better parallelization on http requests. + However, as they are low on memory, models need to be trained lightweight. + Not suitable if you want to run transformers + ### Debugging bot related issues while scaling up From 96eead79cce0aa7936dfd4827fc2dba5a56ed901 Mon Sep 17 00:00:00 2001 From: Tawakalt Date: Fri, 29 Dec 2023 22:49:27 +0100 Subject: [PATCH 10/33] update cryptography --- poetry.lock | 50 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/poetry.lock b/poetry.lock index b347db8c13bf..0f266f79a5ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1090,35 +1090,35 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "41.0.3" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -7231,4 +7231,4 @@ transformers = ["sentencepiece", "transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "71bfb81a213fc85cbf7fe0ac10a9ac16363fd14c18647e1f0ee26b35e73e0747" +content-hash = "d38a12f16b69e12490c4ad2cebda4f8b8a9bdb45f6cdd883b44b4754dbf424c5" diff --git a/pyproject.toml b/pyproject.toml index 904736791eeb..efa2bce4abc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -154,7 +154,7 @@ structlog-sentry = "^2.0.2" dnspython = "2.3.0" wheel = ">=0.38.1" certifi = ">=2023.7.22" -cryptography = ">=41.0.2" +cryptography = ">=41.0.7" [[tool.poetry.dependencies.tensorflow-io-gcs-filesystem]] version = "==0.31" markers = "sys_platform == 'win32'" From 45cbefd0a901160376af4ceef83583ad8244ccc2 Mon Sep 17 00:00:00 2001 From: Tawakalt Date: Tue, 2 Jan 2024 11:34:22 +0100 Subject: [PATCH 11/33] add changelog --- changelog/12983.bugfix.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/12983.bugfix.md diff --git a/changelog/12983.bugfix.md b/changelog/12983.bugfix.md new file mode 100644 index 000000000000..5de8934cfe59 --- /dev/null +++ b/changelog/12983.bugfix.md @@ -0,0 +1 @@ +Upgrade Cryptography to fix improper certificate validation. \ No newline at end of file From 652c1756ab695b55eb0c199ebe3a190482fdb29c Mon Sep 17 00:00:00 2001 From: sancharigr Date: Thu, 4 Jan 2024 20:25:20 +0530 Subject: [PATCH 12/33] Review changes made --- .../monitoring/load-testing-guidelines.mdx | 23 ++++++------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/docs/docs/monitoring/load-testing-guidelines.mdx b/docs/docs/monitoring/load-testing-guidelines.mdx index 23b104917177..a794d73639da 100644 --- a/docs/docs/monitoring/load-testing-guidelines.mdx +++ b/docs/docs/monitoring/load-testing-guidelines.mdx @@ -12,33 +12,24 @@ In order to gather metrics on our system's ability to handle increased loads and In each test case we spawned the following number of concurrent users at peak concurrency using a [spawn rate](https://docs.locust.io/en/1.5.0/configuration.html#all-available-configuration-options) of 1000 users per second. In our tests we used the Rasa [HTTP-API](https://rasa.com/docs/rasa/pages/http-api) and the [Locust](https://locust.io/) open source load testing tool. + | Users | CPU | Memory | |--------------------------|----------------------------------------------|---------------| | Up to 50,000 | 6vCPU | 16 GB | | Up to 80,000 | 6vCPU, with almost 90% CPU usage | 16 GB | -:::info This is the most optimal AWS setup tested on EKS with - -ec2: c5.2xlarge - 9.2rps/node throughput -ec2: c5.4xlarge - 19.5rps/node throughput -You can always choose a bigger compute efficient instance like c5.4xlarge with more CPU per node to maximize throughput per node - -::: - -| AWS | RasaPro | Rasa Action Server | -|--------------------------|----------------------------------------------|-------------------------------------------| -| EC2: C52xlarge | 3vCPU, 10Gb Memory, 3 Sanic Threads | 3vCPU, 2Gb Memory, 3 Sanic Threads | -| EC2: C54xlarge | 7vCPU, 16Gb Memory, 7 Sanic Threads | 7vCPU, 12Gb Memory, 7 Sanic Threads | ### Some recommendations to improve latency -- Running action as a sidecar, saves about ~100ms on average trips from the action server on the concluded tests. Results may vary depending on the number of calls made to the action server. - Sanic Workers must be mapped 1:1 to CPU for both Rasa Pro and Rasa Action Server - Create `async` actions to avoid any blocking I/O -- Use KEDA for pre-emptive autoscaling of rasa pods in production based on http requests - `enable_selective_domain: true` : Domain is only sent for actions that needs it. This massively trims the payload between the two pods. -- Consider using c5n.nxlarge machines which are more compute optimized and support better parallelization on http requests. +- Consider using compute efficient machines on cloud which are optimized for high performance computing such as the C5 instances on AWS. However, as they are low on memory, models need to be trained lightweight. - Not suitable if you want to run transformers + + +| Machine | RasaPro | Rasa Action Server | +|--------------------------------|------------------------------------------------|--------------------------------------------------| +| AWS C5 or Azure F or Gcloud C2 | 3-7vCPU, 10-16Gb Memory, 3-7 Sanic Threads | 3-7vCPU, 2-12Gb Memory, 3-7 Sanic Threads | ### Debugging bot related issues while scaling up From 64546fcf70be2754e75550016c9e8916e898aa09 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Fri, 5 Jan 2024 11:28:01 +0530 Subject: [PATCH 13/33] Add missing CI step condition --- .github/workflows/continous-integration.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/continous-integration.yml b/.github/workflows/continous-integration.yml index b50753b1dffb..587ad2ad26e4 100644 --- a/.github/workflows/continous-integration.yml +++ b/.github/workflows/continous-integration.yml @@ -290,6 +290,7 @@ jobs: - name: Prevent race condition in poetry build # More context about race condition during poetry build can be found here: # https://github.com/python-poetry/poetry/issues/7611#issuecomment-1747836233 + if: needs.changes.outputs.backend == 'true' run: | poetry config installer.max-workers 1 From 8f916e94faf43e4a9d5a240b29634afe701c6a30 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Thu, 11 Jan 2024 18:32:19 +0530 Subject: [PATCH 14/33] Edit docs workflow to release docs for 3.6.x --- .github/workflows/documentation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 3e1a31913f6d..076f44d0c612 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -82,7 +82,7 @@ jobs: echo "build_docs=true" >> $GITHUB_OUTPUT else # Get latest tagged Rasa version - git fetch --depth=1 origin "+refs/tags/*:refs/tags/*" + git describe --tags --match="3.6.[0-9]*" --abbrev=0 HEAD # Fetch branch history TAG_NAME=${GITHUB_REF#refs/tags/} git fetch --prune --unshallow From e970b3a721ed87186edfb6e38069a49e971bea22 Mon Sep 17 00:00:00 2001 From: m-vdb Date: Tue, 16 Jan 2024 08:47:09 +0100 Subject: [PATCH 15/33] update changelog with entries from 3.5.x releases --- CHANGELOG.mdx | 113 ++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 81 insertions(+), 32 deletions(-) diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index fce125567d98..60752d9d683d 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -17,37 +17,37 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> ## [3.6.15] - 2023-11-30 - -Rasa 3.6.15 (2023-11-30) + +Rasa 3.6.15 (2023-11-30) ### Bugfixes - [#12965](https://github.com/rasahq/rasa/issues/12965): Fixed connection timeout to action server by setting KEEP_ALIVE_TIMEOUT to 120, and reverting changes introduced in #12886. ## [3.6.14] - 2023-11-17 - -Rasa 3.6.14 (2023-11-17) + +Rasa 3.6.14 (2023-11-17) ### Bugfixes - [#12948](https://github.com/rasahq/rasa/issues/12948): Fixed UnexpecTEDIntentlessPolicy training errors that resulted from a change to batching behavior. Changed the batching behavior back to the original for all components. Made the changed batching behavior accessible in DietClassifier using `drop_small_last_batch: True`. ## [3.6.13] - 2023-10-23 - -Rasa 3.6.13 (2023-10-23) + +Rasa 3.6.13 (2023-10-23) ### Bugfixes - [#12927](https://github.com/rasahq/rasa/issues/12927): Fix wrong conflicts that occur when rasa validate stories is run with slots that have active_loop set to null in mapping conditions. ## [3.6.12] - 2023-10-10 - -Rasa 3.6.12 (2023-10-10) + +Rasa 3.6.12 (2023-10-10) ### Bugfixes - [#12904](https://github.com/rasahq/rasa/issues/12904): Refresh headers used in requests (e.g. action server requests) made by `EndpointConfig` using its `headers` attribute. - [#12906](https://github.com/rasahq/rasa/issues/12906): Upgrade `pillow` to `10.0.1` to address security vulnerability CVE-2023-4863 found in `10.0.0` version. ## [3.6.11] - 2023-10-05 - -Rasa 3.6.11 (2023-10-05) + +Rasa 3.6.11 (2023-10-05) ### Bugfixes - [#12722](https://github.com/rasahq/rasa/issues/12722): Intent names will not be falsely abbreviated in interactive training (fixes OSS-413). @@ -59,8 +59,8 @@ Rasa 3.6.11 (2023-10-05) ## [3.6.10] - 2023-09-26 - -Rasa 3.6.10 (2023-09-26) + +Rasa 3.6.10 (2023-09-26) ### Improvements - [#12827](https://github.com/rasahq/rasa/issues/12827): Improved handling of last batch during DIET and TED training. The last batch is discarded if it contains less than half a batch size of data. - [#12852](https://github.com/rasahq/rasa/issues/12852): Added `username` to the connection parameters for `RedisLockStore` and `RedisTrackerStore` @@ -71,8 +71,8 @@ Rasa 3.6.10 (2023-09-26) ## [3.6.9] - 2023-09-15 - -Rasa 3.6.9 (2023-09-15) + +Rasa 3.6.9 (2023-09-15) ### Improvements - [#12778](https://github.com/rasahq/rasa/issues/12778): Added additional method `fingerprint_addon` to the `GraphComponent` interface to allow inclusion of external data into the fingerprint calculation of a component @@ -81,29 +81,29 @@ Rasa 3.6.9 (2023-09-15) ## [3.6.8] - 2023-08-30 - -Rasa 3.6.8 (2023-08-30) + +Rasa 3.6.8 (2023-08-30) No significant changes. ## [3.6.7] - 2023-08-29 - -Rasa 3.6.7 (2023-08-29) + +Rasa 3.6.7 (2023-08-29) ### Bugfixes - [#12768](https://github.com/rasahq/rasa/issues/12768): Updated certifi, cryptography, and scipy packages to address security vulnerabilities. ## [3.6.6] - 2023-08-23 - -Rasa 3.6.6 (2023-08-23) + +Rasa 3.6.6 (2023-08-23) ### Bugfixes - [#12755](https://github.com/rasahq/rasa/issues/12755): Updated setuptools and wheel to address security vulnerabilities. ## [3.6.5] - 2023-08-17 - -Rasa 3.6.5 (2023-08-17) + +Rasa 3.6.5 (2023-08-17) ### Improvements - [#12696](https://github.com/rasahq/rasa/issues/12696): Use the same session across requests in `RasaNLUHttpInterpreter` @@ -116,8 +116,8 @@ Rasa 3.6.5 (2023-08-17) ## [3.6.4] - 2023-07-21 - -Rasa 3.6.4 (2023-07-21) + +Rasa 3.6.4 (2023-07-21) ### Bugfixes - [#12575](https://github.com/rasahq/rasa/issues/12575): Extract conditional response variation and channel variation filtering logic into a separate component. Enable usage of this component in the NaturalLanguageGenerator subclasses (e.g. CallbackNaturalLanguageGenerator, TemplatedNaturalLanguageGenerator). @@ -128,8 +128,8 @@ Rasa 3.6.4 (2023-07-21) ## [3.6.3] - 2023-07-20 - -Rasa 3.6.3 (2023-07-20) + +Rasa 3.6.3 (2023-07-20) ### Improvements - [#12637](https://github.com/rasahq/rasa/issues/12637): Added a human readable component to structlog using the `event_info` key and made it the default rendered key if present. @@ -144,15 +144,15 @@ Rasa 3.6.3 (2023-07-20) ## [3.6.2] - 2023-07-06 - -Rasa 3.6.2 (2023-07-06) + +Rasa 3.6.2 (2023-07-06) ### Bugfixes - [#12602](https://github.com/rasahq/rasa/issues/12602): Resolves the issue of importing TensorFlow on Docker for ARM64 architecture. ## [3.6.1] - 2023-07-03 - -Rasa 3.6.1 (2023-07-03) + +Rasa 3.6.1 (2023-07-03) ### Improvements - [#12533](https://github.com/rasahq/rasa/issues/12533): Add building multi-platform Docker image (amd64/arm64) - [#12543](https://github.com/rasahq/rasa/issues/12543): Switch struct log to `FilteringBoundLogger` in order to retain log level set in the config. @@ -174,8 +174,8 @@ Rasa 3.6.1 (2023-07-03) ## [3.6.0] - 2023-06-14 - -Rasa 3.6.0 (2023-06-14) + +Rasa 3.6.0 (2023-06-14) ### Deprecations and Removals - [#12355](https://github.com/rasahq/rasa/issues/12355): Removed Python 3.7 support as [it reaches its end of life in June 2023](https://devguide.python.org/versions/) @@ -219,6 +219,55 @@ Rasa 3.6.0 (2023-06-14) ### Miscellaneous internal changes - [#12291](https://github.com/rasahq/rasa/issues/12291), [#12329](https://github.com/rasahq/rasa/issues/12329), [#12332](https://github.com/rasahq/rasa/issues/12332), [#12365](https://github.com/rasahq/rasa/issues/12365), [#12372](https://github.com/rasahq/rasa/issues/12372), [#12386](https://github.com/rasahq/rasa/issues/12386), [#12492](https://github.com/rasahq/rasa/issues/12492) +## [3.5.17] - 2023-12-05 + +Rasa 3.5.17 (2023-12-05) +### Improvements +- [#12851](https://github.com/rasahq/rasa/issues/12851): Added `username` to the connection parameters for `RedisLockStore` and `RedisTrackerStore` +- [#1493](https://github.com/rasahq/rasa/issues/1493): Telemetry data is only send for licensed users. + + +## [3.5.16] - 2023-08-30 + +Rasa 3.5.16 (2023-08-30) + +No significant changes. + + +## [3.5.15] - 2023-07-21 + +Rasa 3.5.15 (2023-07-21) + +No significant changes. + + +## [3.5.14] - 2023-07-12 + +Rasa 3.5.14 (2023-07-12) +### Bugfixes +- [#12639](https://github.com/rasahq/rasa/issues/12639): Fix the issue with the most recent model not being selected if the owner or permissions where modified on the model file. + +### Miscellaneous internal changes +- [#12649](https://github.com/rasahq/rasa/issues/12649) + + +## [3.5.13] - 2023-07-05 + +Rasa 3.5.13 (2023-07-05) +### Bugfixes +- [#12549](https://github.com/rasahq/rasa/issues/12549): Introduce a validation step in `rasa data validate` command to identify non-existent paths and empty domains. + + +## [3.5.12] - 2023-06-23 + +Rasa 3.5.12 (2023-06-23) +### Bugfixes +- [#12534](https://github.com/rasahq/rasa/issues/12534): Rich responses containing buttons with parentheses characters are now correctly parsed. + Previously any characters found between the first identified pair of `()` in response button took precedence. + +### Miscellaneous internal changes +- [#12512](https://github.com/rasahq/rasa/issues/12512) + ## [3.5.11] - 2023-06-08 From ce1f5d2a31e55723fde1f522a5338eb51b840385 Mon Sep 17 00:00:00 2001 From: Shailendra Paliwal Date: Fri, 19 Jan 2024 11:56:21 +0100 Subject: [PATCH 16/33] Get Full Retrieval Intent Name (#12998) * update full retrieval intent name * format * add test for test_update_full_retrieval_intent * fix linting * fix the structure of dict --- rasa/core/processor.py | 22 +++++++++++++ tests/core/test_processor.py | 64 +++++++++++++++++++++++++++++++++++- 2 files changed, 85 insertions(+), 1 deletion(-) diff --git a/rasa/core/processor.py b/rasa/core/processor.py index ec6ef1260a3e..fc628c7a7247 100644 --- a/rasa/core/processor.py +++ b/rasa/core/processor.py @@ -66,7 +66,11 @@ ENTITIES, INTENT, INTENT_NAME_KEY, + INTENT_RESPONSE_KEY, PREDICTED_CONFIDENCE_KEY, + FULL_RETRIEVAL_INTENT_NAME_KEY, + RESPONSE_SELECTOR, + RESPONSE, TEXT, ) from rasa.utils.endpoints import EndpointConfig @@ -721,6 +725,7 @@ async def parse_message( message, tracker, only_output_properties ) + self._update_full_retrieval_intent(parse_data) structlogger.debug( "processor.message.parse", parse_data_text=copy.deepcopy(parse_data["text"]), @@ -732,6 +737,23 @@ async def parse_message( return parse_data + def _update_full_retrieval_intent(self, parse_data: Dict[Text, Any]) -> None: + """Update the parse data with the full retrieval intent. + + Args: + parse_data: Message parse data to update. + """ + intent_name = parse_data.get(INTENT, {}).get(INTENT_NAME_KEY) + response_selector = parse_data.get(RESPONSE_SELECTOR, {}) + all_retrieval_intents = response_selector.get("all_retrieval_intents", []) + if intent_name and intent_name in all_retrieval_intents: + retrieval_intent = ( + response_selector.get(intent_name, {}) + .get(RESPONSE, {}) + .get(INTENT_RESPONSE_KEY) + ) + parse_data[INTENT][FULL_RETRIEVAL_INTENT_NAME_KEY] = retrieval_intent + def _parse_message_with_graph( self, message: UserMessage, diff --git a/tests/core/test_processor.py b/tests/core/test_processor.py index 392d85c29745..d0581b1800ef 100644 --- a/tests/core/test_processor.py +++ b/tests/core/test_processor.py @@ -70,7 +70,12 @@ from rasa.core.http_interpreter import RasaNLUHttpInterpreter from rasa.core.processor import MessageProcessor from rasa.shared.core.trackers import DialogueStateTracker -from rasa.shared.nlu.constants import INTENT_NAME_KEY, METADATA_MODEL_ID +from rasa.shared.nlu.constants import ( + INTENT, + INTENT_NAME_KEY, + FULL_RETRIEVAL_INTENT_NAME_KEY, + METADATA_MODEL_ID, +) from rasa.shared.nlu.training_data.message import Message from rasa.utils.endpoints import EndpointConfig from rasa.shared.core.constants import ( @@ -1928,3 +1933,60 @@ async def test_run_anonymization_pipeline_mocked_pipeline( await processor.run_anonymization_pipeline(tracker) event_diff.assert_called_once() + + +async def test_update_full_retrieval_intent( + default_processor: MessageProcessor, +) -> None: + parse_data = { + "text": "I like sunny days in berlin", + "intent": {"name": "chitchat", "confidence": 0.9}, + "entities": [], + "response_selector": { + "all_retrieval_intents": ["faq", "chitchat"], + "faq": { + "response": { + "responses": [{"text": "Our return policy lasts 30 days."}], + "confidence": 1.0, + "intent_response_key": "faq/what_is_return_policy", + "utter_action": "utter_faq/what_is_return_policy", + }, + "ranking": [ + { + "confidence": 1.0, + "intent_response_key": "faq/what_is_return_policy", + }, + { + "confidence": 2.3378809862799945e-19, + "intent_response_key": "faq/how_can_i_track_my_order", + }, + ], + }, + "chitchat": { + "response": { + "responses": [ + { + "text": "The sun is out today! Isn't that great?", + }, + ], + "confidence": 1.0, + "intent_response_key": "chitchat/ask_weather", + "utter_action": "utter_chitchat/ask_weather", + }, + "ranking": [ + { + "confidence": 1.0, + "intent_response_key": "chitchat/ask_weather", + }, + {"confidence": 0.0, "intent_response_key": "chitchat/ask_name"}, + ], + }, + }, + } + + default_processor._update_full_retrieval_intent(parse_data) + + assert parse_data[INTENT][INTENT_NAME_KEY] == "chitchat" + # assert that parse_data["intent"] has a key called response + assert FULL_RETRIEVAL_INTENT_NAME_KEY in parse_data[INTENT] + assert parse_data[INTENT][FULL_RETRIEVAL_INTENT_NAME_KEY] == "chitchat/ask_weather" From 5339fae00c1cb527ba778916fd8b5e5d785b7d50 Mon Sep 17 00:00:00 2001 From: Shailendra Paliwal Date: Fri, 19 Jan 2024 12:23:27 +0100 Subject: [PATCH 17/33] add changelog (#13000) --- changelog/12998.bugfix.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/12998.bugfix.md diff --git a/changelog/12998.bugfix.md b/changelog/12998.bugfix.md new file mode 100644 index 000000000000..63280d49c42d --- /dev/null +++ b/changelog/12998.bugfix.md @@ -0,0 +1 @@ +Fixes a bug that caused the `full_retrieval_intent_name` key to be missing in the published event. Rasa Analytics makes use of this key to get the Retrieval Intent Name \ No newline at end of file From 26210847931f4df166896f6ce4eb0334e093e23c Mon Sep 17 00:00:00 2001 From: Shailendra Paliwal Date: Fri, 19 Jan 2024 14:06:31 +0100 Subject: [PATCH 18/33] prepared release of version 3.6.16 (#13003) --- CHANGELOG.mdx | 11 +++++++++++ changelog/12983.bugfix.md | 1 - changelog/12998.bugfix.md | 1 - changelog/712.misc.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 6 files changed, 13 insertions(+), 5 deletions(-) delete mode 100644 changelog/12983.bugfix.md delete mode 100644 changelog/12998.bugfix.md delete mode 100644 changelog/712.misc.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index 60752d9d683d..b3411889c069 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,17 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.16] - 2024-01-19 + +Rasa 3.6.16 (2024-01-19) +### Bugfixes +- [#12983](https://github.com/rasahq/rasa/issues/12983): Upgrade Cryptography to fix improper certificate validation. +- [#12998](https://github.com/rasahq/rasa/issues/12998): Fixes a bug that caused the `full_retrieval_intent_name` key to be missing in the published event. Rasa Analytics makes use of this key to get the Retrieval Intent Name + +### Miscellaneous internal changes +- [#712](https://github.com/rasahq/rasa/issues/712) + + ## [3.6.15] - 2023-11-30 Rasa 3.6.15 (2023-11-30) diff --git a/changelog/12983.bugfix.md b/changelog/12983.bugfix.md deleted file mode 100644 index 5de8934cfe59..000000000000 --- a/changelog/12983.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Upgrade Cryptography to fix improper certificate validation. \ No newline at end of file diff --git a/changelog/12998.bugfix.md b/changelog/12998.bugfix.md deleted file mode 100644 index 63280d49c42d..000000000000 --- a/changelog/12998.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Fixes a bug that caused the `full_retrieval_intent_name` key to be missing in the published event. Rasa Analytics makes use of this key to get the Retrieval Intent Name \ No newline at end of file diff --git a/changelog/712.misc.md b/changelog/712.misc.md deleted file mode 100644 index 020a19afe7f1..000000000000 --- a/changelog/712.misc.md +++ /dev/null @@ -1 +0,0 @@ -Prevent race condition in poetry build to fix dependency install failures on windows. diff --git a/pyproject.toml b/pyproject.toml index bd34f9e7ec4b..7b695117b412 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.15" +version = "3.6.16" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index 3d8e9f0ee007..fc234e58601b 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.15" +__version__ = "3.6.16" From ece9224ac456575b67abfb12413c8640fb4de8be Mon Sep 17 00:00:00 2001 From: OgnjenFrancuski Date: Mon, 12 Feb 2024 16:00:02 +0100 Subject: [PATCH 19/33] Update aiohttp and aioresponse packages, due to reported vulnerability on aiohttp --- poetry.lock | 241 +++++++++++++++++++++++++------------------------ pyproject.toml | 4 +- 2 files changed, 125 insertions(+), 120 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0f266f79a5ae..e3eb767e760a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -45,134 +45,121 @@ files = [ [[package]] name = "aiogram" -version = "2.25.1" +version = "2.15" description = "Is a pretty simple and fully asynchronous framework for Telegram Bot API" category = "main" optional = false -python-versions = ">=3.7" +python-versions = "*" files = [ - {file = "aiogram-2.25.1-py3-none-any.whl", hash = "sha256:7bb770cd0459f1dbaea00578bf13fb2e6a1812f22adf94a988c11a7c0d5f33e1"}, - {file = "aiogram-2.25.1.tar.gz", hash = "sha256:59ad78fc0ebbef1fd471c15778a4594b60117e0d7373bc2ce7bcd192074d527d"}, + {file = "aiogram-2.15-py3-none-any.whl", hash = "sha256:5d4dae610625893fe53e07c01c9e95671fd863718caab692baac948e3746ab87"}, + {file = "aiogram-2.15.tar.gz", hash = "sha256:13c740c52ee1301af8a9905e0a412754bcff03deb83dfdf1c578d9249ab35026"}, ] [package.dependencies] -aiohttp = ">=3.8.0,<3.9.0" -Babel = ">=2.9.1,<2.10.0" -certifi = ">=2021.10.8" -magic-filter = ">=1.0.9" +aiohttp = ">=3.7.2,<4.0.0" +Babel = ">=2.8.0" +certifi = ">=2020.6.20" [package.extras] -fast = ["ujson (>=1.35)", "uvloop (>=0.16.0,<0.17.0)"] +fast = ["ujson (>=1.35)", "uvloop (>=0.14.0,<0.15.0)"] proxy = ["aiohttp-socks (>=0.5.3,<0.6.0)"] [[package]] name = "aiohttp" -version = "3.8.4" +version = "3.9.3" description = "Async http client/server framework (asyncio)" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, - {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, - {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, - {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, - {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, - {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, - {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, - {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, - {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, - {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, - {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, - {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, - {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, - {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, - {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, - {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, - {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, - {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, - {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, - {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, - {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, - {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, - {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, - {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, - {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, - {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, - {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, - {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, - {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, - {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiohttp-retry" @@ -191,18 +178,18 @@ aiohttp = "*" [[package]] name = "aioresponses" -version = "0.7.4" +version = "0.7.6" description = "Mock out requests made by ClientSession from aiohttp package" category = "dev" optional = false python-versions = "*" files = [ - {file = "aioresponses-0.7.4-py2.py3-none-any.whl", hash = "sha256:1160486b5ea96fcae6170cf2bdef029b9d3a283b7dbeabb3d7f1182769bfb6b7"}, - {file = "aioresponses-0.7.4.tar.gz", hash = "sha256:9b8c108b36354c04633bad0ea752b55d956a7602fe3e3234b939fc44af96f1d8"}, + {file = "aioresponses-0.7.6-py2.py3-none-any.whl", hash = "sha256:d2c26defbb9b440ea2685ec132e90700907fd10bcca3e85ec2f157219f0d26f7"}, + {file = "aioresponses-0.7.6.tar.gz", hash = "sha256:f795d9dbda2d61774840e7e32f5366f45752d1adc1b74c9362afd017296c7ee1"}, ] [package.dependencies] -aiohttp = ">=2.0.0,<4.0.0" +aiohttp = ">=3.3.0,<4.0.0" [[package]] name = "aiormq" @@ -2716,18 +2703,6 @@ files = [ {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, ] -[[package]] -name = "magic-filter" -version = "1.0.9" -description = "This package provides magic filter based on dynamic attribute getter" -category = "main" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "magic-filter-1.0.9.tar.gz", hash = "sha256:d0f1ffa5ff1fbe5105fd5f293c79b5d3795f336ea0f6129c636959a687bf422a"}, - {file = "magic_filter-1.0.9-py3-none-any.whl", hash = "sha256:51002312a8972fa514b998b7ff89340c98be3fc499967c1f5f2af98d13baf8d5"}, -] - [[package]] name = "markdown" version = "3.4.3" @@ -4347,6 +4322,15 @@ files = [ {file = "python_crfsuite-0.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ab333b7cda21b1b5ab76f8e16e5f00654360df057e2092b273681d64850f714"}, {file = "python_crfsuite-0.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee6153ed8a26adaea645445b997c55d67505476976dfc40f4bbd46200b66de15"}, {file = "python_crfsuite-0.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c32292f722e293cee87aac0d793abd8b53dc05126e4b6f0202c41e0e7d027005"}, + {file = "python_crfsuite-0.9.9-cp311-cp311-win32.whl", hash = "sha256:3e8bbacff1d86cbc18e1d52f617c85521029127a09c86ed428cd8238384a9db3"}, + {file = "python_crfsuite-0.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:83f87b652108110263aa83c220baccc36c911f04cf422cf6632a5f42121bce6a"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:92f72eb554dac53218805958747b4cd417bd76039f083f66cc9881987d88e167"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cdd1e823fa9dfe611a573a0c1371941e887ad8b8ffbc25e2d87e4cd6d4f22af"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54f94d6fabb14ad8106dc65b5d38665bb0abb16527d4a6aa2ba233670c3480db"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:65ebfab892c49c49b5e1030318d144c559e449bdf86b12983fa7ba0e88f7abdf"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5c1bb3d2802bb777affbe2855fc723aa32345ead2fe26ad18c7e1417bc104c53"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-win32.whl", hash = "sha256:abef974a2b520c0204cb15b0b799fdbc1c1a0af4be2b9ad7548800de95975345"}, + {file = "python_crfsuite-0.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:ccc4c4f1cd47c74553d03d915c7dd7c06fc23b41310f30f35f2e5c09cdeb9297"}, {file = "python_crfsuite-0.9.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:01d279f1c8225aaf66290563312708c1905dce84f70ee5e374ecfb2dec1c2343"}, {file = "python_crfsuite-0.9.9-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f29e94fcb9e2f8f52c7323668752b34dcffef91751b0d1e0789ecbbc0069842"}, {file = "python_crfsuite-0.9.9-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497a8b9c0df152ada10732c07853942a8725cb66fe5b6fe1a64f768ecf583291"}, @@ -4491,6 +4475,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -5562,6 +5547,7 @@ files = [ {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, @@ -5571,26 +5557,35 @@ files = [ {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, @@ -7040,6 +7035,16 @@ files = [ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecee4132c6cd2ce5308e21672015ddfed1ff975ad0ac8d27168ea82e71413f55"}, + {file = "wrapt-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2020f391008ef874c6d9e208b24f28e31bcb85ccff4f335f15a3251d222b92d9"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2feecf86e1f7a86517cab34ae6c2f081fd2d0dac860cb0c0ded96d799d20b335"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:240b1686f38ae665d1b15475966fe0472f78e71b1b4903c143a842659c8e4cb9"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9008dad07d71f68487c91e96579c8567c98ca4c3881b9b113bc7b33e9fd78b8"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6447e9f3ba72f8e2b985a1da758767698efa72723d5b59accefd716e9e8272bf"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:acae32e13a4153809db37405f5eba5bac5fbe2e2ba61ab227926a22901051c0a"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49ef582b7a1152ae2766557f0550a9fcbf7bbd76f43fbdc94dd3bf07cc7168be"}, + {file = "wrapt-1.14.1-cp311-cp311-win32.whl", hash = "sha256:358fe87cc899c6bb0ddc185bf3dbfa4ba646f05b1b0b9b5a27c2cb92c2cea204"}, + {file = "wrapt-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:26046cd03936ae745a502abf44dac702a5e6880b2b01c29aea8ddf3353b68224"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, @@ -7231,4 +7236,4 @@ transformers = ["sentencepiece", "transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "d38a12f16b69e12490c4ad2cebda4f8b8a9bdb45f6cdd883b44b4754dbf424c5" +content-hash = "c554d0f11eb367109ccd80c3694de7f07d35b52a09b1279d2b9743e6c155f43c" diff --git a/pyproject.toml b/pyproject.toml index 7b695117b412..01b534b61908 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,7 +120,7 @@ sanic-jwt = "^1.6.0" sanic-routing = "^0.7.2" websockets = ">=10.0,<11.0" cloudpickle = ">=1.2,<2.3" -aiohttp = ">=3.6,!=3.7.4.post0,<3.9" +aiohttp = ">=3.9.0,<3.10" questionary = ">=1.5.1,<1.11.0" prompt-toolkit = "^3.0,<3.0.29" python-socketio = ">=4.4,<6" @@ -313,7 +313,7 @@ pytest-xdist = "^3.2.1" pytest = "^7.1.3" freezegun = "^1.0.0" responses = "^0.22.0" -aioresponses = "^0.7.2" +aioresponses = "^0.7.6" moto = "~=4.1.2" fakeredis = "^2.11.2" mongomock = "^4.1.2" From 9386a7345490fb8e8e27175b99a0a71da9485e90 Mon Sep 17 00:00:00 2001 From: OgnjenFrancuski Date: Mon, 12 Feb 2024 16:05:07 +0100 Subject: [PATCH 20/33] Add changelog --- changelog/13007.misc.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelog/13007.misc.md diff --git a/changelog/13007.misc.md b/changelog/13007.misc.md new file mode 100644 index 000000000000..4329f20aed22 --- /dev/null +++ b/changelog/13007.misc.md @@ -0,0 +1,2 @@ +aiohttp - update from 3.6 to 3.9 +aioresponses - update from 0.7.4 to 0.7.6 \ No newline at end of file From d7a26bc23e42deedd773479fd03de453bc6c83e1 Mon Sep 17 00:00:00 2001 From: OgnjenFrancuski Date: Tue, 13 Feb 2024 13:36:16 +0100 Subject: [PATCH 21/33] prepared release of version 3.6.17 --- CHANGELOG.mdx | 7 +++++++ changelog/13007.misc.md | 2 -- pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 9 insertions(+), 4 deletions(-) delete mode 100644 changelog/13007.misc.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index b3411889c069..8efe6d303a21 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.17] - 2024-02-13 + +Rasa 3.6.17 (2024-02-13) +### Miscellaneous internal changes +- [#13007](https://github.com/rasahq/rasa/issues/13007) + + ## [3.6.16] - 2024-01-19 Rasa 3.6.16 (2024-01-19) diff --git a/changelog/13007.misc.md b/changelog/13007.misc.md deleted file mode 100644 index 4329f20aed22..000000000000 --- a/changelog/13007.misc.md +++ /dev/null @@ -1,2 +0,0 @@ -aiohttp - update from 3.6 to 3.9 -aioresponses - update from 0.7.4 to 0.7.6 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 01b534b61908..913610027cf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.16" +version = "3.6.17" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index fc234e58601b..6a7745b5540c 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.16" +__version__ = "3.6.17" From 56c0c483fc1176812d8d05c11e6b0d3379ab920d Mon Sep 17 00:00:00 2001 From: sancharigr Date: Mon, 19 Feb 2024 16:50:21 +0100 Subject: [PATCH 22/33] Delete auto main to branch merge workflow --- .../automatic-release-to-main-merger.yml | 113 ------------------ 1 file changed, 113 deletions(-) delete mode 100644 .github/workflows/automatic-release-to-main-merger.yml diff --git a/.github/workflows/automatic-release-to-main-merger.yml b/.github/workflows/automatic-release-to-main-merger.yml deleted file mode 100644 index 6ed4cca20af8..000000000000 --- a/.github/workflows/automatic-release-to-main-merger.yml +++ /dev/null @@ -1,113 +0,0 @@ -name: Automatic main branch merger -on: - # whenever a pull request is merged into a release branch, - # open a pull request to merge changes down to the main branch - pull_request: - branches: - - '[0-9]+.[0-9]+.x' - # Don't merge 2.8.x into main - - '!2.8.x' - # Don't merge 3.0, 3.1 and 3.2 into main - - '!3.0.x' - - '!3.1.x' - - '!3.2.x' - - types: - # means that the PR is closed, we still have to check if it was merged - - closed - -env: - # keep this in sync with the automatic-pr-approver workflow - LABEL_TYPE: type:release-branch-port - LABEL_STATUS: status:ready-to-merge - -jobs: - update_merge_pr: - runs-on: ubuntu-22.04 - - # only run this workflow if a pull request has been merged - # don't run this workflow on pull request from forks, permissions will be missing anyway - # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflows-in-forked-repositories - if: github.event.pull_request.merged == true && github.event.pull_request.head.repo.full_name == 'RasaHQ/rasa' - - steps: - - name: Checkout git repository 🕝 - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - - - name: Fetch git tags 🎨 - # see https://github.com/actions/checkout/issues/206#issuecomment-617937725 - run: git fetch --prune --unshallow --tags - - - name: Get branch name ✍️ - id: get-branch-name - run: | - GITHUB_BRANCH=${GITHUB_REF/refs\/heads\//} - echo "release_branch=${GITHUB_BRANCH}" >> $GITHUB_OUTPUT - echo "new_branch=merge-${GITHUB_BRANCH}-main-${GITHUB_SHA:0:7}" >> $GITHUB_OUTPUT - - - name: Get GitHub labels 🏷 - id: get-github-labels - run: | - LATEST_RASA_MINOR=$(git tag --list | grep -P '^\d+\.\d+\.\d+$' | tail -n1 | sed -e 's/.\([0-9]\)*$/.0/g') - echo "Latest minor: ${LATEST_RASA_MINOR}" - # bash doesn't support nested variable access - CURRENT_RASA_MINOR=${GITHUB_REF/refs\/heads\//} - CURRENT_RASA_MINOR=${CURRENT_RASA_MINOR/\.x/\.0} - - if [[ ${LATEST_RASA_MINOR} == ${CURRENT_RASA_MINOR} ]] - then - echo "labels=${LABEL_TYPE},${LABEL_STATUS}" >> $GITHUB_OUTPUT - else - echo "labels=${LABEL_TYPE}" >> $GITHUB_OUTPUT - fi - - - name: Create new branch 🐣 - id: create-new-branch - if: always() - uses: peterjgrainger/action-create-branch@64aa569aea81305305c6e92bd236d8c427debff8 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - branch: ${{ steps.get-branch-name.outputs.new_branch }} - - - name: Open pull request ☄️ - if: ${{ steps.create-new-branch.conclusion == 'success' }} - uses: repo-sync/pull-request@7e79a9f5dc3ad0ce53138f01df2fad14a04831c5 - with: - # using this token to make sure it triggers other actions - github_token: ${{ secrets.RASABOT_GITHUB_TOKEN }} - source_branch: ${{ steps.get-branch-name.outputs.new_branch }} - destination_branch: main - pr_title: Merge ${{ steps.get-branch-name.outputs.release_branch }} into main - pr_template: .github/PULL_REQUEST_AUTOMATIC_TEMPLATE.md - pr_label: ${{ steps.get-github-labels.outputs.labels }} - pr_reviewer: ${{ github.event.pull_request.user.login }} - - - name: Close outdated release-merge PRs 🧹 - id: close-outdated-release-merge-prs - run: | - # fetch all open merge-PRs that have been opened from the current release branch - gh pr list -S "is:open label:${LABEL_TYPE} head:merge-${{ steps.get-branch-name.outputs.release_branch }}-main" > prs.txt - less prs.txt - - # delete newly opened PR from the list - awk '!/${{ steps.get-branch-name.outputs.new_branch }}/' prs.txt > temp && mv temp prs.txt - - # extract the PR ids - awk '{print $1}' prs.txt > pr_ids.txt - - # close all outdated PRs - while read id; do - gh pr close $id -d - done Date: Fri, 23 Feb 2024 13:29:56 +0000 Subject: [PATCH 23/33] [ATO-2122] Backport `rasa export` Kafka bugfix to `3.6.x` (#13017) * backport bugfix, update CI workflow * add changelog entry, update dead link in Docker build docs --- .github/workflows/continous-integration.yml | 110 +++++++++++++--- Makefile | 2 +- changelog/13017.bugfix.md | 1 + docs/docs/docker/building-in-docker.mdx | 2 +- rasa/core/brokers/kafka.py | 4 +- .../core/brokers/test_kafka.py | 14 ++- .../core/brokers/test_pika.py | 3 + tests/integration_tests/core/test_exporter.py | 117 ++++++++++++++++++ 8 files changed, 228 insertions(+), 25 deletions(-) create mode 100644 changelog/13017.bugfix.md create mode 100644 tests/integration_tests/core/test_exporter.py diff --git a/.github/workflows/continous-integration.yml b/.github/workflows/continous-integration.yml index 587ad2ad26e4..7a69bbb4e807 100644 --- a/.github/workflows/continous-integration.yml +++ b/.github/workflows/continous-integration.yml @@ -618,10 +618,6 @@ jobs: POSTGRES_PORT: 5432 POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres - RABBITMQ_HOST: localhost - RABBITMQ_PORT: 5672 - RABBITMQ_USER: guest - RABBITMQ_PASSWORD: guest services: redis: @@ -653,12 +649,6 @@ jobs: # mapping container ports to the host - 5432:5432 - rabbitmq: - # see https://github.com/docker-library/healthcheck/blob/master/rabbitmq/docker-healthcheck - image: healthcheck/rabbitmq - ports: - - 5672:5672 - mongodb: image: mongodb/mongodb-community-server:6.0.4-ubuntu2204 options: >- @@ -728,6 +718,94 @@ jobs: if grep 'The lock file is not up to date' .output; then exit 1; fi make prepare-tests-ubuntu + - name: Test Code with Services 🩺 + if: needs.changes.outputs.backend == 'true' + env: + JOBS: 2 + INTEGRATION_TEST_PYTEST_MARKERS: '"(not sequential) and (not broker)"' + PYTHONIOENCODING: "utf-8" + run: | + make test-integration + + broker_integration_test: + name: Run Broker Integration Tests + if: github.ref_type != 'tag' + runs-on: ubuntu-22.04 + timeout-minutes: 60 + needs: [changes] + env: + RABBITMQ_HOST: localhost + RABBITMQ_PORT: 5672 + RABBITMQ_USER: guest + RABBITMQ_PASSWORD: guest + + services: + rabbitmq: + # see https://github.com/docker-library/healthcheck/blob/master/rabbitmq/docker-healthcheck + image: healthcheck/rabbitmq + ports: + - 5672:5672 + + steps: + - name: Checkout git repository 🕝 + if: needs.changes.outputs.backend == 'true' + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + + - name: Set up Python ${{ env.DEFAULT_PYTHON_VERSION }} 🐍 + if: needs.changes.outputs.backend == 'true' + uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b + with: + python-version: ${{ env.DEFAULT_PYTHON_VERSION }} + + - name: Read Poetry Version 🔢 + if: needs.changes.outputs.backend == 'true' + run: | + echo "POETRY_VERSION=$(scripts/poetry-version.sh)" >> $GITHUB_ENV + shell: bash + + - name: Install poetry 🦄 + if: needs.changes.outputs.backend == 'true' + uses: Gr1N/setup-poetry@15821dc8a61bc630db542ae4baf6a7c19a994844 # v8 + with: + poetry-version: ${{ env.POETRY_VERSION }} + + - name: Load Poetry Cached Libraries ⬇ + id: cache-poetry + if: needs.changes.outputs.backend == 'true' + uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 + with: + path: .venv + key: ${{ runner.os }}-poetry-${{ env.POETRY_VERSION }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('**/poetry.lock') }}-venv-${{ secrets.POETRY_CACHE_VERSION }}-${{ env.pythonLocation }} + + - name: Clear Poetry cache + if: steps.cache-poetry.outputs.cache-hit == 'true' && needs.changes.outputs.backend == 'true' && contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests') + run: rm -r .venv + + # Poetry >= 1.1.0b uses virtualenv to create a virtual environment. + # The virtualenv simply doesn't work on Windows with our setup, + # that's why we use venv to create virtual environment + - name: Create virtual environment + if: (steps.cache-poetry.outputs.cache-hit != 'true' || contains(github.event.pull_request.labels.*.name, 'tools:clear-poetry-cache-unit-tests')) && needs.changes.outputs.backend == 'true' + run: python -m venv create .venv + + - name: Set up virtual environment + if: needs.changes.outputs.backend == 'true' + # Poetry on Windows cannot pick up the virtual environments directory properly, + # and it creates a new one every time the pipeline runs. + # This step solves this problem — it tells poetry to always use `.venv` directory inside + # the project itself, which also makes it easier for us to determine the correct directory + # that needs to be cached. + run: poetry config virtualenvs.in-project true + + - name: Install Dependencies (Linux) 📦 + if: needs.changes.outputs.backend == 'true' + run: | + sudo apt-get -y install libpq-dev + make install-full | tee .output + if grep 'The lock file is not up to date' .output; then exit 1; fi + make prepare-tests-ubuntu + make prepare-spacy + - name: Run kafka and zookeeper containers for integration testing if: needs.changes.outputs.backend == 'true' run: | @@ -737,11 +815,16 @@ jobs: if: needs.changes.outputs.backend == 'true' env: JOBS: 2 - INTEGRATION_TEST_PYTEST_MARKERS: '"not sequential"' + INTEGRATION_TEST_PYTEST_MARKERS: "broker" PYTHONIOENCODING: "utf-8" run: | make test-integration + - name: Stop kafka and zookeeper containers for integration testing + if: needs.changes.outputs.backend == 'true' + run: | + docker-compose -f tests_deployment/docker-compose.kafka.yml down + sequential_integration_test: name: Run Sequential Integration Tests if: github.ref_type != 'tag' @@ -841,11 +924,6 @@ jobs: run: | make test-integration - - name: Stop kafka and zookeeper containers for integration testing - if: needs.changes.outputs.backend == 'true' - run: | - docker-compose -f tests_deployment/docker-compose.kafka.yml down - build_docker_base_images_and_set_env: name: Build Docker base images and setup environment runs-on: ubuntu-22.04 diff --git a/Makefile b/Makefile index 23b5799e1d5f..62367bc17c4f 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ JOBS ?= 1 INTEGRATION_TEST_FOLDER = tests/integration_tests/ -INTEGRATION_TEST_PYTEST_MARKERS ?= "sequential or not sequential" +INTEGRATION_TEST_PYTEST_MARKERS ?= "sequential or broker or ((not sequential) and (not broker))" PLATFORM ?= "linux/amd64" help: diff --git a/changelog/13017.bugfix.md b/changelog/13017.bugfix.md new file mode 100644 index 000000000000..654e62aafa2c --- /dev/null +++ b/changelog/13017.bugfix.md @@ -0,0 +1 @@ +Flush messages when Kafka producer is closed. This is to ensure that all messages in the producer's internal queue are sent to the broker. diff --git a/docs/docs/docker/building-in-docker.mdx b/docs/docs/docker/building-in-docker.mdx index a6027c51f7e2..9866ad95e063 100644 --- a/docs/docs/docker/building-in-docker.mdx +++ b/docs/docs/docker/building-in-docker.mdx @@ -67,7 +67,7 @@ The initial project files should all be there, as well as a `models` directory t :::note If you run into permission errors, it may be because the `rasa/rasa` images run as user `1001` as a best practice, to avoid giving the container `root` permissions. -Hence, all files created by these containers will be owned by user `1001`. See the [Docker documentation](https://docs.docker.com/edge/engine/reference/commandline/run/) +Hence, all files created by these containers will be owned by user `1001`. See the [Docker documentation](https://docs.docker.com/reference/cli/docker/container/run/) if you want to run the containers as a different user. ::: diff --git a/rasa/core/brokers/kafka.py b/rasa/core/brokers/kafka.py index 7183be12746a..66e77c2ca385 100644 --- a/rasa/core/brokers/kafka.py +++ b/rasa/core/brokers/kafka.py @@ -260,9 +260,11 @@ def _publish(self, event: Dict[Text, Any]) -> None: on_delivery=delivery_report, ) - def _close(self) -> None: + async def close(self) -> None: self._cancelled = True self._poll_thread.join() + if self.producer: + self.producer.flush() @rasa.shared.utils.common.lazy_property def rasa_environment(self) -> Optional[Text]: diff --git a/tests/integration_tests/core/brokers/test_kafka.py b/tests/integration_tests/core/brokers/test_kafka.py index 89fcdbb2d7bd..6be6eaa89d48 100644 --- a/tests/integration_tests/core/brokers/test_kafka.py +++ b/tests/integration_tests/core/brokers/test_kafka.py @@ -1,9 +1,12 @@ +import pytest + from rasa.core.brokers.kafka import KafkaEventBroker from pytest import LogCaptureFixture import logging.config -def test_kafka_event_broker_valid(): +@pytest.mark.broker +async def test_kafka_event_broker_valid(): broker = KafkaEventBroker( url="localhost", topic="rasa", @@ -19,11 +22,11 @@ def test_kafka_event_broker_valid(): ) assert broker.producer.poll() == 1 finally: - broker.producer.flush() - broker._close() + await broker.close() -def test_kafka_event_broker_buffer_error_is_handled(caplog: LogCaptureFixture): +@pytest.mark.broker +async def test_kafka_event_broker_buffer_error_is_handled(caplog: LogCaptureFixture): broker = KafkaEventBroker( url="localhost", topic="rasa", @@ -48,5 +51,4 @@ def test_kafka_event_broker_buffer_error_is_handled(caplog: LogCaptureFixture): assert "Queue full" in caplog.text assert broker.producer.poll() == 1 finally: - broker.producer.flush() - broker._close() + await broker.close() diff --git a/tests/integration_tests/core/brokers/test_pika.py b/tests/integration_tests/core/brokers/test_pika.py index eb27f9ba9f09..b514b1f91c09 100644 --- a/tests/integration_tests/core/brokers/test_pika.py +++ b/tests/integration_tests/core/brokers/test_pika.py @@ -16,6 +16,7 @@ ) +@pytest.mark.broker async def test_pika_event_broker_connect(): broker = PikaEventBroker( host=RABBITMQ_HOST, @@ -31,6 +32,7 @@ async def test_pika_event_broker_connect(): await broker.close() +@pytest.mark.broker @pytest.mark.xdist_group("rabbitmq") async def test_pika_event_broker_publish_after_restart( docker_client: docker.DockerClient, @@ -102,6 +104,7 @@ async def test_pika_event_broker_publish_after_restart( rabbitmq_container.remove() +@pytest.mark.broker @pytest.mark.xdist_group("rabbitmq") @pytest.mark.parametrize("host_component", ["localhost", "myuser:mypassword@localhost"]) async def test_pika_event_broker_connect_with_path_and_query_params_in_url( diff --git a/tests/integration_tests/core/test_exporter.py b/tests/integration_tests/core/test_exporter.py new file mode 100644 index 000000000000..2b3e8b83edb4 --- /dev/null +++ b/tests/integration_tests/core/test_exporter.py @@ -0,0 +1,117 @@ +import textwrap +from pathlib import Path +from unittest.mock import Mock + +import pytest + +from pytest import MonkeyPatch + +from rasa.core.brokers.kafka import KafkaEventBroker +from rasa.core.exporter import Exporter +from rasa.core.tracker_store import InMemoryTrackerStore +from rasa.shared.core.domain import Domain +from rasa.shared.core.events import ActionExecuted +from rasa.shared.core.trackers import DialogueStateTracker + + +@pytest.mark.broker +async def test_exporter_publishes_to_kafka_broker_success( + tmp_path: Path, +) -> None: + tracker_store = InMemoryTrackerStore(domain=Domain.empty()) + tracker = DialogueStateTracker.from_events( + "test_export", + [ + ActionExecuted("action_listen"), + ], + ) + + await tracker_store.save(tracker) + + kafka_broker = KafkaEventBroker( + url="localhost", + topic="rasa", + sasl_username="admin", + sasl_password="password", + partition_by_sender=True, + ) + + endpoints_file = tmp_path / "endpoints.yml" + endpoints_file.write_text( + textwrap.dedent( + """ + event_broker: + type: kafka + topic: rasa + url: localhost:9092 + client_id: kafka-python-rasa + partition_by_sender: true + security_protocol: SASL_PLAINTEXT + sasl_username: admin + sasl_password: password + sasl_mechanism: PLAIN + """ + ) + ) + + exporter = Exporter(tracker_store, kafka_broker, str(endpoints_file)) + + published_events = await exporter.publish_events() + assert published_events == 1 + + +@pytest.mark.broker +async def test_exporter_publishes_to_kafka_broker_fail( + tmp_path: Path, + monkeypatch: MonkeyPatch, +) -> None: + tracker_store = InMemoryTrackerStore(domain=Domain.empty()) + tracker = DialogueStateTracker.from_events( + "test_export", + [ + ActionExecuted("action_listen"), + ], + ) + + await tracker_store.save(tracker) + + kafka_broker = KafkaEventBroker( + url="localhost", + topic="rasa", + sasl_username="admin", + sasl_password="password", + partition_by_sender=True, + ) + + endpoints_file = tmp_path / "endpoints.yml" + endpoints_file.write_text( + textwrap.dedent( + """ + event_broker: + type: kafka + topic: rasa + url: localhost:9092 + client_id: kafka-python-rasa + partition_by_sender: true + security_protocol: SASL_PLAINTEXT + sasl_username: admin + sasl_password: password + sasl_mechanism: PLAIN + """ + ) + ) + + exporter = Exporter(tracker_store, kafka_broker, str(endpoints_file)) + + # patch the exporter to raise an exception when publishing events + monkeypatch.setattr(exporter, "publish_events", Mock(side_effect=Exception)) + + with pytest.raises(Exception) as error: + await exporter.publish_events() + assert "Producer terminating with 1 messages" in str(error.value) + assert ( + "still in queue or transit: use flush() to wait for " + "outstanding message delivery" in str(error.value) + ) + # necessary for producer teardown + await kafka_broker.close() From 50667b5469a20798cde508d16893ddbaa267ad96 Mon Sep 17 00:00:00 2001 From: Anca Lita <27920906+ancalita@users.noreply.github.com> Date: Fri, 23 Feb 2024 14:35:36 +0000 Subject: [PATCH 24/33] prepared release of version 3.6.18 --- CHANGELOG.mdx | 7 +++++++ changelog/13017.bugfix.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) delete mode 100644 changelog/13017.bugfix.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index 8efe6d303a21..a0d7ebd27de6 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.18] - 2024-02-23 + +Rasa 3.6.18 (2024-02-23) +### Bugfixes +- [#13017](https://github.com/rasahq/rasa/issues/13017): Flush messages when Kafka producer is closed. This is to ensure that all messages in the producer's internal queue are sent to the broker. + + ## [3.6.17] - 2024-02-13 Rasa 3.6.17 (2024-02-13) diff --git a/changelog/13017.bugfix.md b/changelog/13017.bugfix.md deleted file mode 100644 index 654e62aafa2c..000000000000 --- a/changelog/13017.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Flush messages when Kafka producer is closed. This is to ensure that all messages in the producer's internal queue are sent to the broker. diff --git a/pyproject.toml b/pyproject.toml index 913610027cf5..f52ef4094521 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.17" +version = "3.6.18" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index 6a7745b5540c..bbd3bcf024cb 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.17" +__version__ = "3.6.18" From 0df5212ad0a925216357f1226142484ca5883735 Mon Sep 17 00:00:00 2001 From: Shailendra Paliwal Date: Mon, 4 Mar 2024 17:26:53 +0100 Subject: [PATCH 25/33] SQL event.timestamp is not always unique (#13019) * return ID instead of timestamp * order by both timestamp and ID * order events by ID * add timestamp ordering to events * fix linting error * change event timestamps in test * add context in docstring and comment * add test to tracker store * undo changes to assertion * add changelog --- changelog/13019.bugfix.md | 1 + rasa/core/exporter.py | 8 +++++++- rasa/core/tracker_store.py | 5 ++++- tests/core/test_exporter.py | 6 +++--- tests/core/test_tracker_stores.py | 30 ++++++++++++++++++++++++++++++ 5 files changed, 45 insertions(+), 5 deletions(-) create mode 100644 changelog/13019.bugfix.md diff --git a/changelog/13019.bugfix.md b/changelog/13019.bugfix.md new file mode 100644 index 000000000000..9ce820d61dbe --- /dev/null +++ b/changelog/13019.bugfix.md @@ -0,0 +1 @@ +Changed the ordering of returned events to order by ID (previously timestamp) in SQL Tracker Store diff --git a/rasa/core/exporter.py b/rasa/core/exporter.py index 58c567dbdbe2..2961b6b6fc94 100644 --- a/rasa/core/exporter.py +++ b/rasa/core/exporter.py @@ -160,7 +160,7 @@ def _validate_all_requested_ids_exist( self, conversation_ids_in_tracker_store: Set[Text] ) -> None: """Warn user if `self.requested_conversation_ids` contains IDs not found in - `conversation_ids_in_tracker_store` + `conversation_ids_in_tracker_store`. Args: conversation_ids_in_tracker_store: Set of conversation IDs contained in @@ -241,6 +241,12 @@ async def _fetch_events_within_time_range(self) -> AsyncIterator[Dict[Text, Any] continue events = self._get_events_for_conversation_id(_events, conversation_id) + + # the order of events was changed after ATO-2192 + # more context: https://github.com/RasaHQ/rasa/pull/13019 + # we should sort the events by timestamp to keep the order + events.sort(key=lambda x: x["timestamp"]) + # the conversation IDs are needed in the event publishing for event in events: if ( diff --git a/rasa/core/tracker_store.py b/rasa/core/tracker_store.py index 93632cd3d779..a91f9f5c6b87 100644 --- a/rasa/core/tracker_store.py +++ b/rasa/core/tracker_store.py @@ -1291,6 +1291,9 @@ def _event_query( self, session: "Session", sender_id: Text, fetch_events_from_all_sessions: bool ) -> "Query": """Provide the query to retrieve the conversation events for a specific sender. + The events are ordered by ID to ensure correct sequence of events. + As `timestamp` is not guaranteed to be unique and low-precision (float), it + cannot be used to order the events. Args: session: Current database session. @@ -1325,7 +1328,7 @@ def _event_query( ) ) - return event_query.order_by(self.SQLEvent.timestamp) + return event_query.order_by(self.SQLEvent.id) async def save(self, tracker: DialogueStateTracker) -> None: """Update database with events from the current conversation.""" diff --git a/tests/core/test_exporter.py b/tests/core/test_exporter.py index b74849c6bb33..0ea78f4b397f 100644 --- a/tests/core/test_exporter.py +++ b/tests/core/test_exporter.py @@ -73,9 +73,9 @@ async def test_fetch_events_within_time_range(): conversation_ids = ["some-id", "another-id"] # prepare events from different senders and different timestamps - event_1 = random_user_uttered_event(3) - event_2 = random_user_uttered_event(2) - event_3 = random_user_uttered_event(1) + event_1 = random_user_uttered_event(1) + event_2 = random_user_uttered_event(3) + event_3 = random_user_uttered_event(2) events = {conversation_ids[0]: [event_1, event_2], conversation_ids[1]: [event_3]} def _get_tracker(conversation_id: Text) -> DialogueStateTracker: diff --git a/tests/core/test_tracker_stores.py b/tests/core/test_tracker_stores.py index fb4a891b097e..4a05f370e5ff 100644 --- a/tests/core/test_tracker_stores.py +++ b/tests/core/test_tracker_stores.py @@ -614,6 +614,36 @@ async def test_sql_additional_events_with_session_start(domain: Domain): assert isinstance(additional_events[0], UserUttered) +async def test_tracker_store_retrieve_ordered_by_id( + domain: Domain, +): + tracker_store_kwargs = {"host": "sqlite:///"} + tracker_store = SQLTrackerStore(domain, **tracker_store_kwargs) + events = [ + SessionStarted(timestamp=1), + UserUttered("Hola", {"name": "greet"}, timestamp=2), + BotUttered("Hi", timestamp=2), + UserUttered("How are you?", {"name": "greet"}, timestamp=2), + BotUttered("I am good, whats up", timestamp=2), + UserUttered("Ciao", {"name": "greet"}, timestamp=2), + BotUttered("Bye", timestamp=2), + ] + sender_id = "test_sql_tracker_store_events_order" + tracker = DialogueStateTracker.from_events(sender_id, events) + await tracker_store.save(tracker) + + # Save other tracker to ensure that we don't run into problems with other senders + other_tracker = DialogueStateTracker.from_events("other-sender", [SessionStarted()]) + await tracker_store.save(other_tracker) + + # Retrieve tracker with events since latest SessionStarted + tracker = await tracker_store.retrieve(sender_id) + + assert len(tracker.events) == 7 + # assert the order of events is same as the order in which they were added + assert all((event == tracker.events[i] for i, event in enumerate(events))) + + @pytest.mark.parametrize( "tracker_store_type,tracker_store_kwargs", [(MockedMongoTrackerStore, {}), (SQLTrackerStore, {"host": "sqlite:///"})], From c90c2211264cde9c3e319b93b2a7de2e0baf2a85 Mon Sep 17 00:00:00 2001 From: Shailendra Paliwal Date: Mon, 4 Mar 2024 17:49:17 +0100 Subject: [PATCH 26/33] prepared release of version 3.6.19 (#13020) --- CHANGELOG.mdx | 7 +++++++ changelog/13019.bugfix.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) delete mode 100644 changelog/13019.bugfix.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index a0d7ebd27de6..a9a2a21fb56b 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.19] - 2024-03-04 + +Rasa 3.6.19 (2024-03-04) +### Bugfixes +- [#13019](https://github.com/rasahq/rasa/issues/13019): Changed the ordering of returned events to order by ID (previously timestamp) in SQL Tracker Store + + ## [3.6.18] - 2024-02-23 Rasa 3.6.18 (2024-02-23) diff --git a/changelog/13019.bugfix.md b/changelog/13019.bugfix.md deleted file mode 100644 index 9ce820d61dbe..000000000000 --- a/changelog/13019.bugfix.md +++ /dev/null @@ -1 +0,0 @@ -Changed the ordering of returned events to order by ID (previously timestamp) in SQL Tracker Store diff --git a/pyproject.toml b/pyproject.toml index f52ef4094521..b9c98cf2486f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.18" +version = "3.6.19" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index bbd3bcf024cb..12d8ae480621 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.18" +__version__ = "3.6.19" From 9dd97dc6f1abbd8419dc04861a9f96959c6c554e Mon Sep 17 00:00:00 2001 From: sancharigr Date: Wed, 13 Mar 2024 17:49:07 +0100 Subject: [PATCH 27/33] Update slack release notifications channels --- .github/workflows/continous-integration.yml | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/continous-integration.yml b/.github/workflows/continous-integration.yml index 7a69bbb4e807..8b6c0ab7dbe2 100644 --- a/.github/workflows/continous-integration.yml +++ b/.github/workflows/continous-integration.yml @@ -1264,10 +1264,9 @@ jobs: sentry-cli releases set-commits --auto "rasa-$GITHUB_TAG" sentry-cli releases finalize "rasa-$GITHUB_TAG" - - name: Notify Slack & Publish Release Notes 🗞 + - name: Publish Release Notes 🗞 env: GH_RELEASE_NOTES_TOKEN: ${{ secrets.GH_RELEASE_NOTES_TOKEN }} - SLACK_WEBHOOK_TOKEN: ${{ secrets.SLACK_WEBHOOK_TOKEN }} GITHUB_TAG: ${{ github.ref }} GITHUB_REPO_SLUG: ${{ github.repository }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -1275,7 +1274,16 @@ jobs: GITHUB_TAG=${GITHUB_TAG/refs\/tags\//} pip install -U github3.py pep440-version-utils python3 scripts/publish_gh_release_notes.py - ./scripts/ping_slack_about_package_release.sh + + - name: Notify Slack of successful release + # notification will be sent to the #product channel on slack, webhook url is added as repository secret + if: success() + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ASSISTANT_RELEASE_WEBHOOK }} + uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a + with: + args: "💥 New *Rasa Open Source * version `${{ github.ref_name }}` has been released!" + send_slack_notification_for_release_on_failure: name: Notify Slack & Publish Release Notes @@ -1290,7 +1298,7 @@ jobs: # send notification if 'deploy' is skipped (previous needed job failed) or failed if: needs.deploy.result != 'success' env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_TOKEN }} + SLACK_WEBHOOK: ${{ secrets.SLACK_RELEASE_ASSISTANT_DEV_TRIBE_WEBHOOK }} uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a with: args: "⛔️ *Rasa Open Source* version `${{ github.ref_name }}` could not be released 😱! Please check out GitHub Actions: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" From 3a6ddaaef686540e8965918c80d108bf95a2f351 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Fri, 12 Apr 2024 14:38:44 +0200 Subject: [PATCH 28/33] Update poetry lock for 1.8.2 --- poetry.lock | 310 +++---------------------------------------------- pyproject.toml | 1 + 2 files changed, 16 insertions(+), 295 deletions(-) diff --git a/poetry.lock b/poetry.lock index e3eb767e760a..a99ed6046eab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "absl-py" version = "1.4.0" description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "aio-pika" version = "8.2.3" description = "Wrapper for the aiormq for asyncio and humans." -category = "main" optional = false python-versions = ">3.6, <4" files = [ @@ -35,7 +33,6 @@ develop = ["aiomisc (>=16.0,<17.0)", "coverage (!=4.3)", "coveralls", "nox", "py name = "aiofiles" version = "23.1.0" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -47,7 +44,6 @@ files = [ name = "aiogram" version = "2.15" description = "Is a pretty simple and fully asynchronous framework for Telegram Bot API" -category = "main" optional = false python-versions = "*" files = [ @@ -68,7 +64,6 @@ proxy = ["aiohttp-socks (>=0.5.3,<0.6.0)"] name = "aiohttp" version = "3.9.3" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -165,7 +160,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiohttp-retry" version = "2.8.3" description = "Simple retry client for aiohttp" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -180,7 +174,6 @@ aiohttp = "*" name = "aioresponses" version = "0.7.6" description = "Mock out requests made by ClientSession from aiohttp package" -category = "dev" optional = false python-versions = "*" files = [ @@ -195,7 +188,6 @@ aiohttp = ">=3.3.0,<4.0.0" name = "aiormq" version = "6.4.2" description = "Pure python AMQP asynchronous client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -214,7 +206,6 @@ develop = ["aiomisc (>=16.0,<17.0)", "coverage (!=4.3)", "coveralls", "pylava", name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -229,7 +220,6 @@ frozenlist = ">=1.1.0" name = "analytics-python" version = "1.4.post1" description = "The hassle-free way to integrate analytics into any python application." -category = "dev" optional = false python-versions = "*" files = [ @@ -251,7 +241,6 @@ test = ["flake8 (==3.7.9)", "mock (==2.0.0)", "pylint (==1.9.3)"] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -273,7 +262,6 @@ trio = ["trio (<0.22)"] name = "apscheduler" version = "3.9.1.post1" description = "In-process task scheduler with Cron-like capabilities" -category = "main" optional = false python-versions = "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" files = [ @@ -285,7 +273,7 @@ files = [ pytz = "*" setuptools = ">=0.7" six = ">=1.4.0" -tzlocal = ">=2.0,<3.0.0 || >=4.0.0" +tzlocal = ">=2.0,<3.dev0 || >=4.dev0" [package.extras] asyncio = ["trollius"] @@ -304,7 +292,6 @@ zookeeper = ["kazoo"] name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -320,7 +307,6 @@ wheel = ">=0.23.0,<1.0" name = "async-generator" version = "1.10" description = "Async generators and context managers for Python 3.5+" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -332,7 +318,6 @@ files = [ name = "async-timeout" version = "4.0.2" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -344,7 +329,6 @@ files = [ name = "attrs" version = "22.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -362,7 +346,6 @@ tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy name = "azure-core" version = "1.27.1" description = "Microsoft Azure Core Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -382,7 +365,6 @@ aio = ["aiohttp (>=3.0)"] name = "azure-storage-blob" version = "12.15.0" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -403,7 +385,6 @@ aio = ["azure-core[aio] (>=1.26.0,<2.0.0)"] name = "babel" version = "2.9.1" description = "Internationalization utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -418,7 +399,6 @@ pytz = ">=2015.7" name = "backoff" version = "1.10.0" description = "Function decoration for backoff and retry" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -430,7 +410,6 @@ files = [ name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -459,7 +438,6 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -483,7 +461,6 @@ yaml = ["PyYAML"] name = "bidict" version = "0.22.1" description = "The bidirectional mapping library for Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -500,7 +477,6 @@ test = ["hypothesis", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "py name = "black" version = "22.12.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -536,7 +512,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "blis" version = "0.7.9" description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." -category = "main" optional = true python-versions = "*" files = [ @@ -577,7 +552,6 @@ numpy = ">=1.15.0" name = "boto3" version = "1.27.1" description = "The AWS SDK for Python" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -597,7 +571,6 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.30.1" description = "Low-level, data-driven core of boto 3." -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -617,7 +590,6 @@ crt = ["awscrt (==0.16.9)"] name = "cachecontrol" version = "0.12.14" description = "httplib2 caching for requests" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -637,7 +609,6 @@ redis = ["redis (>=2.10.5)"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -649,7 +620,6 @@ files = [ name = "catalogue" version = "2.0.8" description = "Super lightweight function registries for your library" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -661,7 +631,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -673,7 +642,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -750,7 +718,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -835,7 +802,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -850,7 +816,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-default-group" version = "1.2.2" description = "Extends click.Group to invoke a command without explicit subcommand name" -category = "dev" optional = false python-versions = "*" files = [ @@ -864,7 +829,6 @@ click = "*" name = "cloudpickle" version = "2.2.1" description = "Extended pickling support for Python objects" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -876,7 +840,6 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -888,7 +851,6 @@ files = [ name = "colorclass" version = "2.2.2" description = "Colorful worry-free console applications for Linux, Mac OS X, and Windows." -category = "main" optional = false python-versions = ">=2.6" files = [ @@ -900,7 +862,6 @@ files = [ name = "coloredlogs" version = "15.0.1" description = "Colored terminal output for Python's logging module" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -918,7 +879,6 @@ cron = ["capturer (>=2.4)"] name = "colorhash" version = "1.2.1" description = "Generate color based on any object" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -930,7 +890,6 @@ files = [ name = "confection" version = "0.1.0" description = "The sweetest config system for Python" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -946,7 +905,6 @@ srsly = ">=2.4.0,<3.0.0" name = "confluent-kafka" version = "2.1.1" description = "Confluent's Python client for Apache Kafka" -category = "main" optional = false python-versions = "*" files = [ @@ -993,7 +951,6 @@ schema-registry = ["requests"] name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1059,7 +1016,6 @@ toml = ["tomli"] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" -category = "dev" optional = false python-versions = ">= 3.5" files = [ @@ -1068,7 +1024,7 @@ files = [ ] [package.dependencies] -coverage = ">=4.1,<6.0.0 || >6.1,<6.1.1 || >6.1.1,<7.0" +coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" @@ -1079,7 +1035,6 @@ yaml = ["PyYAML (>=3.10)"] name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1125,7 +1080,6 @@ test-randomorder = ["pytest-randomly"] name = "cycler" version = "0.11.0" description = "Composable style cycles" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1137,7 +1091,6 @@ files = [ name = "cymem" version = "2.0.7" description = "Manage calls to calloc/free through Cython" -category = "main" optional = true python-versions = "*" files = [ @@ -1175,7 +1128,6 @@ files = [ name = "dask" version = "2022.10.2" description = "Parallel PyData with Task Scheduling" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1204,7 +1156,6 @@ test = ["pandas[test]", "pre-commit", "pytest", "pytest-rerunfailures", "pytest- name = "databind" version = "1.5.3" description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. The `databind` package will install the full suite of databind packages. Compatible with Python 3.7 and newer." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1220,7 +1171,6 @@ files = [ name = "databind-core" version = "1.5.3" description = "Databind is a library inspired by jackson-databind to de-/serialize Python dataclasses. Compatible with Python 3.7 and newer." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1237,7 +1187,6 @@ typing-extensions = ">=3.10.0" name = "databind-json" version = "1.5.3" description = "De-/serialize Python dataclasses to or from JSON payloads. Compatible with Python 3.7 and newer." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1254,7 +1203,6 @@ typing-extensions = ">=3.10.0" name = "datadog" version = "0.45.0" description = "The Datadog Python library" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -1269,7 +1217,6 @@ requests = ">=2.6.0" name = "datadog-api-client" version = "2.14.0" description = "Collection of all Datadog Public endpoints" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1293,7 +1240,6 @@ zstandard = ["zstandard"] name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1311,7 +1257,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "dnspython" version = "2.3.0" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1332,7 +1277,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1354,7 +1298,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "main" optional = false python-versions = "*" files = [ @@ -1365,7 +1308,6 @@ files = [ name = "docspec" version = "2.1.2" description = "Docspec is a JSON object specification for representing API documentation of programming languages." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1381,7 +1323,6 @@ Deprecated = ">=1.2.12,<2.0.0" name = "docspec-python" version = "2.0.2" description = "A parser based on lib2to3 producing docspec data from Python source code." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1397,7 +1338,6 @@ docspec = ">=2.0.2,<3.0.0" name = "docstring-parser" version = "0.11" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1411,7 +1351,6 @@ test = ["black", "pytest"] name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1426,7 +1365,6 @@ test = ["pytest (>=6)"] name = "execnet" version = "1.9.0" description = "execnet: rapid multi-Python deployment" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1441,7 +1379,6 @@ testing = ["pre-commit"] name = "fakeredis" version = "2.16.0" description = "Python implementation of redis API, can be used for testing purposes." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1461,7 +1398,6 @@ lua = ["lupa (>=1.14,<2.0)"] name = "fbmessenger" version = "6.0.0" description = "A python library to communicate with the Facebook Messenger API's" -category = "main" optional = false python-versions = "*" files = [ @@ -1476,7 +1412,6 @@ requests = ">=2.0" name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1492,7 +1427,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "fire" version = "0.5.0" description = "A library for automatically generating command line interfaces." -category = "main" optional = false python-versions = "*" files = [ @@ -1507,7 +1441,6 @@ termcolor = "*" name = "flatbuffers" version = "23.5.26" description = "The FlatBuffers serialization format for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1519,7 +1452,6 @@ files = [ name = "fonttools" version = "4.40.0" description = "Tools to manipulate font files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1577,7 +1509,6 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1592,7 +1523,6 @@ python-dateutil = ">=2.7" name = "frozenlist" version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1676,7 +1606,6 @@ files = [ name = "fsspec" version = "2023.6.0" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1712,7 +1641,6 @@ tqdm = ["tqdm"] name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1723,7 +1651,6 @@ files = [ name = "gast" version = "0.4.0" description = "Python AST that abstracts the underlying Python version" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1735,7 +1662,6 @@ files = [ name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1750,7 +1676,6 @@ smmap = ">=3.0.1,<6" name = "github3-py" version = "3.2.0" description = "Python wrapper for the GitHub API(http://developer.github.com/v3)" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1768,7 +1693,6 @@ uritemplate = ">=3.0.0" name = "gitpython" version = "3.1.31" description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1783,7 +1707,6 @@ gitdb = ">=4.0.1,<5" name = "google-api-core" version = "2.11.1" description = "Google API client core library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1806,7 +1729,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.21.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1832,7 +1754,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-oauthlib" version = "1.0.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1851,7 +1772,6 @@ tool = ["click (>=6.0.0)"] name = "google-cloud-core" version = "2.3.3" description = "Google Cloud API client core library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1860,7 +1780,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] @@ -1870,7 +1790,6 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] name = "google-cloud-storage" version = "2.10.0" description = "Google Cloud Storage API client library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1879,7 +1798,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-resumable-media = ">=2.3.2" @@ -1892,7 +1811,6 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1973,7 +1891,6 @@ testing = ["pytest"] name = "google-pasta" version = "0.2.0" description = "pasta is an AST-based Python refactoring library" -category = "main" optional = false python-versions = "*" files = [ @@ -1989,7 +1906,6 @@ six = "*" name = "google-resumable-media" version = "2.5.0" description = "Utilities for Google Media Downloads and Resumable Uploads" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2008,7 +1924,6 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] name = "googleapis-common-protos" version = "1.59.1" description = "Common protobufs used in Google APIs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2026,7 +1941,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -2104,7 +2018,6 @@ test = ["objgraph", "psutil"] name = "grpcio" version = "1.56.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2162,7 +2075,6 @@ protobuf = ["grpcio-tools (>=1.56.0)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2174,7 +2086,6 @@ files = [ name = "h5py" version = "3.9.0" description = "Read and write HDF5 files from Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2208,7 +2119,6 @@ numpy = ">=1.17.3" name = "httpcore" version = "0.16.3" description = "A minimal low-level HTTP client." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2220,17 +2130,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httptools" version = "0.5.0" description = "A collection of framework independent HTTP protocol utils." -category = "main" optional = false python-versions = ">=3.5.0" files = [ @@ -2284,7 +2193,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "httpx" version = "0.23.3" description = "The next generation HTTP client." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2300,15 +2208,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" version = "0.16.2" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -2341,7 +2248,6 @@ typing = ["pydantic", "types-PyYAML", "types-requests", "types-simplejson", "typ name = "humanfriendly" version = "10.0" description = "Human friendly output for text interfaces using Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2356,7 +2262,6 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2368,7 +2273,6 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2388,7 +2292,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2407,7 +2310,6 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" -category = "dev" optional = false python-versions = "*" files = [ @@ -2423,7 +2325,6 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2435,7 +2336,6 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "dev" optional = false python-versions = "*" files = [ @@ -2450,7 +2350,6 @@ six = "*" name = "jax" version = "0.4.13" description = "Differentiate, compile, and transform Numpy code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2481,7 +2380,6 @@ tpu = ["jaxlib (==0.4.13)", "libtpu-nightly (==0.1.dev20230622)"] name = "jieba" version = "0.42.1" description = "Chinese Words Segmentation Utilities" -category = "main" optional = true python-versions = "*" files = [ @@ -2492,7 +2390,6 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2510,7 +2407,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2522,7 +2418,6 @@ files = [ name = "joblib" version = "1.2.0" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2534,7 +2429,6 @@ files = [ name = "jsonpickle" version = "3.0.1" description = "Python library for serializing any arbitrary object graph into JSON" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2551,7 +2445,6 @@ testing-libs = ["simplejson", "ujson"] name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2573,7 +2466,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "keras" version = "2.12.0" description = "Deep learning for humans." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2584,7 +2476,6 @@ files = [ name = "kiwisolver" version = "1.4.4" description = "A fast implementation of the Cassowary constraint solver" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2662,7 +2553,6 @@ files = [ name = "langcodes" version = "3.3.0" description = "Tools for labeling human languages with IETF language tags" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -2677,7 +2567,6 @@ data = ["language-data (>=1.1,<2.0)"] name = "libclang" version = "16.0.0" description = "Clang Python Bindings, mirrored from the official LLVM repo: https://github.com/llvm/llvm-project/tree/main/clang/bindings/python, to make the installation process easier." -category = "main" optional = false python-versions = "*" files = [ @@ -2695,7 +2584,6 @@ files = [ name = "locket" version = "1.0.0" description = "File-based locks for Python on Linux and Windows" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2707,7 +2595,6 @@ files = [ name = "markdown" version = "3.4.3" description = "Python implementation of John Gruber's Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2725,7 +2612,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2750,7 +2636,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2820,7 +2705,6 @@ files = [ name = "matplotlib" version = "3.5.3" description = "Python plotting package" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2875,7 +2759,6 @@ python-dateutil = ">=2.7" name = "mattermostwrapper" version = "2.2" description = "A mattermost api v4 wrapper to interact with api" -category = "main" optional = false python-versions = "*" files = [ @@ -2889,7 +2772,6 @@ requests = "*" name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2901,7 +2783,6 @@ files = [ name = "memory-profiler" version = "0.61.0" description = "A module for monitoring memory usage of a python program" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2916,7 +2797,6 @@ psutil = "*" name = "ml-dtypes" version = "0.2.0" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2941,8 +2821,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">1.20", markers = "python_version <= \"3.9\""}, {version = ">=1.21.2", markers = "python_version > \"3.9\""}, + {version = ">1.20", markers = "python_version <= \"3.9\""}, ] [package.extras] @@ -2952,7 +2832,6 @@ dev = ["absl-py", "pyink", "pylint (>=2.6.0)", "pytest", "pytest-xdist"] name = "mongomock" version = "4.1.2" description = "Fake pymongo stub for testing simple MongoDB-dependent code" -category = "dev" optional = false python-versions = "*" files = [ @@ -2968,7 +2847,6 @@ sentinels = "*" name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "dev" optional = false python-versions = "*" files = [ @@ -2980,7 +2858,6 @@ files = [ name = "moto" version = "4.1.12" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3027,7 +2904,6 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "msgpack" version = "1.0.5" description = "MessagePack serializer" -category = "main" optional = false python-versions = "*" files = [ @@ -3100,7 +2976,6 @@ files = [ name = "multidict" version = "5.2.0" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3182,7 +3057,6 @@ files = [ name = "murmurhash" version = "1.0.9" description = "Cython bindings for MurmurHash" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -3220,7 +3094,6 @@ files = [ name = "mypy" version = "1.0.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3267,7 +3140,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "0.4.4" description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -3278,7 +3150,6 @@ files = [ name = "networkx" version = "2.6.3" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3297,7 +3168,6 @@ test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] name = "nr-util" version = "0.8.12" description = "General purpose Python utility library." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3313,7 +3183,6 @@ typing-extensions = ">=3.0.0" name = "numpy" version = "1.22.3" description = "NumPy is the fundamental package for array computing with Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3343,7 +3212,6 @@ files = [ name = "numpy" version = "1.23.5" description = "NumPy is the fundamental package for array computing with Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3381,7 +3249,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3398,7 +3265,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opt-einsum" version = "3.3.0" description = "Optimizing numpys einsum function" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3417,7 +3283,6 @@ tests = ["pytest", "pytest-cov", "pytest-pep8"] name = "packaging" version = "20.9" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3432,7 +3297,6 @@ pyparsing = ">=2.0.2" name = "pamqp" version = "3.2.1" description = "RabbitMQ Focused AMQP low-level library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3448,7 +3312,6 @@ testing = ["coverage", "flake8", "flake8-comprehensions", "flake8-deprecated", " name = "partd" version = "1.4.0" description = "Appendable key-value storage" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3467,7 +3330,6 @@ complete = ["blosc", "numpy (>=1.9.0)", "pandas (>=0.19.0)", "pyzmq"] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3479,7 +3341,6 @@ files = [ name = "pathy" version = "0.10.2" description = "pathlib.Path subclasses for local and cloud bucket storage" -category = "main" optional = true python-versions = ">= 3.6" files = [ @@ -3502,7 +3363,6 @@ test = ["mock", "pytest", "pytest-coverage", "typer-cli"] name = "pbr" version = "5.11.1" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -3514,7 +3374,6 @@ files = [ name = "pep440-version-utils" version = "0.3.0" description = "Utilities to deal with pep440 versioning" -category = "dev" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -3529,7 +3388,6 @@ packaging = ">=20.3,<21.0" name = "pillow" version = "10.0.1" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3597,7 +3455,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3609,7 +3466,6 @@ files = [ name = "platformdirs" version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3625,7 +3481,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3641,7 +3496,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3661,7 +3515,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "preshed" version = "3.0.8" description = "Cython hash table that trusts the keys are pre-hashed" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -3703,7 +3556,6 @@ murmurhash = ">=0.28.0,<1.1.0" name = "prompt-toolkit" version = "3.0.28" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.6.2" files = [ @@ -3718,7 +3570,6 @@ wcwidth = "*" name = "protobuf" version = "4.23.3" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3741,7 +3592,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3768,7 +3618,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2-binary" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3840,7 +3689,6 @@ files = [ name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -3852,7 +3700,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -3867,7 +3714,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3879,7 +3725,6 @@ files = [ name = "pydantic" version = "1.10.9" description = "Data validation and settings management using python type hints" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3932,7 +3777,6 @@ email = ["email-validator (>=1.0.3)"] name = "pydoc-markdown" version = "4.7.0" description = "Create Python API documentation in Markdown format." -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3959,7 +3803,6 @@ yapf = ">=0.30.0" name = "pydot" version = "1.4.2" description = "Python interface to Graphviz's Dot" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3974,7 +3817,6 @@ pyparsing = ">=2.1.4" name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3989,7 +3831,6 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.7.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4010,7 +3851,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pykwalify" version = "1.8.0" description = "Python lib/cli for JSON/YAML schema validation" -category = "main" optional = false python-versions = "*" files = [ @@ -4027,7 +3867,6 @@ python-dateutil = ">=2.8.0" name = "pymongo" version = "4.3.3" description = "Python driver for MongoDB " -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4122,7 +3961,6 @@ zstd = ["zstandard"] name = "pyparsing" version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -4137,7 +3975,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyreadline3" version = "3.4.1" description = "A python implementation of GNU readline." -category = "main" optional = false python-versions = "*" files = [ @@ -4149,7 +3986,6 @@ files = [ name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4186,7 +4022,6 @@ files = [ name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4209,7 +4044,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.20.3" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4228,7 +4062,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4247,7 +4080,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-sanic" version = "1.9.1" description = "a pytest plugin for Sanic" -category = "dev" optional = false python-versions = ">=3.7" files = [] @@ -4269,7 +4101,6 @@ resolved_reference = "4092e8005fbbdc29b892cd8b09399894d86b1ac7" name = "pytest-timeout" version = "2.1.0" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4284,7 +4115,6 @@ pytest = ">=5.0.0" name = "pytest-xdist" version = "3.3.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4305,7 +4135,6 @@ testing = ["filelock"] name = "python-crfsuite" version = "0.9.9" description = "Python binding for CRFsuite" -category = "main" optional = false python-versions = "*" files = [ @@ -4365,7 +4194,6 @@ files = [ name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -4380,7 +4208,6 @@ six = ">=1.5" name = "python-engineio" version = "4.5.1" description = "Engine.IO server and client for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4397,7 +4224,6 @@ docs = ["sphinx"] name = "python-socketio" version = "5.8.0" description = "Socket.IO server and client for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4417,7 +4243,6 @@ client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] name = "pytz" version = "2022.7.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -4429,7 +4254,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -4453,7 +4277,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4514,7 +4337,6 @@ files = [ name = "questionary" version = "1.10.0" description = "Python library to build pretty command line user prompts ⭐️" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -4532,7 +4354,6 @@ docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphin name = "randomname" version = "0.1.5" description = "Generate random adj-noun names like docker and github." -category = "main" optional = false python-versions = "*" files = [ @@ -4546,7 +4367,6 @@ fire = "*" name = "rasa-sdk" version = "3.6.2" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" -category = "main" optional = false python-versions = ">=3.8,<3.11" files = [ @@ -4570,7 +4390,6 @@ wheel = ">=0.38.1" name = "redis" version = "4.6.0" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4589,7 +4408,6 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "regex" version = "2022.10.31" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4687,7 +4505,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4709,7 +4526,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4728,7 +4544,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -4743,7 +4558,6 @@ requests = ">=2.0.1,<3.0.0" name = "responses" version = "0.22.0" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4764,7 +4578,6 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rfc3986" version = "1.5.0" description = "Validating URI References per RFC 3986" -category = "dev" optional = false python-versions = "*" files = [ @@ -4782,7 +4595,6 @@ idna2008 = ["idna"] name = "rich" version = "13.4.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -4802,7 +4614,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rocketchat-api" version = "1.30.0" description = "Python API wrapper for Rocket.Chat" -category = "main" optional = false python-versions = "*" files = [ @@ -4818,7 +4629,6 @@ requests = "*" name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -4833,7 +4643,6 @@ pyasn1 = ">=0.1.3" name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" optional = false python-versions = ">=3" files = [ @@ -4852,7 +4661,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4899,7 +4707,6 @@ files = [ name = "ruff" version = "0.0.255" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4926,7 +4733,6 @@ files = [ name = "s3transfer" version = "0.6.1" description = "An Amazon S3 Transfer Manager" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -4944,7 +4750,6 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] name = "sanic" version = "21.12.2" description = "A web server and web framework that's written to go fast. Build fast. Run fast." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4962,17 +4767,16 @@ uvloop = {version = ">=0.5.3", markers = "sys_platform != \"win32\" and implemen websockets = ">=10.0" [package.extras] -all = ["bandit", "beautifulsoup4", "black", "chardet (>=3.0.0,<4.0.0)", "coverage (==5.3)", "cryptography", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "m2r2", "mistune (<2.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn (<0.15.0)"] -dev = ["bandit", "beautifulsoup4", "black", "chardet (>=3.0.0,<4.0.0)", "coverage (==5.3)", "cryptography", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "towncrier", "tox", "types-ujson", "uvicorn (<0.15.0)"] +all = ["bandit", "beautifulsoup4", "black", "chardet (==3.*)", "coverage (==5.3)", "cryptography", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "m2r2", "mistune (<2.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn (<0.15.0)"] +dev = ["bandit", "beautifulsoup4", "black", "chardet (==3.*)", "coverage (==5.3)", "cryptography", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "towncrier", "tox", "types-ujson", "uvicorn (<0.15.0)"] docs = ["docutils", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)"] ext = ["sanic-ext"] -test = ["bandit", "beautifulsoup4", "black", "chardet (>=3.0.0,<4.0.0)", "coverage (==5.3)", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "types-ujson", "uvicorn (<0.15.0)"] +test = ["bandit", "beautifulsoup4", "black", "chardet (==3.*)", "coverage (==5.3)", "docutils", "flake8", "gunicorn (==20.0.4)", "isort (>=5.0.0)", "mypy (>=0.901,<0.910)", "pygments", "pytest (==6.2.5)", "pytest-benchmark", "pytest-cov", "pytest-sanic", "pytest-sugar", "sanic-testing (>=0.7.0)", "types-ujson", "uvicorn (<0.15.0)"] [[package]] name = "sanic-cors" version = "2.0.1" description = "A Sanic extension adding a decorator for CORS support. Based on flask-cors by Cory Dolphin." -category = "main" optional = false python-versions = "*" files = [ @@ -4987,7 +4791,6 @@ sanic = ">=21.9.3" name = "sanic-jwt" version = "1.8.0" description = "JWT oauth flow for Sanic" -category = "main" optional = false python-versions = "*" files = [ @@ -5006,7 +4809,6 @@ docs = ["Jinja2 (<3.1)", "Sphinx"] name = "sanic-routing" version = "0.7.2" description = "Core routing component for Sanic" -category = "main" optional = false python-versions = "*" files = [ @@ -5018,7 +4820,6 @@ files = [ name = "sanic-testing" version = "22.6.0" description = "Core testing clients for Sanic" -category = "dev" optional = false python-versions = "*" files = [ @@ -5033,7 +4834,6 @@ httpx = ">=0.18,<0.24" name = "scikit-learn" version = "1.1.3" description = "A set of python modules for machine learning and data mining" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5076,7 +4876,6 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.2)", "mypy ( name = "scipy" version = "1.10.1" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -5115,7 +4914,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "sentencepiece" version = "0.1.99" description = "SentencePiece python wrapper" -category = "main" optional = true python-versions = "*" files = [ @@ -5170,7 +4968,6 @@ files = [ name = "sentinels" version = "1.0.0" description = "Various objects to denote special meanings in python" -category = "dev" optional = false python-versions = "*" files = [ @@ -5181,7 +4978,6 @@ files = [ name = "sentry-sdk" version = "1.14.0" description = "Python client for Sentry (https://sentry.io)" -category = "main" optional = false python-versions = "*" files = [ @@ -5220,7 +5016,6 @@ tornado = ["tornado (>=5)"] name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5237,7 +5032,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -5249,7 +5043,6 @@ files = [ name = "sklearn-crfsuite" version = "0.3.6" description = "CRFsuite (python-crfsuite) wrapper which provides interface simlar to scikit-learn" -category = "main" optional = false python-versions = "*" files = [ @@ -5267,7 +5060,6 @@ tqdm = ">=2.0" name = "slack-sdk" version = "3.21.3" description = "The Slack API Platform SDK for Python" -category = "main" optional = false python-versions = ">=3.6.0" files = [ @@ -5283,7 +5075,6 @@ testing = ["Flask (>=1,<2)", "Flask-Sockets (>=0.2,<1)", "Jinja2 (==3.0.3)", "We name = "smart-open" version = "6.3.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" optional = true python-versions = ">=3.6,<4.0" files = [ @@ -5305,7 +5096,6 @@ webhdfs = ["requests"] name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -5317,7 +5107,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -5329,7 +5118,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" optional = false python-versions = "*" files = [ @@ -5341,7 +5129,6 @@ files = [ name = "spacy" version = "3.4.4" description = "Industrial-strength Natural Language Processing (NLP) in Python" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -5428,7 +5215,6 @@ transformers = ["spacy-transformers (>=1.1.2,<1.2.0)"] name = "spacy" version = "3.5.4" description = "Industrial-strength Natural Language Processing (NLP) in Python" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -5515,7 +5301,6 @@ transformers = ["spacy-transformers (>=1.1.2,<1.3.0)"] name = "spacy-legacy" version = "3.0.12" description = "Legacy registered functions for spaCy backwards compatibility" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -5527,7 +5312,6 @@ files = [ name = "spacy-loggers" version = "1.0.4" description = "Logging utilities for SpaCy" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -5539,7 +5323,6 @@ files = [ name = "sqlalchemy" version = "1.4.49" description = "Database Abstraction Library" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -5594,7 +5377,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -5621,7 +5404,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "srsly" version = "2.4.6" description = "Modern high-performance serialization utilities for Python" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -5662,7 +5444,6 @@ catalogue = ">=2.0.3,<2.1.0" name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -5677,7 +5458,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "23.1.0" description = "Structured Logging for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5695,7 +5475,6 @@ typing = ["mypy", "rich", "twisted"] name = "structlog-sentry" version = "2.0.3" description = "Sentry integration for structlog" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -5711,7 +5490,6 @@ structlog = "*" name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5726,7 +5504,6 @@ widechars = ["wcwidth"] name = "tarsafe" version = "0.0.4" description = "A safe subclass of the TarFile class for interacting with tar files. Can be used as a direct drop-in replacement for safe usage of extractall()" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -5738,7 +5515,6 @@ files = [ name = "tensorboard" version = "2.12.3" description = "TensorBoard lets you watch Tensors Flow" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5763,7 +5539,6 @@ wheel = ">=0.26" name = "tensorboard-data-server" version = "0.7.1" description = "Fast data loading for TensorBoard" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5776,7 +5551,6 @@ files = [ name = "tensorflow" version = "2.12.0" description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5826,7 +5600,6 @@ wrapt = ">=1.11.0,<1.15" name = "tensorflow-cpu-aws" version = "2.12.0" description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5864,7 +5637,6 @@ wrapt = ">=1.11.0,<1.15" name = "tensorflow-estimator" version = "2.12.0" description = "TensorFlow Estimator." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -5875,7 +5647,6 @@ files = [ name = "tensorflow-hub" version = "0.13.0" description = "TensorFlow Hub is a library to foster the publication, discovery, and consumption of reusable parts of machine learning models." -category = "main" optional = false python-versions = "*" files = [ @@ -5894,7 +5665,6 @@ make-nearest-neighbour-index = ["annoy", "apache-beam"] name = "tensorflow-intel" version = "2.12.0" description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -5932,7 +5702,6 @@ wrapt = ">=1.11.0,<1.15" name = "tensorflow-io-gcs-filesystem" version = "0.31.0" description = "TensorFlow IO" -category = "main" optional = false python-versions = ">=3.7, <3.12" files = [ @@ -5968,7 +5737,6 @@ tensorflow-rocm = ["tensorflow-rocm (>=2.11.0,<2.12.0)"] name = "tensorflow-io-gcs-filesystem" version = "0.32.0" description = "TensorFlow IO" -category = "main" optional = false python-versions = ">=3.7, <3.12" files = [ @@ -5999,7 +5767,6 @@ tensorflow-rocm = ["tensorflow-rocm (>=2.12.0,<2.13.0)"] name = "tensorflow-macos" version = "2.12.0" description = "TensorFlow is an open source machine learning framework for everyone." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6041,7 +5808,6 @@ wrapt = ">=1.11.0,<1.15" name = "tensorflow-metal" version = "0.8.0" description = "TensorFlow acceleration for Mac GPUs." -category = "main" optional = true python-versions = "*" files = [ @@ -6063,7 +5829,6 @@ wheel = ">=0.35,<1.0" name = "tensorflow-text" version = "2.12.0" description = "TF.Text is a TensorFlow library of text related ops, modules, and subgraphs." -category = "main" optional = false python-versions = "*" files = [ @@ -6089,7 +5854,6 @@ tests = ["absl-py", "pytest", "tensorflow-datasets (>=3.2.0)"] name = "termcolor" version = "2.3.0" description = "ANSI color formatting for output in terminal" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6104,7 +5868,6 @@ tests = ["pytest", "pytest-cov"] name = "terminaltables" version = "3.1.10" description = "Generate simple tables in terminals from a nested list of strings." -category = "main" optional = false python-versions = ">=2.6" files = [ @@ -6116,7 +5879,6 @@ files = [ name = "thinc" version = "8.1.10" description = "A refreshing functional take on deep learning, compatible with your favorite libraries" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6192,7 +5954,6 @@ torch = ["torch (>=1.6.0)"] name = "threadpoolctl" version = "3.1.0" description = "threadpoolctl" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -6204,7 +5965,6 @@ files = [ name = "tokenizers" version = "0.13.3" description = "Fast and Customizable Tokenizers" -category = "main" optional = true python-versions = "*" files = [ @@ -6259,7 +6019,6 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -6271,7 +6030,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6283,7 +6041,6 @@ files = [ name = "tomli-w" version = "1.0.0" description = "A lil' TOML writer" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6295,7 +6052,6 @@ files = [ name = "toolz" version = "0.12.0" description = "List processing tools and functional utilities" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -6307,7 +6063,6 @@ files = [ name = "towncrier" version = "22.12.0" description = "Building newsfiles for your project." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6330,7 +6085,6 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6351,7 +6105,6 @@ telegram = ["requests"] name = "transformers" version = "4.26.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -6418,7 +6171,6 @@ vision = ["Pillow"] name = "twilio" version = "8.2.2" description = "Twilio API client and TwiML generator" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -6437,7 +6189,6 @@ requests = ">=2.0.0" name = "typer" version = "0.7.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6458,7 +6209,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6480,7 +6230,6 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-pyopenssl" version = "23.2.0.1" description = "Typing stubs for pyOpenSSL" -category = "dev" optional = false python-versions = "*" files = [ @@ -6495,7 +6244,6 @@ cryptography = ">=35.0.0" name = "types-python-dateutil" version = "2.8.19.13" description = "Typing stubs for python-dateutil" -category = "dev" optional = false python-versions = "*" files = [ @@ -6507,7 +6255,6 @@ files = [ name = "types-pytz" version = "2022.7.1.2" description = "Typing stubs for pytz" -category = "dev" optional = false python-versions = "*" files = [ @@ -6519,7 +6266,6 @@ files = [ name = "types-redis" version = "4.6.0.2" description = "Typing stubs for redis" -category = "dev" optional = false python-versions = "*" files = [ @@ -6535,7 +6281,6 @@ types-pyOpenSSL = "*" name = "types-requests" version = "2.31.0.1" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -6550,7 +6295,6 @@ types-urllib3 = "*" name = "types-setuptools" version = "67.8.0.0" description = "Typing stubs for setuptools" -category = "dev" optional = false python-versions = "*" files = [ @@ -6562,7 +6306,6 @@ files = [ name = "types-toml" version = "0.10.8.6" description = "Typing stubs for toml" -category = "dev" optional = false python-versions = "*" files = [ @@ -6574,7 +6317,6 @@ files = [ name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -6586,7 +6328,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6598,7 +6339,6 @@ files = [ name = "typing-utils" version = "0.1.0" description = "utils to inspect Python type annotations" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -6613,7 +6353,6 @@ test = ["pytest"] name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -6625,7 +6364,6 @@ files = [ name = "tzlocal" version = "5.0.1" description = "tzinfo object for the local timezone" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6644,7 +6382,6 @@ devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pyte name = "ujson" version = "5.8.0" description = "Ultra fast JSON encoder and decoder for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6715,7 +6452,6 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6727,7 +6463,6 @@ files = [ name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -6744,7 +6479,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6789,7 +6523,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "wasabi" version = "0.10.1" description = "A lightweight console printing and formatting toolkit" -category = "main" optional = true python-versions = "*" files = [ @@ -6801,7 +6534,6 @@ files = [ name = "wasabi" version = "1.1.2" description = "A lightweight console printing and formatting toolkit" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -6813,7 +6545,6 @@ files = [ name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6853,7 +6584,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -6865,7 +6595,6 @@ files = [ name = "webexteamssdk" version = "1.6.1" description = "Community-developed Python SDK for the Webex Teams APIs" -category = "main" optional = false python-versions = "*" files = [ @@ -6883,7 +6612,6 @@ requests-toolbelt = "*" name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -6900,7 +6628,6 @@ test = ["websockets"] name = "websockets" version = "10.4" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -6979,7 +6706,6 @@ files = [ name = "werkzeug" version = "2.3.6" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -6997,7 +6723,6 @@ watchdog = ["watchdog (>=2.3)"] name = "wheel" version = "0.40.0" description = "A built-package format for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7012,7 +6737,6 @@ test = ["pytest (>=6.0.0)"] name = "wrapt" version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -7096,7 +6820,6 @@ files = [ name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -7108,7 +6831,6 @@ files = [ name = "yapf" version = "0.40.1" description = "A formatter for Python code." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -7125,7 +6847,6 @@ tomli = ">=2.0.1" name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7213,7 +6934,6 @@ multidict = ">=4.0" name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -7236,4 +6956,4 @@ transformers = ["sentencepiece", "transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "c554d0f11eb367109ccd80c3694de7f07d35b52a09b1279d2b9743e6c155f43c" +content-hash = "8e1bef6b78365110ec598219ee747edb505f82a76fd03bdcffcec7a299b39513" diff --git a/pyproject.toml b/pyproject.toml index b9c98cf2486f..53c05df1c74e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ license = "Apache-2.0" [[tool.poetry.source]] name = "internal repository mirroring psycopg binary for macos" url = "https://europe-west3-python.pkg.dev/rasa-releases/psycopg-binary/simple/" +priority = "supplemental" [tool.towncrier] package = "rasa" From 635afda18016583f5da4f0f9f2369bc0d2000495 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Fri, 12 Apr 2024 14:44:10 +0200 Subject: [PATCH 29/33] Update CI and dockerfiles --- .github/poetry_version.txt | 2 +- docker/docker-bake.hcl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/poetry_version.txt b/.github/poetry_version.txt index 8856dd07eb95..271f9b9b1d5e 100644 --- a/.github/poetry_version.txt +++ b/.github/poetry_version.txt @@ -1,2 +1,2 @@ # The poetry version is stored in a separate file due to the https://github.com/python-poetry/poetry/issues/3316 -poetry-version=1.4.2 +poetry-version=1.8.2 diff --git a/docker/docker-bake.hcl b/docker/docker-bake.hcl index 00e1471d0804..89934a81accf 100644 --- a/docker/docker-bake.hcl +++ b/docker/docker-bake.hcl @@ -21,7 +21,7 @@ variable "BASE_BUILDER_IMAGE_HASH" { # keep this in sync with the version in .github/poetry_version.txt # the variable is set automatically for builds in CI variable "POETRY_VERSION" { - default = "1.4.2" + default = "1.8.2" } group "base-images" { From d87389690f0c0fc34cde927e6f967483b857c301 Mon Sep 17 00:00:00 2001 From: sancharigr Date: Fri, 12 Apr 2024 14:49:09 +0200 Subject: [PATCH 30/33] Add a changelog entry --- changelog/13030.misc.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/13030.misc.md diff --git a/changelog/13030.misc.md b/changelog/13030.misc.md new file mode 100644 index 000000000000..6dd2c81ee85b --- /dev/null +++ b/changelog/13030.misc.md @@ -0,0 +1 @@ +Update poetry version to `1.8.2` From 772c387c29452ec522240b213c230001248903a5 Mon Sep 17 00:00:00 2001 From: Anca Lita <27920906+ancalita@users.noreply.github.com> Date: Thu, 18 Apr 2024 15:25:58 +0100 Subject: [PATCH 31/33] Add upper limit version constraint for scipy on 3.6.x (#13031) * update pyproject and poetry.lock on 3.6.x * add changelog entry --- changelog/13031.misc.md | 1 + poetry.lock | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog/13031.misc.md diff --git a/changelog/13031.misc.md b/changelog/13031.misc.md new file mode 100644 index 000000000000..9a6c1b307a41 --- /dev/null +++ b/changelog/13031.misc.md @@ -0,0 +1 @@ +Add upper limit constraint for scipy dependency version in `pyproject.toml`. diff --git a/poetry.lock b/poetry.lock index a99ed6046eab..58af70bada4b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6956,4 +6956,4 @@ transformers = ["sentencepiece", "transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.11" -content-hash = "8e1bef6b78365110ec598219ee747edb505f82a76fd03bdcffcec7a299b39513" +content-hash = "4c84d994449f859816e48dd00d77f31f6f9d964e29a9f6060300c51d923786e0" diff --git a/pyproject.toml b/pyproject.toml index 53c05df1c74e..7b983f4be729 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -194,7 +194,7 @@ version = ">=1.4.1,<1.7.3" python = "~=3.7.0" [[tool.poetry.dependencies.scipy]] -version = ">=1.10.0" +version = ">=1.10.0,<1.11.0" python = ">=3.8,<3.11" [[tool.poetry.dependencies.scikit-learn]] From c195a25166a4d63ff7fdd51b1e7cba19f0137f4c Mon Sep 17 00:00:00 2001 From: Anca Lita <27920906+ancalita@users.noreply.github.com> Date: Thu, 18 Apr 2024 15:27:41 +0100 Subject: [PATCH 32/33] prepared release of version 3.6.20 --- CHANGELOG.mdx | 7 +++++++ changelog/13030.misc.md | 1 - changelog/13031.misc.md | 1 - pyproject.toml | 2 +- rasa/version.py | 2 +- 5 files changed, 9 insertions(+), 4 deletions(-) delete mode 100644 changelog/13030.misc.md delete mode 100644 changelog/13031.misc.md diff --git a/CHANGELOG.mdx b/CHANGELOG.mdx index a9a2a21fb56b..a97640d0772b 100644 --- a/CHANGELOG.mdx +++ b/CHANGELOG.mdx @@ -16,6 +16,13 @@ https://github.com/RasaHQ/rasa/tree/main/changelog/ . --> +## [3.6.20] - 2024-04-18 + +Rasa 3.6.20 (2024-04-18) +### Miscellaneous internal changes +- [#13030](https://github.com/rasahq/rasa/issues/13030), [#13031](https://github.com/rasahq/rasa/issues/13031) + + ## [3.6.19] - 2024-03-04 Rasa 3.6.19 (2024-03-04) diff --git a/changelog/13030.misc.md b/changelog/13030.misc.md deleted file mode 100644 index 6dd2c81ee85b..000000000000 --- a/changelog/13030.misc.md +++ /dev/null @@ -1 +0,0 @@ -Update poetry version to `1.8.2` diff --git a/changelog/13031.misc.md b/changelog/13031.misc.md deleted file mode 100644 index 9a6c1b307a41..000000000000 --- a/changelog/13031.misc.md +++ /dev/null @@ -1 +0,0 @@ -Add upper limit constraint for scipy dependency version in `pyproject.toml`. diff --git a/pyproject.toml b/pyproject.toml index 7b983f4be729..0944c09460d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ exclude = "((.eggs | .git | .pytest_cache | build | dist))" [tool.poetry] name = "rasa" -version = "3.6.19" +version = "3.6.20" description = "Open source machine learning framework to automate text- and voice-based conversations: NLU, dialogue management, connect to Slack, Facebook, and more - Create chatbots and voice assistants" authors = [ "Rasa Technologies GmbH ",] maintainers = [ "Tom Bocklisch ",] diff --git a/rasa/version.py b/rasa/version.py index 12d8ae480621..2e1490a6a8c3 100644 --- a/rasa/version.py +++ b/rasa/version.py @@ -1,3 +1,3 @@ # this file will automatically be changed, # do not add anything but the version number here! -__version__ = "3.6.19" +__version__ = "3.6.20" From a51504b8cc73036c5c20e75e6c846061f80343a0 Mon Sep 17 00:00:00 2001 From: Vlada Anicic <115999837+rasa-aadlv@users.noreply.github.com> Date: Mon, 25 Nov 2024 16:04:01 +0100 Subject: [PATCH 33/33] [INFRA-1124] Update deprecated dependencies --- .github/workflows/continous-integration.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/continous-integration.yml b/.github/workflows/continous-integration.yml index 8b6c0ab7dbe2..00f2f8aab5f8 100644 --- a/.github/workflows/continous-integration.yml +++ b/.github/workflows/continous-integration.yml @@ -396,7 +396,7 @@ jobs: - name: Store coverage reports if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 #v4.4.3 with: name: ${{ matrix.test }}-coverage path: | @@ -547,7 +547,7 @@ jobs: - name: Store coverage reports if: needs.changes.outputs.backend == 'true' && matrix.os == 'ubuntu-22.04' - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce + uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 #v4.4.3 with: name: ${{ matrix.test }}-coverage path: | @@ -574,7 +574,7 @@ jobs: - name: Get backend coverage reports if: needs.changes.outputs.backend == 'true' - uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #v4.1.8 with: path: ${{ github.workspace }}/tests_coverage