diff --git a/examples/from_sql.py b/examples/from_sql.py new file mode 100644 index 000000000..c8d435559 --- /dev/null +++ b/examples/from_sql.py @@ -0,0 +1,45 @@ +"""Example of using PandasAI with a CSV file.""" + +from pandasai import SmartDatalake +from pandasai.llm import OpenAI +from pandasai.connectors import MySQLConnector, PostgreSQLConnector + +# With a MySQL database +loan_connector = MySQLConnector( + config={ + "host": "localhost", + "port": 3306, + "database": "mydb", + "username": "root", + "password": "root", + "table": "loans", + "where": [ + # this is optional and filters the data to + # reduce the size of the dataframe + ["loan_status", "=", "PAIDOFF"], + ], + } +) + +# With a PostgreSQL database +payment_connector = PostgreSQLConnector( + config={ + "host": "localhost", + "port": 5432, + "database": "mydb", + "username": "root", + "password": "root", + "table": "payments", + "where": [ + # this is optional and filters the data to + # reduce the size of the dataframe + ["payment_status", "=", "PAIDOFF"], + ], + } +) + +llm = OpenAI() +df = SmartDatalake([loan_connector, payment_connector], config={"llm": llm}) +response = df.chat("How many people from the United states?") +print(response) +# Output: 247 loans have been paid off by men. diff --git a/pandasai/connectors/__init__.py b/pandasai/connectors/__init__.py new file mode 100644 index 000000000..00d6e2083 --- /dev/null +++ b/pandasai/connectors/__init__.py @@ -0,0 +1,17 @@ +""" +Connectors are used to connect to databases, external APIs, and other data sources. + +The connectors package contains all the connectors that are used by the application. +""" + +from .base import BaseConnector +from .sql import SQLConnector, MySQLConnector, PostgreSQLConnector +from .yahoo_finance import YahooFinanceConnector + +__all__ = [ + "BaseConnector", + "SQLConnector", + "MySQLConnector", + "PostgreSQLConnector", + "YahooFinanceConnector", +] diff --git a/pandasai/connectors/base.py b/pandasai/connectors/base.py new file mode 100644 index 000000000..55701b43f --- /dev/null +++ b/pandasai/connectors/base.py @@ -0,0 +1,136 @@ +""" +Base connector class to be extended by all connectors. +""" + +from abc import ABC, abstractmethod +from ..helpers.df_info import DataFrameType +from ..helpers.logger import Logger +from pydantic import BaseModel +from typing import Optional + + +class ConnectorConfig(BaseModel): + """ + Connector configuration. + """ + + dialect: Optional[str] = None + driver: Optional[str] = None + username: str + password: str + host: str + port: int + database: str + table: str + where: list[list[str]] = None + + +class BaseConnector(ABC): + """ + Base connector class to be extended by all connectors. + """ + + _config = None + _logger: Logger = None + _additional_filters: list[list[str]] = None + + def __init__(self, config): + """ + Initialize the connector with the given configuration. + + Args: + config (dict): The configuration for the connector. + """ + self._config = config + + @abstractmethod + def head(self): + """ + Return the head of the data source that the connector is connected to. + This information is passed to the LLM to provide the schema of the + data source. + """ + pass + + @abstractmethod + def execute(self) -> DataFrameType: + """ + Execute the given query on the data source that the connector is + connected to. + """ + pass + + def set_additional_filters(self, filters: dict): + """ + Add additional filters to the connector. + + Args: + filters (dict): The additional filters to add to the connector. + """ + self._additional_filters = filters if filters else [] + + @property + def rows_count(self): + """ + Return the number of rows in the data source that the connector is + connected to. + """ + raise NotImplementedError + + @property + def columns_count(self): + """ + Return the number of columns in the data source that the connector is + connected to. + """ + raise NotImplementedError + + @property + def column_hash(self): + """ + Return the hash code that is unique to the columns of the data source + that the connector is connected to. + """ + raise NotImplementedError + + @property + def path(self): + """ + Return the path of the data source that the connector is connected to. + """ + # JDBC string + return ( + self.__class__.__name__ + + "://" + + self._config.host + + ":" + + str(self._config.port) + + "/" + + self._config.database + + "/" + + self._config.table + ) + + @property + def logger(self): + """ + Return the logger for the connector. + """ + return self._logger + + @logger.setter + def logger(self, logger: Logger): + """ + Set the logger for the connector. + + Args: + logger (Logger): The logger for the connector. + """ + self._logger = logger + + @property + def fallback_name(self): + """ + Return the name of the table that the connector is connected to. + """ + raise NotImplementedError diff --git a/pandasai/connectors/sql.py b/pandasai/connectors/sql.py new file mode 100644 index 000000000..f8c2afff6 --- /dev/null +++ b/pandasai/connectors/sql.py @@ -0,0 +1,393 @@ +""" +SQL connectors are used to connect to SQL databases in different dialects. +""" + +import re +import os +import pandas as pd +from .base import BaseConnector, ConnectorConfig +from sqlalchemy import create_engine, sql, text, select, asc +from functools import cached_property, cache +import hashlib +from ..helpers.path import find_project_root +from typing import Union +import time + + +class SQLConnector(BaseConnector): + """ + SQL connectors are used to connect to SQL databases in different dialects. + """ + + _engine = None + _connection: int = None + _rows_count: int = None + _columns_count: int = None + _cache_interval: int = 600 # 10 minutes + + def __init__(self, config: Union[ConnectorConfig, dict], cache_interval: int = 600): + """ + Initialize the SQL connector with the given configuration. + + Args: + config (ConnectorConfig): The configuration for the SQL connector. + """ + config = ConnectorConfig(**config) + super().__init__(config) + + if config.dialect is None: + raise Exception("SQL dialect must be specified") + + if config.driver: + self._engine = create_engine( + f"{config.dialect}+{config.driver}://{config.username}:{config.password}" + f"@{config.host}:{str(config.port)}/{config.database}" + ) + else: + self._engine = create_engine( + f"{config.dialect}://{config.username}:{config.password}@{config.host}" + f":{str(config.port)}/{config.database}" + ) + self._connection = self._engine.connect() + self._cache_interval = cache_interval + + def __del__(self): + """ + Close the connection to the SQL database. + """ + self._connection.close() + + def __repr__(self): + """ + Return the string representation of the SQL connector. + + Returns: + str: The string representation of the SQL connector. + """ + return ( + f"<{self.__class__.__name__} dialect={self._config.dialect} " + f"driver={self._config.driver} username={self._config.username} " + f"password={self._config.password} host={self._config.host} " + f"port={str(self._config.port)} database={self._config.database} " + f"table={self._config.table}>" + ) + + def _validate_column_name(self, column_name): + regex = r"^[a-zA-Z0-9_]+$" + if not re.match(regex, column_name): + raise ValueError("Invalid column name: {}".format(column_name)) + + def _build_query(self, limit=None, order=None): + base_query = select("*").select_from(text(self._config.table)) + valid_operators = ["=", ">", "<", ">=", "<=", "LIKE", "!=", "IN", "NOT IN"] + + if self._config.where or self._additional_filters: + # conditions is the list of wher + additional filters + conditions = [] + if self._config.where: + conditions += self._config.where + if self._additional_filters: + conditions += self._additional_filters + + query_params = {} + condition_strings = [] + + for i, condition in enumerate(conditions): + if len(condition) == 3: + column_name, operator, value = condition + if operator in valid_operators: + self._validate_column_name(column_name) + + condition_strings.append(f"{column_name} {operator} :value_{i}") + query_params[f"value_{i}"] = value + + if condition_strings: + where_clause = " AND ".join(condition_strings) + base_query = base_query.where( + text(where_clause).bindparams(**query_params) + ) + + if order: + base_query = base_query.order_by(asc(text(order))) + + if limit: + base_query = base_query.limit(limit) + + return base_query + + @cache + def head(self): + """ + Return the head of the data source that the connector is connected to. + This information is passed to the LLM to provide the schema of the data source. + + Returns: + DataFrame: The head of the data source. + """ + + if self.logger: + self.logger.log( + f"Getting head of {self._config.table} " + f"using dialect {self._config.dialect}" + ) + + # Run a SQL query to get all the columns names and 5 random rows + query = self._build_query(limit=5, order="RAND()") + + # Return the head of the data source + return pd.read_sql(query, self._connection) + + def _get_cache_path(self, include_additional_filters: bool = False): + """ + Return the path of the cache file. + + Returns: + str: The path of the cache file. + """ + try: + cache_dir = os.path.join((find_project_root()), "cache") + except ValueError: + cache_dir = os.path.join(os.getcwd(), "cache") + + os.makedirs(cache_dir, mode=0o777, exist_ok=True) + + filename = ( + self._get_column_hash(include_additional_filters=include_additional_filters) + + ".csv" + ) + path = os.path.join(cache_dir, filename) + + return path + + def _cached(self, include_additional_filters: bool = False): + """ + Return the cached data if it exists and is not older than the cache interval. + + Returns: + DataFrame|bool: The cached data if it exists and is not older than + the cache interval, False otherwise. + """ + filename = self._get_cache_path( + include_additional_filters=include_additional_filters + ) + if not os.path.exists(filename): + return False + + # If the file is older than 1 day, delete it + if os.path.getmtime(filename) < time.time() - self._cache_interval: + if self.logger: + self.logger.log(f"Deleting expired cached data from {filename}") + os.remove(filename) + return False + + if self.logger: + self.logger.log(f"Loading cached data from {filename}") + + return filename + + def _save_cache(self, df): + """ + Save the given DataFrame to the cache. + + Args: + df (DataFrame): The DataFrame to save to the cache. + """ + + filename = self._get_cache_path( + include_additional_filters=self._additional_filters is not None + and len(self._additional_filters) > 0 + ) + df.to_csv(filename, index=False) + + def execute(self): + """ + Execute the SQL query and return the result. + + Returns: + DataFrame: The result of the SQL query. + """ + + # try to load the generic cache first, then the cache with additional + # filters as a fallback + cached = self._cached() or self._cached(include_additional_filters=True) + if cached: + return pd.read_csv(cached) + + if self.logger: + self.logger.log( + f"Loading the table {self._config.table} " + f"using dialect {self._config.dialect}" + ) + + # Run a SQL query to get all the results + query = self._build_query() + + # Get the result of the query + result = pd.read_sql(query, self._connection) + + # Save the result to the cache + self._save_cache(result) + + # Return the result + return result + + @cached_property + def rows_count(self): + """ + Return the number of rows in the SQL table. + + Returns: + int: The number of rows in the SQL table. + """ + + if self._rows_count is not None: + return self._rows_count + + if self.logger: + self.logger.log( + "Getting the number of rows in the table " + f"{self._config.table} using dialect " + f"{self._config.dialect}" + ) + + # Run a SQL query to get the number of rows + query = sql.text( + "SELECT COUNT(*) FROM information_schema.columns " + "WHERE table_name = :table_name" + ).bindparams(table_name=self._config.table) + + # Return the number of rows + self._rows_count = self._connection.execute(query).fetchone()[0] + return self._rows_count + + @cached_property + def columns_count(self): + """ + Return the number of columns in the SQL table. + + Returns: + int: The number of columns in the SQL table. + """ + + if self._columns_count is not None: + return self._columns_count + + if self.logger: + self.logger.log( + "Getting the number of columns in the table " + f"{self._config.table} using dialect " + f"{self._config.dialect}" + ) + + # Run a SQL query to get the number of columns + query = sql.text( + "SELECT COUNT(*) FROM information_schema.columns " + f"WHERE table_name = '{self._config.table}'" + ) + + # Return the number of columns + self._columns_count = self._connection.execute(query).fetchone()[0] + return self._columns_count + + def _get_column_hash(self, include_additional_filters: bool = False): + """ + Return the hash of the SQL table columns. + + Args: + include_additional_filters (bool, optional): Whether to include the + additional filters in the hash. Defaults to False. + + Returns: + str: The hash of the SQL table columns. + """ + + # Return the hash of the columns and the where clause + columns_str = "".join(self.head().columns) + if ( + self._config.where + or include_additional_filters + and self._additional_filters is not None + ): + columns_str += "WHERE" + if self._config.where: + # where clause is a list of lists + for condition in self._config.where: + columns_str += f"{condition[0]} {condition[1]} {condition[2]}" + if include_additional_filters and self._additional_filters: + for condition in self._additional_filters: + columns_str += f"{condition[0]} {condition[1]} {condition[2]}" + + hash_object = hashlib.sha256(columns_str.encode()) + return hash_object.hexdigest() + + @cached_property + def column_hash(self): + """ + Return the hash of the SQL table columns. + + Returns: + str: The hash of the SQL table columns. + """ + return self._get_column_hash() + + @property + def fallback_name(self): + return self._config.table + + +class MySQLConnector(SQLConnector): + """ + MySQL connectors are used to connect to MySQL databases. + """ + + def __init__(self, config: ConnectorConfig): + """ + Initialize the MySQL connector with the given configuration. + + Args: + config (ConnectorConfig): The configuration for the MySQL connector. + """ + config["dialect"] = "mysql" + config["driver"] = "pymysql" + + if "host" not in config and os.getenv("MYSQL_HOST"): + config["host"] = os.getenv("MYSQL_HOST") + if "port" not in config and os.getenv("MYSQL_PORT"): + config["port"] = os.getenv("MYSQL_PORT") + if "database" not in config and os.getenv("MYSQL_DATABASE"): + config["database"] = os.getenv("MYSQL_DATABASE") + if "username" not in config and os.getenv("MYSQL_USERNAME"): + config["username"] = os.getenv("MYSQL_USERNAME") + if "password" not in config and os.getenv("MYSQL_PASSWORD"): + config["password"] = os.getenv("MYSQL_PASSWORD") + + super().__init__(config) + + +class PostgreSQLConnector(SQLConnector): + """ + PostgreSQL connectors are used to connect to PostgreSQL databases. + """ + + def __init__(self, config: ConnectorConfig): + """ + Initialize the PostgreSQL connector with the given configuration. + + Args: + config (ConnectorConfig): The configuration for the PostgreSQL connector. + """ + config["dialect"] = "postgresql" + config["driver"] = "psycopg2" + + if "host" not in config and os.getenv("POSTGRESQL_HOST"): + config["host"] = os.getenv("POSTGRESQL_HOST") + if "port" not in config and os.getenv("POSTGRESQL_PORT"): + config["port"] = os.getenv("POSTGRESQL_PORT") + if "database" not in config and os.getenv("POSTGRESQL_DATABASE"): + config["database"] = os.getenv("POSTGRESQL_DATABASE") + if "username" not in config and os.getenv("POSTGRESQL_USERNAME"): + config["username"] = os.getenv("POSTGRESQL_USERNAME") + if "password" not in config and os.getenv("POSTGRESQL_PASSWORD"): + config["password"] = os.getenv("POSTGRESQL_PASSWORD") + + super().__init__(config) diff --git a/pandasai/connectors/yahoo_finance.py b/pandasai/connectors/yahoo_finance.py new file mode 100644 index 000000000..9beccd66f --- /dev/null +++ b/pandasai/connectors/yahoo_finance.py @@ -0,0 +1,164 @@ +import os +import yfinance as yf +import pandas as pd +from .base import ConnectorConfig, BaseConnector +import time +from ..helpers.path import find_project_root +import hashlib + + +class YahooFinanceConnector(BaseConnector): + """ + Yahoo Finance connector for retrieving stock data. + """ + + _cache_interval: int = 600 # 10 minutes + + def __init__(self, stock_ticker, where=None, cache_interval: int = 600): + yahoo_finance_config = ConnectorConfig( + dialect="yahoo_finance", + username="", + password="", + host="yahoo.finance.com", + port=443, + database="stock_data", + table=stock_ticker, + where=where, + ) + self._cache_interval = cache_interval + super().__init__(yahoo_finance_config) + + def head(self): + """ + Return the head of the data source that the connector is connected to. + + Returns: + DataFrameType: The head of the data source that the connector is + connected to. + """ + ticker = yf.Ticker(self._config.table) + head_data = ticker.history(period="5d") + return head_data + + def _get_cache_path(self, include_additional_filters: bool = False): + """ + Return the path of the cache file. + + Returns: + str: The path of the cache file. + """ + cache_dir = os.path.join(os.getcwd(), "") + try: + cache_dir = os.path.join((find_project_root()), "cache") + except ValueError: + cache_dir = os.path.join(os.getcwd(), "cache") + + return os.path.join(cache_dir, f"{self._config.table}_data.csv") + + def _get_cache_path(self): + """ + Return the path of the cache file for Yahoo Finance data. + """ + try: + cache_dir = os.path.join((find_project_root()), "cache") + except ValueError: + cache_dir = os.path.join(os.getcwd(), "cache") + + os.makedirs(cache_dir, mode=0o777, exist_ok=True) + + return os.path.join(cache_dir, f"{self._config.table}_data.csv") + + def _cached(self): + """ + Return the cached Yahoo Finance data if it exists and is not older than the + cache interval. + + Returns: + DataFrame|None: The cached data if it exists and is not older than the cache + interval, None otherwise. + """ + cache_path = self._get_cache_path() + if not os.path.exists(cache_path): + return None + + # If the file is older than 1 day, delete it + if os.path.getmtime(cache_path) < time.time() - self._cache_interval: + if self.logger: + self.logger.log(f"Deleting expired cached data from {cache_path}") + os.remove(cache_path) + return None + + if self.logger: + self.logger.log(f"Loading cached data from {cache_path}") + + return cache_path + + def execute(self): + """ + Execute the connector and return the result. + + Returns: + DataFrameType: The result of the connector. + """ + cached_path = self._cached() + if cached_path: + return pd.read_csv(cached_path) + + # Use yfinance to retrieve historical stock data + ticker = yf.Ticker(self._config.table) + stock_data = ticker.history(period="max") + + # Save the result to the cache + stock_data.to_csv(self._get_cache_path(), index=False) + + return stock_data + + @property + def rows_count(self): + """ + Return the number of rows in the data source that the connector is + connected to. + + Returns: + int: The number of rows in the data source that the connector is + connected to. + """ + stock_data = self.execute() + return len(stock_data) + + @property + def columns_count(self): + """ + Return the number of columns in the data source that the connector is + connected to. + + Returns: + int: The number of columns in the data source that the connector is + connected to. + """ + stock_data = self.execute() + return len(stock_data.columns) + + @property + def column_hash(self): + """ + Return the hash code that is unique to the columns of the data source + that the connector is connected to. + + Returns: + int: The hash code that is unique to the columns of the data source + that the connector is connected to. + """ + stock_data = self.execute() + columns_str = "|".join(stock_data.columns) + return hashlib.sha256(columns_str.encode("utf-8")).hexdigest() + + @property + def fallback_name(self): + """ + Return the fallback name of the connector. + + Returns: + str: The fallback name of the connector. + """ + return self._config.table diff --git a/pandasai/helpers/__init__.py b/pandasai/helpers/__init__.py index 9d5ee4972..21586679b 100644 --- a/pandasai/helpers/__init__.py +++ b/pandasai/helpers/__init__.py @@ -5,6 +5,7 @@ from .notebook import Notebook from .anonymizer import Anonymizer from .data_sampler import DataSampler +from .logger import Logger __all__ = [ "get_openai_callback", @@ -15,4 +16,5 @@ "Notebook", "Anonymizer", "DataSampler", + "Logger", ] diff --git a/pandasai/helpers/code_manager.py b/pandasai/helpers/code_manager.py index 415cc64f6..9979607e5 100644 --- a/pandasai/helpers/code_manager.py +++ b/pandasai/helpers/code_manager.py @@ -1,7 +1,11 @@ import re import ast +from collections import defaultdict + import astor import pandas as pd + +from .node_visitors import AssignmentVisitor, CallVisitor from .save_chart import add_save_chart from .optional import import_dependency from ..exceptions import BadImportError @@ -11,7 +15,7 @@ WHITELISTED_LIBRARIES, ) from ..middlewares.charts import ChartsMiddleware -from typing import Union, List, Optional +from typing import Union, List, Optional, Generator from ..helpers.logger import Logger from ..schemas.df_config import Config import logging @@ -24,7 +28,19 @@ class CodeManager: _config: Union[Config, dict] _logger: Logger = None _additional_dependencies: List[dict] = [] - + _ast_comparatos_map: dict = { + ast.Eq: "=", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + } + _current_code_executed: str = None _last_code_executed: str = None def __init__( @@ -142,6 +158,27 @@ def _handle_error( if not use_error_correction_framework: raise exc + def _required_dfs(self, code: str) -> List[str]: + """ + List the index of the DataFrames that are needed to execute the code. The goal + is to avoid to run the connectors if the code does not need them. + + Args: + code (str): Python code to execute + + Returns: + List[int]: A list of the index of the DataFrames that are needed to execute + the code. + """ + + required_dfs = [] + for i, df in enumerate(self._dfs): + if f"dfs[{i}]" in code: + required_dfs.append(df) + else: + required_dfs.append(None) + return required_dfs + def execute_code( self, code: str, @@ -163,6 +200,7 @@ def execute_code( on the generated code. """ + self._current_code_executed = code for middleware in self._middlewares: code = middleware(code) @@ -187,7 +225,11 @@ def execute_code( ```""" ) + # List the required dfs, so we can avoid to run the connectors + # if the code does not need them + dfs = self._required_dfs(code_to_run) environment: dict = self._get_environment() + environment["dfs"] = self._get_samples(dfs) caught_error = self._execute_catching_errors(code_to_run, environment) if caught_error is not None: @@ -200,17 +242,51 @@ def execute_code( analyze_data = environment.get("analyze_data", None) - return analyze_data(self._get_original_dfs()) + return analyze_data(self._get_originals(dfs)) + + def _get_samples(self, dfs): + """ + Get samples from the dfs + + Args: + dfs (list): List of dfs - def _get_original_dfs(self): - dfs = [] - for df in self._dfs: - if df.engine == "polars": - dfs.append(df.original.to_pandas()) + Returns: + list: List of samples + """ + samples = [] + for df in dfs: + if df is not None: + samples.append(df.head_df) else: - dfs.append(df.original) + samples.append(None) + return samples - return dfs + def _get_originals(self, dfs): + """ + Get original dfs + + Args: + dfs (list): List of dfs + + Returns: + list: List of dfs + """ + original_dfs = [] + for index, df in enumerate(dfs): + if df is None: + original_dfs.append(None) + continue + + if df.has_connector: + extracted_filters = self._extract_filters(self._current_code_executed) + filters = extracted_filters.get(f"dfs[{index}]", []) + df.connector.set_additional_filters(filters) + df.load_connector(temporary=len(filters) > 0) + + original_dfs.append(df.dataframe) + + return original_dfs def _get_environment(self) -> dict: """ @@ -219,11 +295,8 @@ def _get_environment(self) -> dict: Returns (dict): A dictionary of environment variables """ - dfs = self._get_original_dfs() - return { "pd": pd, - "dfs": dfs, **{ lib["alias"]: getattr(import_dependency(lib["module"]), lib["name"]) if hasattr(import_dependency(lib["module"]), lib["name"]) @@ -394,6 +467,222 @@ def _check_imports(self, node: Union[ast.Import, ast.ImportFrom]): if library not in WHITELISTED_BUILTINS: raise BadImportError(library) + @staticmethod + def _get_nearest_func_call( + current_lineno: int, calls: list[ast.Call], func_name: str + ) -> ast.Call: + """ + Utility function to get the nearest previous call node. + + Sort call nodes list (copy of the list) by line number. + Iterate over the call nodes list. If the call node's function name + equals to `func_name`, set `nearest_call` to the node object. + + Args: + current_lineno (int): Number of the current processed line. + calls (list[ast.Assign]): List of call nodes. + func_name (str): Name of the target function. + + Returns: + ast.Call: The node of the nearest previous call `()`. + """ + calls = sorted(calls, key=lambda node: node.lineno) + nearest_call = None + for call_node in calls: + if call_node.lineno > current_lineno: + return nearest_call + try: + if call_node.func.attr == func_name: + nearest_call = call_node + except AttributeError: + continue + + @staticmethod + def _tokenize_operand(operand_node: ast.expr) -> Generator[str, None, None]: + """ + Utility generator function to get subscript slice contants. + + Args: + operand_node (ast.expr): + The node to be tokenized. + Yields: + str: Token string. + + Examples: + >>> code = ''' + ... foo = [1, [2, 3], [[4, 5], [6, 7]]] + ... print(foo[2][1][0]) + ... ''' + >>> tree = ast.parse(code) + >>> res = CodeManager._tokenize_operand(tree.body[1].value.args[0]) + >>> print(list(res)) + ['foo', 2, 1, 0] + """ + if isinstance(operand_node, ast.Subscript): + slice_ = operand_node.slice.value + yield from CodeManager._tokenize_operand(operand_node.value) + yield slice_ + + if isinstance(operand_node, ast.Name): + yield operand_node.id + + if isinstance(operand_node, ast.Constant): + yield operand_node.value + + @staticmethod + def _get_df_id_by_nearest_assignment( + current_lineno: int, assignments: list[ast.Assign], target_name: str + ): + """ + Utility function to get df label by finding the nearest assigment. + + Sort assignment nodes list (copy of the list) by line number. + Iterate over the assignment nodes list. If the assignment node's value + looks like `dfs[]` and target label equals to `target_name`, + set `nearest_assignment` to "dfs[]". + + Args: + current_lineno (int): Number of the current processed line. + assignments (list[ast.Assign]): List of assignment nodes. + target_name (str): Name of the target variable. The assignment + node is supposed to assign to this name. + + Returns: + str: The string representing df label, looks like "dfs[]". + """ + nearest_assignment = None + assignments = sorted(assignments, key=lambda node: node.lineno) + for assignment in assignments: + if assignment.lineno > current_lineno: + return nearest_assignment + try: + is_subscript = isinstance(assignment.value, ast.Subscript) + dfs_on_the_right = assignment.value.value.id == "dfs" + assign_to_target = assignment.targets[0].id == target_name + if is_subscript and dfs_on_the_right and assign_to_target: + nearest_assignment = f"dfs[{assignment.value.slice.value}]" + except AttributeError: + continue + + def _extract_comparisons(self, tree: ast.Module) -> dict[str, list]: + """ + Process nodes from passed tree to extract filters. + + Collects all assignments in the tree. + Collects all function calls in the tree. + Walk over the tree and handle each comparison node. + For each comparison node, defined what `df` is this node related to. + Parse constants values from the comparison node. + Add to the result dict. + + Args: + tree (str): A snippet of code to be parsed. + + Returns: + dict: The `defaultdict(list)` instance containing all filters + parsed from the passed instructions tree. The dictionary has + the following structure: + { + "": [ + ("", "", "") + ] + } + """ + comparisons = defaultdict(list) + current_df = "dfs[0]" + + visitor = AssignmentVisitor() + visitor.visit(tree) + assignments = visitor.assignment_nodes + + call_visitor = CallVisitor() + call_visitor.visit(tree) + calls = call_visitor.call_nodes + + for node in ast.walk(tree): + if isinstance(node, ast.Compare): + is_call_on_left = isinstance(node.left, ast.Call) + is_polars = False + is_calling_col = False + try: + is_polars = node.left.func.value.id in ("pl", "polars") + is_calling_col = node.left.func.attr == "col" + except AttributeError: + pass + + if is_call_on_left and is_polars and is_calling_col: + df_name = self._get_nearest_func_call( + node.lineno, calls, "filter" + ).func.value.id + current_df = self._get_df_id_by_nearest_assignment( + node.lineno, assignments, df_name + ) + left_str = node.left.args[0].value + + for op, right in zip(node.ops, node.comparators): + op_str = self._ast_comparatos_map.get(type(op), "Unknown") + right_str = right.value + + comparisons[current_df].append((left_str, op_str, right_str)) + elif isinstance(node.left, ast.Subscript): + name, *slices = self._tokenize_operand(node.left) + current_df = ( + self._get_df_id_by_nearest_assignment( + node.lineno, assignments, name + ) + or current_df + ) + left_str = name if not slices else slices[-1] + + for op, right in zip(node.ops, node.comparators): + op_str = self._ast_comparatos_map.get(type(op), "Unknown") + name, *slices = self._tokenize_operand(right) + right_str = name if not slices else slices[-1] + + comparisons[current_df].append((left_str, op_str, right_str)) + return comparisons + + def _extract_filters(self, code) -> dict[str, list]: + """ + Extract filters to be applied to the dataframe from passed code. + + Args: + code (str): A snippet of code to be parsed. + + Returns: + dict: The dictionary containing all filters parsed from + the passed code. The dictionary has the following structure: + { + "": [ + ("", "", "") + ] + } + + Raises: + SyntaxError: If the code is unable to be parsed by `ast.parse()`. + Exception: If any exception is raised during working with nodes + of the code tree. + """ + try: + parsed_tree = ast.parse(code) + except SyntaxError: + self._logger.log( + "Invalid code passed for extracting filters", level=logging.ERROR + ) + self._logger.log(f"{traceback.format_exc()}", level=logging.DEBUG) + raise + + try: + filters = self._extract_comparisons(parsed_tree) + except Exception: + self._logger.log( + "Unable to extract filters for passed code", level=logging.ERROR + ) + self._logger.log(f"{traceback.format_exc()}", level=logging.DEBUG) + raise + + return filters + @property def middlewares(self): return self._middlewares diff --git a/pandasai/helpers/df_config_manager.py b/pandasai/helpers/df_config_manager.py index 09d9f018a..e48e5055a 100644 --- a/pandasai/helpers/df_config_manager.py +++ b/pandasai/helpers/df_config_manager.py @@ -36,7 +36,7 @@ def _create_csv_save_path(self): directory_path = os.path.join(find_project_root(), "cache") create_directory(directory_path) - csv_file_path = os.path.join(directory_path, f"{self.name}.csv") + csv_file_path = os.path.join(directory_path, f"{self._sdf.table_name}.csv") return csv_file_path def _check_for_duplicates(self, saved_dfs): @@ -47,19 +47,28 @@ def _check_for_duplicates(self, saved_dfs): saved_dfs (List[dict]): List of saved dataframes """ - if any(df_info["name"] == self.name for df_info in saved_dfs): - raise ValueError(f"Duplicate dataframe found: {self.name}") + if any(df_info["name"] == self._sdf.table_name for df_info in saved_dfs): + raise ValueError(f"Duplicate dataframe found: {self._sdf.table_name}") def _get_import_path(self): """ Gets the import path for the dataframe """ + # Handle connectors + if self._sdf.connector is not None: + return self._sdf.connector.path + # Return if already a string if isinstance(self.original_import, str): # Check if it is a csv or xlsx file - if self.original_import.endswith(".csv") or self.original_import.endswith( - ".xlsx" + if ( + self.original_import.endswith(".csv") + or self.original_import.endswith(".parquet") + or self.original_import.endswith(".xlsx") + or self.original_import.startswith( + "https://docs.google.com/spreadsheets/" + ) ): return self.original_import @@ -70,11 +79,11 @@ def _get_import_path(self): dataframe_type = df_type(self.original_import) if dataframe_type == "pandas": csv_file_path = self._create_csv_save_path() - self._sdf.original.to_csv(csv_file_path) + self._sdf.dataframe.to_csv(csv_file_path) elif dataframe_type == "polars": csv_file_path = self._create_csv_save_path() with open(csv_file_path, "w") as f: - self._sdf.original.write_csv(f) + self._sdf.dataframe.write_csv(f) else: raise ValueError("Unknown dataframe type") @@ -83,6 +92,12 @@ def _get_import_path(self): def save(self, name=None): """ Saves the dataframe object to used for later + + Args: + name (str, optional): Name of the dataframe. Defaults to None. + + Raises: + ValueError: If the dataframe name already exists """ file_path = find_closest("pandasai.json") @@ -103,8 +118,8 @@ def save(self, name=None): pandas_json[saved_df_keys].append( { "name": name if name is not None else self.name, - "description": self.description, - "sample": self.head_csv, + "description": self._sdf.table_description, + "sample": self._sdf.head_csv, "import_path": import_path, } ) @@ -114,6 +129,15 @@ def save(self, name=None): json_file.truncate() def load(self, name) -> dict: + """ + Loads a dataframe from the config file + + Args: + name (str): Name of the dataframe + + Returns: + dict: Dictionary with dataframe information + """ file_path = find_closest("pandasai.json") with open(file_path, "r") as json_file: @@ -130,17 +154,17 @@ def head_csv(self): @property def name(self): - name = self._sdf.name + name = self._sdf.table_name if name is None: # Generate random hash - hash_object = hashlib.sha256(self.head_csv.encode()) + hash_object = hashlib.sha256(self._sdf.head_csv.encode()) name = hash_object.hexdigest() return name @property def description(self): - return self._sdf.description + return self._sdf.table_description @property def original_import(self): - return self._sdf.original_import + return self._sdf._original_import diff --git a/pandasai/helpers/df_info.py b/pandasai/helpers/df_info.py index ac3de91f3..a22dd5b90 100644 --- a/pandasai/helpers/df_info.py +++ b/pandasai/helpers/df_info.py @@ -11,7 +11,7 @@ DataFrameType = Union[pd.DataFrame, str] -def df_type(df: DataFrameType) -> str: +def df_type(df: DataFrameType) -> Union[str, None]: """ Returns the type of the dataframe. diff --git a/pandasai/helpers/memory.py b/pandasai/helpers/memory.py index 47d55ebff..5c7e01c8e 100644 --- a/pandasai/helpers/memory.py +++ b/pandasai/helpers/memory.py @@ -24,8 +24,9 @@ def last(self) -> dict: def get_conversation(self, limit: int = 1) -> str: return "\n".join( [ - f"{'User' if message['is_user'] else 'Bot'}: {message['message']}" - for message in self._messages[-limit:] + f"{f'User {i+1}' if message['is_user'] else f'Assistant {i}'}: " + f"{message['message']}" + for i, message in enumerate(self._messages[-limit:]) ] ) diff --git a/pandasai/helpers/node_visitors.py b/pandasai/helpers/node_visitors.py new file mode 100644 index 000000000..87543de86 --- /dev/null +++ b/pandasai/helpers/node_visitors.py @@ -0,0 +1,19 @@ +import ast + + +class AssignmentVisitor(ast.NodeVisitor): + def __init__(self): + self.assignment_nodes = [] + + def visit_Assign(self, node): # noqa: N802 + self.assignment_nodes.append(node) + self.generic_visit(node) + + +class CallVisitor(ast.NodeVisitor): + def __init__(self): + self.call_nodes = [] + + def visit_Call(self, node): # noqa: N802 + self.call_nodes.append(node) + self.generic_visit(node) diff --git a/pandasai/prompts/base.py b/pandasai/prompts/base.py index 1441c6256..f53ac9082 100644 --- a/pandasai/prompts/base.py +++ b/pandasai/prompts/base.py @@ -28,19 +28,20 @@ def _generate_dataframes(self, dfs): """ dataframes = [] for index, df in enumerate(dfs, start=1): - description = "Dataframe " - if df.name is not None: - description += f"{df.name} (dfs[{index-1}])" + description = """ +Dataframe """ + if df.table_name is not None: + description += f"{df.table_name} (dfs[{index-1}])" else: description += f"dfs[{index-1}]" description += ( f", with {df.rows_count} rows and {df.columns_count} columns." ) - if df.description is not None: - description += f"\nDescription: {df.description}" + if df.table_description is not None: + description += f"\nDescription: {df.table_description}" description += f""" This is the metadata of the dataframe dfs[{index-1}]: -{df.head_csv}""" # noqa: E501 +{df.head_csv}""" # noqa: E501 dataframes.append(description) return "\n\n".join(dataframes) diff --git a/pandasai/prompts/correct_error_prompt.py b/pandasai/prompts/correct_error_prompt.py index de865946e..3a961761c 100644 --- a/pandasai/prompts/correct_error_prompt.py +++ b/pandasai/prompts/correct_error_prompt.py @@ -1,11 +1,11 @@ """ Prompt to correct Python Code on Error ``` -You are provided with a pandas dataframe (df) with {num_rows} rows and {num_columns} columns. -This is the metadata of the dataframe: -{df_head}. +You are provided with the following pandas DataFrames with the following metadata: + +{dataframes} The user asked the following question: -{question} +{conversation} You generated this python code: {code} @@ -13,9 +13,7 @@ It fails with the following error: {error_returned} -Correct the python code and return a new python code (do not import anything) that fixes the above -mentioned error. Do not generate the same code again. -``` +Correct the python code and return a new python code (do not import anything) that fixes the above mentioned error. Do not generate the same code again. """ # noqa: E501 from .base import Prompt @@ -25,9 +23,9 @@ class CorrectErrorPrompt(Prompt): """Prompt to Correct Python code on Error""" text: str = """ -You are provided with a pandas dataframe (df) with {num_rows} rows and {num_columns} columns. -This is the metadata of the dataframe: -{df_head}. +You are provided with the following {engine} DataFrames with the following metadata: + +{dataframes} The user asked the following question: {conversation} @@ -40,8 +38,3 @@ class CorrectErrorPrompt(Prompt): Correct the python code and return a new python code (do not import anything) that fixes the above mentioned error. Do not generate the same code again. """ # noqa: E501 - - def __init__(self, **kwargs): - super().__init__( - **kwargs, - ) diff --git a/pandasai/prompts/generate_python_code.py b/pandasai/prompts/generate_python_code.py index 902f2c841..baa39e3ad 100644 --- a/pandasai/prompts/generate_python_code.py +++ b/pandasai/prompts/generate_python_code.py @@ -1,31 +1,32 @@ """ Prompt to generate Python code ``` -You are provided with the following pandas DataFrames with the following metadata: +You are provided with the following pandas DataFrames: {dataframes} + +{conversation} + + This is the initial python code to be updated: ```python # TODO import all the dependencies required {default_import} -# Analyze the data -# 1. Prepare: Preprocessing and cleaning data if necessary -# 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) -# 3. Analyze: Conducting the actual analysis (if the user asks to create a chart save it to an image in exports/charts/temp_chart.png and do not show the chart.) -# 4. Output: return a dictionary of: -# - type (possible values "text", "number", "dataframe", "plot") -# - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) -# Example output: {{ "type": "text", "value": "The average loan amount is $15,000." }} def analyze_data(dfs: list[{engine_df_name}]) -> dict: - # Code goes here (do not add comments) - - -# Declare a result variable -result = analyze_data(dfs) + \"\"\" + Analyze the data + 1. Prepare: Preprocessing and cleaning data if necessary + 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) + 3. Analyze: Conducting the actual analysis (if the user asks to plot a chart save it to an image in {save_charts_path}/temp_chart.png and do not show the chart.) + 4. Output: return a dictionary of: + - type (possible values "text", "number", "dataframe", "plot") + - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) + Example output: {{ "type": "text", "value": "The average loan amount is $15,000." }} + \"\"\" ``` -Using the provided dataframes (`dfs`), update the python code based on the last user question: +Using the provided dataframes (`dfs`), update the python code based on the last question in the conversation. {conversation} Updated code: @@ -39,33 +40,33 @@ class GeneratePythonCodePrompt(Prompt): """Prompt to generate Python code""" text: str = """ -You are provided with the following pandas DataFrames with the following metadata: +You are provided with the following pandas DataFrames: {dataframes} + +{conversation} + + This is the initial python code to be updated: ```python # TODO import all the dependencies required {default_import} -# Analyze the data -# 1. Prepare: Preprocessing and cleaning data if necessary -# 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) -# 3. Analyze: Conducting the actual analysis (if the user asks to create a chart save it to an image in {save_charts_path}/temp_chart.png and do not show the chart.) -# 4. Output: return a dictionary of: -# - type (possible values "text", "number", "dataframe", "plot") -# - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) -# Example output: {{ "type": "text", "value": "The average loan amount is $15,000." }} def analyze_data(dfs: list[{engine_df_name}]) -> dict: - # Code goes here (do not add comments) - - -# Declare a result variable -result = analyze_data(dfs) + \"\"\" + Analyze the data + 1. Prepare: Preprocessing and cleaning data if necessary + 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) + 3. Analyze: Conducting the actual analysis (if the user asks to plot a chart save it to an image in {save_charts_path}/temp_chart.png and do not show the chart.) + 4. Output: return a dictionary of: + - type (possible values "text", "number", "dataframe", "plot") + - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) + Example output: {{ "type": "text", "value": "The average loan amount is $15,000." }} + \"\"\" ``` -Using the provided dataframes (`dfs`), update the python code based on the last user question: -{conversation} +Using the provided dataframes (`dfs`), update the python code based on the last question in the conversation. Updated code: """ # noqa: E501 diff --git a/pandasai/schemas/df_config.py b/pandasai/schemas/df_config.py index ae3ba7711..8cea0661c 100644 --- a/pandasai/schemas/df_config.py +++ b/pandasai/schemas/df_config.py @@ -19,6 +19,7 @@ class Config(BaseModel): max_retries: int = 3 middlewares: List[Middleware] = Field(default_factory=list) callback: Optional[BaseCallback] = None + lazy_load_connector: bool = True llm: Any = None class Config: diff --git a/pandasai/smart_dataframe/__init__.py b/pandasai/smart_dataframe/__init__.py index f5af2d56f..e539c64e5 100644 --- a/pandasai/smart_dataframe/__init__.py +++ b/pandasai/smart_dataframe/__init__.py @@ -22,6 +22,7 @@ from io import StringIO import pandas as pd +from functools import cached_property import pydantic from pandasai.helpers.df_validator import DfValidator @@ -40,88 +41,55 @@ from .abstract_df import DataframeAbstract from ..callbacks.base import BaseCallback from ..llm import LLM, LangchainLLM +from ..connectors.base import BaseConnector -class SmartDataframe(DataframeAbstract, Shortcuts): - _engine: str - _original_import: any - _name: str - _description: str - _df: pd.DataFrame - _dl: SmartDatalake - _sample_head: str = None - - def __init__( - self, - df: DataFrameType, - name: str = None, - description: str = None, - config: Optional[Union[Config, dict]] = None, - sample_head: pd.DataFrame = None, - logger: Logger = None, - ): - """ - Args: - df (Union[pd.DataFrame, pl.DataFrame]): Pandas or Polars dataframe - name (str, optional): Name of the dataframe. Defaults to None. - description (str, optional): Description of the dataframe. Defaults to "". - config (Union[Config, dict], optional): Config to be used. Defaults to None. - logger (Logger, optional): Logger to be used. Defaults to None. - """ - self._original_import = df - self._name = name - self._description = description - - self._load_df(df) +class SmartDataframeCore: + """ + A smart dataframe class is a wrapper around the pandas/polars dataframe that allows + you to query it using natural language. It uses the LLMs to generate Python code + from natural language and then executes it on the dataframe. + """ - self._load_engine() + _df = None + _df_loaded: bool = True + _temporary_loaded: bool = False + _connector: BaseConnector = None + _engine: str = None + _logger: Logger = None - self._dl = SmartDatalake([self], config=config, logger=logger) + def __init__(self, df: DataFrameType, logger: Logger = None): + self._logger = logger + self._load_dataframe(df) - if sample_head is not None: - self._sample_head = sample_head.to_csv(index=False) - - def _load_df(self, df: DataFrameType): + def _load_dataframe(self, df): """ - Load a dataframe into the smart dataframe + Load the dataframe from a file or a connector. Args: - df (DataFrameType): Pandas or Polars dataframe or path to a file + df (Union[pd.DataFrame, pl.DataFrame, BaseConnector]): + Pandas or Polars dataframe or a connector. """ - if isinstance(df, str): - if not ( - df.endswith(".csv") - or df.endswith(".parquet") - or df.endswith(".xlsx") - or df.startswith("https://docs.google.com/spreadsheets/") - ): - df_config = self._load_from_config(df) - if df_config: - if self._name is None: - self._name = df_config["name"] - if self._description is None: - self._description = df_config["description"] - df = df_config["import_path"] - else: - raise ValueError( - "Could not find a saved dataframe configuration " - "with the given name." - ) - - self._df = self._import_from_file(df) + if isinstance(df, BaseConnector): + self.dataframe = None + self.connector = df + self.connector.logger = self._logger + self._df_loaded = False + elif isinstance(df, str): + self.dataframe = self._import_from_file(df) elif isinstance(df, pd.Series): - self._df = df.to_frame() + self.dataframe = df.to_frame() elif isinstance(df, (list, dict)): # if the list can be converted to a dataframe, convert it # otherwise, raise an error try: - self._df = pd.DataFrame(df) + self.dataframe = pd.DataFrame(df) except ValueError: raise ValueError( "Invalid input data. We cannot convert it to a dataframe." ) else: - self._df = df + self.dataframe = df def _import_from_file(self, file_path: str): """ @@ -146,36 +114,196 @@ def _import_from_file(self, file_path: str): raise ValueError("Invalid file format.") def _load_engine(self): - self._engine = df_type(self._df) + """ + Load the engine of the dataframe (Pandas or Polars) + """ + engine = df_type(self._df) - if self._engine is None: + if engine is None: raise ValueError( "Invalid input data. Must be a Pandas or Polars dataframe." ) - def __getattr__(self, name): - if name in self.__dict__: - return self.__dict__[name] - elif name in dir(SmartDataframe): - return object.__getattribute__(self, name) - elif hasattr(self._df, name): - return getattr(self._df, name) + self._engine = engine + + def _validate_and_convert_dataframe(self, df: DataFrameType) -> DataFrameType: + """ + Validate the dataframe and convert it to a Pandas or Polars dataframe. + + Args: + df (DataFrameType): Pandas or Polars dataframe or path to a file + + Returns: + DataFrameType: Pandas or Polars dataframe + """ + if isinstance(df, str): + return self._import_from_file(df) + elif isinstance(df, (list, dict)): + # if the list or dictionary can be converted to a dataframe, convert it + # otherwise, raise an error + try: + return pd.DataFrame(df) + except ValueError: + raise ValueError( + "Invalid input data. We cannot convert it to a dataframe." + ) else: - raise AttributeError( - f"'{name}' is not a valid attribute for SmartDataframe" - ) + return df - def __dir__(self): - return dir(self._df) + def load_connector(self, temporary: bool = False): + """ + Load a connector into the smart dataframe - def __getitem__(self, key): - return self._df[key] + Args: + connector (BaseConnector): Connector to be loaded + """ + self.dataframe = self.connector.execute() + self._df_loaded = True + self._temporary_loaded = temporary - def __setitem__(self, key, value): - self._df[key] = value + def _unload_connector(self): + """ + Unload the connector from the smart dataframe. + This is done when a partial dataframe is loaded from a connector (i.e. + because of a filter) and we want to load the full dataframe or a different + partial dataframe. + """ + self._df = None + self._df_loaded = False + self._temporary_loaded = False - def __repr__(self): - return self._df.__repr__() + @property + def dataframe(self) -> DataFrameType: + if self._df_loaded: + return_df = None + + if self._engine == "polars": + return_df = self._df.clone() + elif self._engine == "pandas": + return_df = self._df.copy() + + if self.has_connector and self._df_loaded and self._temporary_loaded: + self._unload_connector() + + return return_df + elif self.has_connector: + return None + + @dataframe.setter + def dataframe(self, df: DataFrameType): + """ + Load a dataframe into the smart dataframe + + Args: + df (DataFrameType): Pandas or Polars dataframe or path to a file + """ + df = self._validate_and_convert_dataframe(df) + self._df = df + + if df is not None: + self._load_engine() + + @property + def engine(self) -> str: + return self._engine + + @property + def connector(self): + return self._connector + + @connector.setter + def connector(self, connector: BaseConnector): + self._connector = connector + + @property + def has_connector(self): + return self._connector is not None + + +class SmartDataframe(DataframeAbstract, Shortcuts): + _table_name: str + _table_description: str + _sample_head: str = None + _original_import: any + _core: SmartDataframeCore + _lake: SmartDatalake + + def __init__( + self, + df: DataFrameType, + name: str = None, + description: str = None, + sample_head: pd.DataFrame = None, + config: Config = None, + logger: Logger = None, + ): + """ + Args: + df (Union[pd.DataFrame, pl.DataFrame]): Pandas or Polars dataframe + name (str, optional): Name of the dataframe. Defaults to None. + description (str, optional): Description of the dataframe. Defaults to "". + sample_head (pd.DataFrame, optional): Sample head of the dataframe. + config (Config, optional): Config to be used. Defaults to None. + logger (Logger, optional): Logger to be used. Defaults to None. + """ + self._original_import = df + + if isinstance(df, str): + if not ( + df.endswith(".csv") + or df.endswith(".parquet") + or df.endswith(".xlsx") + or df.startswith("https://docs.google.com/spreadsheets/") + ): + df_config = self._load_from_config(df) + if df_config: + if "://" in df_config["import_path"]: + connector_name = df_config["import_path"].split("://")[0] + connector_path = df_config["import_path"].split("://")[1] + connector_host = connector_path.split(":")[0] + connector_port = connector_path.split(":")[1].split("/")[0] + connector_database = connector_path.split(":")[1].split("/")[1] + connector_table = connector_path.split(":")[1].split("/")[2] + + # instantiate the connector + df = getattr( + __import__( + "pandasai.connectors", fromlist=[connector_name] + ), + connector_name, + )( + { + "host": connector_host, + "port": connector_port, + "database": connector_database, + "table": connector_table, + } + ) + else: + df = df_config["import_path"] + + if name is None: + name = df_config["name"] + if description is None: + description = df_config["description"] + else: + raise ValueError( + "Could not find a saved dataframe configuration " + "with the given name." + ) + + self._core = SmartDataframeCore(df, logger) + + self._table_name = name + self._table_description = description + self._lake = SmartDatalake([self], config, logger) + + # If no name is provided, use the fallback name provided the connector + if self._table_name is None and self.connector: + self._table_name = self.connector.fallback_name + + if sample_head is not None: + self._sample_head = sample_head.to_csv(index=False) def add_middlewares(self, *middlewares: Optional[Middleware]): """ @@ -185,7 +313,7 @@ def add_middlewares(self, *middlewares: Optional[Middleware]): *middlewares: Middlewares to be added """ - self._dl.add_middlewares(*middlewares) + self.lake.add_middlewares(*middlewares) def chat(self, query: str): """ @@ -197,21 +325,99 @@ def chat(self, query: str): Raises: ValueError: If the query is empty """ - return self._dl.chat(query) + return self.lake.chat(query) def column_hash(self) -> str: - columns_str = "".join(self._df.columns) + """ + Get the hash of the columns of the dataframe. + + Returns: + str: Hash of the columns of the dataframe + """ + if not self._core._df_loaded and self.connector: + return self.connector.column_hash + + columns_str = "".join(self.dataframe.columns) hash_object = hashlib.sha256(columns_str.encode()) return hash_object.hexdigest() def save(self, name: str = None): """ Saves the dataframe configuration to be used for later + + Args: + name (str, optional): Name of the dataframe configuration. Defaults to None. """ config_manager = DfConfigManager(self) config_manager.save(name) + def load_connector(self, temporary: bool = False): + """ + Load a connector into the smart dataframe + + Args: + temporary (bool, optional): Whether the connector is temporary or not. + Defaults to False. + """ + self._core.load_connector(temporary) + + def _truncate_head_columns(self, df: DataFrameType, max_size=25) -> DataFrameType: + """ + Truncate the columns of the dataframe to a maximum of 20 characters. + + Args: + df (DataFrameType): Pandas or Polars dataframe + + Returns: + DataFrameType: Pandas or Polars dataframe + """ + + if df_type(df) == "pandas": + df_trunc = df.copy() + + for col in df.columns: + if df[col].dtype == "object": + first_val = df[col].iloc[0] + if isinstance(first_val, str) and len(first_val) > max_size: + df_trunc[col] = df_trunc[col].str.slice(0, max_size - 3) + "..." + elif df_type(df) == "polars": + try: + import polars as pl + + df_trunc = df.clone() + + for col in df.columns: + if df[col].dtype == pl.Utf8: + first_val = df[col][0] + if isinstance(first_val, str) and len(df_trunc[col]) > max_size: + df_trunc[col] = ( + df_trunc[col].str.slice(0, max_size - 3) + "..." + ) + except ImportError: + raise ImportError( + "Polars is not installed. " + "Please install Polars to use this feature." + ) + + return df_trunc + + def _get_sample_head(self) -> DataFrameType: + head = None + rows_to_display = 0 if self.lake.config.enforce_privacy else 5 + if not self._core._df_loaded and self.connector: + head = self.connector.head() + else: + head = self.dataframe.head(rows_to_display) + + if head is None: + return None + + sampler = DataSampler(head) + sampled_head = sampler.sample(rows_to_display) + + return self._truncate_head_columns(sampled_head) + def _load_from_config(self, name: str): """ Loads a saved dataframe configuration @@ -220,23 +426,22 @@ def _load_from_config(self, name: str): config_manager = DfConfigManager(self) return config_manager.load(name) - def _get_head_csv(self): - """ - Get the head of the dataframe as a CSV string. - - Returns: - str: CSV string - """ - if self._sample_head is not None: - return self._sample_head + @property + def dataframe(self) -> DataFrameType: + return self._core.dataframe - rows_to_display = 0 if self._dl.config.enforce_privacy else 5 + @property + def engine(self): + return self._core.engine - sample = DataSampler(self._df) - df_head = sample.sample(rows_to_display) + @property + def connector(self): + return self._core.connector - self._sample_head = df_head.to_csv(index=False) - return self._sample_head + @connector.setter + def connector(self, connector: BaseConnector): + connector.logger = self.logger + self._core.connector = connector def validate(self, schema: pydantic.BaseModel): """ @@ -245,182 +450,212 @@ def validate(self, schema: pydantic.BaseModel): schema: Pydantic schema class verbose: Print Errors """ - df_validator = DfValidator(self.original_import) + df_validator = DfValidator(self.dataframe) return df_validator.validate(schema) @property - def datalake(self): - return self._dl + def lake(self) -> SmartDatalake: + return self._lake + + @lake.setter + def lake(self, lake: SmartDatalake): + self._lake = lake @property def rows_count(self): - return self._df.shape[0] + if self._core._df_loaded: + return self.dataframe.shape[0] + elif self.connector is not None: + return self.connector.rows_count + else: + raise ValueError( + "Cannot determine rows_count. No dataframe or connector loaded." + ) @property def columns_count(self): - return self._df.shape[1] + if self._core._df_loaded: + return self.dataframe.shape[1] + elif self.connector is not None: + return self.connector.columns_count + else: + raise ValueError( + "Cannot determine columns_count. No dataframe or connector loaded." + ) - @property + @cached_property + def head_df(self): + """ + Get the head of the dataframe as a dataframe. + + Returns: + DataFrameType: Pandas or Polars dataframe + """ + return self._get_sample_head() + + @cached_property def head_csv(self): - return self._get_head_csv() + """ + Get the head of the dataframe as a CSV string. + + Returns: + str: CSV string + """ + df_head = self._get_sample_head() + return df_head.to_csv(index=False) @property def last_prompt(self): - return self._dl.last_prompt + return self.lake.last_prompt @property def last_prompt_id(self) -> str: - return self._dl.last_prompt_id + return self.lake.last_prompt_id @property def last_code_generated(self): - return self._dl.last_code_generated + return self.lake.last_code_executed @property def last_code_executed(self): - return self._dl.last_code_executed + return self.lake.last_code_executed @property def last_result(self): - return self._dl.last_result + return self.lake.last_result @property def last_error(self): - return self._dl.last_error + return self.lake.last_error @property - def original(self): - return self._df + def cache(self): + return self.lake.cache @property + def middlewares(self): + return self.lake.middlewares + def original_import(self): return self._original_import @property - def name(self): - return self._name + def logger(self): + return self.lake.logger - @property - def engine(self): - return self._engine - - @property - def description(self): - return self._description - - @property - def config(self): - return self._dl.config - - @property - def cache(self): - return self._dl.cache - - @property - def middlewares(self): - return self._dl.middlewares + @logger.setter + def logger(self, logger: Logger): + self.lake.logger = logger @property def logs(self): - return self._dl.logs + return self.lake.logs @property def verbose(self): - return self._dl.verbose + return self.lake.verbose @verbose.setter def verbose(self, verbose: bool): - self._dl.verbose = verbose + self.lake.verbose = verbose @property def save_logs(self): - return self._dl.save_logs + return self.lake.save_logs @save_logs.setter def save_logs(self, save_logs: bool): - self._dl.save_logs = save_logs + self.lake.save_logs = save_logs @property def callback(self): - return self._dl.callback + return self.lake.callback @callback.setter def callback(self, callback: BaseCallback): - self._dl.callback = callback + self.lake.callback = callback @property def enforce_privacy(self): - return self._dl.enforce_privacy + return self.lake.enforce_privacy @enforce_privacy.setter def enforce_privacy(self, enforce_privacy: bool): - self._dl.enforce_privacy = enforce_privacy + self.lake.enforce_privacy = enforce_privacy @property def enable_cache(self): - return self._dl.enable_cache + return self.lake.enable_cache @enable_cache.setter def enable_cache(self, enable_cache: bool): - self._dl.enable_cache = enable_cache + self.lake.enable_cache = enable_cache @property def use_error_correction_framework(self): - return self._dl.use_error_correction_framework + return self.lake.use_error_correction_framework @use_error_correction_framework.setter def use_error_correction_framework(self, use_error_correction_framework: bool): - self._dl.use_error_correction_framework = use_error_correction_framework + self.lake.use_error_correction_framework = use_error_correction_framework @property def custom_prompts(self): - return self._dl.custom_prompts + return self.lake.custom_prompts @custom_prompts.setter def custom_prompts(self, custom_prompts: dict): - self._dl.custom_prompts = custom_prompts + self.lake.custom_prompts = custom_prompts @property def save_charts(self): - return self._dl.save_charts + return self.lake.save_charts @save_charts.setter def save_charts(self, save_charts: bool): - self._dl.save_charts = save_charts + self.lake.save_charts = save_charts @property def save_charts_path(self): - return self._dl.save_charts_path + return self.lake.save_charts_path @save_charts_path.setter def save_charts_path(self, save_charts_path: str): - self._dl.save_charts_path = save_charts_path + self.lake.save_charts_path = save_charts_path @property def custom_whitelisted_dependencies(self): - return self._dl.custom_whitelisted_dependencies + return self.lake.custom_whitelisted_dependencies @custom_whitelisted_dependencies.setter def custom_whitelisted_dependencies( self, custom_whitelisted_dependencies: List[str] ): - self._dl.custom_whitelisted_dependencies = custom_whitelisted_dependencies + self.lake.custom_whitelisted_dependencies = custom_whitelisted_dependencies @property def max_retries(self): - return self._dl.max_retries + return self.lake.max_retries @max_retries.setter def max_retries(self, max_retries: int): - self._dl.max_retries = max_retries + self.lake.max_retries = max_retries @property def llm(self): - return self._dl.llm + return self.lake.llm @llm.setter def llm(self, llm: Union[LLM, LangchainLLM]): - self._dl.llm = llm + self.lake.llm = llm + + @property + def table_name(self): + return self._table_name + + @property + def table_description(self): + return self._table_description @property def sample_head(self): @@ -430,3 +665,26 @@ def sample_head(self): @sample_head.setter def sample_head(self, sample_head: pd.DataFrame): self._sample_head = sample_head.to_csv(index=False) + + def __getattr__(self, name): + if name in self._core.__dir__(): + return getattr(self._core, name) + elif name in self.dataframe.__dir__(): + return getattr(self.dataframe, name) + else: + return self.__getattribute__(name) + + def __getitem__(self, key): + return self.dataframe.__getitem__(key) + + def __setitem__(self, key, value): + return self.dataframe.__setitem__(key, value) + + def __dir__(self): + return dir(self._core) + dir(self.dataframe) + dir(self.__class__) + + def __repr__(self): + return self.dataframe.__repr__() + + def __len__(self): + return len(self.dataframe) diff --git a/pandasai/smart_dataframe/abstract_df.py b/pandasai/smart_dataframe/abstract_df.py index 4bc50a0c7..04274c8eb 100644 --- a/pandasai/smart_dataframe/abstract_df.py +++ b/pandasai/smart_dataframe/abstract_df.py @@ -1,169 +1,332 @@ -class DataframeAbstract: +from abc import ABC + + +class DataframeAbstract(ABC): + _engine: str + + @property + def dataframe(self): + raise NotImplementedError("This method must be implemented in the child class") + # Columns @property def columns(self) -> list: - raise NotImplementedError + return self.dataframe.columns def rename(self, columns): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.rename()`. + """ + return self.dataframe.rename(columns=columns) # Index @property def index(self): - raise NotImplementedError + return self.dataframe.index def set_index(self, keys): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.set_index()`. + """ + return self.dataframe.set_index(keys=keys) def reset_index(self, drop=False): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.reset_index()`. + """ + return self.dataframe.reset_index(drop=drop) # Data def head(self, n): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.head()`. + """ + return self.dataframe.head(n=n) def tail(self, n): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.tail()`. + """ + return self.dataframe.tail(n=n) def sample(self, n): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.sample()`. + """ + return self.dataframe.sample(n=n) def describe(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.describe()`. + """ + return self.dataframe.describe() # Missing data def isna(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.isna()`. + """ + return self.dataframe.isna() def notna(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.notna()`. + """ + return self.dataframe.notna() def dropna(self, axis): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.dropna()`. + """ + return self.dataframe.dropna(axis=axis) def fillna(self, value): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.fillna()`. + """ + return self.dataframe.fillna(value=value) # Duplicates def duplicated(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.duplicated()`. + """ + return self.dataframe.duplicated() def drop_duplicates(self, subset): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.drop_duplicates()`. + """ + return self.dataframe.drop_duplicates(subset=subset) # Transform def apply(self, func): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.apply()`. + """ + return self.dataframe.apply(func=func) def applymap(self, func): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.applymap()`. + """ + return self.dataframe.applymap(func=func) def pipe(self, func): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.pipe()`. + """ + return self.dataframe.pipe(func=func) # Groupby def groupby(self, by): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.groupby()`. + """ + return self.dataframe.groupby(by=by) def pivot(self, index, columns, values): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.pivot()`. + """ + return self.dataframe.pivot(index=index, columns=columns, values=values) def unstack(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.unstack()`. + """ + return self.dataframe.unstack() # Join/Merge def append(self, other): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.append()`. + """ + return self.dataframe.append(other=other) def join(self, other): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.join()`. + """ + return self.dataframe.join(other=other) def merge(self, other): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.merge()`. + """ + return self.dataframe.merge(other=other) # Combine def concat(self, others): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.concat()`. + """ + return self.dataframe.concat(others=others) # Statistical def count(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.count()`. + """ + return self.dataframe.count() def mean(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.mean()`. + """ + return self.dataframe.mean() def median(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.median()`. + """ + return self.dataframe.median() def std(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.std()`. + """ + return self.dataframe.std() def min(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.min()`. + """ + return self.dataframe.min() def max(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.max()`. + """ + return self.dataframe.max() def abs(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.abs()`. + """ + return self.dataframe.abs() def prod(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.prod()`. + """ + return self.dataframe.prod() def sum(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.sum()`. + """ + return self.dataframe.sum() def nunique(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.nunique()`. + """ + return self.dataframe.nunique() def value_counts(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.value_counts()`. + """ + return self.dataframe.value_counts() def corr(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.corr()`. + """ + return self.dataframe.corr() def cov(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.cov()`. + """ + return self.dataframe.cov() # Window def rolling(self, window): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.window()`. + """ + return self.dataframe.rolling(window=window) def expanding(self, min_periods): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.expanding()`. + """ + return self.dataframe.expanding(min_periods=min_periods) def resample(self, rule): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.resample()`. + """ + return self.dataframe.resample(rule=rule) # Plotting def plot(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.plot()`. + """ + return self.dataframe.plot() def hist(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.hist()`. + """ + return self.dataframe.hist() # Exporting def to_csv(self, path): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.to_csv()`. + """ + return self.dataframe.to_csv(path=path) def to_json(self, path): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.to_json()`. + """ + return self.dataframe.to_json(path=path) def to_sql(self, name, con): - raise NotImplementedError - - def to_dict(self, orient): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.to_sql()`. + """ + return self.dataframe.to_sql(name=name, con=con) + + def to_dict(self, orient="dict", into=dict, as_series=True): + """ + A proxy-call to the dataframe's `.to_dict()`. + """ + if self._engine == "pandas": + return self.dataframe.to_dict(orient=orient, into=into) + elif self._engine == "polars": + return self.dataframe.to_dict(as_series=as_series) + raise RuntimeError( + f"{self.__class__} object has unknown engine type. " + f"Possible engines: 'pandas', 'polars'. Actual '{self._engine}'." + ) def to_numpy(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.to_numpy()`. + """ + return self.dataframe.to_numpy() def to_markdown(self): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.to_markdown()`. + """ + return self.dataframe.to_markdown() # Query def query(self, expr): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.query()`. + """ + return self.dataframe.query(expr=expr) def filter(self, expr): - raise NotImplementedError + """ + A proxy-call to the dataframe's `.filter()`. + """ + return self.dataframe.filter(items=expr) diff --git a/pandasai/smart_datalake/__init__.py b/pandasai/smart_datalake/__init__.py index 4650a7d94..fc87d6c63 100644 --- a/pandasai/smart_datalake/__init__.py +++ b/pandasai/smart_datalake/__init__.py @@ -76,9 +76,9 @@ def __init__( self._load_config(config) if logger: - self._logger = logger + self.logger = logger else: - self._logger = Logger( + self.logger = Logger( save_logs=self._config.save_logs, verbose=self._config.verbose ) @@ -92,7 +92,7 @@ def __init__( self._code_manager = CodeManager( dfs=self._dfs, config=self._config, - logger=self._logger, + logger=self.logger, ) if self._config.enable_cache: @@ -129,7 +129,7 @@ def _load_dfs(self, dfs: List[Union[DataFrameType, Any]]): for df in dfs: if not isinstance(df, SmartDataframe): smart_dfs.append( - SmartDataframe(df, config=self._config, logger=self._logger) + SmartDataframe(df, config=self._config, logger=self.logger) ) else: smart_dfs.append(df) @@ -187,7 +187,9 @@ def _assign_prompt_id(self): """Assign a prompt ID""" self._last_prompt_id = uuid.uuid4() - self._logger.log(f"Prompt ID: {self._last_prompt_id}") + + if self.logger: + self.logger.log(f"Prompt ID: {self._last_prompt_id}") def _is_running_in_console(self) -> bool: """ @@ -205,6 +207,18 @@ def _get_prompt( default_prompt: Type[Prompt], default_values: Optional[dict] = None, ) -> Prompt: + """ + Return a prompt by key. + + Args: + key (str): The key of the prompt + default_prompt (Type[Prompt]): The default prompt to use + default_values (Optional[dict], optional): The default values to use for the + prompt. Defaults to None. + + Returns: + Prompt: The prompt + """ if default_values is None: default_values = {} @@ -222,11 +236,18 @@ def _get_prompt( return prompt def _get_cache_key(self) -> str: + """ + Return the cache key for the current conversation. + + Returns: + str: The cache key for the current conversation + """ cache_key = self._memory.get_conversation() # make the cache key unique for each combination of dfs for df in self._dfs: - cache_key += df.column_hash() + hash = df.column_hash() + cache_key += str(hash) return cache_key @@ -243,8 +264,8 @@ def chat(self, query: str): self._start_timer() - self._logger.log(f"Question: {query}") - self._logger.log(f"Running PandasAI with {self._llm.type} LLM...") + self.logger.log(f"Question: {query}") + self.logger.log(f"Running PandasAI with {self._llm.type} LLM...") self._assign_prompt_id() @@ -256,7 +277,7 @@ def chat(self, query: str): and self._cache and self._cache.get(self._get_cache_key()) ): - self._logger.log("Using cached response") + self.logger.log("Using cached response") code = self._cache.get(self._get_cache_key()) else: default_values = { @@ -279,13 +300,12 @@ def chat(self, query: str): self._config.callback.on_code(code) self.last_code_generated = code - self._logger.log( - f""" - Code generated: - ``` - {code} - ``` - """ + self.logger.log( + f"""Code generated: +``` +{code} +``` +""" ) # TODO: figure out what to do with this @@ -322,7 +342,7 @@ def chat(self, query: str): if result is not None: self.last_result = result - self._logger.log(f"Answer: {result}") + self.logger.log(f"Answer: {result}") except Exception as exception: self.last_error = str(exception) return ( @@ -331,24 +351,37 @@ def chat(self, query: str): f"\n{exception}\n" ) - self._logger.log(f"Executed in: {time.time() - self._start_time}s") + self.logger.log(f"Executed in: {time.time() - self._start_time}s") self._add_result_to_memory(result) return self._format_results(result) def _add_result_to_memory(self, result: dict): + """ + Add the result to the memory. + + Args: + result (dict): The result to add to the memory + """ if result is None: return - if result["type"] == "string": + if result["type"] == "string" or result["type"] == "number": self._memory.add(result["value"], False) - elif result["type"] == "dataframe": - self._memory.add("Here is the data you requested.", False) - elif result["type"] == "plot": - self._memory.add("Here is the plot you requested.", False) + elif result["type"] == "dataframe" or result["type"] == "plot": + self._memory.add("Ok here it is", False) def _format_results(self, result: dict): + """ + Format the results based on the type of the result. + + Args: + result (dict): The result to format + + Returns: + str: The formatted result + """ if result is None: return @@ -365,7 +398,7 @@ def _format_results(self, result: dict): return SmartDataframe( df, config=self._config.__dict__, - logger=self._logger, + logger=self.logger, ) elif result["type"] == "plot": import matplotlib.pyplot as plt @@ -394,15 +427,12 @@ def _retry_run_code(self, code: str, e: Exception): Returns (str): A python code """ - self._logger.log(f"Failed with error: {e}. Retrying") + self.logger.log(f"Failed with error: {e}. Retrying", logging.ERROR) default_values = { + "engine": self._dfs[0].engine, "code": code, "error_returned": e, - # TODO: find a better way to determine these values - "df_head": self._dfs[0].head_csv, - "num_rows": self._dfs[0].rows_count, - "num_columns": self._dfs[0].columns_count, } error_correcting_instruction = self._get_prompt( "correct_error", @@ -432,7 +462,15 @@ def last_prompt_id(self) -> str: @property def logs(self): - return self._logger.logs + return self.logger.logs + + @property + def logger(self): + return self._logger + + @logger.setter + def logger(self, logger): + self._logger = logger @property def config(self): @@ -470,7 +508,7 @@ def callback(self): @callback.setter def callback(self, callback: Any): - self._config.callback = callback + self.config.callback = callback @property def enforce_privacy(self): @@ -557,7 +595,7 @@ def last_code_generated(self): @last_code_generated.setter def last_code_generated(self, last_code_generated: str): - self._last_code_generated = last_code_generated + self._code_manager._last_code_generated = last_code_generated @property def last_code_executed(self): diff --git a/poetry.lock b/poetry.lock index 1824a57f4..01cdc7b4a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -124,26 +124,38 @@ frozenlist = ">=1.1.0" [[package]] name = "altair" -version = "5.0.1" +version = "5.1.1" description = "Vega-Altair: A declarative statistical visualization library for Python." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "altair-5.0.1-py3-none-any.whl", hash = "sha256:9f3552ed5497d4dfc14cf48a76141d8c29ee56eae2873481b4b28134268c9bbe"}, - {file = "altair-5.0.1.tar.gz", hash = "sha256:087d7033cb2d6c228493a053e12613058a5d47faf6a36aea3ff60305fd8b4cb0"}, + {file = "altair-5.1.1-py3-none-any.whl", hash = "sha256:bb421459b53c80ad45f2bd009c87da2a81165b8f7d5a90658e0fc1ffc741bf34"}, + {file = "altair-5.1.1.tar.gz", hash = "sha256:ad6cd6983c8db69a34dd68e42653f6172b7fc3775b7190005107f1b4fc60d64d"}, ] [package.dependencies] jinja2 = "*" jsonschema = ">=3.0" numpy = "*" -pandas = ">=0.18" +packaging = "*" +pandas = ">=0.25" toolz = "*" typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["black (<24)", "hatch", "ipython", "m2r", "mypy", "pandas-stubs", "pytest", "pytest-cov", "ruff", "types-jsonschema", "types-setuptools", "vega-datasets", "vl-convert-python"] -doc = ["docutils", "geopandas", "jinja2", "myst-parser", "numpydoc", "pillow", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"] +dev = ["anywidget", "black (<24)", "hatch", "ipython", "m2r", "mypy", "pandas-stubs", "pyarrow (>=11)", "pytest", "pytest-cov", "ruff", "types-jsonschema", "types-setuptools", "vega-datasets", "vegafusion[embed] (>=1.4.0)", "vl-convert-python (>=0.13.0)"] +doc = ["docutils", "geopandas", "jinja2", "myst-parser", "numpydoc", "pillow (>=9,<10)", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = true +python-versions = "*" +files = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] [[package]] name = "appnope" @@ -169,17 +181,17 @@ files = [ [[package]] name = "asttokens" -version = "2.2.1" +version = "2.3.0" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.3.0-py2.py3-none-any.whl", hash = "sha256:bef1a51bc256d349e9f94e7e40e44b705ed1162f55294220dd561d24583d9877"}, + {file = "asttokens-2.3.0.tar.gz", hash = "sha256:2552a88626aaa7f0f299f871479fc755bd4e7c11e89078965e928fb7bb9a6afe"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] test = ["astroid", "pytest"] @@ -668,18 +680,21 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.3" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} + [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] [[package]] name = "fonttools" @@ -738,6 +753,52 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.0.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +[[package]] +name = "frozendict" +version = "2.3.8" +description = "A simple immutable dictionary" +optional = true +python-versions = ">=3.6" +files = [ + {file = "frozendict-2.3.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d188d062084fba0e4bf32719ff7380b26c050b932ff164043ce82ab90587c52b"}, + {file = "frozendict-2.3.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2a4e818ac457f6354401dcb631527af25e5a20fcfc81e6b5054b45fc245caca"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a506d807858fa961aaa5b48dab6154fdc6bd045bbe9310788bbff141bb42d13"}, + {file = "frozendict-2.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:750632cc890d8ee9484fe6d31b261159144b6efacc08e1317fe46accd1410373"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ee5fe2658a8ac9a57f748acaf563f6a47f80b8308cbf0a04fac0ba057d41f75"}, + {file = "frozendict-2.3.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23c4bb46e6b8246e1e7e49b5593c2bc09221db0d8f31f7c092be8dfb42b9e620"}, + {file = "frozendict-2.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:c31abc8acea309b132dde441856829f6003a3d242da8b54bce4c0f2a3c8c63f0"}, + {file = "frozendict-2.3.8-cp310-cp310-win_arm64.whl", hash = "sha256:9ea5520e85447ff8d4681e181941e482662817ccba921b7cb3f87922056d892a"}, + {file = "frozendict-2.3.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f83fed36497af9562ead5e9fb8443224ba2781786bd3b92b1087cb7d0ff20135"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e27c5c1d29d0eda7979253ec88abc239da1313b38f39f4b16984db3b3e482300"}, + {file = "frozendict-2.3.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c785de7f1a13f15963945f400656b18f057c2fc76c089dacf127a2bb188c03"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8cf35ddd25513428ec152614def9696afb93ae5ec0eb54fa6aa6206eda77ac4c"}, + {file = "frozendict-2.3.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ffc684773de7c88724788fa9787d0016fd75830412d58acbd9ed1a04762c675b"}, + {file = "frozendict-2.3.8-cp36-cp36m-win_amd64.whl", hash = "sha256:4c258aab9c8488338634f2ec670ef049dbf0ab0e7a2fa9bc2c7b5009cb614801"}, + {file = "frozendict-2.3.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47fc26468407fdeb428cfc89495b7921419e670355c21b383765482fdf6c5c14"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ea638228692db2bf94bce40ea4b25f4077588497b516bd16576575560094bd9"}, + {file = "frozendict-2.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a75bf87e76c4386caecdbdd02a99e53ad43a6b5c38fb3d5a634a9fc9ce41462"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed5a6c5c7a0f57269577c2a338a6002949aea21a23b7b7d06da7e7dced8b605b"}, + {file = "frozendict-2.3.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d086440328a465dea9bef2dbad7548d75d1a0a0d21f43a08c03e1ec79ac5240e"}, + {file = "frozendict-2.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:0bc4767e2f83db5b701c787e22380296977368b0c57e485ca71b2eedfa11c4a3"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:638cf363d3cbca31a341503cf2219eac52a5f5140449676fae3d9644cd3c5487"}, + {file = "frozendict-2.3.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b2fd8ce36277919b36e3c834d2389f3cd7ac068ae730c312671dd4439a5dd65"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3957d52f1906b0c85f641a1911d214255873f6408ab4e5ad657cc27a247fb145"}, + {file = "frozendict-2.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72cfe08ab8ae524e54848fa90b22d02c1b1ecfb3064438696bcaa4b953f18772"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4742e76c4111bd09198d3ab66cef94be8506212311338f9182d6ef5f5cb60493"}, + {file = "frozendict-2.3.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:313ed8d9ba6bac35d7635cd9580ee5721a0fb016f4d2d20f0efa05dbecbdb1be"}, + {file = "frozendict-2.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:d3c6ce943946c2a61501c8cf116fff0892d11dd579877eb36e2aea2c27fddfef"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0f573dc4861dd7ec9e055c8cceaf45355e894e749f621f199aab7b311ac4bdb"}, + {file = "frozendict-2.3.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b3435e5f1ca5ae68a5e95e64b09d6d5c645cadd6b87569a0b3019dd248c8d00"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:145afd033ebfade28416093335261b8ec1af5cccc593482309e7add062ec8668"}, + {file = "frozendict-2.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da98427de26b5a2865727947480cbb53860089c4d195baa29c539da811cea617"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e82befa7c385a668d569cebbebbdf49cee6fea4083f08e869a1b08cfb640a9f"}, + {file = "frozendict-2.3.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80abe81d36e889ceec665e06ec764a7638000fa3e7be09786ac4d3ddc64b76db"}, + {file = "frozendict-2.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:8ccc94ac781710db44e142e1a11ff9b31d02c032c01c6868d51fcbef73086225"}, + {file = "frozendict-2.3.8-cp39-cp39-win_arm64.whl", hash = "sha256:e72dbc1bcc2203cef38d205f692396f5505921a5680f66aa9a7e8bb71fd38f28"}, + {file = "frozendict-2.3.8-py311-none-any.whl", hash = "sha256:ba41a7ed019bd03b62d63ed3f8dea35b8243d1936f7c9ed4b5298ca45a01928e"}, + {file = "frozendict-2.3.8.tar.gz", hash = "sha256:5526559eca8f1780a4ee5146896f59afc31435313560208dd394a3a5e537d3ff"}, +] + [[package]] name = "frozenlist" version = "1.4.0" @@ -810,13 +871,13 @@ files = [ [[package]] name = "fsspec" -version = "2023.6.0" +version = "2023.9.0" description = "File-system specification" optional = true python-versions = ">=3.8" files = [ - {file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"}, - {file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"}, + {file = "fsspec-2023.9.0-py3-none-any.whl", hash = "sha256:d55b9ab2a4c1f2b759888ae9f93e40c2aa72c0808132e87e282b549f9e6c4254"}, + {file = "fsspec-2023.9.0.tar.gz", hash = "sha256:4dbf0fefee035b7c6d3bbbe6bc99b2f201f40d4dca95b67c2b719be77bcd917f"}, ] [package.extras] @@ -898,13 +959,13 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.32" +version = "3.1.34" description = "GitPython is a Python library used to interact with Git repositories" optional = true python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, - {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, + {file = "GitPython-3.1.34-py3-none-any.whl", hash = "sha256:5d3802b98a3bae1c2b8ae0e1ff2e4aa16bcdf02c145da34d092324f599f01395"}, + {file = "GitPython-3.1.34.tar.gz", hash = "sha256:85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd"}, ] [package.dependencies] @@ -944,11 +1005,11 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" @@ -961,37 +1022,38 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.17.3" +version = "2.22.0" description = "Google Authentication Library" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" +python-versions = ">=3.6" files = [ - {file = "google-auth-2.17.3.tar.gz", hash = "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc"}, - {file = "google_auth-2.17.3-py2.py3-none-any.whl", hash = "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f"}, + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" -rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} +rsa = ">=3.1.4,<5" six = ">=1.9.0" +urllib3 = "<2.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0dev)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-aiplatform" -version = "1.31.0" +version = "1.31.1" description = "Vertex AI API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-aiplatform-1.31.0.tar.gz", hash = "sha256:a5de8f5cb9bcd66db08a404cf74e7ed252d5d4038649a58f37588ccb4e2785f0"}, - {file = "google_cloud_aiplatform-1.31.0-py2.py3-none-any.whl", hash = "sha256:19429dfb6098414f758810fde1690d8e8170aff7add4281681dc61de79b4112b"}, + {file = "google-cloud-aiplatform-1.31.1.tar.gz", hash = "sha256:6de8d7d647990cc0ee601d938d3a1693e3ef50f3d54d735397b2e31ca8eeb946"}, + {file = "google_cloud_aiplatform-1.31.1-py2.py3-none-any.whl", hash = "sha256:360d95c4c6f6a27fc2a4a071741a66588f0f0ca245509315839cfa320d6862e2"}, ] [package.dependencies] @@ -1321,13 +1383,13 @@ test = ["objgraph", "psutil"] [[package]] name = "griffe" -version = "0.34.0" +version = "0.36.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.34.0-py3-none-any.whl", hash = "sha256:d8bca9bd4a0880e7f71dc152de4222171d941a32b5504d77450a71a7908dfc1d"}, - {file = "griffe-0.34.0.tar.gz", hash = "sha256:48c667ad51a7f756238f798866203aeb8f9fa02d4192e25970f57f813bb37f26"}, + {file = "griffe-0.36.0-py3-none-any.whl", hash = "sha256:4235df397b7b56192cbfda601e458526279bdaf3bf1f59d0be368abac72bf42d"}, + {file = "griffe-0.36.0.tar.gz", hash = "sha256:ccf062126041d19cc4d9850ca46a555a656e738f5e83feb78f36b05fec5974ad"}, ] [package.dependencies] @@ -1422,6 +1484,27 @@ googleapis-common-protos = ">=1.5.5" grpcio = ">=1.57.0" protobuf = ">=4.21.6" +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + [[package]] name = "huggingface-hub" version = "0.16.4" @@ -1529,13 +1612,13 @@ files = [ [[package]] name = "ipython" -version = "8.14.0" +version = "8.15.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" files = [ - {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, - {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, + {file = "ipython-8.15.0-py3-none-any.whl", hash = "sha256:45a2c3a529296870a97b7de34eda4a31bee16bc7bf954e07d39abe49caf8f887"}, + {file = "ipython-8.15.0.tar.gz", hash = "sha256:2baeb5be6949eeebf532150f81746f8333e2ccce02de1c7eedde3f23ed5e9f1e"}, ] [package.dependencies] @@ -1543,6 +1626,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} @@ -1554,9 +1638,9 @@ traitlets = ">=5" typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -1665,79 +1749,115 @@ files = [ [[package]] name = "kiwisolver" -version = "1.4.4" +version = "1.4.5" description = "A fast implementation of the Cassowary constraint solver" optional = false python-versions = ">=3.7" files = [ - {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"}, - {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c"}, - {file = "kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3"}, - {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938"}, - {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d"}, - {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09"}, - {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de"}, - {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32"}, - {file = "kiwisolver-1.4.4-cp310-cp310-win32.whl", hash = "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408"}, - {file = "kiwisolver-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004"}, - {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6"}, - {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2"}, - {file = "kiwisolver-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca"}, - {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69"}, - {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514"}, - {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494"}, - {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5"}, - {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51"}, - {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da"}, - {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4"}, - {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626"}, - {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750"}, - {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4"}, - {file = "kiwisolver-1.4.4-cp311-cp311-win32.whl", hash = "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e"}, - {file = "kiwisolver-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-win32.whl", hash = "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3"}, - {file = "kiwisolver-1.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166"}, - {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454"}, - {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0"}, - {file = "kiwisolver-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"}, - {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae"}, - {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0"}, - {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1"}, - {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d"}, - {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c"}, - {file = "kiwisolver-1.4.4-cp38-cp38-win32.whl", hash = "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191"}, - {file = "kiwisolver-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766"}, - {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8"}, - {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897"}, - {file = "kiwisolver-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824"}, - {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29"}, - {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f"}, - {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd"}, - {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac"}, - {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9"}, - {file = "kiwisolver-1.4.4-cp39-cp39-win32.whl", hash = "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea"}, - {file = "kiwisolver-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b"}, - {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a"}, - {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d"}, - {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a"}, - {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871"}, - {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9"}, - {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8"}, - {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286"}, - {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb"}, - {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f"}, - {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008"}, - {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767"}, - {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9"}, - {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2"}, - {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b"}, - {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, + {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, + {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, + {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, + {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, + {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, + {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, + {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, + {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, + {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, + {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, + {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, + {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, + {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, + {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, + {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, + {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, + {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, + {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, + {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, + {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, + {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, + {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, + {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, ] [[package]] @@ -1793,6 +1913,113 @@ pydantic = ">=1,<2" requests = ">=2,<3" tenacity = ">=8.1.0,<9.0.0" +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + [[package]] name = "markdown" version = "3.3.7" @@ -2081,13 +2308,13 @@ mkdocs = ">=1.1" [[package]] name = "mkdocstrings" -version = "0.22.0" +version = "0.23.0" description = "Automatic documentation from sources, for MkDocs." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocstrings-0.22.0-py3-none-any.whl", hash = "sha256:2d4095d461554ff6a778fdabdca3c00c468c2f1459d469f7a7f622a2b23212ba"}, - {file = "mkdocstrings-0.22.0.tar.gz", hash = "sha256:82a33b94150ebb3d4b5c73bab4598c3e21468c79ec072eff6931c8f3bfc38256"}, + {file = "mkdocstrings-0.23.0-py3-none-any.whl", hash = "sha256:051fa4014dfcd9ed90254ae91de2dbb4f24e166347dae7be9a997fe16316c65e"}, + {file = "mkdocstrings-0.23.0.tar.gz", hash = "sha256:d9c6a37ffbe7c14a7a54ef1258c70b8d394e6a33a1c80832bce40b9567138d1c"}, ] [package.dependencies] @@ -2203,6 +2430,17 @@ files = [ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] +[[package]] +name = "multitasking" +version = "0.0.11" +description = "Non-blocking Python methods using decorators" +optional = true +python-versions = "*" +files = [ + {file = "multitasking-0.0.11-py3-none-any.whl", hash = "sha256:1e5b37a5f8fc1e6cfaafd1a82b6b1cc6d2ed20037d3b89c25a84f499bd7b3dd4"}, + {file = "multitasking-0.0.11.tar.gz", hash = "sha256:4d6bc3cc65f9b2dca72fb5a787850a88dae8f620c2b36ae9b55248e51bcd6026"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -2306,13 +2544,13 @@ files = [ [[package]] name = "openai" -version = "0.27.9" +version = "0.27.10" description = "Python client library for the OpenAI API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.27.9-py3-none-any.whl", hash = "sha256:6a3cf8e276d1a6262b50562fbc0cba7967cfebb78ed827d375986b48fdad6475"}, - {file = "openai-0.27.9.tar.gz", hash = "sha256:b687761c82f5ebb6f61efc791b2083d2d068277b94802d4d1369efe39851813d"}, + {file = "openai-0.27.10-py3-none-any.whl", hash = "sha256:beabd1757e3286fa166dde3b70ebb5ad8081af046876b47c14c41e203ed22a14"}, + {file = "openai-0.27.10.tar.gz", hash = "sha256:60e09edf7100080283688748c6803b7b3b52d5a55d21890f3815292a0552d83b"}, ] [package.dependencies] @@ -2403,8 +2641,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.1" @@ -2593,13 +2831,13 @@ tenacity = ">=6.2.0" [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2640,13 +2878,13 @@ xlsxwriter = ["xlsxwriter"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "3.4.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, ] [package.dependencies] @@ -2689,24 +2927,44 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.24.1" +version = "4.24.2" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.1-cp310-abi3-win32.whl", hash = "sha256:d414199ca605eeb498adc4d2ba82aedc0379dca4a7c364ff9bc9a179aa28e71b"}, - {file = "protobuf-4.24.1-cp310-abi3-win_amd64.whl", hash = "sha256:5906c5e79ff50fe38b2d49d37db5874e3c8010826f2362f79996d83128a8ed9b"}, - {file = "protobuf-4.24.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:970c701ee16788d74f3de20938520d7a0aebc7e4fff37096a48804c80d2908cf"}, - {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fc361148e902949dcb953bbcb148c99fe8f8854291ad01107e4120361849fd0e"}, - {file = "protobuf-4.24.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:5d32363d14aca6e5c9e9d5918ad8fb65b091b6df66740ae9de50ac3916055e43"}, - {file = "protobuf-4.24.1-cp37-cp37m-win32.whl", hash = "sha256:df015c47d6855b8efa0b9be706c70bf7f050a4d5ac6d37fb043fbd95157a0e25"}, - {file = "protobuf-4.24.1-cp37-cp37m-win_amd64.whl", hash = "sha256:d4af4fd9e9418e819be30f8df2a16e72fbad546a7576ac7f3653be92a6966d30"}, - {file = "protobuf-4.24.1-cp38-cp38-win32.whl", hash = "sha256:302e8752c760549ed4c7a508abc86b25d46553c81989343782809e1a062a2ef9"}, - {file = "protobuf-4.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:06437f0d4bb0d5f29e3d392aba69600188d4be5ad1e0a3370e581a9bf75a3081"}, - {file = "protobuf-4.24.1-cp39-cp39-win32.whl", hash = "sha256:0b2b224e9541fe9f046dd7317d05f08769c332b7e4c54d93c7f0f372dedb0b1a"}, - {file = "protobuf-4.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:bd39b9094a4cc003a1f911b847ab379f89059f478c0b611ba1215053e295132e"}, - {file = "protobuf-4.24.1-py3-none-any.whl", hash = "sha256:55dd644adc27d2a624339332755fe077c7f26971045b469ebb9732a69ce1f2ca"}, - {file = "protobuf-4.24.1.tar.gz", hash = "sha256:44837a5ed9c9418ad5d502f89f28ba102e9cd172b6668bc813f21716f9273348"}, + {file = "protobuf-4.24.2-cp310-abi3-win32.whl", hash = "sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924"}, + {file = "protobuf-4.24.2-cp310-abi3-win_amd64.whl", hash = "sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3"}, + {file = "protobuf-4.24.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880"}, + {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c"}, + {file = "protobuf-4.24.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74"}, + {file = "protobuf-4.24.2-cp37-cp37m-win32.whl", hash = "sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1"}, + {file = "protobuf-4.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099"}, + {file = "protobuf-4.24.2-cp38-cp38-win32.whl", hash = "sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e"}, + {file = "protobuf-4.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16"}, + {file = "protobuf-4.24.2-cp39-cp39-win32.whl", hash = "sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b"}, + {file = "protobuf-4.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd"}, + {file = "protobuf-4.24.2-py3-none-any.whl", hash = "sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e"}, + {file = "protobuf-4.24.2.tar.gz", hash = "sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e"}, +] + +[[package]] +name = "psycopg2" +version = "2.9.7" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = true +python-versions = ">=3.6" +files = [ + {file = "psycopg2-2.9.7-cp310-cp310-win32.whl", hash = "sha256:1a6a2d609bce44f78af4556bea0c62a5e7f05c23e5ea9c599e07678995609084"}, + {file = "psycopg2-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:b22ed9c66da2589a664e0f1ca2465c29b75aaab36fa209d4fb916025fb9119e5"}, + {file = "psycopg2-2.9.7-cp311-cp311-win32.whl", hash = "sha256:44d93a0109dfdf22fe399b419bcd7fa589d86895d3931b01fb321d74dadc68f1"}, + {file = "psycopg2-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:91e81a8333a0037babfc9fe6d11e997a9d4dac0f38c43074886b0d9dead94fe9"}, + {file = "psycopg2-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:d1210fcf99aae6f728812d1d2240afc1dc44b9e6cba526a06fb8134f969957c2"}, + {file = "psycopg2-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e9b04cbef584310a1ac0f0d55bb623ca3244c87c51187645432e342de9ae81a8"}, + {file = "psycopg2-2.9.7-cp38-cp38-win32.whl", hash = "sha256:d5c5297e2fbc8068d4255f1e606bfc9291f06f91ec31b2a0d4c536210ac5c0a2"}, + {file = "psycopg2-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:8275abf628c6dc7ec834ea63f6f3846bf33518907a2b9b693d41fd063767a866"}, + {file = "psycopg2-2.9.7-cp39-cp39-win32.whl", hash = "sha256:c7949770cafbd2f12cecc97dea410c514368908a103acf519f2a346134caa4d5"}, + {file = "psycopg2-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:b6bd7d9d3a7a63faae6edf365f0ed0e9b0a1aaf1da3ca146e6b043fb3eb5d723"}, + {file = "psycopg2-2.9.7.tar.gz", hash = "sha256:f00cc35bd7119f1fed17b85bd1007855194dde2cbd8de01ab8ebb17487440ad8"}, ] [[package]] @@ -2887,19 +3145,22 @@ plugins = ["importlib-metadata"] [[package]] name = "pymdown-extensions" -version = "10.1" +version = "10.3" description = "Extension pack for Python Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.1-py3-none-any.whl", hash = "sha256:ef25dbbae530e8f67575d222b75ff0649b1e841e22c2ae9a20bad9472c2207dc"}, - {file = "pymdown_extensions-10.1.tar.gz", hash = "sha256:508009b211373058debb8247e168de4cbcb91b1bff7b5e961b2c3e864e00b195"}, + {file = "pymdown_extensions-10.3-py3-none-any.whl", hash = "sha256:77a82c621c58a83efc49a389159181d570e370fff9f810d3a4766a75fc678b66"}, + {file = "pymdown_extensions-10.3.tar.gz", hash = "sha256:94a0d8a03246712b64698af223848fd80aaf1ae4c4be29c8c61939b0467b5722"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pympler" version = "1.0.1" @@ -2911,6 +3172,21 @@ files = [ {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, ] +[[package]] +name = "pymysql" +version = "1.1.0" +description = "Pure Python MySQL Driver" +optional = true +python-versions = ">=3.7" +files = [ + {file = "PyMySQL-1.1.0-py3-none-any.whl", hash = "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"}, + {file = "PyMySQL-1.1.0.tar.gz", hash = "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96"}, +] + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + [[package]] name = "pyparsing" version = "3.0.9" @@ -2927,13 +3203,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, + {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, ] [package.dependencies] @@ -3153,108 +3429,108 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.9.2" +version = "0.10.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = true python-versions = ">=3.8" files = [ - {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, - {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, - {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, - {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, - {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, - {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, - {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, - {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, - {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, - {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, - {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, + {file = "rpds_py-0.10.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c1e0e9916301e3b3d970814b1439ca59487f0616d30f36a44cead66ee1748c31"}, + {file = "rpds_py-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ce8caa29ebbdcde67e5fd652c811d34bc01f249dbc0d61e5cc4db05ae79a83b"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad277f74b1c164f7248afa968700e410651eb858d7c160d109fb451dc45a2f09"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e1c68303ccf7fceb50fbab79064a2636119fd9aca121f28453709283dbca727"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:780fcb855be29153901c67fc9c5633d48aebef21b90aa72812fa181d731c6b00"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbd7b24d108509a1b9b6679fcc1166a7dd031dbef1f3c2c73788f42e3ebb3beb"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0700c2133ba203c4068aaecd6a59bda22e06a5e46255c9da23cbf68c6942215d"}, + {file = "rpds_py-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576da63eae7809f375932bfcbca2cf20620a1915bf2fedce4b9cc8491eceefe3"}, + {file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23750a9b8a329844ba1fe267ca456bb3184984da2880ed17ae641c5af8de3fef"}, + {file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d08395595c42bcd82c3608762ce734504c6d025eef1c06f42326a6023a584186"}, + {file = "rpds_py-0.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1d7b7b71bcb82d8713c7c2e9c5f061415598af5938666beded20d81fa23e7640"}, + {file = "rpds_py-0.10.0-cp310-none-win32.whl", hash = "sha256:97f5811df21703446b42303475b8b855ee07d6ab6cdf8565eff115540624f25d"}, + {file = "rpds_py-0.10.0-cp310-none-win_amd64.whl", hash = "sha256:cdbed8f21204398f47de39b0a9b180d7e571f02dfb18bf5f1b618e238454b685"}, + {file = "rpds_py-0.10.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:7a3a3d3e4f1e3cd2a67b93a0b6ed0f2499e33f47cc568e3a0023e405abdc0ff1"}, + {file = "rpds_py-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc72ae476732cdb7b2c1acb5af23b478b8a0d4b6fcf19b90dd150291e0d5b26b"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0583f69522732bdd79dca4cd3873e63a29acf4a299769c7541f2ca1e4dd4bc6"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8b9a7cd381970e64849070aca7c32d53ab7d96c66db6c2ef7aa23c6e803f514"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d292cabd7c8335bdd3237ded442480a249dbcdb4ddfac5218799364a01a0f5c"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6903cdca64f1e301af9be424798328c1fe3b4b14aede35f04510989fc72f012"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed57543c99249ab3a4586ddc8786529fbc33309e5e8a1351802a06ca2baf4c2"}, + {file = "rpds_py-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15932ec5f224b0e35764dc156514533a4fca52dcfda0dfbe462a1a22b37efd59"}, + {file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb2d59bc196e6d3b1827c7db06c1a898bfa0787c0574af398e65ccf2e97c0fbe"}, + {file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f99d74ddf9d3b6126b509e81865f89bd1283e3fc1b568b68cd7bd9dfa15583d7"}, + {file = "rpds_py-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f70bec8a14a692be6dbe7ce8aab303e88df891cbd4a39af091f90b6702e28055"}, + {file = "rpds_py-0.10.0-cp311-none-win32.whl", hash = "sha256:5f7487be65b9c2c510819e744e375bd41b929a97e5915c4852a82fbb085df62c"}, + {file = "rpds_py-0.10.0-cp311-none-win_amd64.whl", hash = "sha256:748e472345c3a82cfb462d0dff998a7bf43e621eed73374cb19f307e97e08a83"}, + {file = "rpds_py-0.10.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:d4639111e73997567343df6551da9dd90d66aece1b9fc26c786d328439488103"}, + {file = "rpds_py-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4760e1b02173f4155203054f77a5dc0b4078de7645c922b208d28e7eb99f3e2"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a6420a36975e0073acaeee44ead260c1f6ea56812cfc6c31ec00c1c48197173"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58fc4d66ee349a23dbf08c7e964120dc9027059566e29cf0ce6205d590ed7eca"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:063411228b852fb2ed7485cf91f8e7d30893e69b0acb207ec349db04cccc8225"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65af12f70355de29e1092f319f85a3467f4005e959ab65129cb697169ce94b86"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298e8b5d8087e0330aac211c85428c8761230ef46a1f2c516d6a2f67fb8803c5"}, + {file = "rpds_py-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b9bf77008f2c55dabbd099fd3ac87009471d223a1c7ebea36873d39511b780a"}, + {file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c7853f27195598e550fe089f78f0732c66ee1d1f0eaae8ad081589a5a2f5d4af"}, + {file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:75dbfd41a61bc1fb0536bf7b1abf272dc115c53d4d77db770cd65d46d4520882"}, + {file = "rpds_py-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b25136212a3d064a8f0b9ebbb6c57094c5229e0de76d15c79b76feff26aeb7b8"}, + {file = "rpds_py-0.10.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:9affee8cb1ec453382c27eb9043378ab32f49cd4bc24a24275f5c39bf186c279"}, + {file = "rpds_py-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d55528ef13af4b4e074d067977b1f61408602f53ae4537dccf42ba665c2c7bd"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7865df1fb564092bcf46dac61b5def25342faf6352e4bc0e61a286e3fa26a3d"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f5cc8c7bc99d2bbcd704cef165ca7d155cd6464c86cbda8339026a42d219397"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbae50d352e4717ffc22c566afc2d0da744380e87ed44a144508e3fb9114a3f4"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fccbf0cd3411719e4c9426755df90bf3449d9fc5a89f077f4a7f1abd4f70c910"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d10c431073dc6ebceed35ab22948a016cc2b5120963c13a41e38bdde4a7212"}, + {file = "rpds_py-0.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b401e8b9aece651512e62c431181e6e83048a651698a727ea0eb0699e9f9b74"}, + {file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:7618a082c55cf038eede4a918c1001cc8a4411dfe508dc762659bcd48d8f4c6e"}, + {file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b3226b246facae14909b465061ddcfa2dfeadb6a64f407f24300d42d69bcb1a1"}, + {file = "rpds_py-0.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a8edd467551c1102dc0f5754ab55cd0703431cd3044edf8c8e7d9208d63fa453"}, + {file = "rpds_py-0.10.0-cp38-none-win32.whl", hash = "sha256:71333c22f7cf5f0480b59a0aef21f652cf9bbaa9679ad261b405b65a57511d1e"}, + {file = "rpds_py-0.10.0-cp38-none-win_amd64.whl", hash = "sha256:a8ab1adf04ae2d6d65835995218fd3f3eb644fe20655ca8ee233e2c7270ff53b"}, + {file = "rpds_py-0.10.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:87c93b25d538c433fb053da6228c6290117ba53ff6a537c133b0f2087948a582"}, + {file = "rpds_py-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7996aed3f65667c6dcc8302a69368435a87c2364079a066750a2eac75ea01e"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8856aa76839dc234d3469f1e270918ce6bec1d6a601eba928f45d68a15f04fc3"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00215f6a9058fbf84f9d47536902558eb61f180a6b2a0fa35338d06ceb9a2e5a"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23a059143c1393015c68936370cce11690f7294731904bdae47cc3e16d0b2474"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e5c26905aa651cc8c0ddc45e0e5dea2a1296f70bdc96af17aee9d0493280a17"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c651847545422c8131660704c58606d841e228ed576c8f1666d98b3d318f89da"}, + {file = "rpds_py-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80992eb20755701753e30a6952a96aa58f353d12a65ad3c9d48a8da5ec4690cf"}, + {file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ffcf18ad3edf1c170e27e88b10282a2c449aa0358659592462448d71b2000cfc"}, + {file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:08e08ccf5b10badb7d0a5c84829b914c6e1e1f3a716fdb2bf294e2bd01562775"}, + {file = "rpds_py-0.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7150b83b3e3ddaac81a8bb6a9b5f93117674a0e7a2b5a5b32ab31fdfea6df27f"}, + {file = "rpds_py-0.10.0-cp39-none-win32.whl", hash = "sha256:3455ecc46ea443b5f7d9c2f946ce4017745e017b0d0f8b99c92564eff97e97f5"}, + {file = "rpds_py-0.10.0-cp39-none-win_amd64.whl", hash = "sha256:afe6b5a04b2ab1aa89bad32ca47bf71358e7302a06fdfdad857389dca8fb5f04"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b1cb078f54af0abd835ca76f93a3152565b73be0f056264da45117d0adf5e99c"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8e7e2b3577e97fa43c2c2b12a16139b2cedbd0770235d5179c0412b4794efd9b"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae46a50d235f1631d9ec4670503f7b30405103034830bc13df29fd947207f795"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f869e34d2326e417baee430ae998e91412cc8e7fdd83d979277a90a0e79a5b47"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d544a614055b131111bed6edfa1cb0fb082a7265761bcb03321f2dd7b5c6c48"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9c2f6ca9774c2c24bbf7b23086264e6b5fa178201450535ec0859739e6f78d"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2da4a8c6d465fde36cea7d54bf47b5cf089073452f0e47c8632ecb9dec23c07"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac00c41dd315d147b129976204839ca9de699d83519ff1272afbe4fb9d362d12"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0155c33af0676fc38e1107679be882077680ad1abb6303956b97259c3177e85e"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:db6585b600b2e76e98131e0ac0e5195759082b51687ad0c94505970c90718f4a"}, + {file = "rpds_py-0.10.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:7b6975d3763d0952c111700c0634968419268e6bbc0b55fe71138987fa66f309"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6388e4e95a26717b94a05ced084e19da4d92aca883f392dffcf8e48c8e221a24"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:18f87baa20e02e9277ad8960cd89b63c79c05caf106f4c959a9595c43f2a34a5"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f05fc7d832e970047662b3440b190d24ea04f8d3c760e33e7163b67308c878"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:291c9ce3929a75b45ce8ddde2aa7694fc8449f2bc8f5bd93adf021efaae2d10b"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:861d25ae0985a1dd5297fee35f476b60c6029e2e6e19847d5b4d0a43a390b696"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:668d2b45d62c68c7a370ac3dce108ffda482b0a0f50abd8b4c604a813a59e08f"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344b89384c250ba6a4ce1786e04d01500e4dac0f4137ceebcaad12973c0ac0b3"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:885e023e73ce09b11b89ab91fc60f35d80878d2c19d6213a32b42ff36543c291"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:841128a22e6ac04070a0f84776d07e9c38c4dcce8e28792a95e45fc621605517"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:899b5e7e2d5a8bc92aa533c2d4e55e5ebba095c485568a5e4bedbc163421259a"}, + {file = "rpds_py-0.10.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e7947d9a6264c727a556541b1630296bbd5d0a05068d21c38dde8e7a1c703ef0"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4992266817169997854f81df7f6db7bdcda1609972d8ffd6919252f09ec3c0f6"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:26d9fd624649a10e4610fab2bc820e215a184d193e47d0be7fe53c1c8f67f370"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0028eb0967942d0d2891eae700ae1a27b7fd18604cfcb16a1ef486a790fee99e"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9e7e493ded7042712a374471203dd43ae3fff5b81e3de1a0513fa241af9fd41"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d68a8e8a3a816629283faf82358d8c93fe5bd974dd2704152394a3de4cec22a"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6d5f061f6a2aa55790b9e64a23dfd87b6664ab56e24cd06c78eb43986cb260b"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c7c4266c1b61eb429e8aeb7d8ed6a3bfe6c890a1788b18dbec090c35c6b93fa"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:80772e3bda6787510d9620bc0c7572be404a922f8ccdfd436bf6c3778119464c"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b98e75b21fc2ba5285aef8efaf34131d16af1c38df36bdca2f50634bea2d3060"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:d63787f289944cc4bde518ad2b5e70a4f0d6e2ce76324635359c74c113fd188f"}, + {file = "rpds_py-0.10.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:872f3dcaa8bf2245944861d7311179d2c0c9b2aaa7d3b464d99a7c2e401f01fa"}, + {file = "rpds_py-0.10.0.tar.gz", hash = "sha256:e36d7369363d2707d5f68950a64c4e025991eb0177db01ccb6aa6facae48b69f"}, ] [[package]] @@ -3493,13 +3769,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -3553,7 +3829,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} typing-extensions = ">=4.2.0" [package.extras] @@ -3631,8 +3907,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.3", markers = "python_version == \"3.10\" and platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""}, {version = ">=1.18", markers = "python_version != \"3.10\" or platform_system != \"Windows\" or platform_python_implementation == \"PyPy\""}, + {version = ">=1.22.3", markers = "python_version == \"3.10\" and platform_system == \"Windows\" and platform_python_implementation != \"PyPy\""}, ] packaging = ">=21.3" pandas = ">=1.0" @@ -3646,13 +3922,13 @@ docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "n [[package]] name = "streamlit" -version = "1.25.0" +version = "1.26.0" description = "A faster way to build and share data apps" optional = true python-versions = ">=3.8, !=3.9.7" files = [ - {file = "streamlit-1.25.0-py2.py3-none-any.whl", hash = "sha256:3c561dca1b5430e73b7f2d66bff1d26103936bb4223912ab563ffee881fccc30"}, - {file = "streamlit-1.25.0.tar.gz", hash = "sha256:8a7c93bee8703869045804afe22e9373c4e974fdb2a3e9abe3b027df3de03119"}, + {file = "streamlit-1.26.0-py2.py3-none-any.whl", hash = "sha256:2bfdac041816e2e1ba27f061d40112afe61e0d4e72d25f354b38ba81107b4cb3"}, + {file = "streamlit-1.26.0.tar.gz", hash = "sha256:25475fb15a3cc9fb184945f3fc936f011998bd8386e0c892febe14c9625bf47a"}, ] [package.dependencies] @@ -3870,51 +4146,51 @@ devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pyte [[package]] name = "urllib3" -version = "2.0.4" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "validators" -version = "0.21.2" +version = "0.22.0" description = "Python Data Validation for Humans™" optional = true python-versions = ">=3.8" files = [ - {file = "validators-0.21.2-py3-none-any.whl", hash = "sha256:6ad95131005a9d4c734a69dd4ef08cf66961e61222e60da25a9b5137cecd6fd4"}, - {file = "validators-0.21.2.tar.gz", hash = "sha256:002ba1552076535176824e43149c18c06f6b611bc8b597ddbcf8770bcf5f9f5c"}, + {file = "validators-0.22.0-py3-none-any.whl", hash = "sha256:61cf7d4a62bbae559f2e54aed3b000cea9ff3e2fdbe463f51179b92c58c9585a"}, + {file = "validators-0.22.0.tar.gz", hash = "sha256:77b2689b172eeeb600d9605ab86194641670cdb73b60afd577142a9397873370"}, ] [package.extras] docs-offline = ["myst-parser (>=2.0.0)", "pypandoc-binary (>=1.11)", "sphinx (>=7.1.1)"] -docs-online = ["mkdocs (>=1.5.2)", "mkdocs-material (>=9.1.21)", "mkdocstrings[python] (>=0.22.0)", "pyaml (>=23.7.0)"] +docs-online = ["mkdocs (>=1.5.2)", "mkdocs-git-revision-date-localized-plugin (>=1.2.0)", "mkdocs-material (>=9.2.6)", "mkdocstrings[python] (>=0.22.0)", "pyaml (>=23.7.0)"] hooks = ["pre-commit (>=3.3.3)"] -runner = ["tox (>=4.6.4)"] +package = ["build (>=1.0.0)", "twine (>=4.0.2)"] +runner = ["tox (>=4.11.1)"] sast = ["bandit[toml] (>=1.7.5)"] testing = ["pytest (>=7.4.0)"] -tooling = ["black (>=23.7.0)", "pyright (>=1.1.320)", "ruff (>=0.0.280)"] +tooling = ["black (>=23.7.0)", "pyright (>=1.1.325)", "ruff (>=0.0.287)"] tooling-extras = ["pyaml (>=23.7.0)", "pypandoc-binary (>=1.11)", "pytest (>=7.4.0)"] [[package]] name = "virtualenv" -version = "20.24.3" +version = "20.24.4" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, - {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, + {file = "virtualenv-20.24.4-py3-none-any.whl", hash = "sha256:29c70bb9b88510f6414ac3e55c8b413a1f96239b6b789ca123437d5e892190cb"}, + {file = "virtualenv-20.24.4.tar.gz", hash = "sha256:772b05bfda7ed3b8ecd16021ca9716273ad9f4467c801f27e83ac73430246dca"}, ] [package.dependencies] @@ -3923,7 +4199,7 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<4" [package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -3976,6 +4252,17 @@ files = [ {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, ] +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = true +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + [[package]] name = "yarl" version = "1.9.2" @@ -4063,6 +4350,29 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "yfinance" +version = "0.2.28" +description = "Download market data from Yahoo! Finance API" +optional = true +python-versions = "*" +files = [ + {file = "yfinance-0.2.28-py2.py3-none-any.whl", hash = "sha256:9697dcbc38d67d1172a905fd5787c65bf049d87e18a709be788485ad7886791c"}, + {file = "yfinance-0.2.28.tar.gz", hash = "sha256:be89cd2a4d55c0a2a6227c32915b355bde59f85c569dca9123320322445901ff"}, +] + +[package.dependencies] +appdirs = ">=1.4.4" +beautifulsoup4 = ">=4.11.1" +frozendict = ">=2.3.4" +html5lib = ">=1.1" +lxml = ">=4.9.1" +multitasking = ">=0.0.7" +numpy = ">=1.16.5" +pandas = ">=1.3.0" +pytz = ">=2022.5" +requests = ">=2.31" + [[package]] name = "zipp" version = "3.16.2" @@ -4079,6 +4389,7 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] +connectors = ["psycopg2", "pymysql", "sqlalchemy"] excel = ["openpyxl"] ggplot = ["ggplot"] google-ai = ["google-cloud-aiplatform", "google-generativeai"] @@ -4092,8 +4403,9 @@ seaborn = ["seaborn"] statsmodels = ["statsmodels"] streamlit = ["streamlit"] text-generation = ["text-generation"] +yfinance = ["yfinance"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.9.7 || >3.9.7,<4.0" -content-hash = "f8bf8fc7c8e491d31f05ca2c8acdf1f0338c1eb8f36fe37d965806157db8c2d0" +content-hash = "857f7beff7301ff88ed3a482fbeca2b65574495428d54082b5aee13c421f2d21" diff --git a/pyproject.toml b/pyproject.toml index a929d9d04..d10ff5392 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,10 @@ streamlit = {version = "^1.23.1", optional = true} beautifulsoup4 = { version = "^4.12.2", optional = true } text-generation = { version = ">=0.6.0", optional = true } openpyxl = { version = "^3.0.7", optional = true } +sqlalchemy = { version = "^2.0.19", optional = true } +pymysql = { version = "^1.1.0", optional = true } +psycopg2 = { version = "^2.9.7", optional = true } +yfinance = { version = "^0.2.28", optional = true } [tool.poetry.group.dev.dependencies] black = "^23.3.0" @@ -47,6 +51,7 @@ coverage = "^7.2.7" google-cloud-aiplatform = "^1.26.1" [tool.poetry.extras] +connectors = ["sqlalchemy", "pymysql", "psycopg2"] google-ai = ["google-generativeai", "google-cloud-aiplatform"] google-sheets = ["beautifulsoup4"] excel = ["openpyxl"] @@ -60,6 +65,7 @@ statsmodels = ["statsmodels"] scikit-learn = ["scikit-learn"] streamlit = ["streamlit"] text-generation = ["fsspec", "huggingface-hub", "text-generation"] +yfinance = ["yfinance"] [tool.poetry.group.docs.dependencies] mkdocs = "1.4.0" diff --git a/tests/connectors/__init__.py b/tests/connectors/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/connectors/test_base.py b/tests/connectors/test_base.py new file mode 100644 index 000000000..c110375d3 --- /dev/null +++ b/tests/connectors/test_base.py @@ -0,0 +1,85 @@ +import pytest +from pandasai.connectors import BaseConnector +from pandasai.helpers import Logger + + +class MockConfig: + def __init__(self, host, port, database, table): + self.host = host + self.port = port + self.database = database + self.table = table + + +# Mock subclass of BaseConnector for testing +class MockConnector(BaseConnector): + def head(self): + pass + + def execute(self): + pass + + @property + def rows_count(self): + return 100 + + @property + def columns_count(self): + return 5 + + @property + def column_hash(self): + return "some_hash_value" + + @property + def fallback_name(self): + return "fallback_table_name" + + +# Mock Logger class for testing +class MockLogger(Logger): + def __init__(self): + pass + + +# Create a fixture for the configuration +@pytest.fixture +def mock_config(): + return MockConfig("localhost", 5432, "test_db", "test_table") + + +# Create a fixture for the connector with the configuration +@pytest.fixture +def mock_connector(mock_config): + return MockConnector(mock_config) + + +def test_base_connector_initialization(mock_config, mock_connector): + assert mock_connector._config == mock_config + + +def test_base_connector_path_property(mock_connector): + expected_path = "MockConnector://localhost:5432/test_db/test_table" + assert mock_connector.path == expected_path + + +def test_base_connector_logger_property(mock_connector): + logger = MockLogger() + mock_connector.logger = logger + assert mock_connector.logger == logger + + +def test_base_connector_rows_count_property(mock_connector): + assert mock_connector.rows_count == 100 + + +def test_base_connector_columns_count_property(mock_connector): + assert mock_connector.columns_count == 5 + + +def test_base_connector_column_hash_property(mock_connector): + assert mock_connector.column_hash == "some_hash_value" + + +def test_base_connector_fallback_name_property(mock_connector): + assert mock_connector.fallback_name == "fallback_table_name" diff --git a/tests/connectors/test_sql.py b/tests/connectors/test_sql.py new file mode 100644 index 000000000..f2152aa60 --- /dev/null +++ b/tests/connectors/test_sql.py @@ -0,0 +1,96 @@ +import unittest +import pandas as pd +from unittest.mock import Mock, patch +from pandasai.connectors.base import ConnectorConfig +from pandasai.connectors.sql import SQLConnector + + +class TestSQLConnector(unittest.TestCase): + @patch("pandasai.connectors.sql.create_engine", autospec=True) + @patch("pandasai.connectors.sql.sql", autospec=True) + def setUp(self, mock_sql, mock_create_engine): + # Create a mock engine and connection + self.mock_engine = Mock() + self.mock_connection = Mock() + self.mock_engine.connect.return_value = self.mock_connection + mock_create_engine.return_value = self.mock_engine + + # Define your ConnectorConfig instance here + self.config = ConnectorConfig( + dialect="mysql", + driver="pymysql", + username="your_username", + password="your_password", + host="your_host", + port=443, + database="your_database", + table="your_table", + where=[["column_name", "=", "value"]], + ).dict() + + # Create an instance of SQLConnector + self.connector = SQLConnector(self.config) + + def test_constructor_and_properties(self): + # Test constructor and properties + self.assertEqual(self.connector._config, self.config) + self.assertEqual(self.connector._engine, self.mock_engine) + self.assertEqual(self.connector._connection, self.mock_connection) + self.assertEqual(self.connector._cache_interval, 600) + + def test_repr_method(self): + # Test __repr__ method + expected_repr = ( + "" + ) + self.assertEqual(repr(self.connector), expected_repr) + + def test_build_query_method(self): + # Test _build_query method + query = self.connector._build_query(limit=5, order="RAND()") + expected_query = """SELECT * +FROM your_table +WHERE column_name = :value_0 ORDER BY RAND() ASC + LIMIT :param_1""" + + self.assertEqual(str(query), expected_query) + + @patch("pandasai.connectors.sql.pd.read_sql", autospec=True) + def test_head_method(self, mock_read_sql): + expected_data = pd.DataFrame({"Column1": [1, 2, 3], "Column2": [4, 5, 6]}) + mock_read_sql.return_value = expected_data + head_data = self.connector.head() + pd.testing.assert_frame_equal(head_data, expected_data) + + def test_rows_count_property(self): + # Test rows_count property + self.connector._rows_count = None + self.mock_connection.execute.return_value.fetchone.return_value = ( + 50, + ) # Sample rows count + rows_count = self.connector.rows_count + self.assertEqual(rows_count, 50) + + def test_columns_count_property(self): + # Test columns_count property + self.connector._columns_count = None + self.mock_connection.execute.return_value.fetchone.return_value = ( + 8, + ) # Sample columns count + columns_count = self.connector.columns_count + self.assertEqual(columns_count, 8) + + def test_column_hash_property(self): + # Test column_hash property + mock_df = Mock() + mock_df.columns = ["Column1", "Column2"] + self.connector.head = Mock(return_value=mock_df) + column_hash = self.connector.column_hash + self.assertIsNotNone(column_hash) + + def test_fallback_name_property(self): + # Test fallback_name property + fallback_name = self.connector.fallback_name + self.assertEqual(fallback_name, "your_table") diff --git a/tests/connectors/test_yahoo_finance.py b/tests/connectors/test_yahoo_finance.py new file mode 100644 index 000000000..68421d360 --- /dev/null +++ b/tests/connectors/test_yahoo_finance.py @@ -0,0 +1,101 @@ +from unittest.mock import patch +import pandas as pd +import pytest +import yfinance as yf +from pandasai.connectors.yahoo_finance import YahooFinanceConnector + + +@pytest.fixture +def stock_ticker(): + return "AAPL" + + +@pytest.fixture +def where(): + return [["column1", "=", "value1"], ["column2", ">", "value2"]] + + +@pytest.fixture +def cache_interval(): + return 600 + + +@pytest.fixture +def yahoo_finance_config(stock_ticker, where, cache_interval): + return { + "dialect": "yahoo_finance", + "username": "", + "password": "", + "host": "yahoo.finance.com", + "port": 443, + "database": "stock_data", + "table": stock_ticker, + "where": where, + } + + +@pytest.fixture +def yahoo_finance_connector(stock_ticker, where, cache_interval): + return YahooFinanceConnector(stock_ticker, where, cache_interval) + + +def test_head(yahoo_finance_connector): + with patch.object(yf.Ticker, "history") as mock_history: + mock_history.return_value = pd.DataFrame( + { + "Open": [1.0, 2.0, 3.0, 4.0, 5.0], + "High": [2.0, 3.0, 4.0, 5.0, 6.0], + "Low": [0.5, 1.5, 2.5, 3.5, 4.5], + "Close": [1.5, 2.5, 3.5, 4.5, 5.5], + "Volume": [100, 200, 300, 400, 500], + } + ) + expected_result = pd.DataFrame( + { + "Open": [1.0, 2.0, 3.0, 4.0, 5.0], + "High": [2.0, 3.0, 4.0, 5.0, 6.0], + "Low": [0.5, 1.5, 2.5, 3.5, 4.5], + "Close": [1.5, 2.5, 3.5, 4.5, 5.5], + "Volume": [100, 200, 300, 400, 500], + } + ) + assert yahoo_finance_connector.head().equals(expected_result) + + +def test_get_cache_path(yahoo_finance_connector): + with patch("os.path.join") as mock_join: + expected_result = "../AAPL_data.csv" + mock_join.return_value = expected_result + assert yahoo_finance_connector._get_cache_path() == expected_result + + +def test_rows_count(yahoo_finance_connector): + with patch.object(yf.Ticker, "history") as mock_history: + mock_history.return_value = pd.DataFrame( + { + "Open": [1.0, 2.0, 3.0, 4.0, 5.0], + "High": [2.0, 3.0, 4.0, 5.0, 6.0], + "Low": [0.5, 1.5, 2.5, 3.5, 4.5], + "Close": [1.5, 2.5, 3.5, 4.5, 5.5], + "Volume": [100, 200, 300, 400, 500], + } + ) + assert yahoo_finance_connector.rows_count == 5 + + +def test_columns_count(yahoo_finance_connector): + with patch.object(yf.Ticker, "history") as mock_history: + mock_history.return_value = pd.DataFrame( + { + "Open": [1.0, 2.0, 3.0, 4.0, 5.0], + "High": [2.0, 3.0, 4.0, 5.0, 6.0], + "Low": [0.5, 1.5, 2.5, 3.5, 4.5], + "Close": [1.5, 2.5, 3.5, 4.5, 5.5], + "Volume": [100, 200, 300, 400, 500], + } + ) + assert yahoo_finance_connector.columns_count == 5 + + +def test_fallback_name(yahoo_finance_connector, stock_ticker): + assert yahoo_finance_connector.fallback_name == stock_ticker diff --git a/tests/prompts/test_correct_error_prompt.py b/tests/prompts/test_correct_error_prompt.py index 4245768fe..aaa29f66f 100644 --- a/tests/prompts/test_correct_error_prompt.py +++ b/tests/prompts/test_correct_error_prompt.py @@ -1,7 +1,9 @@ """Unit tests for the correct error prompt class""" - +import pandas as pd +from pandasai import SmartDataframe from pandasai.prompts import CorrectErrorPrompt +from pandasai.llm.fake import FakeLLM class TestCorrectErrorPrompt: @@ -9,23 +11,30 @@ class TestCorrectErrorPrompt: def test_str_with_args(self): """Test that the __str__ method is implemented""" - assert ( - str( - CorrectErrorPrompt( - conversation="What is the correct code?", - error_message="Error message", - code="df.head()", - answer="df.head(5)", - num_rows=5, - num_columns=5, - df_head="df.head()", - error_returned="error", - ) + + llm = FakeLLM("plt.show()") + dfs = [ + SmartDataframe( + pd.DataFrame({}), + config={"llm": llm}, ) + ] + prompt = CorrectErrorPrompt( + engine="pandas", code="df.head()", error_returned="Error message" + ) + prompt.set_var("dfs", dfs) + prompt.set_var("conversation", "What is the correct code?") + + assert ( + prompt.to_string() == """ -You are provided with a pandas dataframe (df) with 5 rows and 5 columns. -This is the metadata of the dataframe: -df.head(). +You are provided with the following pandas DataFrames with the following metadata: + + +Dataframe dfs[0], with 0 rows and 0 columns. +This is the metadata of the dataframe dfs[0]: + + The user asked the following question: What is the correct code? @@ -34,7 +43,7 @@ def test_str_with_args(self): df.head() It fails with the following error: -error +Error message Correct the python code and return a new python code (do not import anything) that fixes the above mentioned error. Do not generate the same code again. """ # noqa: E501 diff --git a/tests/prompts/test_generate_python_code_prompt.py b/tests/prompts/test_generate_python_code_prompt.py index 889723ddf..abad90140 100644 --- a/tests/prompts/test_generate_python_code_prompt.py +++ b/tests/prompts/test_generate_python_code_prompt.py @@ -27,37 +27,38 @@ def test_str_with_args(self): assert ( prompt.to_string() == """ -You are provided with the following pandas DataFrames with the following metadata: +You are provided with the following pandas DataFrames: + Dataframe dfs[0], with 1 rows and 2 columns. This is the metadata of the dataframe dfs[0]: a,b 1,4 + + +Question + This is the initial python code to be updated: ```python # TODO import all the dependencies required import pandas as pd -# Analyze the data -# 1. Prepare: Preprocessing and cleaning data if necessary -# 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) -# 3. Analyze: Conducting the actual analysis (if the user asks to create a chart save it to an image in exports/charts/temp_chart.png and do not show the chart.) -# 4. Output: return a dictionary of: -# - type (possible values "text", "number", "dataframe", "plot") -# - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) -# Example output: { "type": "text", "value": "The average loan amount is $15,000." } def analyze_data(dfs: list[pd.DataFrame]) -> dict: - # Code goes here (do not add comments) - - -# Declare a result variable -result = analyze_data(dfs) + \"\"\" + Analyze the data + 1. Prepare: Preprocessing and cleaning data if necessary + 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) + 3. Analyze: Conducting the actual analysis (if the user asks to plot a chart save it to an image in exports/charts/temp_chart.png and do not show the chart.) + 4. Output: return a dictionary of: + - type (possible values "text", "number", "dataframe", "plot") + - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) + Example output: { "type": "text", "value": "The average loan amount is $15,000." } + \"\"\" ``` -Using the provided dataframes (`dfs`), update the python code based on the last user question: -Question +Using the provided dataframes (`dfs`), update the python code based on the last question in the conversation. Updated code: """ # noqa: E501 @@ -82,37 +83,38 @@ def test_str_with_custom_save_charts_path(self): assert ( prompt.to_string() == """ -You are provided with the following pandas DataFrames with the following metadata: +You are provided with the following pandas DataFrames: + Dataframe dfs[0], with 1 rows and 2 columns. This is the metadata of the dataframe dfs[0]: a,b 1,4 + + +Question + This is the initial python code to be updated: ```python # TODO import all the dependencies required import pandas as pd -# Analyze the data -# 1. Prepare: Preprocessing and cleaning data if necessary -# 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) -# 3. Analyze: Conducting the actual analysis (if the user asks to create a chart save it to an image in custom_path/temp_chart.png and do not show the chart.) -# 4. Output: return a dictionary of: -# - type (possible values "text", "number", "dataframe", "plot") -# - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) -# Example output: { "type": "text", "value": "The average loan amount is $15,000." } def analyze_data(dfs: list[pd.DataFrame]) -> dict: - # Code goes here (do not add comments) - - -# Declare a result variable -result = analyze_data(dfs) + \"\"\" + Analyze the data + 1. Prepare: Preprocessing and cleaning data if necessary + 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) + 3. Analyze: Conducting the actual analysis (if the user asks to plot a chart save it to an image in custom_path/temp_chart.png and do not show the chart.) + 4. Output: return a dictionary of: + - type (possible values "text", "number", "dataframe", "plot") + - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) + Example output: { "type": "text", "value": "The average loan amount is $15,000." } + \"\"\" ``` -Using the provided dataframes (`dfs`), update the python code based on the last user question: -Question +Using the provided dataframes (`dfs`), update the python code based on the last question in the conversation. Updated code: """ # noqa: E501 diff --git a/tests/test_codemanager.py b/tests/test_codemanager.py index e2376f068..d318d1bdb 100644 --- a/tests/test_codemanager.py +++ b/tests/test_codemanager.py @@ -69,7 +69,7 @@ def smart_dataframe(self, llm, sample_df): @pytest.fixture def code_manager(self, smart_dataframe: SmartDataframe): - return smart_dataframe._dl._code_manager + return smart_dataframe.lake._code_manager def test_run_code_for_calculations(self, code_manager: CodeManager): code = """def analyze_data(dfs): @@ -174,7 +174,6 @@ def test_get_environment(self, code_manager: CodeManager, smart_dataframe): {"name": "numpy", "alias": "np", "module": "numpy"}, ] - assert smart_dataframe.equals(code_manager._get_environment()["dfs"][0]) assert "pd" in code_manager._get_environment() assert "plt" in code_manager._get_environment() assert "np" in code_manager._get_environment() @@ -290,3 +289,216 @@ def test_handle_error_name_error_not_whitelisted_lib( exc, code, environment, use_error_correction_framework=False ) assert "os" not in environment + + @pytest.mark.parametrize( + "df_name, code", + [ + ( + "df", + """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + df = dfs[0] + filtered_df = df.filter( + (pl.col('loan_status') == 'PAIDOFF') & (pl.col('Gender') == 'male') + ) + count = filtered_df.shape[0] + result = {'type': 'number', 'value': count} + return result + +result = analyze_data(dfs) + """, + ), + ( + "foobar", + """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + foobar = dfs[0] + filtered_df = foobar.filter( + (pl.col('loan_status') == 'PAIDOFF') & (pl.col('Gender') == 'male') + ) + count = filtered_df.shape[0] + result = {'type': 'number', 'value': count} + return result + +result = analyze_data(dfs) + """, + ), + ], + ) + def test_extract_filters_polars(self, df_name, code, code_manager: CodeManager): + filters = code_manager._extract_filters(code) + assert isinstance(filters, dict) + assert "dfs[0]" in filters + assert isinstance(filters["dfs[0]"], list) + assert len(filters["dfs[0]"]) == 2 + + assert filters["dfs[0]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[0]"][1] == ("Gender", "=", "male") + + def test_extract_filters_polars_multiple_df(self, code_manager: CodeManager): + code = """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + df = dfs[0] + filtered_paid_df_male = df.filter( + (pl.col('loan_status') == 'PAIDOFF') & (pl.col('Gender') == 'male') + ) + num_loans_paid_off_male = len(filtered_paid_df) + + df = dfs[1] + filtered_pend_df_male = df.filter( + (pl.col('loan_status') == 'PENDING') & (pl.col('Gender') == 'male') + ) + num_loans_pending_male = len(filtered_pend_df) + + df = dfs[2] + filtered_paid_df_female = df.filter( + (pl.col('loan_status') == 'PAIDOFF') & (pl.col('Gender') == 'female') + ) + num_loans_paid_off_female = len(filtered_pend_df) + + value = num_loans_paid_off + num_loans_pending + num_loans_paid_off_female + result = { + 'type': 'number', + 'value': value + } + return result + +result = analyze_data(dfs) +""" + filters = code_manager._extract_filters(code) + assert isinstance(filters, dict) + assert "dfs[0]" in filters + assert "dfs[1]" in filters + assert "dfs[2]" in filters + assert isinstance(filters["dfs[0]"], list) + assert len(filters["dfs[0]"]) == 2 + assert len(filters["dfs[1]"]) == 2 + + assert filters["dfs[0]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[0]"][1] == ("Gender", "=", "male") + + assert filters["dfs[1]"][0] == ("loan_status", "=", "PENDING") + assert filters["dfs[1]"][1] == ("Gender", "=", "male") + + assert filters["dfs[2]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[2]"][1] == ("Gender", "=", "female") + + @pytest.mark.parametrize("df_name", ["df", "foobar"]) + def test_extract_filters_col_index(self, df_name, code_manager: CodeManager): + code = f""" +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + {df_name} = dfs[0] + filtered_df = ( + {df_name}[ + ({df_name}['loan_status'] == 'PAIDOFF') & ({df_name}['Gender'] == 'male') + ] + ) + num_loans = len(filtered_df) + result = {{'type': 'number', 'value': num_loans}} + return result + +result = analyze_data(dfs) +""" + filters = code_manager._extract_filters(code) + assert isinstance(filters, dict) + assert "dfs[0]" in filters + assert isinstance(filters["dfs[0]"], list) + assert len(filters["dfs[0]"]) == 2 + + assert filters["dfs[0]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[0]"][1] == ("Gender", "=", "male") + + @pytest.mark.parametrize( + "df_name, code", + [ + ( + "df", + """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + df = dfs[0] + filtered_df = df.filter( + (pl.col('loan_status') == 'PAIDOFF') & (pl.col('Gender') == 'male') + ) + count = filtered_df.shape[0] + result = {'type': 'number', 'value': count} + return result + +result = analyze_data(dfs) + """, + ), + ( + "foobar", + """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + foobar = dfs[0] + filtered_df = foobar[( + foobar['loan_status'] == 'PAIDOFF' + ) & (df['Gender'] == 'male')] + num_loans = len(filtered_df) + result = {'type': 'number', 'value': num_loans} + return result + +result = analyze_data(dfs) + """, + ), + ], + ) + def test_extract_filters_col_index_non_default_name( + self, df_name, code, code_manager: CodeManager + ): + filters = code_manager._extract_filters(code) + assert isinstance(filters, dict) + assert "dfs[0]" in filters + assert isinstance(filters["dfs[0]"], list) + assert len(filters["dfs[0]"]) == 2 + + assert filters["dfs[0]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[0]"][1] == ("Gender", "=", "male") + + def test_extract_filters_col_index_multiple_df(self, code_manager: CodeManager): + code = """ +def analyze_data(dfs: list[pd.DataFrame]) -> dict: + df = dfs[0] + filtered_paid_df_male = df[( + df['loan_status'] == 'PAIDOFF') & (df['Gender'] == 'male' + )] + num_loans_paid_off_male = len(filtered_paid_df) + + df = dfs[1] + filtered_pend_df_male = df[( + df['loan_status'] == 'PENDING') & (df['Gender'] == 'male' + )] + num_loans_pending_male = len(filtered_pend_df) + + df = dfs[2] + filtered_paid_df_female = df[( + df['loan_status'] == 'PAIDOFF') & (df['Gender'] == 'female' + )] + num_loans_paid_off_female = len(filtered_pend_df) + + value = num_loans_paid_off + num_loans_pending + num_loans_paid_off_female + result = { + 'type': 'number', + 'value': value + } + return result + +result = analyze_data(dfs) +""" + filters = code_manager._extract_filters(code) + assert isinstance(filters, dict) + assert "dfs[0]" in filters + assert "dfs[1]" in filters + assert "dfs[2]" in filters + assert isinstance(filters["dfs[0]"], list) + assert len(filters["dfs[0]"]) == 2 + assert len(filters["dfs[1]"]) == 2 + + assert filters["dfs[0]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[0]"][1] == ("Gender", "=", "male") + + assert filters["dfs[1]"][0] == ("loan_status", "=", "PENDING") + assert filters["dfs[1]"][1] == ("Gender", "=", "male") + + assert filters["dfs[2]"][0] == ("loan_status", "=", "PAIDOFF") + assert filters["dfs[2]"][1] == ("Gender", "=", "female") diff --git a/tests/test_pandasai.py b/tests/test_pandasai.py index ec98ee280..9bf00b151 100644 --- a/tests/test_pandasai.py +++ b/tests/test_pandasai.py @@ -63,7 +63,7 @@ def test_call_with_exception(self, _mocked_method, pai, df): def test_run_with_invalid_arguments(self, pai): with pytest.raises(ValueError) as e_info: - pai.run(None, "Question") + pai.run(0, "Question") assert ( str(e_info.value) == "Invalid input data. Must be a Pandas or Polars dataframe." diff --git a/tests/test_smartdataframe.py b/tests/test_smartdataframe.py index ba4863bc8..c71915590 100644 --- a/tests/test_smartdataframe.py +++ b/tests/test_smartdataframe.py @@ -2,6 +2,7 @@ import json import os import sys +from collections import defaultdict from typing import Optional from unittest.mock import patch, Mock from uuid import UUID @@ -107,6 +108,16 @@ def smart_dataframe(self, llm, sample_df, sample_head): sample_head=sample_head, ) + @pytest.fixture + def smart_dataframe_mocked_df(self, llm, sample_df, sample_head): + smart_df = SmartDataframe( + sample_df, + config={"llm": llm, "enable_cache": False}, + sample_head=sample_head, + ) + smart_df._core._df = Mock() + return smart_df + @pytest.fixture def custom_middleware(self): class CustomMiddleware(Middleware): @@ -117,10 +128,10 @@ def run(self, code): return CustomMiddleware def test_init(self, smart_dataframe): - assert smart_dataframe._name is None - assert smart_dataframe._description is None - assert smart_dataframe._engine is not None - assert smart_dataframe._df is not None + assert smart_dataframe._table_name is None + assert smart_dataframe._table_description is None + assert smart_dataframe.engine is not None + assert smart_dataframe.dataframe is not None def test_init_without_llm(self, sample_df): with pytest.raises(LLMNotFoundError): @@ -170,36 +181,37 @@ def test_run_with_privacy_enforcement(self, llm): df.enforce_privacy = True expected_prompt = """ -You are provided with the following pandas DataFrames with the following metadata: +You are provided with the following pandas DataFrames: + Dataframe dfs[0], with 0 rows and 1 columns. This is the metadata of the dataframe dfs[0]: country + + +User 1: How many countries are in the dataframe? + This is the initial python code to be updated: ```python # TODO import all the dependencies required import pandas as pd -# Analyze the data -# 1. Prepare: Preprocessing and cleaning data if necessary -# 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) -# 3. Analyze: Conducting the actual analysis (if the user asks to create a chart save it to an image in exports/charts/temp_chart.png and do not show the chart.) -# 4. Output: return a dictionary of: -# - type (possible values "text", "number", "dataframe", "plot") -# - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) -# Example output: { "type": "text", "value": "The average loan amount is $15,000." } def analyze_data(dfs: list[pd.DataFrame]) -> dict: - # Code goes here (do not add comments) - - -# Declare a result variable -result = analyze_data(dfs) + \"\"\" + Analyze the data + 1. Prepare: Preprocessing and cleaning data if necessary + 2. Process: Manipulating data for analysis (grouping, filtering, aggregating, etc.) + 3. Analyze: Conducting the actual analysis (if the user asks to plot a chart save it to an image in exports/charts/temp_chart.png and do not show the chart.) + 4. Output: return a dictionary of: + - type (possible values "text", "number", "dataframe", "plot") + - value (can be a string, a dataframe or the path of the plot, NOT a dictionary) + Example output: { "type": "text", "value": "The average loan amount is $15,000." } + \"\"\" ``` -Using the provided dataframes (`dfs`), update the python code based on the last user question: -User: How many countries are in the dataframe? +Using the provided dataframes (`dfs`), update the python code based on the last question in the conversation. Updated code: """ # noqa: E501 @@ -209,6 +221,47 @@ def analyze_data(dfs: list[pd.DataFrame]) -> dict: last_prompt = df.last_prompt.replace("\r\n", "\n") assert last_prompt == expected_prompt + def test_to_dict(self, smart_dataframe: SmartDataframe): + expected_keys = ("country", "gdp", "happiness_index") + + result_dict = smart_dataframe.to_dict() + + assert isinstance(result_dict, dict) + assert all(key in result_dict for key in expected_keys) + + @pytest.mark.parametrize( + "to_dict_params,expected_passing_params,engine_type", + [ + ({}, {"orient": "dict", "into": dict}, "pandas"), + ({}, {"as_series": True}, "polars"), + ({"orient": "dict"}, {"orient": "dict", "into": dict}, "pandas"), + ( + {"orient": "dict", "into": defaultdict}, + {"orient": "dict", "into": defaultdict}, + "pandas", + ), + ({"as_series": False}, {"as_series": False}, "polars"), + ( + {"as_series": False, "orient": "dict", "into": defaultdict}, + {"as_series": False}, + "polars", + ), + ], + ) + def test_to_dict_passing_parameters( + self, + smart_dataframe_mocked_df: SmartDataframe, + to_dict_params, + engine_type, + expected_passing_params, + ): + smart_dataframe_mocked_df._engine = engine_type + smart_dataframe_mocked_df.to_dict(**to_dict_params) + # noinspection PyUnresolvedReferences + smart_dataframe_mocked_df.dataframe.to_dict.assert_called_once_with( + **expected_passing_params + ) + def test_extract_code(self, llm): code = """```python result = {'happiness': 0.5, 'gdp': 0.8} @@ -328,7 +381,7 @@ def __init__(self, **kwargs): replacement_prompt = CustomPrompt(test="test value") df = SmartDataframe( - pd.DataFrame(), + pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}), config={ "llm": llm, "enable_cache": False, @@ -355,7 +408,7 @@ class ReplacementPrompt(Prompt): }, ) - df._dl._retry_run_code("wrong code", Exception()) + df.lake._retry_run_code("wrong code", Exception()) expected_last_prompt = replacement_prompt.to_string() assert llm.last_prompt == expected_last_prompt @@ -368,13 +421,13 @@ def test_saves_logs(self, smart_dataframe: SmartDataframe): error_msg = "Some error log" critical_msg = "Some critical log" - smart_dataframe._dl._logger.log(debug_msg, level=logging.DEBUG) + smart_dataframe.lake.logger.log(debug_msg, level=logging.DEBUG) - smart_dataframe._dl._logger.log(debug_msg, level=logging.DEBUG) - smart_dataframe._dl._logger.log(info_msg) # INFO should be default - smart_dataframe._dl._logger.log(warning_msg, level=logging.WARNING) - smart_dataframe._dl._logger.log(error_msg, level=logging.ERROR) - smart_dataframe._dl._logger.log(critical_msg, level=logging.CRITICAL) + smart_dataframe.lake.logger.log(debug_msg, level=logging.DEBUG) + smart_dataframe.lake.logger.log(info_msg) # INFO should be default + smart_dataframe.lake.logger.log(warning_msg, level=logging.WARNING) + smart_dataframe.lake.logger.log(error_msg, level=logging.ERROR) + smart_dataframe.lake.logger.log(critical_msg, level=logging.CRITICAL) logs = smart_dataframe.logs assert all("msg" in log and "level" in log for log in logs) @@ -385,20 +438,20 @@ def test_saves_logs(self, smart_dataframe: SmartDataframe): assert {"msg": critical_msg, "level": logging.CRITICAL} in logs def test_updates_verbose_config_with_setters(self, smart_dataframe: SmartDataframe): - assert smart_dataframe.config.verbose is False + assert smart_dataframe.verbose is False smart_dataframe.verbose = True assert smart_dataframe.verbose is True - assert smart_dataframe._dl._logger.verbose is True - assert len(smart_dataframe._dl._logger._logger.handlers) == 1 + assert smart_dataframe.lake._logger.verbose is True + assert len(smart_dataframe.lake._logger._logger.handlers) == 1 assert isinstance( - smart_dataframe._dl._logger._logger.handlers[0], logging.StreamHandler + smart_dataframe.lake._logger._logger.handlers[0], logging.StreamHandler ) smart_dataframe.verbose = False assert smart_dataframe.verbose is False - assert smart_dataframe._dl._logger.verbose is False - assert len(smart_dataframe._dl._logger._logger.handlers) == 0 + assert smart_dataframe.lake._logger.verbose is False + assert len(smart_dataframe.lake._logger._logger.handlers) == 0 def test_updates_save_logs_config_with_setters( self, smart_dataframe: SmartDataframe @@ -407,15 +460,15 @@ def test_updates_save_logs_config_with_setters( smart_dataframe.save_logs = False assert smart_dataframe.save_logs is False - assert smart_dataframe._dl._logger.save_logs is False - assert len(smart_dataframe._dl._logger._logger.handlers) == 0 + assert smart_dataframe.lake._logger.save_logs is False + assert len(smart_dataframe.lake._logger._logger.handlers) == 0 smart_dataframe.save_logs = True assert smart_dataframe.save_logs is True - assert smart_dataframe._dl._logger.save_logs is True - assert len(smart_dataframe._dl._logger._logger.handlers) == 1 + assert smart_dataframe.lake._logger.save_logs is True + assert len(smart_dataframe.lake._logger._logger.handlers) == 1 assert isinstance( - smart_dataframe._dl._logger._logger.handlers[0], logging.FileHandler + smart_dataframe.lake._logger._logger.handlers[0], logging.FileHandler ) def test_updates_enable_cache_config_with_setters( @@ -425,14 +478,14 @@ def test_updates_enable_cache_config_with_setters( smart_dataframe.enable_cache = True assert smart_dataframe.enable_cache is True - assert smart_dataframe._dl.enable_cache is True - assert smart_dataframe._dl.cache is not None - assert isinstance(smart_dataframe._dl._cache, Cache) + assert smart_dataframe.lake.enable_cache is True + assert smart_dataframe.lake.cache is not None + assert isinstance(smart_dataframe.lake._cache, Cache) smart_dataframe.enable_cache = False assert smart_dataframe.enable_cache is False - assert smart_dataframe._dl.enable_cache is False - assert smart_dataframe._dl.cache is None + assert smart_dataframe.lake.enable_cache is False + assert smart_dataframe.lake.cache is None def test_updates_configs_with_setters(self, smart_dataframe: SmartDataframe): assert smart_dataframe.callback is None @@ -485,23 +538,23 @@ def test_load_dataframe_from_list(self, smart_dataframe): {"column1": 3, "column2": 6}, ] - smart_dataframe._load_df(input_data) + smart_dataframe._load_dataframe(input_data) - assert isinstance(smart_dataframe._df, pd.DataFrame) + assert isinstance(smart_dataframe.dataframe, pd.DataFrame) def test_load_dataframe_from_dict(self, smart_dataframe): input_data = {"column1": [1, 2, 3], "column2": [4, 5, 6]} - smart_dataframe._load_df(input_data) + smart_dataframe._load_dataframe(input_data) - assert isinstance(smart_dataframe._df, pd.DataFrame) + assert isinstance(smart_dataframe.dataframe, pd.DataFrame) def test_load_dataframe_from_pandas_dataframe(self, smart_dataframe): pandas_df = pd.DataFrame({"column1": [1, 2, 3], "column2": [4, 5, 6]}) - smart_dataframe._load_df(pandas_df) + smart_dataframe._load_dataframe(pandas_df) - assert isinstance(smart_dataframe._df, pd.DataFrame) + assert isinstance(smart_dataframe.dataframe, pd.DataFrame) def test_load_dataframe_from_saved_dfs(self, sample_saved_dfs, mocker): expected_df = pd.DataFrame( @@ -522,16 +575,20 @@ def test_load_dataframe_from_saved_dfs(self, sample_saved_dfs, mocker): saved_df_name = "photo" smart_dataframe = SmartDataframe(saved_df_name) - assert isinstance(smart_dataframe._df, pd.DataFrame) - assert smart_dataframe._name == saved_df_name - assert smart_dataframe.original.equals(expected_df) + assert isinstance(smart_dataframe.dataframe, pd.DataFrame) + assert smart_dataframe.table_name == saved_df_name + assert smart_dataframe.dataframe.equals(expected_df) def test_load_dataframe_from_other_dataframe_type(self, smart_dataframe): polars_df = pl.DataFrame({"column1": [1, 2, 3], "column2": [4, 5, 6]}) - smart_dataframe._load_df(polars_df) + smart_dataframe._load_dataframe(polars_df) + + print(smart_dataframe.dataframe) + print(polars_df) - assert smart_dataframe._df is polars_df + assert isinstance(smart_dataframe.dataframe, pl.DataFrame) + assert smart_dataframe.dataframe.frame_equal(polars_df) def test_import_csv_file(self, smart_dataframe, mocker): mocker.patch.object( @@ -580,13 +637,13 @@ def test_invalid_file_format(self, smart_dataframe, file_path): with pytest.raises(ValueError): smart_dataframe._import_from_file(file_path) - def test_import_pandas_series(self, smart_dataframe): + def test_import_pandas_series(self, llm): pandas_series = pd.Series([1, 2, 3]) - smart_dataframe._load_df(pandas_series) + smart_dataframe = SmartDataframe(pandas_series, config={"llm": llm}) - assert isinstance(smart_dataframe._df, pd.DataFrame) - assert smart_dataframe._df.equals(pd.DataFrame({0: [1, 2, 3]})) + assert isinstance(smart_dataframe.dataframe, pd.DataFrame) + assert smart_dataframe.dataframe.equals(pd.DataFrame({0: [1, 2, 3]})) def test_save_pandas_dataframe(self, llm): with open("pandasai.json", "r") as json_file: diff --git a/tests/test_smartdatalake.py b/tests/test_smartdatalake.py index 836491e31..b2a29fc33 100644 --- a/tests/test_smartdatalake.py +++ b/tests/test_smartdatalake.py @@ -71,7 +71,7 @@ def smart_dataframe(self, llm, sample_df): @pytest.fixture def smart_datalake(self, smart_dataframe: SmartDataframe): - return smart_dataframe.datalake + return smart_dataframe.lake @pytest.fixture def custom_middleware(self): @@ -117,8 +117,8 @@ def test_last_result_is_saved(self, _mocked_method, smart_datalake: SmartDatalak def test_middlewares(self, smart_dataframe: SmartDataframe, custom_middleware): middleware = custom_middleware() - smart_dataframe._dl._code_manager._middlewares = [middleware] - assert smart_dataframe._dl.middlewares == [middleware] + smart_dataframe.lake._code_manager._middlewares = [middleware] + assert smart_dataframe.lake.middlewares == [middleware] assert ( smart_dataframe.chat("How many countries are in the dataframe?") == "Overwritten by middleware" @@ -131,11 +131,14 @@ def test_retry_on_error_with_single_df( code = """def analyze_data(df): return { "type": "text", "value": "Hello World" }""" - smart_dataframe._get_head_csv = Mock( - return_value="""country,gdp,happiness_index -China,0654881226,6.66 -Japan,9009692259,7.16 -Spain,8446903488,6.38""" + smart_dataframe._get_sample_head = Mock( + return_value=pd.DataFrame( + { + "country": ["China", "Japan", "Spain"], + "gdp": [654881226, 9009692259, 8446903488], + "happiness_index": [6.66, 7.16, 6.38], + } + ) ) smart_datalake._retry_run_code( @@ -146,12 +149,16 @@ def test_retry_on_error_with_single_df( assert ( smart_datalake.last_prompt == """ -You are provided with a pandas dataframe (df) with 10 rows and 3 columns. -This is the metadata of the dataframe: +You are provided with the following pandas DataFrames with the following metadata: + + +Dataframe dfs[0], with 10 rows and 3 columns. +This is the metadata of the dataframe dfs[0]: country,gdp,happiness_index -China,0654881226,6.66 +China,654881226,6.66 Japan,9009692259,7.16 -Spain,8446903488,6.38. +Spain,8446903488,6.38 + The user asked the following question: