From bad80c347e0ca9c6136701580f8a1344305a616d Mon Sep 17 00:00:00 2001 From: Gabriele Venturi Date: Mon, 18 Mar 2024 03:03:04 -0700 Subject: [PATCH] feat: launch BambooLLM beta --- README.md | 27 +++--- docs/custom-response.md | 28 +++--- docs/examples.md | 132 +++++++++++++++----------- docs/getting-started.md | 38 +++++--- docs/index.md | 13 +-- docs/skills.md | 23 ++--- examples/agent.py | 10 +- examples/from_airtable.py | 11 ++- examples/from_csv.py | 15 +-- examples/from_databricks.py | 11 ++- examples/from_dataframe.py | 12 ++- examples/from_excel.py | 10 +- examples/from_google_sheets.py | 11 ++- examples/from_googlebigquery.py | 18 ++-- examples/from_parquet.py | 8 +- examples/from_snowflake.py | 11 ++- examples/from_sql.py | 12 ++- examples/from_yahoo_finance.py | 11 ++- examples/save_chart.py | 16 ++-- examples/show_chart.py | 12 ++- examples/skills_example.py | 9 +- examples/sql_direct_config.py | 9 +- examples/using_flask.py | 14 +-- examples/using_pandasai_log_server.py | 12 +-- examples/using_pipeline.py | 82 ---------------- examples/using_streamlit.py | 14 +-- examples/using_train.py | 6 +- examples/using_workspace_env.py | 14 +-- examples/with_azure.py | 5 +- examples/with_multiple_dataframes.py | 15 ++- examples/with_name_and_description.py | 15 +-- examples/with_privacy_enforced.py | 13 +-- examples/with_vertexai.py | 6 +- pandasai.json | 2 +- pandasai/llm/__init__.py | 2 + pandasai/llm/bamboo_llm.py | 9 +- pandasai/schemas/df_config.py | 4 +- 37 files changed, 317 insertions(+), 343 deletions(-) delete mode 100644 examples/using_pipeline.py diff --git a/README.md b/README.md index 09051d96e..7e4205c7b 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,7 @@ If you are interested in managed PandasAI Cloud or self-hosted Enterprise Offeri ### Ask questions ```python +import os import pandas as pd from pandasai import SmartDataframe @@ -55,12 +56,12 @@ sales_by_country = pd.DataFrame({ "sales": [5000, 3200, 2900, 4100, 2300, 2100, 2500, 2600, 4500, 7000] }) -# Instantiate a LLM -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -df = SmartDataframe(sales_by_country, config={"llm": llm}) -df.chat('Which are the top 5 countries by sales?') +agent = SmartDataframe(sales_by_country) +agent.chat('Which are the top 5 countries by sales?') ``` ``` @@ -72,7 +73,7 @@ China, United States, Japan, Germany, Australia Or you can ask more complex questions: ```python -df.chat( +agent.chat( "What is the total sales for the top 3 countries by sales?" ) ``` @@ -86,7 +87,7 @@ The total sales for the top 3 countries by sales is 16500. You can also ask PandasAI to generate charts for you: ```python -df.chat( +agent.chat( "Plot the histogram of countries showing for each the gdp, using different colors for each bar", ) ``` @@ -98,9 +99,9 @@ df.chat( You can also pass in multiple dataframes to PandasAI and ask questions relating them. ```python +import os import pandas as pd -from pandasai import SmartDatalake -from pandasai.llm import OpenAI +from pandasai import Agent employees_data = { 'EmployeeID': [1, 2, 3, 4, 5], @@ -116,10 +117,12 @@ salaries_data = { employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -llm = OpenAI() -dl = SmartDatalake([employees_df, salaries_df], config={"llm": llm}) -dl.chat("Who gets paid the most?") +agent = Agent([employees_df, salaries_df]) +agent.chat("Who gets paid the most?") ``` ``` diff --git a/docs/custom-response.md b/docs/custom-response.md index c12a065d6..b1f3f0c8e 100644 --- a/docs/custom-response.md +++ b/docs/custom-response.md @@ -8,10 +8,9 @@ You have the option to provide a custom parser, such as `StreamlitResponse`, to ```python +import os import pandas as pd - from pandasai import SmartDatalake -from pandasai.llm import OpenAI from pandasai.responses.response_parser import ResponseParser # This class overrides default behaviour how dataframe is returned @@ -41,13 +40,16 @@ salaries_df = pd.DataFrame( } ) -llm = OpenAI("OPENAI-KEY") -dl = SmartDatalake( +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = SmartDatalake( [employees_df, salaries_df], config={"llm": llm, "verbose": True, "response_parser": PandasDataFrame}, ) -response = dl.chat("Return a dataframe of name against salaries") +response = agent.chat("Return a dataframe of name against salaries") # Returns the response as Pandas DataFrame ``` @@ -56,13 +58,11 @@ response = dl.chat("Return a dataframe of name against salaries") ```python +import os import pandas as pd - from pandasai import SmartDatalake -from pandasai.llm import OpenAI from pandasai.responses.streamlit_response import StreamlitResponse - employees_df = pd.DataFrame( { "EmployeeID": [1, 2, 3, 4, 5], @@ -78,11 +78,15 @@ salaries_df = pd.DataFrame( } ) -llm = OpenAI() -dl = SmartDatalake( + +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = SmartDatalake( [employees_df, salaries_df], - config={"llm": llm, "verbose": True, "response_parser": StreamlitResponse}, + config={"verbose": True, "response_parser": StreamlitResponse}, ) -dl.chat("Plot salaries against name") +agent.chat("Plot salaries against name") ``` diff --git a/docs/examples.md b/docs/examples.md index d34e1c9de..5ec80542d 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -8,9 +8,9 @@ More [examples](https://github.com/Sinaptik-AI/pandas-ai/tree/main/examples) are Using PandasAI with a Pandas DataFrame ```python +import os from pandasai import SmartDataframe import pandas as pd -from pandasai.llm import OpenAI # pandas dataframe sales_by_country = pd.DataFrame({ @@ -18,12 +18,15 @@ sales_by_country = pd.DataFrame({ "sales": [5000, 3200, 2900, 4100, 2300, 2100, 2500, 2600, 4500, 7000] }) -llm = OpenAI(api_token="YOUR_API_TOKEN") + +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # convert to SmartDataframe -df = SmartDataframe(sales_by_country, config={"llm": llm}) +sdf = SmartDataframe(sales_by_country) -response = df.chat('Which are the top 5 countries by sales?') +response = sdf.chat('Which are the top 5 countries by sales?') print(response) # Output: China, United States, Japan, Germany, Australia ``` @@ -33,15 +36,17 @@ print(response) Example of using PandasAI with a CSV file ```python +import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # You can instantiate a SmartDataframe with a path to a CSV file -df = SmartDataframe("data/Loan payments data.csv", config={"llm": llm}) +sdf = SmartDataframe("data/Loan payments data.csv") -response = df.chat("How many loans are from men and have been paid off?") +response = sdf.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. ``` @@ -57,15 +62,17 @@ pip install pandasai[excel] Then, you can use PandasAI with an Excel file as follows: ```python +import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # You can instantiate a SmartDataframe with a path to an Excel file -df = SmartDataframe("data/Loan payments data.xlsx", config={"llm": llm}) +sdf = SmartDataframe("data/Loan payments data.xlsx") -response = df.chat("How many loans are from men and have been paid off?") +response = sdf.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. ``` @@ -75,15 +82,17 @@ print(response) Example of using PandasAI with a Parquet file ```python +import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # You can instantiate a SmartDataframe with a path to a Parquet file -df = SmartDataframe("data/Loan payments data.parquet", config={"llm": llm}) +sdf = SmartDataframe("data/Loan payments data.parquet") -response = df.chat("How many loans are from men and have been paid off?") +response = sdf.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. ``` @@ -99,14 +108,16 @@ pip install pandasai[google-sheet] Then, you can use PandasAI with a Google Sheet as follows: ```python +import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # You can instantiate a SmartDataframe with a path to a Google Sheet -df = SmartDataframe("https://docs.google.com/spreadsheets/d/fake/edit#gid=0", config={"llm": llm}) -response = df.chat("How many loans are from men and have been paid off?") +sdf = SmartDataframe("https://docs.google.com/spreadsheets/d/fake/edit#gid=0") +response = sdf.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. ``` @@ -124,12 +135,14 @@ pip install pandasai[modin] Then, you can use PandasAI with a Modin DataFrame as follows: ```python +import os import pandasai from pandasai import SmartDataframe import modin.pandas as pd -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" sales_by_country = pd.DataFrame({ "country": ["United States", "United Kingdom", "France", "Germany", "Italy", "Spain", "Canada", "Australia", "Japan", "China"], @@ -137,8 +150,8 @@ sales_by_country = pd.DataFrame({ }) pandasai.set_pd_engine("modin") -df = SmartDataframe(df, config={"llm": llm}) -response = df.chat('Which are the top 5 countries by sales?') +sdf = SmartDataframe(sales_by_country) +response = sdf.chat('Which are the top 5 countries by sales?') print(response) # Output: China, United States, Japan, Germany, Australia @@ -157,11 +170,13 @@ pip install pandasai[polars] Then, you can use PandasAI with a Polars DataFrame as follows: ```python +import os from pandasai import SmartDataframe import polars as pl -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" # You can instantiate a SmartDataframe with a Polars DataFrame sales_by_country = pl.DataFrame({ @@ -169,8 +184,8 @@ sales_by_country = pl.DataFrame({ "sales": [5000, 3200, 2900, 4100, 2300, 2100, 2500, 2600, 4500, 7000] }) -df = SmartDataframe(df, config={"llm": llm}) -response = df.chat("How many loans are from men and have been paid off?") +sdf = SmartDataframe(sales_by_country) +response = sdf.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. ``` @@ -180,13 +195,15 @@ print(response) Example of using PandasAI to plot a chart from a Pandas DataFrame ```python +import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -df = SmartDataframe("data/Countries.csv", config={"llm": llm}) -response = df.chat( +sdf = SmartDataframe("data/Countries.csv") +response = sdf.chat( "Plot the histogram of countries showing for each the gpd, using different colors for each bar", ) print(response) @@ -201,17 +218,18 @@ Below is the example to Save Charts with user defined location. ```python import os from pandasai import SmartDataframe -from pandasai.llm import OpenAI user_defined_path = os.getcwd() -llm = OpenAI(api_token="YOUR_API_TOKEN") -df = SmartDataframe("data/Countries.csv", config={ +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +sdf = SmartDataframe("data/Countries.csv", config={ "save_charts": True, "save_charts_path": user_defined_path, - "llm": llm }) -response = df.chat( +response = sdf.chat( "Plot the histogram of countries showing for each the gpd," " using different colors for each bar", ) @@ -224,9 +242,9 @@ print(response) Example of using PandasAI with multiple dataframes. In order to use multiple dataframes as a data source, you need to use a `SmartDatalake` instead of a `SmartDataframe`. You can instantiate a `SmartDatalake` as follows: ```python +import os from pandasai import SmartDatalake import pandas as pd -from pandasai.llm import OpenAI employees_data = { 'EmployeeID': [1, 2, 3, 4, 5], @@ -242,10 +260,12 @@ salaries_data = { employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -df = SmartDatalake([employees_df, salaries_df], config={"llm": llm}) -response = df.chat("Who gets paid the most?") +lake = SmartDatalake([employees_df, salaries_df]) +response = lake.chat("Who gets paid the most?") print(response) # Output: Olivia gets paid the most. ``` @@ -265,11 +285,10 @@ With the chat agent, you can engage in dynamic conversations where the agent ret Feel free to initiate conversations, seek clarifications, and explore explanations to enhance your interactions with the chat agent! ```python +import os import pandas as pd from pandasai import Agent -from pandasai.llm.openai import OpenAI - employees_data = { "EmployeeID": [1, 2, 3, 4, 5], "Name": ["John", "Emma", "Liam", "Olivia", "William"], @@ -285,8 +304,11 @@ employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) -llm = OpenAI("OpenAI_API_KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent([employees_df, salaries_df], memory_size=10) query = "Who gets paid the most?" @@ -315,15 +337,15 @@ Some examples of descriptions can be: - Act as a data analyst. Every time I ask you a question, you should provide the code to visualize the answer using plotly ```python +import os from pandasai import Agent -from pandasai.llm.openai import OpenAI - -llm = OpenAI("YOUR_API_KEY") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" agent = Agent( "data.csv", - config={"llm": llm}, description="You are a data analysis agent. Your main goal is to help non-technical users to analyze data", ) ``` @@ -333,12 +355,12 @@ agent = Agent( You can add customs functions for the agent to use, allowing the agent to expand its capabilities. These custom functions can be seamlessly integrated with the agent's skills, enabling a wide range of user-defined operations. ```python +import os import pandas as pd from pandasai import Agent - -from pandasai.llm.openai import OpenAI from pandasai.skills import skill + employees_data = { "EmployeeID": [1, 2, 3, 4, 5], "Name": ["John", "Emma", "Liam", "Olivia", "William"], @@ -369,14 +391,14 @@ def plot_salaries(merged_df: pd.DataFrame): plt.savefig("temp_chart.png") plt.close() +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -llm = OpenAI("YOUR_API_KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) - +agent = Agent([employees_df, salaries_df], memory_size=10) agent.add_skills(plot_salaries) # Chat with the agent response = agent.chat("Plot the employee salaries against names") print(response) - ``` diff --git a/docs/getting-started.md b/docs/getting-started.md index af5211816..b5d812a5e 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -43,9 +43,9 @@ You can replace `extra-dependency-name` with any of the following: The `SmartDataframe` class is the main class of `pandasai`. It is used to interact with a single dataframe. Below is simple example to get started with `pandasai`. ```python +import os import pandas as pd from pandasai import SmartDataframe -from pandasai.llm import OpenAI # Sample DataFrame sales_by_country = pd.DataFrame({ @@ -53,10 +53,11 @@ sales_by_country = pd.DataFrame({ "sales": [5000, 3200, 2900, 4100, 2300, 2100, 2500, 2600, 4500, 7000] }) -# Instantiate a LLM -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -df = SmartDataframe(sales_by_country, config={"llm": llm}) +df = SmartDataframe(sales_by_country) df.chat('Which are the top 5 countries by sales?') # Output: China, United States, Japan, Germany, Australia ``` @@ -65,6 +66,14 @@ If you want to learn more about the `SmartDataframe` class, check out this video [![Intro to SmartDataframe](https://cdn.loom.com/sessions/thumbnails/1ec1b8fbaa0e4ae0ab99b728b8b05fdb-00001.jpg)](https://www.loom.com/embed/1ec1b8fbaa0e4ae0ab99b728b8b05fdb?sid=7370854b-57c3-4f00-801b-69811a98d970 "Intro to the SmartDataframe") +### Hot to generate a BambooLLM API Token + +In order to use BambooLLM, you need to generate an API token. Follow these simple steps to generate a token with [PandaBI](https://pandabi.ai): + +1. Go to https://pandabi.ai and signup with your email address or connect your Google Account. +2. Go to the API section on the settings page. +3. Select Create new API key. + ### How to generate an OpenAI API Token In order to use the OpenAI language model, users are required to generate a token. Follow these simple steps to generate a token with [openai](https://platform.openai.com/overview): @@ -91,9 +100,9 @@ PandasAI also supports queries with multiple dataframes. To perform such queries Similarly to a `SmartDataframe`, you can instantiate a `SmartDatalake` as follows: ```python +import os import pandas as pd from pandasai import SmartDatalake -from pandasai.llm import OpenAI employees_data = { 'EmployeeID': [1, 2, 3, 4, 5], @@ -109,10 +118,12 @@ salaries_data = { employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -llm = OpenAI() -dl = SmartDatalake([employees_df, salaries_df], config={"llm": llm}) -dl.chat("Who gets paid the most?") +lake = SmartDatalake([employees_df, salaries_df]) +lake.chat("Who gets paid the most?") # Output: Olivia gets paid the most ``` @@ -127,10 +138,9 @@ While a `SmartDataframe` or a `SmartDatalake` can be used to answer a single que To instantiate an agent, you can use the following code: ```python +import os from pandasai import Agent import pandas as pd -from pandasai.llm import OpenAI - # Sample DataFrames sales_by_country = pd.DataFrame({ @@ -140,8 +150,12 @@ sales_by_country = pd.DataFrame({ "deals_closed": [120, 70, 60, 80, 50, 40, 30, 20, 100, 110] }) -llm = OpenAI(api_token="YOUR_API_TOKEN") -agent = Agent([sales_by_country], config={"llm": llm}) + +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent(sales_by_country) agent.chat('Which are the top 5 countries by sales?') # Output: China, United States, Japan, Germany, Australia ``` diff --git a/docs/index.md b/docs/index.md index e9ded5348..d4638d63e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -44,8 +44,9 @@ pip install pandasai Once you have installed PandasAI, you can start using it by importing the `SmartDataframe` class and instantiating it with your data. You can then use the `chat` method to ask questions to your data in natural language. ```python +import os import pandas as pd -from pandasai import SmartDataframe +from pandasai import Agent # Sample DataFrame sales_by_country = pd.DataFrame({ @@ -53,12 +54,12 @@ sales_by_country = pd.DataFrame({ "sales": [5000, 3200, 2900, 4100, 2300, 2100, 2500, 2600, 4500, 7000] }) -# Instantiate a LLM -from pandasai.llm import OpenAI -llm = OpenAI(api_token="YOUR_API_TOKEN") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -df = SmartDataframe(sales_by_country, config={"llm": llm}) -df.chat('Which are the top 5 countries by sales?') +agent = Agent(sales_by_country) +agent.chat('Which are the top 5 countries by sales?') ## Output # China, United States, Japan, Germany, Australia ``` diff --git a/docs/skills.md b/docs/skills.md index 4c5f1bd51..d25a6397e 100644 --- a/docs/skills.md +++ b/docs/skills.md @@ -5,11 +5,9 @@ You can add customs functions for the agent to use, allowing the agent to expand ## Example Usage ```python - +import os import pandas as pd from pandasai import Agent - -from pandasai.llm.openai import OpenAI from pandasai.skills import skill employees_data = { @@ -44,26 +42,24 @@ def plot_salaries(names: list[str], salaries: list[int]): plt.title("Employee Salaries") plt.xticks(rotation=45) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" - -llm = OpenAI("YOUR_API_KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) - +agent = Agent([employees_df, salaries_df], memory_size=10) agent.add_skills(plot_salaries) # Chat with the agent response = agent.chat("Plot the employee salaries against names") - ``` ## Add Streamlit Skill ```python +import os import pandas as pd from pandasai import Agent - -from pandasai.llm.openai import OpenAI from pandasai.skills import skill import streamlit as st @@ -101,10 +97,11 @@ def plot_salaries(names: list[str], salaries: list[int]): fig = plt.gcf() st.pyplot(fig) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" -llm = OpenAI("YOUR_API_KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) - +agent = Agent([employees_df, salaries_df], memory_size=10) agent.add_skills(plot_salaries) # Chat with the agent diff --git a/examples/agent.py b/examples/agent.py index 095a1ab96..d941dc871 100644 --- a/examples/agent.py +++ b/examples/agent.py @@ -1,7 +1,6 @@ +import os import pandas as pd - from pandasai import Agent -from pandasai.llm.openai import OpenAI employees_data = { "EmployeeID": [1, 2, 3, 4, 5], @@ -17,9 +16,11 @@ employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -llm = OpenAI("OPEN_API_KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) +agent = Agent([employees_df, salaries_df], memory_size=10) # Chat with the agent response = agent.chat("Who gets paid the most?") @@ -35,7 +36,6 @@ response = agent.explain() print(response) - # Train with data queries = [ "Display the distribution of ages in the population.", diff --git a/examples/from_airtable.py b/examples/from_airtable.py index cf604ec09..055427f0c 100644 --- a/examples/from_airtable.py +++ b/examples/from_airtable.py @@ -1,6 +1,6 @@ +import os from pandasai import Agent from pandasai.connectors import AirtableConnector -from pandasai.llm import OpenAI airtable_connectors = AirtableConnector( config={ @@ -15,8 +15,11 @@ } ) -llm = OpenAI("OPENAI_API_KEY") -df = Agent([airtable_connectors], config={"llm": llm}) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -response = df.chat("How many rows are there in data ?") +agent = Agent(airtable_connectors) + +response = agent.chat("How many rows are there in data ?") print(response) diff --git a/examples/from_csv.py b/examples/from_csv.py index a5bfabecd..5fc7a0548 100644 --- a/examples/from_csv.py +++ b/examples/from_csv.py @@ -1,13 +1,14 @@ """Example of using PandasAI with a CSV file.""" +import os from pandasai import Agent -from pandasai.llm import OpenAI -llm = OpenAI() -df = Agent( - ["examples/data/Loan payments data.csv"], - config={"llm": llm, "enable_cache": False, "max_retries": 1}, -) -response = df.chat("How many loans are from men and have been paid off?") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +# llm = OpenAI() +agent = Agent("examples/data/Loan payments data.csv") +response = agent.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. diff --git a/examples/from_databricks.py b/examples/from_databricks.py index 190c03839..5c1dec1a3 100644 --- a/examples/from_databricks.py +++ b/examples/from_databricks.py @@ -1,10 +1,10 @@ """Example of using PandasAI with a DataBricks""" +import os from pandasai import Agent # A license might be required for using Snowflake with PandasAI from pandasai.ee.connectors import DatabricksConnector -from pandasai.llm import OpenAI databricks_connector = DatabricksConnector( config={ @@ -22,8 +22,11 @@ } ) -llm = OpenAI("OPEN_API_KEY") -df = Agent([databricks_connector], config={"llm": llm}) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -response = df.chat("How many people from the United states?") +agent = Agent(databricks_connector) + +response = agent.chat("How many people from the United states?") print(response) diff --git a/examples/from_dataframe.py b/examples/from_dataframe.py index fdaf727b6..1c08fbb19 100644 --- a/examples/from_dataframe.py +++ b/examples/from_dataframe.py @@ -1,15 +1,17 @@ """Example of using PandasAI with a Pandas DataFrame""" +import os import pandas as pd from data.sample_dataframe import dataframe - from pandasai import Agent -from pandasai.llm import OpenAI df = pd.DataFrame(dataframe) -llm = OpenAI() -df = Agent([pd.DataFrame(dataframe)], config={"llm": llm}) -response = df.chat("Calculate the sum of the gdp of north american countries") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +agent = Agent(dataframe) +response = agent.chat("Calculate the sum of the gdp of north american countries") print(response) # Output: 20901884461056 diff --git a/examples/from_excel.py b/examples/from_excel.py index 7b7f4b579..f1db4d857 100644 --- a/examples/from_excel.py +++ b/examples/from_excel.py @@ -1,11 +1,13 @@ """Example of using PandasAI with am Excel file.""" +import os from pandasai import Agent -from pandasai.llm import OpenAI -llm = OpenAI() +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -df = Agent(["examples/data/Loan payments data.xlsx"], config={"llm": llm}) -response = df.chat("How many loans are from men and have been paid off?") +agent = Agent("examples/data/Loan payments data.xlsx") +response = agent.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. diff --git a/examples/from_google_sheets.py b/examples/from_google_sheets.py index fda33b89e..9751c7e97 100644 --- a/examples/from_google_sheets.py +++ b/examples/from_google_sheets.py @@ -1,13 +1,16 @@ """Example of using PandasAI with am Excel file.""" +import os from pandasai import Agent -from pandasai.llm import OpenAI # Betas & Bludgers Writing Competitions List (source: https://heystacks.com/?type=sheets&tags=data) google_sheets_url = "https://docs.google.com/spreadsheets/d/1VKkhugv2eF87AoOm4OXjI0sQEHrNhxy6gPL3F7xyw7g/edit#gid=115719017" # noqa E501 -llm = OpenAI() -df = Agent([google_sheets_url], config={"llm": llm}) -response = df.chat("How many short stories are there?") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +agent = Agent(google_sheets_url) +response = agent.chat("How many short stories are there?") print(response) # Output: 35 diff --git a/examples/from_googlebigquery.py b/examples/from_googlebigquery.py index 6beac7b91..c283403b6 100644 --- a/examples/from_googlebigquery.py +++ b/examples/from_googlebigquery.py @@ -1,18 +1,16 @@ +import os import base64 import json - from pandasai import SmartDataframe # A license might be required for using Snowflake with PandasAI from pandasai.ee.connectors import GoogleBigQueryConnector -from pandasai.llm import OpenAI # ENV's # BIG_QUERY_DATABASE # KEYFILE_PATH # PROJECT_ID - # EXAMPLE 1 bigquery_connectors = GoogleBigQueryConnector( config={ @@ -24,10 +22,13 @@ } ) -llm = OpenAI("OPEN-API_KEY") -df = SmartDataframe(bigquery_connectors, config={"llm": llm}) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +sdf = SmartDataframe(bigquery_connectors) -response = df.chat("How many rows are there in data ?") +response = sdf.chat("How many rows are there in data ?") print(response) # EXAMPLE 2 @@ -50,8 +51,7 @@ } ) -llm = OpenAI("OPEN-API_KEY") -df = SmartDataframe(bigquery_connectors, config={"llm": llm}) +sdf = SmartDataframe(bigquery_connectors) -response = df.chat("How many rows are there in data ?") +response = sdf.chat("How many rows are there in data ?") print(response) diff --git a/examples/from_parquet.py b/examples/from_parquet.py index 966a1c147..ea2e46662 100644 --- a/examples/from_parquet.py +++ b/examples/from_parquet.py @@ -1,11 +1,13 @@ """Example of using PandasAI with a Parquet file.""" +import os from pandasai import Agent -from pandasai.llm import OpenAI -llm = OpenAI() +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -agent = Agent(["examples/data/Loan payments data.parquet"], config={"llm": llm}) +agent = Agent("examples/data/Loan payments data.parquet") response = agent.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. diff --git a/examples/from_snowflake.py b/examples/from_snowflake.py index ad38aa157..b95199557 100644 --- a/examples/from_snowflake.py +++ b/examples/from_snowflake.py @@ -1,10 +1,10 @@ """Example of using PandasAI with a Snowflake""" +import os from pandasai import Agent # A license might be required for using Snowflake with PandasAI from pandasai.ee.connectors import SnowFlakeConnector -from pandasai.llm import OpenAI snowflake_connector = SnowFlakeConnector( config={ @@ -23,8 +23,11 @@ } ) -llm = OpenAI(api_token="OPEN_API_KEY") -df = Agent([snowflake_connector], config={"llm": llm}) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -response = df.chat("How many records has status 'F'?") +agent = Agent(snowflake_connector) + +response = agent.chat("How many records has status 'F'?") print(response) diff --git a/examples/from_sql.py b/examples/from_sql.py index 3a9bbae64..e042491b0 100644 --- a/examples/from_sql.py +++ b/examples/from_sql.py @@ -1,8 +1,8 @@ """Example of using PandasAI with a CSV file.""" +import os from pandasai import Agent from pandasai.connectors import MySQLConnector, PostgreSQLConnector, SqliteConnector -from pandasai.llm import OpenAI # With a MySQL database loan_connector = MySQLConnector( @@ -47,8 +47,12 @@ "where": [["status", "=", "pending"]], } ) -llm = OpenAI() -df = Agent([loan_connector, payment_connector, invoice_connector], config={"llm": llm}) -response = df.chat("How many people from the United states?") + +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +agent = Agent([loan_connector, payment_connector, invoice_connector]) +response = agent.chat("How many people from the United states?") print(response) # Output: 247 loans have been paid off by men. diff --git a/examples/from_yahoo_finance.py b/examples/from_yahoo_finance.py index 53ecfcb58..3fddcfbca 100644 --- a/examples/from_yahoo_finance.py +++ b/examples/from_yahoo_finance.py @@ -1,11 +1,14 @@ +import os from pandasai import Agent from pandasai.connectors.yahoo_finance import YahooFinanceConnector -from pandasai.llm import OpenAI yahoo_connector = YahooFinanceConnector("MSFT") -llm = OpenAI(api_token="OPEN_API_KEY") -df = Agent([yahoo_connector], config={"llm": llm}) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" -response = df.chat("What is the closing price for yesterday?") +agent = Agent(yahoo_connector) + +response = agent.chat("What is the closing price for yesterday?") print(response) diff --git a/examples/save_chart.py b/examples/save_chart.py index c246c0d20..b6f263890 100644 --- a/examples/save_chart.py +++ b/examples/save_chart.py @@ -1,34 +1,32 @@ """Example of using PandasAI to generate and save a chart from a Pandas DataFrame""" import os - import pandas as pd from data.sample_dataframe import dataframe - from pandasai import Agent from pandasai.helpers import path -from pandasai.llm import OpenAI df = pd.DataFrame(dataframe) -llm = OpenAI() - try: user_defined_path = path.find_project_root() except ValueError: user_defined_path = os.getcwd() +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + user_defined_path = os.path.join(user_defined_path, "exports", "charts") -df = Agent( - [df], +agent = Agent( + df, config={ - "llm": llm, "save_charts_path": user_defined_path, "save_charts": True, "verbose": True, }, ) -response = df.chat( +response = agent.chat( "Plot the histogram of countries showing for each the gpd," " using different colors for each bar", ) diff --git a/examples/show_chart.py b/examples/show_chart.py index ed4e5b17a..44c1f7b0b 100644 --- a/examples/show_chart.py +++ b/examples/show_chart.py @@ -1,16 +1,18 @@ """Example of using PandasAI to generate a chart from a Pandas DataFrame""" +import os import pandas as pd from data.sample_dataframe import dataframe - from pandasai import Agent -from pandasai.llm import OpenAI df = pd.DataFrame(dataframe) -llm = OpenAI() -df = Agent([df], config={"llm": llm, "verbose": True}) -response = df.chat( +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +agent = Agent(df) +response = agent.chat( "Plot the histogram of countries showing for each the gpd," " using different colors for each bar", ) diff --git a/examples/skills_example.py b/examples/skills_example.py index ed7e3dd47..fc7cc1667 100644 --- a/examples/skills_example.py +++ b/examples/skills_example.py @@ -1,8 +1,7 @@ import pandas as pd - from pandasai import Agent -from pandasai.llm.openai import OpenAI from pandasai.skills import skill +import os employees_data = { "EmployeeID": [1, 2, 3, 4, 5], @@ -37,9 +36,11 @@ def plot_salaries(names: list[str], salaries: list[int]): plt.xticks(rotation=45) -llm = OpenAI("YOUR-API-KEY") -agent = Agent([employees_df, salaries_df], config={"llm": llm}, memory_size=10) +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" +agent = Agent([employees_df, salaries_df], memory_size=10) agent.add_skills(plot_salaries) # Chat with the agent diff --git a/examples/sql_direct_config.py b/examples/sql_direct_config.py index 9b14431da..65e2e0fee 100644 --- a/examples/sql_direct_config.py +++ b/examples/sql_direct_config.py @@ -1,7 +1,7 @@ """Example of using PandasAI with a CSV file.""" +import os from pandasai import Agent from pandasai.connectors import PostgreSQLConnector -from pandasai.llm import OpenAI # With a PostgreSQL database order = PostgreSQLConnector( @@ -37,12 +37,13 @@ } ) -llm = OpenAI("OPEN_API_KEY") - +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" agent = Agent( [order, products, order_details], - config={"llm": llm, "direct_sql": True}, + config={"direct_sql": True}, ) response = agent.chat("return orders with count of distinct products") diff --git a/examples/using_flask.py b/examples/using_flask.py index 76eff3dc8..674c3fe17 100644 --- a/examples/using_flask.py +++ b/examples/using_flask.py @@ -4,11 +4,10 @@ Usage: flask –-app using_flask.py run """ +import os import pandas as pd from flask import Flask, render_template, request - from pandasai import SmartDatalake -from pandasai.llm import OpenAI from pandasai.responses.response_parser import ResponseParser app = Flask(__name__) @@ -40,10 +39,13 @@ def format_dataframe(self, result): } ) -llm = OpenAI("OPENAI-KEY") -dl = SmartDatalake( +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "your-api-key" + +agent = SmartDatalake( [employees_df, salaries_df], - config={"llm": llm, "verbose": True, "response_parser": PandasDataFrame}, + config={"verbose": True, "response_parser": PandasDataFrame}, ) @@ -52,7 +54,7 @@ def pandasai(): if request.method == "POST": # prompt question such as "Return a dataframe of name against salaries" query = request.form["query"] - response = dl.chat(query) + response = agent.chat(query) # Returns the response as Pandas DataFrame object in html return render_template("sample_flask_salaries.html", response=response) diff --git a/examples/using_pandasai_log_server.py b/examples/using_pandasai_log_server.py index e62fb827e..b29fc3db4 100644 --- a/examples/using_pandasai_log_server.py +++ b/examples/using_pandasai_log_server.py @@ -1,9 +1,6 @@ import os - import pandas as pd - from pandasai import Agent -from pandasai.llm.openai import OpenAI employees_data = { "EmployeeID": [1, 2, 3, 4, 5], @@ -19,16 +16,13 @@ employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) -# Example 1: Using Environment Variables -os.environ["PANDASAI_API_URL"] = "SERVER_URL" +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" - -llm = OpenAI("YOUR_API_KEY") agent = Agent( [employees_df, salaries_df], config={ - "llm": llm, "enable_cache": True, }, memory_size=10, @@ -40,11 +34,9 @@ # Example 2: Using Config -llm = OpenAI("YOUR_API_KEY") agent = Agent( [employees_df, salaries_df], config={ - "llm": llm, "enable_cache": True, "log_server": { "server_url": "SERVER_URL", diff --git a/examples/using_pipeline.py b/examples/using_pipeline.py deleted file mode 100644 index 75153900f..000000000 --- a/examples/using_pipeline.py +++ /dev/null @@ -1,82 +0,0 @@ -import pandas as pd - -from pandasai.llm.openai import OpenAI -from pandasai.pipelines.logic_units.output_logic_unit import ProcessOutput -from pandasai.pipelines.logic_units.prompt_execution import PromptExecution -from pandasai.pipelines.pipeline import Pipeline -from pandasai.pipelines.pipeline_context import PipelineContext -from pandasai.pipelines.synthetic_dataframe.generate_sdf_pipeline import ( - GenerateSDFPipeline, -) -from pandasai.pipelines.synthetic_dataframe.sdf_code_executor import ( - SDFCodeExecutor, -) -from pandasai.pipelines.synthetic_dataframe.synthetic_df_prompt import ( - SyntheticDataframePrompt, -) - -employees_df = pd.DataFrame( - { - "EmployeeID": [1, 2, 3, 4, 5], - "Name": ["John", "Emma", "Liam", "Olivia", "William"], - "Department": ["HR", "Sales", "IT", "Marketing", "Finance"], - } -) - -salaries_df = pd.DataFrame( - { - "EmployeeID": [1, 2, 3, 4, 5], - "Salary": [5000, 6000, 4500, 7000, 5500], - } -) - -llm = OpenAI("Your-API-Key") - -config = {"llm": llm, "verbose": True} - -context = PipelineContext([salaries_df], config) - -# Create your own pipeline -pipeline = Pipeline( - context=context, - steps=[ - SyntheticDataframePrompt(amount=15), - PromptExecution(), - SDFCodeExecutor(), - ProcessOutput(), - ], -) - -data_frame = pipeline.run() - -print(data_frame) - - -# Using defined Pipelines -context = PipelineContext([employees_df], config) - -pipeline = GenerateSDFPipeline( - amount=10, - context=context, -) - -data_frame = pipeline.run() - -print(data_frame) - - -# Without passing Context -pipeline = Pipeline( - [salaries_df], - config=config, - steps=[ - SyntheticDataframePrompt(amount=15), - PromptExecution(), - SDFCodeExecutor(), - ProcessOutput(), - ], -) - -data_frame = pipeline.run() - -print(data_frame) diff --git a/examples/using_streamlit.py b/examples/using_streamlit.py index 891226e0d..793f0ba23 100644 --- a/examples/using_streamlit.py +++ b/examples/using_streamlit.py @@ -4,10 +4,9 @@ Usage: streamlit run examples/using_streamlit.py """ +import os import pandas as pd - from pandasai import Agent -from pandasai.llm import OpenAI from pandasai.responses.streamlit_response import StreamlitResponse employees_df = pd.DataFrame( @@ -25,10 +24,13 @@ } ) -llm = OpenAI() -dl = Agent( +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent( [employees_df, salaries_df], - config={"llm": llm, "verbose": True, "response_parser": StreamlitResponse}, + config={"verbose": True, "response_parser": StreamlitResponse}, ) -dl.chat("Plot salaries against employee name") +agent.chat("Plot salaries against employee name") diff --git a/examples/using_train.py b/examples/using_train.py index eeff523c0..57e020b00 100644 --- a/examples/using_train.py +++ b/examples/using_train.py @@ -1,9 +1,9 @@ import os - from pandasai import Agent -# Set your PandasAI API key (you can generate one signing up at https://pandabi.ai) -os.environ["PANDASAI_API_KEY"] = "YOUR_PANDASAI_API_KEY" +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" agent = Agent("examples/data/Loan payments data.csv") diff --git a/examples/using_workspace_env.py b/examples/using_workspace_env.py index 250ada63c..8ee2e4c6f 100644 --- a/examples/using_workspace_env.py +++ b/examples/using_workspace_env.py @@ -1,10 +1,6 @@ import os - import pandas as pd - from pandasai import Agent -from pandasai.llm.openai import OpenAI -from pandasai.schemas.df_config import Config employees_data = { "EmployeeID": [1, 2, 3, 4, 5], @@ -20,17 +16,13 @@ employees_df = pd.DataFrame(employees_data) salaries_df = pd.DataFrame(salaries_data) - +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" os.environ["PANDASAI_WORKSPACE"] = "workspace dir path" - -llm = OpenAI("YOUR_API_KEY") -config__ = {"llm": llm, "save_charts": False} - - agent = Agent( [employees_df, salaries_df], - config=Config(**config__), memory_size=10, ) diff --git a/examples/with_azure.py b/examples/with_azure.py index 0da001fa6..3715a0ed0 100644 --- a/examples/with_azure.py +++ b/examples/with_azure.py @@ -2,7 +2,6 @@ import pandas as pd from data.sample_dataframe import dataframe - from pandasai import Agent from pandasai.llm import AzureOpenAI @@ -22,7 +21,7 @@ # is_chat_model=False, # Comment in if you deployed a completion model ) -df = Agent([df], config={"llm": llm}) -response = df.chat("Calculate the sum of the gdp of north american countries") +agent = Agent(df, config={"llm": llm}) +response = agent.chat("Calculate the sum of the gdp of north american countries") print(response) # Output: 20901884461056 diff --git a/examples/with_multiple_dataframes.py b/examples/with_multiple_dataframes.py index 9676e1647..67eac4de4 100644 --- a/examples/with_multiple_dataframes.py +++ b/examples/with_multiple_dataframes.py @@ -1,9 +1,8 @@ """Example of using PandasAI on multiple Pandas DataFrame""" import pandas as pd - from pandasai import Agent -from pandasai.llm import OpenAI +import os employees_df = pd.DataFrame( { @@ -20,11 +19,11 @@ } ) -llm = OpenAI() -dl = Agent( - [employees_df, salaries_df], - config={"llm": llm, "verbose": True}, -) -response = dl.chat("Plot salaries against name") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent([employees_df, salaries_df]) +response = agent.chat("Plot salaries against name") print(response) # Output: diff --git a/examples/with_name_and_description.py b/examples/with_name_and_description.py index 60022ad55..74a833515 100644 --- a/examples/with_name_and_description.py +++ b/examples/with_name_and_description.py @@ -2,20 +2,21 @@ import pandas as pd from data.sample_dataframe import dataframe - from pandasai import Agent -from pandasai.llm import OpenAI +import os df = pd.DataFrame(dataframe) -llm = OpenAI() -df = Agent( - [pd.DataFrame(dataframe)], +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent( + dataframe, name="Countries", description="A dataframe with countries with their GDPs and happiness scores", - config={"llm": llm}, ) -response = df.chat("Calculate the sum of the gdp of north american countries") +response = agent.chat("Calculate the sum of the gdp of north american countries") print(response) print(df.last_prompt) # Output: 20901884461056 diff --git a/examples/with_privacy_enforced.py b/examples/with_privacy_enforced.py index 663289a50..0365eed18 100644 --- a/examples/with_privacy_enforced.py +++ b/examples/with_privacy_enforced.py @@ -1,14 +1,15 @@ """Example of using PandasAI with a Pandas DataFrame""" - +import os import pandas as pd - from pandasai import Agent -from pandasai.llm import OpenAI from .data.sample_dataframe import dataframe -llm = OpenAI() -df = Agent([pd.DataFrame(dataframe)], config={"llm": llm, "enforce_privacy": True}) -response = df.chat("Calculate the sum of the gdp of north american countries") +# Get your FREE API key signing up at https://pandabi.ai. +# You can also configure it in your .env file. +os.environ["PANDASAI_API_KEY"] = "YOUR_API_KEY" + +agent = Agent([pd.DataFrame(dataframe)], config={"enforce_privacy": True}) +response = agent.chat("Calculate the sum of the gdp of north american countries") print(response) # Output: 20901884461056 diff --git a/examples/with_vertexai.py b/examples/with_vertexai.py index 0b6de5d47..43c2473b1 100644 --- a/examples/with_vertexai.py +++ b/examples/with_vertexai.py @@ -1,9 +1,7 @@ """Example of using PandasAI with a CSV file and Google Vertexai.""" import os - import pandas as pd - from pandasai import Agent from pandasai.llm import GoogleVertexAI @@ -15,7 +13,7 @@ llm = GoogleVertexAI( project_id="generative-ai-training", location="us-central1", model="text-bison@001" ) -df = Agent([df], config={"llm": llm}) -response = df.chat("How many loans are from men and have been paid off?") +agent = Agent(df, config={"llm": llm}) +response = agent.chat("How many loans are from men and have been paid off?") print(response) # Output: 247 loans have been paid off by men. diff --git a/pandasai.json b/pandasai.json index 7d091d4d1..9216a2434 100644 --- a/pandasai.json +++ b/pandasai.json @@ -9,6 +9,6 @@ "save_charts": false, "save_charts_path": "exports/charts", "custom_whitelisted_dependencies": [], - "llm": "OpenAI", + "llm": "BambooLLM", "llm_options": null } diff --git a/pandasai/llm/__init__.py b/pandasai/llm/__init__.py index 8e9c3ae5f..c6e75a62c 100644 --- a/pandasai/llm/__init__.py +++ b/pandasai/llm/__init__.py @@ -7,9 +7,11 @@ from .huggingface_text_gen import HuggingFaceTextGen from .langchain import LangchainLLM from .openai import OpenAI +from .bamboo_llm import BambooLLM __all__ = [ "LLM", + "BambooLLM", "AzureOpenAI", "OpenAI", "GooglePalm", diff --git a/pandasai/llm/bamboo_llm.py b/pandasai/llm/bamboo_llm.py index a17986937..8e286003d 100644 --- a/pandasai/llm/bamboo_llm.py +++ b/pandasai/llm/bamboo_llm.py @@ -1,9 +1,8 @@ from typing import Optional -from pandasai.helpers.request import Session -from pandasai.llm.base import LLM -from pandasai.pipelines.pipeline_context import PipelineContext -from pandasai.prompts.base import BasePrompt +from ..helpers.request import Session +from .base import LLM +from ..prompts.base import BasePrompt class BambooLLM(LLM): @@ -14,7 +13,7 @@ def __init__( ): self._session = Session(endpoint_url=endpoint_url, api_key=api_key) - def call(self, instruction: BasePrompt, context: PipelineContext = None) -> str: + def call(self, instruction: BasePrompt, _context=None) -> str: data = instruction.to_json() response = self._session.post("/llm/chat", json=data) return response["data"] diff --git a/pandasai/schemas/df_config.py b/pandasai/schemas/df_config.py index a5d683a32..1ae5712d4 100644 --- a/pandasai/schemas/df_config.py +++ b/pandasai/schemas/df_config.py @@ -5,7 +5,7 @@ from pandasai.pydantic import BaseModel, Field, validator from ..exceptions import LLMNotFoundError -from ..llm import LLM, LangchainLLM +from ..llm import LLM, LangchainLLM, BambooLLM class LogServerConfig(TypedDict): @@ -26,7 +26,7 @@ class Config(BaseModel): max_retries: int = 3 lazy_load_connector: bool = True response_parser: Any = None - llm: Any = None + llm: LLM = BambooLLM() data_viz_library: Optional[str] = "" log_server: LogServerConfig = None direct_sql: bool = False