diff --git a/README.md b/README.md index 5dcbc4c..ae33c19 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Edit your `env.json` to add your API keys and customize your installation. Just: ```sh -make run +make run # or poetry run python main.py ``` OADS will automatically generate the program source code in `./project` directory. diff --git a/constants.py b/constants.py index b2fd930..325e74e 100644 --- a/constants.py +++ b/constants.py @@ -1,7 +1,6 @@ -from autogen import config_list_from_json +import os from dacite import from_dict from jsonc_parser.parser import JsoncParser -import os from typedefs import ProjectConfig import utils @@ -15,6 +14,11 @@ COMMON_LLM_CONFIG = { # https://microsoft.github.io/autogen/docs/FAQ#set-your-api-endpoints "config_list": [utils.get_model_config_as_dict(PROJECT_CONFIG)], + "request_timeout": 600, + # "seed": 42, +} + +FUNCTIONAL_LLM_CONFIG = COMMON_LLM_CONFIG | { "functions": [ { "name": "fetch_web_page", @@ -105,8 +109,6 @@ }, }, ], - "request_timeout": 600, - "seed": 42, } PROJECT_DIRECTORY_NAME = "project" diff --git a/env.sample.jsonc b/env.sample.jsonc index f236d6b..81edf22 100644 --- a/env.sample.jsonc +++ b/env.sample.jsonc @@ -27,34 +27,6 @@ // The `model` key must be unique. // https://microsoft.github.io/FLAML/docs/reference/autogen/oai/completion/#create "models": [ - // Any model using Azure OpenAI API - { - "model": "[AZURE_OPENAI_STUDIO_DEPLOYMENT_NAME]", - "api_key": "[AZURE_OPENAI_API_KEY]", - "api_base": "https://[AZURE_OPENAI_RESOURCE_NAME].openai.azure.com", - "api_type": "azure", - // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions - "api_version": "2023-08-01-preview" - }, - - // `gpt-3.5-turbo-16k` using OpenAI API - { - "model": "gpt-3.5-turbo-16k", - "api_key": "[OPEN_AI_API_KEY]" - }, - - // `gpt-4` using OpenAI API - { - "model": "gpt-4", - "api_key": "[OPEN_AI_API_KEY]" - }, - - // `gpt-4-32k` using OpenAI API - { - "model": "gpt-4-32k", - "api_key": "[OPEN_AI_API_KEY]" - }, - // Open-source LLM deployment using "Text Generation Web UI" with `OpenAI` extension enabled: // https://github.com/ivangabriele/openai-autogen-dev-studio#open-source-llms // https://github.com/oobabooga/text-generation-webui/tree/main/extensions/openai#an-openedai-api-openai-like @@ -65,23 +37,10 @@ // regardless of the model you use behind it. { "model": "Open-Orca/Mistral-7B-OpenOrca", - "api_base": "https://[YOUR_CONTAINER_ID]-5001.proxy.runpod.net", // or your local/public endpoint - // Unless you have setup your endpoint with an API key, you can leave this dummy value: + "api_base": "https://[YOUR_CONTAINER_ID]-5001.proxy.runpod.net/v1", // <== The `/v1` is IMPORTANT! + // Unless you have specifically setup your endpoint with an API key, don't change this dummy value "api_key": "sk-111111111111111111111111111111111111111111111111", "api_type": "open_ai" } ] - - // ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― - // Funtionary LLM API Endpoint Configuration - - // This must be a secondary deployment. Don't use this endpoint in your models. - // You can deploy it in one click using this Github repository: - // https://github.com/ivangabriele/docker-functionary - // "functionary_model": { - // "model": "musabgultekin/functionary-7b-v1", - // "api_base": "https://eoefr8r4dxwu0n-8000.proxy.runpod.net//v1", - // "api_key": "functionary", - // "api_type": "open_ai" - // } } diff --git a/main.py b/main.py index 9dafa42..2d21d42 100644 --- a/main.py +++ b/main.py @@ -13,7 +13,7 @@ import autogen -import actions +# import actions import agents from constants import COMMON_LLM_CONFIG, PROJECT_CONFIG, PROJECT_DIRECTORY_NAME @@ -49,18 +49,18 @@ ) -COMMON_FUNCTION_MAP = { - "fetch_web_page": actions.fetch_web_page, - "read_file": actions.read_file, - "run_bash_command": actions.run_bash_command, - "run_rust_file": actions.run_rust_file, - "search_web": actions.search_web, - "write_file": actions.write_file, -} +# COMMON_FUNCTION_MAP = { +# "fetch_web_page": actions.fetch_web_page, +# "read_file": actions.read_file, +# "run_bash_command": actions.run_bash_command, +# "run_rust_file": actions.run_rust_file, +# "search_web": actions.search_web, +# "write_file": actions.write_file, +# } -ceo_user_proxy_agent.register_function( - function_map=COMMON_FUNCTION_MAP, -) +# ceo_user_proxy_agent.register_function( +# function_map=COMMON_FUNCTION_MAP, +# ) functioneer = agents.Functioneer()