Skip to content

Commit

Permalink
improve plugins
Browse files Browse the repository at this point in the history
Signed-off-by: Agustín Ramiro Díaz <[email protected]>
  • Loading branch information
AgustinRamiroDiaz committed Sep 9, 2024
1 parent 03664e9 commit 13dca11
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,9 @@
"config": {
"temperature": 0.75,
"max_tokens": 500
},
"plugin_config": {
"api_key_env_var": "HEURISTAIAPIKEY",
"url": "https://llm-gateway.heurist.xyz"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,9 @@
"config": {
"temperature": 0.75,
"max_tokens": 500
},
"plugin_config": {
"api_key_env_var": "HEURISTAIAPIKEY",
"url": "https://llm-gateway.heurist.xyz"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,9 @@
"config": {
"temperature": 0.75,
"max_tokens": 500
},
"plugin_config": {
"api_key_env_var": "HEURISTAIAPIKEY",
"url": "https://llm-gateway.heurist.xyz"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,9 @@
"config": {
"temperature": 0.75,
"max_tokens": 500
},
"plugin_config": {
"api_key_env_var": "HEURISTAIAPIKEY",
"url": "https://llm-gateway.heurist.xyz"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,8 @@
"provider": "openai",
"plugin": "openai",
"model": "gpt-4",
"config": "<empty>"
"config": "<empty>",
"plugin_config": {
"api_key_env_var": "OPENAIKEY"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,8 @@
"provider": "openai",
"plugin": "openai",
"model": "gpt-4o-mini",
"config": "<empty>"
"config": "<empty>",
"plugin_config": {
"api_key_env_var": "OPENAIKEY"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,8 @@
"provider": "openai",
"plugin": "openai",
"model": "gpt-4o",
"config": "<empty>"
"config": "<empty>",
"plugin_config": {
"api_key_env_var": "OPENAIKEY"
}
}
35 changes: 33 additions & 2 deletions backend/node/create_nodes/providers_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
"plugin": {
"$comment": "plugin to be loaded by the simulator to interact with the provider",
"enum": ["heuristai", "openai", "ollama"]
},
"plugin_config": {
"type": "object"
}
},
"allOf": [
Expand Down Expand Up @@ -173,6 +176,21 @@
"plugin": {
"const": "heuristai"
},
"plugin_config": {
"type": "object",
"additionalProperties": false,
"properties": {
"api_key_env_var": {
"type": "string",
"$comment": "Environment variable that contains the API key"
},
"url": {
"type": "string",
"$comment": "URL of the API endpoint"
}
},
"required": ["api_key_env_var", "url"]
},
"model": {
"enum": [
"mistralai/mixtral-8x7b-instruct",
Expand Down Expand Up @@ -202,7 +220,8 @@
},
"required": ["temperature", "max_tokens"]
}
}
},
"required": ["plugin_config"]
}
},
{
Expand All @@ -216,13 +235,25 @@
"plugin": {
"const": "openai"
},
"plugin_config": {
"type": "object",
"additionalProperties": false,
"properties": {
"api_key_env_var": {
"type": "string",
"$comment": "Environment variable that contains the API key"
}
},
"required": ["api_key_env_var"]
},
"model": {
"enum": ["gpt-3.5-turbo", "gpt-4", "gpt-4o", "gpt-4o-mini"]
},
"config": {
"const": "<empty>"
}
}
},
"required": ["plugin_config"]
}
}
],
Expand Down
11 changes: 7 additions & 4 deletions backend/node/genvm/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@

load_dotenv()

plugin_config_key = "plugin_config"


async def process_streaming_buffer(buffer: str, chunk: str, regex: str) -> str:
updated_buffer = buffer + chunk
Expand Down Expand Up @@ -77,7 +79,8 @@ async def call_openai(
regex: Optional[str],
return_streaming_channel: Optional[asyncio.Queue],
) -> str:
client = get_openai_client(os.environ.get("OPENAIKEY"))
api_key_env_var = model_config[plugin_config_key]["api_key_env_var"]
client = get_openai_client(os.environ.get(api_key_env_var))
# TODO: OpenAI exceptions need to be caught here
stream = get_openai_stream(client, prompt, model_config)

Expand All @@ -90,9 +93,9 @@ async def call_heuristai(
regex: Optional[str],
return_streaming_channel: Optional[asyncio.Queue],
) -> str:
client = get_openai_client(
os.environ.get("HEURISTAIAPIKEY"), os.environ.get("HEURISTAIURL")
)
api_key_env_var = model_config[plugin_config_key]["api_key_env_var"]
url = model_config[plugin_config_key]["url"]
client = get_openai_client(os.environ.get(api_key_env_var), os.environ.get(url))
stream = get_openai_stream(client, prompt, model_config)
# TODO: Get the line below working
# return await get_openai_output(stream, regex, return_streaming_channel)
Expand Down

0 comments on commit 13dca11

Please sign in to comment.