diff --git a/services/llm-client/adapters/db/__pycache__/interactions.cpython-39.pyc b/services/llm-client/adapters/db/__pycache__/interactions.cpython-39.pyc index a4231f8..2eedd9f 100644 Binary files a/services/llm-client/adapters/db/__pycache__/interactions.cpython-39.pyc and b/services/llm-client/adapters/db/__pycache__/interactions.cpython-39.pyc differ diff --git a/services/llm-client/adapters/db/interactions.py b/services/llm-client/adapters/db/interactions.py index acebf3b..492faa1 100644 --- a/services/llm-client/adapters/db/interactions.py +++ b/services/llm-client/adapters/db/interactions.py @@ -2,4 +2,4 @@ from utils import json def request_to_model(wire): - return InteractionRequest(wire.id, json.deserialize(wire.interaction, Interaction) , wire.response, wire.request_date, wire.status, wire.timeout) \ No newline at end of file + return InteractionRequest(wire.id, json.deserialize(wire.interaction, Interaction), wire.response, wire.request_date, wire.status, wire.timeout, wire.details) \ No newline at end of file diff --git a/services/llm-client/diplomat/db/__pycache__/interactions.cpython-39.pyc b/services/llm-client/diplomat/db/__pycache__/interactions.cpython-39.pyc index 8a7ca75..1548337 100644 Binary files a/services/llm-client/diplomat/db/__pycache__/interactions.cpython-39.pyc and b/services/llm-client/diplomat/db/__pycache__/interactions.cpython-39.pyc differ diff --git a/services/llm-client/diplomat/db/interactions.py b/services/llm-client/diplomat/db/interactions.py index 5cfc94c..6582c36 100644 --- a/services/llm-client/diplomat/db/interactions.py +++ b/services/llm-client/diplomat/db/interactions.py @@ -1,6 +1,7 @@ from components.scylla_connection import ScyllaConnection from datetime import datetime, timezone from adapters.db import interactions as interactions_db_adapter +from models.interactions import InteractionRequest from utils import json def get_interaction(interaction_id, scylla: ScyllaConnection): @@ -10,24 +11,26 @@ def get_interaction(interaction_id, scylla: ScyllaConnection): return interactions_db_adapter.request_to_model(rows[0]) if rows else None def insert(interaction, scylla: ScyllaConnection): - print(json.serialize(interaction)) - statement = """INSERT INTO execution.interaction_requests (id, interaction, response, request_date, status, timeout) - VALUES (?, ?, ?, ?, ?, ?)""" + statement = """INSERT INTO execution.interaction_requests (id, interaction, response, request_date, status, timeout, details) + VALUES (?, ?, ?, ?, ?, ?, ?)""" interaction_id = interaction.id interaction_value = json.serialize(interaction) response = None request_date = int(datetime.now(timezone.utc).timestamp() * 1000) status = 'pending' timeout = 30000 + details = None prepared_statement = scylla.session.prepare(statement) - scylla.session.execute(prepared_statement, (interaction_id, interaction_value, response, request_date, status, timeout)) + scylla.session.execute(prepared_statement, (interaction_id, interaction_value, response, request_date, status, timeout, details)) + return InteractionRequest(interaction_id,interaction, response, request_date, status, timeout, details) -def update_interaction(interaction_result, scylla: ScyllaConnection): +def update_interaction(interaction_request: InteractionRequest, scylla: ScyllaConnection): statement = """UPDATE execution.interaction_requests - SET response = %s, status = %s + SET response = %s, status = %s, details = %s WHERE id = %s""" - interaction_id = interaction_result.id - response = interaction_result.response - status = interaction_result.status - scylla.session.execute(statement, (response, status, interaction_id)) \ No newline at end of file + interaction_id = interaction_request.id + response = interaction_request.response + status = interaction_request.status + details = interaction_request.details + scylla.session.execute(statement, (response, status, details, interaction_id)) \ No newline at end of file diff --git a/services/llm-client/flows/__pycache__/interactions.cpython-39.pyc b/services/llm-client/flows/__pycache__/interactions.cpython-39.pyc index 6ba6356..fb1583c 100644 Binary files a/services/llm-client/flows/__pycache__/interactions.cpython-39.pyc and b/services/llm-client/flows/__pycache__/interactions.cpython-39.pyc differ diff --git a/services/llm-client/flows/interactions.py b/services/llm-client/flows/interactions.py index c225b0c..0297259 100644 --- a/services/llm-client/flows/interactions.py +++ b/services/llm-client/flows/interactions.py @@ -1,10 +1,11 @@ from components.scylla_connection import ScyllaConnection from datetime import datetime, timezone -from models.interactions import Interaction, InteractionResult +from models.interactions import Interaction from diplomat.db import interactions as interactions_db from diplomat.db import prompts as prompts_db from logic import interactions as interactions_logic +from logic import prompts as prompts_logic def new_interaction(interaction: Interaction, scylla: ScyllaConnection): existent_interaction = interactions_db.get_interaction(interaction.id, scylla) @@ -13,22 +14,36 @@ def new_interaction(interaction: Interaction, scylla: ScyllaConnection): return existent_interaction else: if(interactions_logic.is_timed_out(existent_interaction, int(datetime.now(timezone.utc).timestamp() * 1000))): - existent_interaction.failed() - interactions_db.update_interaction(InteractionResult(existent_interaction.id, 'failed', None), scylla) + existent_interaction.failed("Timed out") + interactions_db.update_interaction(existent_interaction, scylla) return existent_interaction else: return existent_interaction + interaction_request = interactions_db.insert(interaction, scylla) + prompt = prompts_db.get_prompt(interaction.prompt_name, scylla) if(prompt is None): - return None + interaction_request.failed("Prompt not found") + interactions_db.update_interaction(interaction_request, scylla) + return interaction_request - # fill prompt with variables - interactions_db.insert(interaction, scylla) + prompt_variables = prompts_logic.find_variables(prompt) + missing_variables = prompts_logic.find_missing_variables(prompt_variables, interaction.variables) + + if(missing_variables): + missing_variables = ", ".join(missing_variables) + interaction_request.failed(f"Variables missing: {missing_variables}") + interactions_db.update_interaction(interaction_request, scylla) + return interaction_request + + rendered_prompt = prompts_logic.replace_variables(prompt, interaction.variables) + + # request Open IA # update the database # return - return interaction + return rendered_prompt diff --git a/services/llm-client/logic/__pycache__/prompts.cpython-39.pyc b/services/llm-client/logic/__pycache__/prompts.cpython-39.pyc index c2e2390..8db0b9a 100644 Binary files a/services/llm-client/logic/__pycache__/prompts.cpython-39.pyc and b/services/llm-client/logic/__pycache__/prompts.cpython-39.pyc differ diff --git a/services/llm-client/logic/prompts.py b/services/llm-client/logic/prompts.py index 13a0725..c66c176 100644 --- a/services/llm-client/logic/prompts.py +++ b/services/llm-client/logic/prompts.py @@ -1,6 +1,18 @@ from models.prompts import Prompt +from typing import List, Dict, Any import re def find_variables(prompt : Prompt): pattern = r'\{\{(\w+)\}\}' - return re.findall(pattern, prompt.prompt) \ No newline at end of file + return re.findall(pattern, prompt.prompt) + +def find_missing_variables(prompt_variables: List[str], received_variables: Dict[str, Any]) -> List[str]: + missing_vars = [] + for var in prompt_variables: + if var not in received_variables or received_variables[var] is None: + missing_vars.append(var) + return missing_vars + +def replace_variables(prompt: Prompt, variables: dict[str, Any]) -> str: + pattern = re.compile(r"\{\{(.*?)\}\}") + return pattern.sub(lambda match: str(variables.get(match.group(1), match.group(0))), prompt.prompt) diff --git a/services/llm-client/migrations/scripts/001_create_executions_table.py b/services/llm-client/migrations/scripts/001_create_executions_table.py index 38f1b39..65ec8f8 100644 --- a/services/llm-client/migrations/scripts/001_create_executions_table.py +++ b/services/llm-client/migrations/scripts/001_create_executions_table.py @@ -12,6 +12,7 @@ def run_migration(session): response text, request_date bigint, status text, - timeout int + timeout int, + details text ); """) \ No newline at end of file diff --git a/services/llm-client/migrations/scripts/003_insert_tick_shopping_items_prompt.py b/services/llm-client/migrations/scripts/003_insert_tick_shopping_items_prompt.py index 44b3986..c1b0f08 100644 --- a/services/llm-client/migrations/scripts/003_insert_tick_shopping_items_prompt.py +++ b/services/llm-client/migrations/scripts/003_insert_tick_shopping_items_prompt.py @@ -3,6 +3,6 @@ def run_migration(session): session.execute(""" INSERT INTO configuration.prompts (prompt_name, prompt) - VALUES('tick_shopping_items', 'Are you able to tick items on a cart if I send you a picture?') + VALUES('tick_shopping_items', 'Are you able to tick items on a cart if I send you a {{item_name}}?') """) \ No newline at end of file diff --git a/services/llm-client/migrations/scripts/__pycache__/001_create_executions_table.cpython-39.pyc b/services/llm-client/migrations/scripts/__pycache__/001_create_executions_table.cpython-39.pyc index fa213de..3df9a7f 100644 Binary files a/services/llm-client/migrations/scripts/__pycache__/001_create_executions_table.cpython-39.pyc and b/services/llm-client/migrations/scripts/__pycache__/001_create_executions_table.cpython-39.pyc differ diff --git a/services/llm-client/migrations/scripts/__pycache__/003_insert_tick_shopping_items_prompt.cpython-39.pyc b/services/llm-client/migrations/scripts/__pycache__/003_insert_tick_shopping_items_prompt.cpython-39.pyc index 992d2e2..0acbe26 100644 Binary files a/services/llm-client/migrations/scripts/__pycache__/003_insert_tick_shopping_items_prompt.cpython-39.pyc and b/services/llm-client/migrations/scripts/__pycache__/003_insert_tick_shopping_items_prompt.cpython-39.pyc differ diff --git a/services/llm-client/models/__pycache__/interactions.cpython-39.pyc b/services/llm-client/models/__pycache__/interactions.cpython-39.pyc index 4c1022f..7e0caac 100644 Binary files a/services/llm-client/models/__pycache__/interactions.cpython-39.pyc and b/services/llm-client/models/__pycache__/interactions.cpython-39.pyc differ diff --git a/services/llm-client/models/interactions.py b/services/llm-client/models/interactions.py index 14273cd..6f2f1e7 100644 --- a/services/llm-client/models/interactions.py +++ b/services/llm-client/models/interactions.py @@ -9,21 +9,17 @@ def __init__(self, id: UUID, prompt_name:str, variables:dict, images:dict, messa self.message = message class InteractionRequest: - def __init__(self, id, interaction, response, request_date, status, timeout): + def __init__(self, id, interaction, response, request_date, status, timeout, details): self.id = id self.interaction = interaction self.response = response self.request_date = request_date self.status = status self.timeout = timeout + self.details = details - def failed(self): + def failed(self, details: str): self.status = 'failed' + self.details = details return self - - -class InteractionResult: - def __init__(self, id, status, response): - self.id = id - self.status = status - self.response = response \ No newline at end of file + \ No newline at end of file diff --git a/services/llm-client/pytest.ini b/services/llm-client/pytest.ini deleted file mode 100644 index 180c0c0..0000000 --- a/services/llm-client/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -python_paths = llm-client \ No newline at end of file diff --git a/services/llm-client/tests/logic/__pycache__/prompts_test.cpython-39-pytest-8.3.2.pyc b/services/llm-client/tests/logic/__pycache__/prompts_test.cpython-39-pytest-8.3.2.pyc index 8aadadb..29f8558 100644 Binary files a/services/llm-client/tests/logic/__pycache__/prompts_test.cpython-39-pytest-8.3.2.pyc and b/services/llm-client/tests/logic/__pycache__/prompts_test.cpython-39-pytest-8.3.2.pyc differ diff --git a/services/llm-client/tests/logic/prompts_test.py b/services/llm-client/tests/logic/prompts_test.py index 66c5cf9..1fe4766 100644 --- a/services/llm-client/tests/logic/prompts_test.py +++ b/services/llm-client/tests/logic/prompts_test.py @@ -1,5 +1,6 @@ import pytest from logic import prompts +from typing import List, Dict, Any from models.prompts import Prompt def test_find_variables_basic(): @@ -20,4 +21,64 @@ def test_edge_case_empty_string(): def test_edge_case_empty_braces(): prompt = Prompt("", "{{}} should not be considered a variable.") - assert prompts.find_variables(prompt) == [] \ No newline at end of file + assert prompts.find_variables(prompt) == [] + +def find_missing_variables(variables: List[str], values: Dict[str, Any]) -> List[str]: + missing_vars = [] + for var in variables: + if var not in values or values[var] is None: + missing_vars.append(var) + return missing_vars + +def test_find_missing_variables_all_present(): + variables: List[str] = ["var1", "var2", "var3"] + values: Dict[str, Any] = { + "var1": "value1", + "var2": "value2", + "var3": "value3" + } + assert find_missing_variables(variables, values) == [] + +def test_find_missing_variables_some_missing(): + variables: List[str] = ["var1", "var2", "var3"] + values: Dict[str, Any] = { + "var1": "value1", + "var2": "value2" + } + assert find_missing_variables(variables, values) == ["var3"] + +def test_find_missing_variables_some_none(): + variables: List[str] = ["var1", "var2", "var3"] + values: Dict[str, Any] = { + "var1": "value1", + "var2": None, + "var3": "value3" + } + assert find_missing_variables(variables, values) == ["var2"] + +def test_find_missing_variables_empty_variables(): + variables: List[str] = [] + values: Dict[str, Any] = { + "var1": "value1", + "var2": "value2", + "var3": "value3" + } + assert find_missing_variables(variables, values) == [] + +def test_find_missing_variables_empty_values(): + variables: List[str] = ["var1", "var2", "var3"] + values: Dict[str, Any] = {} + assert find_missing_variables(variables, values) == ["var1", "var2", "var3"] + +def test_replace_variables(): + template = "Hello, {{name}}. You have {{count}} new messages." + values = {"name": "Alice", "count": 5} + assert prompts.replace_variables(template, values) == "Hello, Alice. You have 5 new messages." + + template = "Hello, {{name}}. You have {{count}} new messages and your ID is {{id}}." + values = {"name": "Bob", "count": 10, "id": 12345} + assert prompts.replace_variables(template, values) == "Hello, Bob. You have 10 new messages and your ID is 12345." + + template = "Hello, {{name}}." + values = {} + assert prompts.replace_variables(template, values) == "Hello, {{name}}." \ No newline at end of file