From 23d24625b425b5bd95d213dd844272ef7b8a09d0 Mon Sep 17 00:00:00 2001 From: joelhulen Date: Sat, 13 Jan 2024 23:59:59 -0500 Subject: [PATCH 1/3] Rename to full_prompt --- .../foundationallm/langchain/agents/blob_storage_agent.py | 2 +- .../PythonSDK/foundationallm/langchain/agents/csv_agent.py | 2 +- .../foundationallm/langchain/agents/search_service_agent.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/PythonSDK/foundationallm/langchain/agents/blob_storage_agent.py b/src/python/PythonSDK/foundationallm/langchain/agents/blob_storage_agent.py index e3145f1d2b..d96992e429 100644 --- a/src/python/PythonSDK/foundationallm/langchain/agents/blob_storage_agent.py +++ b/src/python/PythonSDK/foundationallm/langchain/agents/blob_storage_agent.py @@ -136,7 +136,7 @@ def run(self, prompt: str) -> CompletionResponse: return CompletionResponse( completion = rag_chain.invoke(prompt), user_prompt = prompt, - final_prompt = self.full_prompt.text, + full_prompt = self.full_prompt.text, completion_tokens = cb.completion_tokens, prompt_tokens = cb.prompt_tokens, total_tokens = cb.total_tokens, diff --git a/src/python/PythonSDK/foundationallm/langchain/agents/csv_agent.py b/src/python/PythonSDK/foundationallm/langchain/agents/csv_agent.py index 5857efbd77..b39d88e94c 100644 --- a/src/python/PythonSDK/foundationallm/langchain/agents/csv_agent.py +++ b/src/python/PythonSDK/foundationallm/langchain/agents/csv_agent.py @@ -165,7 +165,7 @@ def run(self, prompt: str) -> CompletionResponse: return CompletionResponse( completion = self.agent.run(prompt), user_prompt = prompt, - final_prompt = self.prompt_template, + full_prompt = self.prompt_template, completion_tokens = cb.completion_tokens, prompt_tokens = cb.prompt_tokens, total_tokens = cb.total_tokens, diff --git a/src/python/PythonSDK/foundationallm/langchain/agents/search_service_agent.py b/src/python/PythonSDK/foundationallm/langchain/agents/search_service_agent.py index b3f131061a..f7b27ddeb5 100644 --- a/src/python/PythonSDK/foundationallm/langchain/agents/search_service_agent.py +++ b/src/python/PythonSDK/foundationallm/langchain/agents/search_service_agent.py @@ -114,7 +114,7 @@ def run(self, prompt: str) -> CompletionResponse: return CompletionResponse( completion = completion, user_prompt = prompt, - final_prompt = self.full_prompt.text, + full_prompt = self.full_prompt.text, completion_tokens = cb.completion_tokens, prompt_tokens = cb.prompt_tokens, total_tokens = cb.total_tokens, From 0ed5083bbbbdfb9fc1e7513fac3fa767e804e543 Mon Sep 17 00:00:00 2001 From: joelhulen Date: Sun, 14 Jan 2024 00:00:28 -0500 Subject: [PATCH 2/3] Update the AgentFactory to capture the FullPrompt value --- src/dotnet/AgentFactory/Agents/DefaultAgent.cs | 1 + .../Orchestration/LLMOrchestrationCompletionResponse.cs | 6 ++++++ src/dotnet/AgentFactory/Services/LangChainService.cs | 1 + 3 files changed, 8 insertions(+) diff --git a/src/dotnet/AgentFactory/Agents/DefaultAgent.cs b/src/dotnet/AgentFactory/Agents/DefaultAgent.cs index a4d845b2ab..083e25339a 100644 --- a/src/dotnet/AgentFactory/Agents/DefaultAgent.cs +++ b/src/dotnet/AgentFactory/Agents/DefaultAgent.cs @@ -154,6 +154,7 @@ public override async Task GetCompletion(CompletionRequest c { Completion = result.Completion!, UserPrompt = completionRequest.UserPrompt!, + FullPrompt = result.FullPrompt, PromptTemplate = result.PromptTemplate, AgentName = result.AgentName, PromptTokens = result.PromptTokens, diff --git a/src/dotnet/AgentFactory/Models/Orchestration/LLMOrchestrationCompletionResponse.cs b/src/dotnet/AgentFactory/Models/Orchestration/LLMOrchestrationCompletionResponse.cs index 3b4e7503ca..334e4f6665 100644 --- a/src/dotnet/AgentFactory/Models/Orchestration/LLMOrchestrationCompletionResponse.cs +++ b/src/dotnet/AgentFactory/Models/Orchestration/LLMOrchestrationCompletionResponse.cs @@ -19,6 +19,12 @@ public class LLMOrchestrationCompletionResponse [JsonProperty("user_prompt")] public string? UserPrompt { get; set; } + /// + /// The full prompt composed by the LLM. + /// + [JsonProperty("full_prompt")] + public string? FullPrompt { get; set; } + /// /// The prompt template used by the LLM. /// diff --git a/src/dotnet/AgentFactory/Services/LangChainService.cs b/src/dotnet/AgentFactory/Services/LangChainService.cs index dff872b947..fdbb59ec8e 100644 --- a/src/dotnet/AgentFactory/Services/LangChainService.cs +++ b/src/dotnet/AgentFactory/Services/LangChainService.cs @@ -66,6 +66,7 @@ public async Task GetCompletion(LLMOrchestra { Completion = completionResponse!.Completion, UserPrompt = completionResponse.UserPrompt, + FullPrompt = completionResponse.FullPrompt, PromptTemplate = request.Agent?.PromptPrefix, AgentName = request.Agent?.Name, PromptTokens = completionResponse.PromptTokens, From a7170f3db50622f82deaab13d5ba908a453be13b Mon Sep 17 00:00:00 2001 From: joelhulen Date: Sun, 14 Jan 2024 00:00:57 -0500 Subject: [PATCH 3/3] Handle and store the full prompt --- .../Common/Models/Orchestration/CompletionResponse.cs | 6 ++++++ src/dotnet/Core/Services/CoreService.cs | 2 +- .../models/orchestration/completion_response.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/dotnet/Common/Models/Orchestration/CompletionResponse.cs b/src/dotnet/Common/Models/Orchestration/CompletionResponse.cs index 977ebd93d5..0a76236c41 100644 --- a/src/dotnet/Common/Models/Orchestration/CompletionResponse.cs +++ b/src/dotnet/Common/Models/Orchestration/CompletionResponse.cs @@ -20,6 +20,12 @@ public class CompletionResponse [JsonProperty("user_prompt")] public string UserPrompt { get; set; } + /// + /// The full prompt composed by the LLM. + /// + [JsonProperty("full_prompt")] + public string? FullPrompt { get; set; } + /// /// The prompt template used by the LLM. /// diff --git a/src/dotnet/Core/Services/CoreService.cs b/src/dotnet/Core/Services/CoreService.cs index febebdcb0c..a621ca5861 100644 --- a/src/dotnet/Core/Services/CoreService.cs +++ b/src/dotnet/Core/Services/CoreService.cs @@ -130,7 +130,7 @@ public async Task GetChatCompletionAsync(string? sessionId, string u var promptMessage = new Message(sessionId, nameof(Participants.User), result.PromptTokens, userPrompt, result.UserPromptEmbedding, null, upn, _callContext.CurrentUserIdentity?.Name); var completionMessage = new Message(sessionId, nameof(Participants.Assistant), result.CompletionTokens, result.Completion, null, null, upn, result.AgentName); var completionPromptText = - $"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {result.PromptTemplate}"; + $"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {(!string.IsNullOrWhiteSpace(result.FullPrompt) ? result.FullPrompt : result.PromptTemplate)}"; var completionPrompt = new CompletionPrompt(sessionId, completionMessage.Id, completionPromptText); completionMessage.CompletionPromptId = completionPrompt.Id; diff --git a/src/python/PythonSDK/foundationallm/models/orchestration/completion_response.py b/src/python/PythonSDK/foundationallm/models/orchestration/completion_response.py index 74890284fe..88c65283ee 100644 --- a/src/python/PythonSDK/foundationallm/models/orchestration/completion_response.py +++ b/src/python/PythonSDK/foundationallm/models/orchestration/completion_response.py @@ -6,7 +6,7 @@ class CompletionResponse(BaseModel): Response from a language model. """ user_prompt: str - final_prompt: Optional[str] = None + full_prompt: Optional[str] = None completion: Union[str, set, List[str]] user_prompt_embedding: Optional[List[float]] = [] prompt_tokens: int = 0