Skip to content

Commit

Permalink
Merge pull request #463 from solliancenet/jdh-full-prompt
Browse files Browse the repository at this point in the history
Capture and persist the full prompt with context
  • Loading branch information
kylebunting authored Jan 14, 2024
2 parents 100fc0e + a7170f3 commit e60ad0e
Show file tree
Hide file tree
Showing 9 changed files with 19 additions and 5 deletions.
1 change: 1 addition & 0 deletions src/dotnet/AgentFactory/Agents/DefaultAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ public override async Task<CompletionResponse> GetCompletion(CompletionRequest c
{
Completion = result.Completion!,
UserPrompt = completionRequest.UserPrompt!,
FullPrompt = result.FullPrompt,
PromptTemplate = result.PromptTemplate,
AgentName = result.AgentName,
PromptTokens = result.PromptTokens,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ public class LLMOrchestrationCompletionResponse
[JsonProperty("user_prompt")]
public string? UserPrompt { get; set; }

/// <summary>
/// The full prompt composed by the LLM.
/// </summary>
[JsonProperty("full_prompt")]
public string? FullPrompt { get; set; }

/// <summary>
/// The prompt template used by the LLM.
/// </summary>
Expand Down
1 change: 1 addition & 0 deletions src/dotnet/AgentFactory/Services/LangChainService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ public async Task<LLMOrchestrationCompletionResponse> GetCompletion(LLMOrchestra
{
Completion = completionResponse!.Completion,
UserPrompt = completionResponse.UserPrompt,
FullPrompt = completionResponse.FullPrompt,
PromptTemplate = request.Agent?.PromptPrefix,
AgentName = request.Agent?.Name,
PromptTokens = completionResponse.PromptTokens,
Expand Down
6 changes: 6 additions & 0 deletions src/dotnet/Common/Models/Orchestration/CompletionResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,12 @@ public class CompletionResponse
[JsonProperty("user_prompt")]
public string UserPrompt { get; set; }

/// <summary>
/// The full prompt composed by the LLM.
/// </summary>
[JsonProperty("full_prompt")]
public string? FullPrompt { get; set; }

/// <summary>
/// The prompt template used by the LLM.
/// </summary>
Expand Down
2 changes: 1 addition & 1 deletion src/dotnet/Core/Services/CoreService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ public async Task<Completion> GetChatCompletionAsync(string? sessionId, string u
var promptMessage = new Message(sessionId, nameof(Participants.User), result.PromptTokens, userPrompt, result.UserPromptEmbedding, null, upn, _callContext.CurrentUserIdentity?.Name);
var completionMessage = new Message(sessionId, nameof(Participants.Assistant), result.CompletionTokens, result.Completion, null, null, upn, result.AgentName);
var completionPromptText =
$"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {result.PromptTemplate}";
$"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {(!string.IsNullOrWhiteSpace(result.FullPrompt) ? result.FullPrompt : result.PromptTemplate)}";
var completionPrompt = new CompletionPrompt(sessionId, completionMessage.Id, completionPromptText);
completionMessage.CompletionPromptId = completionPrompt.Id;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def run(self, prompt: str) -> CompletionResponse:
return CompletionResponse(
completion = rag_chain.invoke(prompt),
user_prompt = prompt,
final_prompt = self.full_prompt.text,
full_prompt = self.full_prompt.text,
completion_tokens = cb.completion_tokens,
prompt_tokens = cb.prompt_tokens,
total_tokens = cb.total_tokens,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def run(self, prompt: str) -> CompletionResponse:
return CompletionResponse(
completion = self.agent.run(prompt),
user_prompt = prompt,
final_prompt = self.prompt_template,
full_prompt = self.prompt_template,
completion_tokens = cb.completion_tokens,
prompt_tokens = cb.prompt_tokens,
total_tokens = cb.total_tokens,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def run(self, prompt: str) -> CompletionResponse:
return CompletionResponse(
completion = completion,
user_prompt = prompt,
final_prompt = self.full_prompt.text,
full_prompt = self.full_prompt.text,
completion_tokens = cb.completion_tokens,
prompt_tokens = cb.prompt_tokens,
total_tokens = cb.total_tokens,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class CompletionResponse(BaseModel):
Response from a language model.
"""
user_prompt: str
final_prompt: Optional[str] = None
full_prompt: Optional[str] = None
completion: Union[str, set, List[str]]
user_prompt_embedding: Optional[List[float]] = []
prompt_tokens: int = 0
Expand Down

0 comments on commit e60ad0e

Please sign in to comment.