Skip to content

Commit

Permalink
Handle and store the full prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
joelhulen committed Jan 14, 2024
1 parent 0ed5083 commit a7170f3
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 2 deletions.
6 changes: 6 additions & 0 deletions src/dotnet/Common/Models/Orchestration/CompletionResponse.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,12 @@ public class CompletionResponse
[JsonProperty("user_prompt")]
public string UserPrompt { get; set; }

/// <summary>
/// The full prompt composed by the LLM.
/// </summary>
[JsonProperty("full_prompt")]
public string? FullPrompt { get; set; }

/// <summary>
/// The prompt template used by the LLM.
/// </summary>
Expand Down
2 changes: 1 addition & 1 deletion src/dotnet/Core/Services/CoreService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ public async Task<Completion> GetChatCompletionAsync(string? sessionId, string u
var promptMessage = new Message(sessionId, nameof(Participants.User), result.PromptTokens, userPrompt, result.UserPromptEmbedding, null, upn, _callContext.CurrentUserIdentity?.Name);
var completionMessage = new Message(sessionId, nameof(Participants.Assistant), result.CompletionTokens, result.Completion, null, null, upn, result.AgentName);
var completionPromptText =
$"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {result.PromptTemplate}";
$"User prompt: {result.UserPrompt}{Environment.NewLine}Agent: {result.AgentName}{Environment.NewLine}Prompt template: {(!string.IsNullOrWhiteSpace(result.FullPrompt) ? result.FullPrompt : result.PromptTemplate)}";
var completionPrompt = new CompletionPrompt(sessionId, completionMessage.Id, completionPromptText);
completionMessage.CompletionPromptId = completionPrompt.Id;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class CompletionResponse(BaseModel):
Response from a language model.
"""
user_prompt: str
final_prompt: Optional[str] = None
full_prompt: Optional[str] = None
completion: Union[str, set, List[str]]
user_prompt_embedding: Optional[List[float]] = []
prompt_tokens: int = 0
Expand Down

0 comments on commit a7170f3

Please sign in to comment.