forked from microsoft/semantic-kernel-starters
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathConsoleChat.cs
86 lines (78 loc) · 3.13 KB
/
ConsoleChat.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
using Microsoft.Extensions.Hosting;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.AI.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AI.OpenAI;
/// <summary>
/// This is the main application service.
/// This takes console input, then sends it to the configured AI service, and then prints the response.
/// All conversation history is maintained in the chat history.
/// </summary>
internal class ConsoleChat : IHostedService
{
private readonly Kernel _kernel;
private readonly IHostApplicationLifetime _lifeTime;
public ConsoleChat(Kernel kernel, IHostApplicationLifetime lifeTime)
{
this._kernel = kernel;
this._lifeTime = lifeTime;
}
/// <summary>
/// Start the service.
/// </summary>
public Task StartAsync(CancellationToken cancellationToken)
{
Task.Run(() => this.ExecuteAsync(cancellationToken), cancellationToken);
return Task.CompletedTask;
}
/// <summary>
/// Stop a running service.
/// </summary>
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
/// <summary>
/// The main execution loop. It will use any of the available plugins to perform actions
/// </summary>
private async Task ExecuteAsync(CancellationToken cancellationToken)
{
ChatHistory chatMessages = [];
IChatCompletionService chatCompletionService = this._kernel.GetRequiredService<IChatCompletionService>();
// Loop till we are cancelled
while (!cancellationToken.IsCancellationRequested)
{
// Get user input
System.Console.Write("User > ");
chatMessages.AddUserMessage(Console.ReadLine()!);
// Get the chat completions
OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new()
{
FunctionCallBehavior = FunctionCallBehavior.AutoInvokeKernelFunctions
};
IAsyncEnumerable<StreamingChatMessageContent> result =
chatCompletionService.GetStreamingChatMessageContentsAsync(
chatMessages,
executionSettings: openAIPromptExecutionSettings,
kernel: this._kernel,
cancellationToken: cancellationToken);
// Print the chat completions
ChatMessageContent? chatMessageContent = null;
await foreach (var content in result)
{
if (content.Role.HasValue)
{
System.Console.Write("Assistant > ");
chatMessageContent = new(
content.Role ?? AuthorRole.Assistant,
content.ModelId!,
content.Content!,
content.InnerContent,
content.Encoding,
content.Metadata
);
}
System.Console.Write(content.Content);
chatMessageContent!.Content += content.Content;
}
System.Console.WriteLine();
chatMessages.AddMessage(chatMessageContent!);
}
}
}