Skip to content

Commit 1269d3c

Browse files
authored
.Net: Update GettingStarted to use M.E.AI.ChatClient (#12740)
### Motivation and Context This pull request updates the `GettingStarted` samples to replace the use of `AddOpenAIChatCompletion` with `AddOpenAIChatClient` for creating kernels, aligning the code with the newer `ChatClient` API. Additionally, it updates documentation and examples to reflect this change and introduces minor improvements to the dependency injection setup.
1 parent d2b22ef commit 1269d3c

9 files changed

+36
-24
lines changed

dotnet/samples/GettingStarted/Step1_Create_Kernel.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,19 @@
66
namespace GettingStarted;
77

88
/// <summary>
9-
/// This example shows how to create and use a <see cref="Kernel"/>.
9+
/// This example shows how to create and use a <see cref="Kernel"/> with ChatClient.
1010
/// </summary>
1111
public sealed class Step1_Create_Kernel(ITestOutputHelper output) : BaseTest(output)
1212
{
1313
/// <summary>
14-
/// Show how to create a <see cref="Kernel"/> and use it to execute prompts.
14+
/// Show how to create a <see cref="Kernel"/> using ChatClient and use it to execute prompts.
1515
/// </summary>
1616
[Fact]
1717
public async Task CreateKernel()
1818
{
19-
// Create a kernel with OpenAI chat completion
19+
// Create a kernel with OpenAI chat completion using ChatClient
2020
Kernel kernel = Kernel.CreateBuilder()
21-
.AddOpenAIChatCompletion(
21+
.AddOpenAIChatClient(
2222
modelId: TestConfiguration.OpenAI.ChatModelId,
2323
apiKey: TestConfiguration.OpenAI.ApiKey)
2424
.Build();

dotnet/samples/GettingStarted/Step2_Add_Plugins.cs

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,36 +9,40 @@
99
namespace GettingStarted;
1010

1111
/// <summary>
12-
/// This example shows how to load a <see cref="KernelPlugin"/> instances.
12+
/// This example shows how to load a <see cref="KernelPlugin"/> instances with ChatClient.
1313
/// </summary>
1414
public sealed class Step2_Add_Plugins(ITestOutputHelper output) : BaseTest(output)
1515
{
1616
/// <summary>
17-
/// Shows different ways to load a <see cref="KernelPlugin"/> instances.
17+
/// Shows different ways to load a <see cref="KernelPlugin"/> instances with ChatClient.
1818
/// </summary>
1919
[Fact]
2020
public async Task AddPlugins()
2121
{
22-
// Create a kernel with OpenAI chat completion
22+
// Create a kernel with ChatClient and plugins
2323
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
24-
kernelBuilder.AddOpenAIChatCompletion(
25-
modelId: TestConfiguration.OpenAI.ChatModelId,
26-
apiKey: TestConfiguration.OpenAI.ApiKey);
24+
kernelBuilder.AddOpenAIChatClient(
25+
modelId: TestConfiguration.OpenAI.ChatModelId,
26+
apiKey: TestConfiguration.OpenAI.ApiKey);
2727
kernelBuilder.Plugins.AddFromType<TimeInformation>();
2828
kernelBuilder.Plugins.AddFromType<WidgetFactory>();
2929
Kernel kernel = kernelBuilder.Build();
3030

3131
// Example 1. Invoke the kernel with a prompt that asks the AI for information it cannot provide and may hallucinate
32+
Console.WriteLine("Example 1: Asking the AI for information it cannot provide:");
3233
Console.WriteLine(await kernel.InvokePromptAsync("How many days until Christmas?"));
3334

34-
// Example 2. Invoke the kernel with a templated prompt that invokes a plugin and display the result
35+
// Example 2. Use kernel for templated prompts that invoke plugins directly
36+
Console.WriteLine("\nExample 2: Using templated prompts that invoke plugins directly:");
3537
Console.WriteLine(await kernel.InvokePromptAsync("The current time is {{TimeInformation.GetCurrentUtcTime}}. How many days until Christmas?"));
3638

37-
// Example 3. Invoke the kernel with a prompt and allow the AI to automatically invoke functions
39+
// Example 3. Use kernel with function calling for automatic plugin invocation
3840
OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
41+
Console.WriteLine("\nExample 3: Using function calling for automatic plugin invocation:");
3942
Console.WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings)));
4043

41-
// Example 4. Invoke the kernel with a prompt and allow the AI to automatically invoke functions that use enumerations
44+
// Example 4. Use kernel with function calling for complex scenarios with enumerations
45+
Console.WriteLine("\nExample 4: Using function calling for complex scenarios with enumerations:");
4246
Console.WriteLine(await kernel.InvokePromptAsync("Create a handy lime colored widget for me.", new(settings)));
4347
Console.WriteLine(await kernel.InvokePromptAsync("Create a beautiful scarlet colored widget for me.", new(settings)));
4448
Console.WriteLine(await kernel.InvokePromptAsync("Create an attractive maroon and navy colored widget for me.", new(settings)));

dotnet/samples/GettingStarted/Step3_Yaml_Prompt.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ public async Task CreatePromptFromYaml()
1919
{
2020
// Create a kernel with OpenAI chat completion
2121
Kernel kernel = Kernel.CreateBuilder()
22-
.AddOpenAIChatCompletion(
22+
.AddOpenAIChatClient(
2323
modelId: TestConfiguration.OpenAI.ChatModelId,
2424
apiKey: TestConfiguration.OpenAI.ApiKey)
2525
.Build();

dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public async Task GetKernelUsingDependencyInjection()
3838
[Fact]
3939
public async Task PluginUsingDependencyInjection()
4040
{
41-
// If an application follows DI guidelines, the following line is unnecessary because DI will inject an instance of the KernelClient class to a class that references it.
41+
// If an application follows DI guidelines, the following line is unnecessary because DI will inject an instance of the Kernel class to a class that references it.
4242
// DI container guidelines - https://learn.microsoft.com/en-us/dotnet/core/extensions/dependency-injection-guidelines#recommendations
4343
var serviceProvider = BuildServiceProvider();
4444
var kernel = serviceProvider.GetRequiredService<Kernel>();
@@ -57,8 +57,12 @@ private ServiceProvider BuildServiceProvider()
5757
collection.AddSingleton<ILoggerFactory>(new XunitLogger(this.Output));
5858
collection.AddSingleton<IUserService>(new FakeUserService());
5959

60+
// Add ChatClient using OpenAI
61+
collection.AddOpenAIChatClient(
62+
modelId: TestConfiguration.OpenAI.ChatModelId,
63+
apiKey: TestConfiguration.OpenAI.ApiKey);
64+
6065
var kernelBuilder = collection.AddKernel();
61-
kernelBuilder.Services.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
6266
kernelBuilder.Plugins.AddFromType<TimeInformation>();
6367
kernelBuilder.Plugins.AddFromType<UserInformation>();
6468

dotnet/samples/GettingStarted/Step5_Chat_Prompt.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ public async Task InvokeChatPrompt()
1414
{
1515
// Create a kernel with OpenAI chat completion
1616
Kernel kernel = Kernel.CreateBuilder()
17-
.AddOpenAIChatCompletion(
17+
.AddOpenAIChatClient(
1818
modelId: TestConfiguration.OpenAI.ChatModelId,
1919
apiKey: TestConfiguration.OpenAI.ApiKey)
2020
.Build();

dotnet/samples/GettingStarted/Step6_Responsible_AI.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public async Task AddPromptFilter()
1515
{
1616
// Create a kernel with OpenAI chat completion
1717
var builder = Kernel.CreateBuilder()
18-
.AddOpenAIChatCompletion(
18+
.AddOpenAIChatClient(
1919
modelId: TestConfiguration.OpenAI.ChatModelId,
2020
apiKey: TestConfiguration.OpenAI.ApiKey);
2121

dotnet/samples/GettingStarted/Step7_Observability.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public async Task ObservabilityWithFilters()
1717
{
1818
// Create a kernel with OpenAI chat completion
1919
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
20-
kernelBuilder.AddOpenAIChatCompletion(
20+
kernelBuilder.AddOpenAIChatClient(
2121
modelId: TestConfiguration.OpenAI.ChatModelId,
2222
apiKey: TestConfiguration.OpenAI.ApiKey);
2323

dotnet/samples/GettingStarted/Step8_Pipelining.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public sealed class Step8_Pipelining(ITestOutputHelper output) : BaseTest(output
1717
public async Task CreateFunctionPipeline()
1818
{
1919
IKernelBuilder builder = Kernel.CreateBuilder();
20-
builder.AddOpenAIChatCompletion(
20+
builder.AddOpenAIChatClient(
2121
TestConfiguration.OpenAI.ChatModelId,
2222
TestConfiguration.OpenAI.ApiKey);
2323
builder.Services.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Trace));

dotnet/samples/GettingStarted/Step9_OpenAPI_Plugins.cs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ public async Task AddOpenAPIPlugins()
1919
{
2020
// Create a kernel with OpenAI chat completion
2121
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
22-
kernelBuilder.AddOpenAIChatCompletion(
22+
kernelBuilder.AddOpenAIChatClient(
2323
modelId: TestConfiguration.OpenAI.ChatModelId,
2424
apiKey: TestConfiguration.OpenAI.ApiKey);
2525
Kernel kernel = kernelBuilder.Build();
@@ -33,12 +33,12 @@ public async Task AddOpenAPIPlugins()
3333
}
3434

3535
/// <summary>
36-
/// Shows how to transform an Open API <see cref="KernelPlugin"/> instance to support dependency injection.
36+
/// Shows how to transform an Open API <see cref="KernelPlugin"/> instance to support dependency injection with ChatClient.
3737
/// </summary>
3838
[Fact]
3939
public async Task TransformOpenAPIPlugins()
4040
{
41-
// Create a kernel with OpenAI chat completion
41+
// Create a kernel with ChatClient and dependency injection
4242
var serviceProvider = BuildServiceProvider();
4343
var kernel = serviceProvider.GetRequiredService<Kernel>();
4444

@@ -61,8 +61,12 @@ private ServiceProvider BuildServiceProvider()
6161
var collection = new ServiceCollection();
6262
collection.AddSingleton<IMechanicService>(new FakeMechanicService());
6363

64+
// Add ChatClient using OpenAI
65+
collection.AddOpenAIChatClient(
66+
modelId: TestConfiguration.OpenAI.ChatModelId,
67+
apiKey: TestConfiguration.OpenAI.ApiKey);
68+
6469
var kernelBuilder = collection.AddKernel();
65-
kernelBuilder.Services.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
6670

6771
return collection.BuildServiceProvider();
6872
}

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy