From 76f7da9cd4667979c04159f5f82a5ce892b83a1b Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Wed, 25 Jun 2025 07:05:57 +0100
Subject: [PATCH 01/61] .Net: Add ONNX ChatClient Extensions + UT (#12477)
# Add IChatClient Extensions for ONNX Connector
## Summary
This PR implements missing AddOnnxChatClient extension methods for the
ONNX Connector, providing support for the new IChatClient interface
alongside the existing IChatCompletionService extensions.
## Changes Made
- New Extension Methods
- IServiceCollection Extensions: Added AddOnnxRuntimeGenAIChatClient
method in a dedicated ServiceCollectionExtensions.DependencyInjection.cs
file following the same pattern as other connectors like OpenAI
- IKernelBuilder Extensions: Added AddOnnxRuntimeGenAIChatClient method
in OnnxKernelBuilderExtensions.ChatClient.cs for seamless kernel
configuration
---------
Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com>
---
.../OnnxChatClientExtensionsTests.cs | 77 ++++++++++
.../OnnxKernelBuilderExtensions.ChatClient.cs | 39 +++++
...ollectionExtensions.DependencyInjection.cs | 56 ++++++++
.../Onnx/OnnxRuntimeGenAIChatClientTests.cs | 136 ++++++++++++++++++
4 files changed, 308 insertions(+)
create mode 100644 dotnet/src/Connectors/Connectors.Onnx.UnitTests/OnnxChatClientExtensionsTests.cs
create mode 100644 dotnet/src/Connectors/Connectors.Onnx/OnnxKernelBuilderExtensions.ChatClient.cs
create mode 100644 dotnet/src/IntegrationTests/Connectors/Onnx/OnnxRuntimeGenAIChatClientTests.cs
diff --git a/dotnet/src/Connectors/Connectors.Onnx.UnitTests/OnnxChatClientExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Onnx.UnitTests/OnnxChatClientExtensionsTests.cs
new file mode 100644
index 000000000000..238ee839c324
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Onnx.UnitTests/OnnxChatClientExtensionsTests.cs
@@ -0,0 +1,77 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Linq;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Xunit;
+
+namespace SemanticKernel.Connectors.Onnx.UnitTests;
+
+///
+/// Unit tests for and Onnx IChatClient service collection extensions.
+///
+public class OnnxChatClientExtensionsTests
+{
+ [Fact]
+ public void AddOnnxRuntimeGenAIChatClientToServiceCollection()
+ {
+ // Arrange
+ var collection = new ServiceCollection();
+
+ // Act
+ collection.AddOnnxRuntimeGenAIChatClient("modelId");
+
+ // Assert
+ var serviceDescriptor = collection.FirstOrDefault(x => x.ServiceType == typeof(IChatClient));
+ Assert.NotNull(serviceDescriptor);
+ Assert.Equal(ServiceLifetime.Singleton, serviceDescriptor.Lifetime);
+ }
+
+ [Fact]
+ public void AddOnnxRuntimeGenAIChatClientToKernelBuilder()
+ {
+ // Arrange
+ var collection = new ServiceCollection();
+ var kernelBuilder = collection.AddKernel();
+
+ // Act
+ kernelBuilder.AddOnnxRuntimeGenAIChatClient("modelPath");
+
+ // Assert
+ var serviceDescriptor = collection.FirstOrDefault(x => x.ServiceType == typeof(IChatClient));
+ Assert.NotNull(serviceDescriptor);
+ Assert.Equal(ServiceLifetime.Singleton, serviceDescriptor.Lifetime);
+ }
+
+ [Fact]
+ public void AddOnnxRuntimeGenAIChatClientWithServiceId()
+ {
+ // Arrange
+ var collection = new ServiceCollection();
+
+ // Act
+ collection.AddOnnxRuntimeGenAIChatClient("modelPath", serviceId: "test-service");
+
+ // Assert
+ var serviceDescriptor = collection.FirstOrDefault(x => x.ServiceType == typeof(IChatClient) && x.ServiceKey?.ToString() == "test-service");
+ Assert.NotNull(serviceDescriptor);
+ Assert.Equal(ServiceLifetime.Singleton, serviceDescriptor.Lifetime);
+ }
+
+ [Fact]
+ public void AddOnnxRuntimeGenAIChatClientToKernelBuilderWithServiceId()
+ {
+ // Arrange
+ var collection = new ServiceCollection();
+ var kernelBuilder = collection.AddKernel();
+
+ // Act
+ kernelBuilder.AddOnnxRuntimeGenAIChatClient("modelPath", serviceId: "test-service");
+
+ // Assert
+ var serviceDescriptor = collection.FirstOrDefault(x => x.ServiceType == typeof(IChatClient) && x.ServiceKey?.ToString() == "test-service");
+ Assert.NotNull(serviceDescriptor);
+ Assert.Equal(ServiceLifetime.Singleton, serviceDescriptor.Lifetime);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxKernelBuilderExtensions.ChatClient.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxKernelBuilderExtensions.ChatClient.cs
new file mode 100644
index 000000000000..7e9329d94903
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxKernelBuilderExtensions.ChatClient.cs
@@ -0,0 +1,39 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.ML.OnnxRuntimeGenAI;
+
+namespace Microsoft.SemanticKernel;
+
+/// Extension methods for .
+public static class OnnxChatClientKernelBuilderExtensions
+{
+ #region Chat Client
+
+ ///
+ /// Adds an OnnxRuntimeGenAI to the .
+ ///
+ /// The instance to augment.
+ /// The generative AI ONNX model path.
+ /// The optional options for the chat client.
+ /// A local identifier for the given AI service
+ /// The same instance as .
+ public static IKernelBuilder AddOnnxRuntimeGenAIChatClient(
+ this IKernelBuilder builder,
+ string modelPath,
+ OnnxRuntimeGenAIChatClientOptions? chatClientOptions = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOnnxRuntimeGenAIChatClient(
+ modelPath,
+ chatClientOptions,
+ serviceId);
+
+ return builder;
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxServiceCollectionExtensions.DependencyInjection.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxServiceCollectionExtensions.DependencyInjection.cs
index 0ea95328d89c..a8dda516b338 100644
--- a/dotnet/src/Connectors/Connectors.Onnx/OnnxServiceCollectionExtensions.DependencyInjection.cs
+++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxServiceCollectionExtensions.DependencyInjection.cs
@@ -1,7 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.IO;
+using System.Text;
using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Logging;
+using Microsoft.ML.OnnxRuntimeGenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Onnx;
using Microsoft.SemanticKernel.Embeddings;
@@ -57,4 +61,56 @@ public static IServiceCollection AddBertOnnxEmbeddingGenerator(
serviceId,
BertOnnxTextEmbeddingGenerationService.Create(onnxModelStream, vocabStream, options).AsEmbeddingGenerator());
}
+
+ ///
+ /// Add OnnxRuntimeGenAI Chat Client to the service collection.
+ ///
+ /// The service collection.
+ /// The generative AI ONNX model path.
+ /// The options for the chat client.
+ /// The optional service ID.
+ /// The updated service collection.
+ public static IServiceCollection AddOnnxRuntimeGenAIChatClient(
+ this IServiceCollection services,
+ string modelPath,
+ OnnxRuntimeGenAIChatClientOptions? chatClientOptions = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(services);
+ Verify.NotNullOrWhiteSpace(modelPath);
+
+ IChatClient Factory(IServiceProvider serviceProvider, object? _)
+ {
+ var loggerFactory = serviceProvider.GetService();
+
+ var chatClient = new OnnxRuntimeGenAIChatClient(modelPath, chatClientOptions ?? new OnnxRuntimeGenAIChatClientOptions()
+ {
+ PromptFormatter = static (messages, _) =>
+ {
+ StringBuilder promptBuilder = new();
+ foreach (var message in messages)
+ {
+ promptBuilder.Append($"<|{message.Role}|>\n{message.Text}");
+ }
+ promptBuilder.Append("<|end|>\n<|assistant|>");
+
+ return promptBuilder.ToString();
+ }
+ });
+
+ var builder = chatClient.AsBuilder()
+ .UseKernelFunctionInvocation(loggerFactory);
+
+ if (loggerFactory is not null)
+ {
+ builder.UseLogging(loggerFactory);
+ }
+
+ return builder.Build();
+ }
+
+ services.AddKeyedSingleton(serviceId, (Func)Factory);
+
+ return services;
+ }
}
diff --git a/dotnet/src/IntegrationTests/Connectors/Onnx/OnnxRuntimeGenAIChatClientTests.cs b/dotnet/src/IntegrationTests/Connectors/Onnx/OnnxRuntimeGenAIChatClientTests.cs
new file mode 100644
index 000000000000..9b5b374fe4bf
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/Onnx/OnnxRuntimeGenAIChatClientTests.cs
@@ -0,0 +1,136 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+#pragma warning disable SKEXP0010
+
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.Onnx;
+
+public class OnnxRuntimeGenAIChatClientTests : BaseIntegrationTest
+{
+ [Fact(Skip = "For manual verification only")]
+ public async Task ItCanUseKernelInvokeAsyncWithChatClientAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernelWithChatClient();
+
+ var func = kernel.CreateFunctionFromPrompt("List the two planets after '{{$input}}', excluding moons, using bullet points.");
+
+ // Act
+ var result = await func.InvokeAsync(kernel, new() { ["input"] = "Jupiter" });
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase);
+ Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact(Skip = "For manual verification only")]
+ public async Task ItCanUseKernelInvokeStreamingAsyncWithChatClientAsync()
+ {
+ // Arrange
+ var kernel = this.CreateAndInitializeKernelWithChatClient();
+
+ var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin");
+
+ StringBuilder fullResult = new();
+
+ var prompt = "Where is the most famous fish market in Seattle, Washington, USA?";
+
+ // Act
+ await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { ["input"] = prompt }))
+ {
+ fullResult.Append(content);
+ }
+
+ // Assert
+ Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact(Skip = "For manual verification only")]
+ public async Task ItCanUseServiceGetResponseAsync()
+ {
+ using var chatClient = CreateChatClient();
+
+ var messages = new List
+ {
+ new(ChatRole.User, "Where is the most famous fish market in Seattle, Washington, USA?")
+ };
+
+ var response = await chatClient.GetResponseAsync(messages);
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.Contains("Pike Place", response.Text, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [Fact(Skip = "For manual verification only")]
+ public async Task ItCanUseServiceGetStreamingResponseAsync()
+ {
+ using var chatClient = CreateChatClient();
+
+ var messages = new List
+ {
+ new(ChatRole.User, "Where is the most famous fish market in Seattle, Washington, USA?")
+ };
+
+ StringBuilder fullResult = new();
+
+ await foreach (var update in chatClient.GetStreamingResponseAsync(messages))
+ {
+ fullResult.Append(update.Text);
+ }
+
+ // Assert
+ Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
+ }
+
+ private static IChatClient CreateChatClient()
+ {
+ Assert.NotNull(Configuration.ModelPath);
+ Assert.NotNull(Configuration.ModelId);
+
+ var services = new ServiceCollection();
+ services.AddOnnxRuntimeGenAIChatClient(Configuration.ModelId);
+
+ var serviceProvider = services.BuildServiceProvider();
+ return serviceProvider.GetRequiredService();
+ }
+
+ #region internals
+
+ private Kernel CreateAndInitializeKernelWithChatClient(HttpClient? httpClient = null)
+ {
+ Assert.NotNull(Configuration.ModelPath);
+ Assert.NotNull(Configuration.ModelId);
+
+ var kernelBuilder = base.CreateKernelBuilder();
+
+ kernelBuilder.AddOnnxRuntimeGenAIChatClient(
+ modelPath: Configuration.ModelPath,
+ serviceId: Configuration.ServiceId);
+
+ return kernelBuilder.Build();
+ }
+
+ private static OnnxConfiguration Configuration => new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build()
+ .GetRequiredSection("Onnx")
+ .Get()!;
+
+ #endregion
+}
From de7470c7d2ad6aaf36dfd2dddae8373ea5eda5a3 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Wed, 25 Jun 2025 08:42:30 +0100
Subject: [PATCH 02/61] .Net: Making Gemini MaxTokens Optional when not
provided (#12539)
### Motivation and Context
- Fixes #12334
---------
Co-authored-by: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com>
---
.../GeminiPromptExecutionSettingsTests.cs | 2 +-
.../Connectors.Google/GeminiPromptExecutionSettings.cs | 7 +------
2 files changed, 2 insertions(+), 7 deletions(-)
diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/GeminiPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/GeminiPromptExecutionSettingsTests.cs
index 5ba6895da18b..f368ac054f88 100644
--- a/dotnet/src/Connectors/Connectors.Google.UnitTests/GeminiPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/GeminiPromptExecutionSettingsTests.cs
@@ -29,7 +29,7 @@ public void ItCreatesGeminiExecutionSettingsWithCorrectDefaults()
Assert.Null(executionSettings.AudioTimestamp);
Assert.Null(executionSettings.ResponseMimeType);
Assert.Null(executionSettings.ResponseSchema);
- Assert.Equal(GeminiPromptExecutionSettings.DefaultTextMaxTokens, executionSettings.MaxTokens);
+ Assert.Null(executionSettings.MaxTokens);
}
[Fact]
diff --git a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
index c4d4514feb5f..82f21bdd99f9 100644
--- a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs
@@ -33,11 +33,6 @@ public sealed class GeminiPromptExecutionSettings : PromptExecutionSettings
private GeminiToolCallBehavior? _toolCallBehavior;
private GeminiThinkingConfig? _thinkingConfig;
- ///
- /// Default max tokens for a text generation.
- ///
- public static int DefaultTextMaxTokens { get; } = 256;
-
///
/// Temperature controls the randomness of the completion.
/// The higher the temperature, the more random the completion.
@@ -358,7 +353,7 @@ public static GeminiPromptExecutionSettings FromExecutionSettings(PromptExecutio
switch (executionSettings)
{
case null:
- return new GeminiPromptExecutionSettings() { MaxTokens = DefaultTextMaxTokens };
+ return new GeminiPromptExecutionSettings();
case GeminiPromptExecutionSettings settings:
return settings;
}
From 3d21f1798469faf01ec74a02335a8a493ea289e1 Mon Sep 17 00:00:00 2001
From: westey <164392973+westey-m@users.noreply.github.com>
Date: Wed, 25 Jun 2025 10:26:00 +0100
Subject: [PATCH 03/61] .Net: Change ChatCompletionAgent to notify intermediate
messages as soon as they are available. (#12575)
### Motivation and Context
Addresses #12521
### Contribution Checklist
- [ ] The code builds clean without any errors or warnings
- [ ] The PR follows the [SK Contribution
Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md)
and the [pre-submission formatting
script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts)
raises no violations
- [ ] All unit tests pass, and I have added new tests where possible
- [ ] I didn't break anyone :smile:
---
dotnet/src/Agents/Core/ChatCompletionAgent.cs | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index 56c8712ab50f..1975027630ba 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -391,6 +391,7 @@ private async IAsyncEnumerable InternalInvokeStream
this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, agentName, serviceType);
+ int messageIndex = messageCount;
AuthorRole? role = null;
StringBuilder builder = new();
await foreach (StreamingChatMessageContent message in messages.ConfigureAwait(false))
@@ -401,18 +402,18 @@ private async IAsyncEnumerable InternalInvokeStream
builder.Append(message.ToString());
- yield return message;
- }
+ // Capture mutated messages related function calling / tools
+ for (; messageIndex < chat.Count; messageIndex++)
+ {
+ ChatMessageContent chatMessage = chat[messageIndex];
- // Capture mutated messages related function calling / tools
- for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++)
- {
- ChatMessageContent message = chat[messageIndex];
+ chatMessage.AuthorName = this.Name;
- message.AuthorName = this.Name;
+ await onNewMessage(chatMessage).ConfigureAwait(false);
+ history.Add(chatMessage);
+ }
- await onNewMessage(message).ConfigureAwait(false);
- history.Add(message);
+ yield return message;
}
// Do not duplicate terminated function result to history
From cb74902375827bb2b23640e777a6b13e52342db9 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+rogerbarreto@users.noreply.github.com>
Date: Wed, 25 Jun 2025 11:39:22 +0100
Subject: [PATCH 04/61] .Net: Allow Kernel to be mutable by
AgentChatCompletions (#12538)
### Motivation and Context
- Fixes #12534
- This regression seems to be part of #11689 where `kernel` instance was
is cloned prior agent iterations, as I didn't captured any failing unit
tests for the added `AIContext` providers considering this as a valid
fix.
- Added UT covering the expected `Kernel` mutability.
---------
Co-authored-by: westey <164392973+westey-m@users.noreply.github.com>
---
dotnet/src/Agents/Abstractions/Agent.cs | 12 +
dotnet/src/Agents/AzureAI/AzureAIAgent.cs | 31 +-
dotnet/src/Agents/Core/ChatCompletionAgent.cs | 30 +-
.../src/Agents/OpenAI/OpenAIAssistantAgent.cs | 31 +-
.../src/Agents/OpenAI/OpenAIResponseAgent.cs | 16 +-
.../Core/ChatCompletionAgentTests.cs | 544 +++++++++++++++++-
.../OpenAI/OpenAIAssistantAgentTests.cs | 414 ++++++++++++-
.../OpenAI/OpenAIResponseAgentTests.cs | 307 ++++++++++
.../ContextualFunctionProviderTests.cs | 2 +
9 files changed, 1364 insertions(+), 23 deletions(-)
diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs
index 5f21f83bb6c0..e35fbc5738a6 100644
--- a/dotnet/src/Agents/Abstractions/Agent.cs
+++ b/dotnet/src/Agents/Abstractions/Agent.cs
@@ -4,6 +4,7 @@
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.SemanticKernel.Arguments.Extensions;
@@ -66,6 +67,17 @@ public abstract class Agent
///
public Kernel Kernel { get; init; } = new();
+ ///
+ /// This option forces the agent to clone the original kernel instance during invocation if true. Default is false.
+ ///
+ ///
+ /// implementations that provide instances require the
+ /// kernel to be cloned during agent invocation, but cloning has the side affect of causing modifications to Kernel
+ /// Data by plugins to be lost. Cloning is therefore opt-in.
+ ///
+ [Experimental("SKEXP0130")]
+ public bool UseImmutableKernel { get; set; } = false;
+
///
/// Gets or sets a prompt template based on the agent instructions.
///
diff --git a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
index a942eae8ba73..1b252b84eddc 100644
--- a/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
+++ b/dotnet/src/Agents/AzureAI/AzureAIAgent.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
@@ -136,11 +137,22 @@ public async IAsyncEnumerable> InvokeAsync
() => new AzureAIAgentThread(this.Client),
cancellationToken).ConfigureAwait(false);
- Kernel kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await azureAIAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
@@ -228,11 +240,22 @@ public async IAsyncEnumerable> In
() => new AzureAIAgentThread(this.Client),
cancellationToken).ConfigureAwait(false);
- var kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await azureAIAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index 1975027630ba..307009fe5099 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -73,11 +73,22 @@ public override async IAsyncEnumerable> In
() => new ChatHistoryAgentThread(),
cancellationToken).ConfigureAwait(false);
- Kernel kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await chatHistoryAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
@@ -168,11 +179,22 @@ public override async IAsyncEnumerable new ChatHistoryAgentThread(),
cancellationToken).ConfigureAwait(false);
- Kernel kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await chatHistoryAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index 1971b8bc9058..11f813fd3267 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
@@ -138,11 +139,22 @@ public async IAsyncEnumerable> InvokeAsync
AdditionalInstructions = options?.AdditionalInstructions,
});
- Kernel kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await openAIAssistantAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
@@ -226,11 +238,22 @@ public async IAsyncEnumerable> In
() => new OpenAIAssistantAgentThread(this.Client),
cancellationToken).ConfigureAwait(false);
- Kernel kernel = (options?.Kernel ?? this.Kernel).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
// Get the context contributions from the AIContextProviders.
-#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
AIContext providersContext = await openAIAssistantAgentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs
index 87b0912d01ef..67a71be16955 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs
@@ -140,10 +140,22 @@ private async Task EnsureThreadExistsWithMessagesAsync(ICollection<
private async Task FinalizeInvokeOptionsAsync(ICollection messages, AgentInvokeOptions? options, AgentThread agentThread, CancellationToken cancellationToken)
{
- Kernel kernel = this.GetKernel(options).Clone();
+ Kernel kernel = this.GetKernel(options);
+#pragma warning disable SKEXP0110, SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ if (this.UseImmutableKernel)
+ {
+ kernel = kernel.Clone();
+ }
-#pragma warning disable SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ // Get the AIContextProviders contributions to the kernel.
AIContext providersContext = await agentThread.AIContextProviders.ModelInvokingAsync(messages, cancellationToken).ConfigureAwait(false);
+
+ // Check for compatibility AIContextProviders and the UseImmutableKernel setting.
+ if (providersContext.AIFunctions is { Count: > 0 } && !this.UseImmutableKernel)
+ {
+ throw new InvalidOperationException("AIContextProviders with AIFunctions are not supported when Agent UseImmutableKernel setting is false.");
+ }
+
kernel.Plugins.AddFromAIContext(providersContext, "Tools");
#pragma warning restore SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
index 1f7d2d1e0fb2..def132c60102 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
@@ -176,6 +176,46 @@ public async Task VerifyChatCompletionAgentInvocationAsync()
Times.Once);
}
+ ///
+ /// Verify the invocation and response of .
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentInvocationsCanMutateProvidedKernelAsync()
+ {
+ // Arrange
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
+
+ var kernel = CreateKernel(mockService.Object);
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = kernel,
+ Arguments = [],
+ };
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeAsync(Array.Empty() as ICollection).ToArrayAsync();
+
+ // Assert
+ Assert.Single(result);
+
+ mockService.Verify(
+ x =>
+ x.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ kernel, // Use the same kernel instance
+ It.IsAny()),
+ Times.Once);
+ }
+
///
/// Verify the invocation and response of using .
///
@@ -195,7 +235,7 @@ public async Task VerifyChatClientAgentInvocationAsync()
{
Instructions = "test instructions",
Kernel = CreateKernel(mockService.Object),
- Arguments = [],
+ Arguments = new(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
};
// Act
@@ -208,7 +248,7 @@ public async Task VerifyChatClientAgentInvocationAsync()
x =>
x.GetResponseAsync(
It.IsAny>(),
- It.IsAny(),
+ It.Is(o => GetKernelFromChatOptions(o) == agent.Kernel),
It.IsAny()),
Times.Once);
}
@@ -258,6 +298,52 @@ public async Task VerifyChatCompletionAgentStreamingAsync()
Times.Once);
}
+ ///
+ /// Verify the streaming invocation and response of .
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentStreamingCanMutateProvidedKernelAsync()
+ {
+ // Arrange
+ StreamingChatMessageContent[] returnContent =
+ [
+ new(AuthorRole.Assistant, "wh"),
+ new(AuthorRole.Assistant, "at?"),
+ ];
+
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).Returns(returnContent.ToAsyncEnumerable());
+
+ var kernel = CreateKernel(mockService.Object);
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = kernel,
+ Arguments = [],
+ };
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeStreamingAsync(Array.Empty() as ICollection).ToArrayAsync();
+
+ // Assert
+ Assert.Equal(2, result.Length);
+
+ mockService.Verify(
+ x =>
+ x.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ kernel, // Use the same kernel instance
+ It.IsAny()),
+ Times.Once);
+ }
+
///
/// Verify the streaming invocation and response of using .
///
@@ -283,7 +369,7 @@ public async Task VerifyChatClientAgentStreamingAsync()
{
Instructions = "test instructions",
Kernel = CreateKernel(mockService.Object),
- Arguments = [],
+ Arguments = new(new PromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
};
// Act
@@ -296,7 +382,7 @@ public async Task VerifyChatClientAgentStreamingAsync()
x =>
x.GetStreamingResponseAsync(
It.IsAny>(),
- It.IsAny(),
+ It.Is(o => GetKernelFromChatOptions(o) == agent.Kernel),
It.IsAny()),
Times.Once);
}
@@ -373,6 +459,414 @@ public void VerifyChatCompletionChannelKeys()
Assert.NotEqual(agent3.GetChannelKeys(), agent5.GetChannelKeys());
}
+ ///
+ /// Verify that InvalidOperationException is thrown when UseImmutableKernel is false and AIFunctions exist.
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentThrowsWhenUseImmutableKernelFalseWithAIFunctionsAsync()
+ {
+ // Arrange
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = CreateKernel(mockService.Object),
+ Arguments = [],
+ UseImmutableKernel = false // Explicitly set to false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(
+ async () => await agent.InvokeAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync());
+
+ Assert.NotNull(exception);
+ }
+
+ ///
+ /// Verify that InvalidOperationException is thrown when UseImmutableKernel is default (false) and AIFunctions exist.
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentThrowsWhenUseImmutableKernelDefaultWithAIFunctionsAsync()
+ {
+ // Arrange
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = CreateKernel(mockService.Object),
+ Arguments = []
+ // UseImmutableKernel not set, should default to false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(
+ async () => await agent.InvokeAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync());
+
+ Assert.NotNull(exception);
+ }
+
+ ///
+ /// Verify that kernel remains immutable when UseImmutableKernel is true.
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentKernelImmutabilityWhenUseImmutableKernelTrueAsync()
+ {
+ // Arrange
+ Mock mockService = new();
+ Kernel capturedKernel = null!;
+ mockService.Setup(
+ s => s.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny()))
+ .Callback((_, _, kernel, _) => capturedKernel = kernel)
+ .ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
+
+ var originalKernel = CreateKernel(mockService.Object);
+ var originalPluginCount = originalKernel.Plugins.Count;
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = originalKernel,
+ Arguments = [],
+ UseImmutableKernel = true
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync();
+
+ // Assert
+ Assert.Single(result);
+
+ // Verify original kernel was not modified
+ Assert.Equal(originalPluginCount, originalKernel.Plugins.Count);
+
+ // Verify a different kernel instance was used for the service call
+ Assert.NotSame(originalKernel, capturedKernel);
+
+ // Verify the captured kernel has the additional plugin from AIContext
+ Assert.True(capturedKernel.Plugins.Count > originalPluginCount);
+ Assert.Contains(capturedKernel.Plugins, p => p.Name == "Tools");
+ }
+
+ ///
+ /// Verify that mutable kernel behavior works when UseImmutableKernel is false and no AIFunctions exist.
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentMutableKernelWhenUseImmutableKernelFalseNoAIFunctionsAsync()
+ {
+ // Arrange
+ Mock mockService = new();
+ Kernel capturedKernel = null!;
+ mockService.Setup(
+ s => s.GetChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny()))
+ .Callback((_, _, kernel, _) => capturedKernel = kernel)
+ .ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
+
+ var originalKernel = CreateKernel(mockService.Object);
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [] // Empty AIFunctions list
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = originalKernel,
+ Arguments = [],
+ UseImmutableKernel = false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync();
+
+ // Assert
+ Assert.Single(result);
+
+ // Verify the same kernel instance was used (mutable behavior)
+ Assert.Same(originalKernel, capturedKernel);
+ }
+
+ ///
+ /// Verify that InvalidOperationException is thrown when UseImmutableKernel is false and AIFunctions exist (streaming).
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentStreamingThrowsWhenUseImmutableKernelFalseWithAIFunctionsAsync()
+ {
+ // Arrange
+ StreamingChatMessageContent[] returnContent =
+ [
+ new(AuthorRole.Assistant, "wh"),
+ new(AuthorRole.Assistant, "at?"),
+ ];
+
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).Returns(returnContent.ToAsyncEnumerable());
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = CreateKernel(mockService.Object),
+ Arguments = [],
+ UseImmutableKernel = false // Explicitly set to false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(
+ async () => await agent.InvokeStreamingAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync());
+
+ Assert.NotNull(exception);
+ }
+
+ ///
+ /// Verify that InvalidOperationException is thrown when UseImmutableKernel is default (false) and AIFunctions exist (streaming).
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentStreamingThrowsWhenUseImmutableKernelDefaultWithAIFunctionsAsync()
+ {
+ // Arrange
+ StreamingChatMessageContent[] returnContent =
+ [
+ new(AuthorRole.Assistant, "wh"),
+ new(AuthorRole.Assistant, "at?"),
+ ];
+
+ Mock mockService = new();
+ mockService.Setup(
+ s => s.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny())).Returns(returnContent.ToAsyncEnumerable());
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = CreateKernel(mockService.Object),
+ Arguments = []
+ // UseImmutableKernel not set, should default to false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(
+ async () => await agent.InvokeStreamingAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync());
+
+ Assert.NotNull(exception);
+ }
+
+ ///
+ /// Verify that kernel remains immutable when UseImmutableKernel is true (streaming).
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentStreamingKernelImmutabilityWhenUseImmutableKernelTrueAsync()
+ {
+ // Arrange
+ StreamingChatMessageContent[] returnContent =
+ [
+ new(AuthorRole.Assistant, "wh"),
+ new(AuthorRole.Assistant, "at?"),
+ ];
+
+ Mock mockService = new();
+ Kernel capturedKernel = null!;
+ mockService.Setup(
+ s => s.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny()))
+ .Callback((_, _, kernel, _) => capturedKernel = kernel)
+ .Returns(returnContent.ToAsyncEnumerable());
+
+ var originalKernel = CreateKernel(mockService.Object);
+ var originalPluginCount = originalKernel.Plugins.Count;
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [new TestAIFunction("TestFunction", "Test function description")]
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = originalKernel,
+ Arguments = [],
+ UseImmutableKernel = true
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeStreamingAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync();
+
+ // Assert
+ Assert.Equal(2, result.Length);
+
+ // Verify original kernel was not modified
+ Assert.Equal(originalPluginCount, originalKernel.Plugins.Count);
+
+ // Verify a different kernel instance was used for the service call
+ Assert.NotSame(originalKernel, capturedKernel);
+
+ // Verify the captured kernel has the additional plugin from AIContext
+ Assert.True(capturedKernel.Plugins.Count > originalPluginCount);
+ Assert.Contains(capturedKernel.Plugins, p => p.Name == "Tools");
+ }
+
+ ///
+ /// Verify that mutable kernel behavior works when UseImmutableKernel is false and no AIFunctions exist (streaming).
+ ///
+ [Fact]
+ public async Task VerifyChatCompletionAgentStreamingMutableKernelWhenUseImmutableKernelFalseNoAIFunctionsAsync()
+ {
+ // Arrange
+ StreamingChatMessageContent[] returnContent =
+ [
+ new(AuthorRole.Assistant, "wh"),
+ new(AuthorRole.Assistant, "at?"),
+ ];
+
+ Mock mockService = new();
+ Kernel capturedKernel = null!;
+ mockService.Setup(
+ s => s.GetStreamingChatMessageContentsAsync(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny()))
+ .Callback((_, _, kernel, _) => capturedKernel = kernel)
+ .Returns(returnContent.ToAsyncEnumerable());
+
+ var originalKernel = CreateKernel(mockService.Object);
+
+ var mockAIContextProvider = new Mock();
+ var aiContext = new AIContext
+ {
+ AIFunctions = [] // Empty AIFunctions list
+ };
+ mockAIContextProvider.Setup(p => p.ModelInvokingAsync(It.IsAny>(), It.IsAny()))
+ .ReturnsAsync(aiContext);
+
+ ChatCompletionAgent agent =
+ new()
+ {
+ Instructions = "test instructions",
+ Kernel = originalKernel,
+ Arguments = [],
+ UseImmutableKernel = false
+ };
+
+ var thread = new ChatHistoryAgentThread();
+ thread.AIContextProviders.Add(mockAIContextProvider.Object);
+
+ // Act
+ AgentResponseItem[] result = await agent.InvokeStreamingAsync(Array.Empty() as ICollection, thread: thread).ToArrayAsync();
+
+ // Assert
+ Assert.Equal(2, result.Length);
+
+ // Verify the same kernel instance was used (mutable behavior)
+ Assert.Same(originalKernel, capturedKernel);
+ }
+
private static Kernel CreateKernel(IChatCompletionService chatCompletionService)
{
var builder = Kernel.CreateBuilder();
@@ -386,4 +880,46 @@ private static Kernel CreateKernel(IChatClient chatClient)
builder.Services.AddSingleton(chatClient);
return builder.Build();
}
+
+ ///
+ /// Gets the Kernel property from ChatOptions using reflection.
+ ///
+ /// The ChatOptions instance to extract Kernel from.
+ /// The Kernel instance if found; otherwise, null.
+ private static Kernel? GetKernelFromChatOptions(ChatOptions options)
+ {
+ // Use reflection to try to get the Kernel property
+ var kernelProperty = options.GetType().GetProperty("Kernel",
+ System.Reflection.BindingFlags.Public |
+ System.Reflection.BindingFlags.NonPublic |
+ System.Reflection.BindingFlags.Instance);
+
+ if (kernelProperty != null)
+ {
+ return kernelProperty.GetValue(options) as Kernel;
+ }
+
+ return null;
+ }
+
+ ///
+ /// Helper class for testing AIFunction behavior.
+ ///
+ private sealed class TestAIFunction : AIFunction
+ {
+ public TestAIFunction(string name, string description = "")
+ {
+ this.Name = name;
+ this.Description = description;
+ }
+
+ public override string Name { get; }
+
+ public override string Description { get; }
+
+ protected override ValueTask