diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index f16128c50b63..ebd494039398 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -13,6 +13,7 @@ + diff --git a/dotnet/SK-dotnet.slnx b/dotnet/SK-dotnet.slnx index ea1e02fd7de6..6be39039bef1 100644 --- a/dotnet/SK-dotnet.slnx +++ b/dotnet/SK-dotnet.slnx @@ -176,6 +176,7 @@ + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj new file mode 100644 index 000000000000..efb111605635 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj @@ -0,0 +1,47 @@ + + + + SemanticKernel.Connectors.Anthropic.UnitTests + SemanticKernel.Connectors.Anthropic.UnitTests + net10.0 + true + enable + disable + false + $(NoWarn);CA2000,CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0050,CS1591,RCS1036 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + Always + + + + + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AutoFunctionInvocationFilterTests.cs new file mode 100644 index 000000000000..190fceefc8b2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AutoFunctionInvocationFilterTests.cs @@ -0,0 +1,1130 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core; + +/// +/// Unit tests for auto function invocation filters with Anthropic connector. +/// +public sealed class AutoFunctionInvocationFilterTests : IDisposable +{ + private readonly MultipleHttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + + public AutoFunctionInvocationFilterTests() + { + this._messageHandlerStub = new MultipleHttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + } + + [Fact] + public async Task FiltersAreExecutedCorrectlyAsync() + { + // Arrange + int filterInvocations = 0; + int functionInvocations = 0; + int[] expectedRequestSequenceNumbers = [0, 0, 1, 1]; + int[] expectedFunctionSequenceNumbers = [0, 1, 0, 1]; + List requestSequenceNumbers = []; + List functionSequenceNumbers = []; + Kernel? contextKernel = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + contextKernel = context.Kernel; + requestSequenceNumbers.Add(context.RequestSequenceIndex); + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + filterInvocations++; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(4, filterInvocations); + Assert.Equal(4, functionInvocations); + Assert.Equal(expectedRequestSequenceNumbers, requestSequenceNumbers); + Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers); + Assert.Same(kernel, contextKernel); + Assert.NotNull(result); + Assert.Contains("Hello", result.ToString()); // Verify actual response content from chat_completion_response.json + } + + [Fact] + public async Task FilterCanTerminateFunctionInvocationAsync() + { + // Arrange + int functionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, (context, next) => + { + // Terminate - don't call next + context.Result = new FunctionResult(context.Function, "Terminated by filter"); + return Task.CompletedTask; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(0, functionInvocations); // Functions should not be invoked + Assert.NotNull(result); + } + + [Fact] + public async Task FilterCanModifyFunctionResultAsync() + { + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => "original-result", "Function1"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + string? modifiedResult = null; + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + await next(context); + context.Result = new FunctionResult(context.Function, "modified-result"); + modifiedResult = context.Result.ToString(); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal("modified-result", modifiedResult); + } + + [Fact] + public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() + { + // Arrange + var executionOrder = new List(); + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var filter1 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter1-Invoking"); + await next(context); + }); + + var filter2 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter2-Invoking"); + await next(context); + }); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + + builder.Services.AddSingleton((_) => + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Add filter to services + builder.Services.AddSingleton(filter1); + + var kernel = builder.Build(); + + // Add filter to kernel + kernel.AutoFunctionInvocationFilters.Add(filter2); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal("Filter1-Invoking", executionOrder[0]); + Assert.Equal("Filter2-Invoking", executionOrder[1]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming) + { + // Arrange + var executionOrder = new List(); + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var filter1 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter1-Invoking"); + await next(context); + executionOrder.Add("Filter1-Invoked"); + }); + + var filter2 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter2-Invoking"); + await next(context); + executionOrder.Add("Filter2-Invoked"); + }); + + var filter3 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter3-Invoking"); + await next(context); + executionOrder.Add("Filter3-Invoked"); + }); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + + builder.Services.AddSingleton((_) => + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + + builder.Services.AddSingleton(filter1); + builder.Services.AddSingleton(filter2); + builder.Services.AddSingleton(filter3); + + var kernel = builder.Build(); + + if (isStreaming) + { + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallStreamingResponses(); + await foreach (var _ in kernel.InvokePromptStreamingAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }))) + { } + } + else + { + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + } + + // Assert - filters should execute in order: Filter1 invoke -> Filter2 invoke -> Filter3 invoke -> Filter3 complete -> Filter2 complete -> Filter1 complete + Assert.Equal("Filter1-Invoking", executionOrder[0]); + Assert.Equal("Filter2-Invoking", executionOrder[1]); + Assert.Equal("Filter3-Invoking", executionOrder[2]); + Assert.Equal("Filter3-Invoked", executionOrder[3]); + Assert.Equal("Filter2-Invoked", executionOrder[4]); + Assert.Equal("Filter1-Invoked", executionOrder[5]); + } + + [Fact] + public async Task FilterReceivesCorrectFunctionContextAsync() + { + // Arrange + string? receivedFunctionName = null; + string? receivedPluginName = null; + KernelArguments? receivedArguments = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + receivedFunctionName = context.Function.Name; + receivedPluginName = context.Function.PluginName; + receivedArguments = context.Arguments; + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal("Function1", receivedFunctionName); + Assert.Equal("MyPlugin", receivedPluginName); + Assert.NotNull(receivedArguments); + } + + [Fact] + public async Task FiltersAreExecutedCorrectlyOnStreamingAsync() + { + // Arrange + int filterInvocations = 0; + int functionInvocations = 0; + List requestSequenceNumbers = []; + List functionSequenceNumbers = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + requestSequenceNumbers.Add(context.RequestSequenceIndex); + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + filterInvocations++; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + var executionSettings = new AnthropicPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + // Act + await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings))) + { } + + // Assert + Assert.Equal(4, filterInvocations); + Assert.Equal(4, functionInvocations); + Assert.Equal([0, 0, 1, 1], requestSequenceNumbers); + Assert.Equal([0, 1, 0, 1], functionSequenceNumbers); + } + + [Fact] + public async Task FilterCanAccessChatHistoryAsync() + { + // Arrange + ChatHistory? capturedHistory = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + capturedHistory = context.ChatHistory; + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.NotNull(capturedHistory); + Assert.True(capturedHistory.Count > 0); + } + + [Fact] + public async Task FilterCanAccessChatHistoryWithMessagesAsync() + { + // Arrange + int historyCount = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + // Count messages in history + historyCount = context.ChatHistory.Count; + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert - history should have at least the user message and assistant response + Assert.True(historyCount >= 2); + } + + [Fact] + public async Task FilterTerminationReturnsLastMessageAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, (context, next) => + { + // Terminate on first function without calling next - skips function execution + context.Terminate = true; + context.Result = new FunctionResult(context.Function, "Terminated"); + return Task.CompletedTask; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.NotNull(result); + // Functions should not be invoked since we terminate without calling next + Assert.Equal(0, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + + // The result should reflect the filter-provided result (M.E.AI returns ChatResponse) + var chatResponse = result.GetValue(); + Assert.NotNull(chatResponse); + + var lastFunctionResult = GetLastFunctionResultFromChatResponse(chatResponse); + Assert.NotNull(lastFunctionResult); + Assert.Equal("Terminated", lastFunctionResult.ToString()); + } + + [Fact] + public async Task FilterCanInspectFunctionBeingInvokedAsync() + { + // Arrange + List invokedFunctionNames = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + invokedFunctionNames.Add(context.Function.Name); + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert - should have invoked both functions multiple times + Assert.Contains("Function1", invokedFunctionNames); + Assert.Contains("Function2", invokedFunctionNames); + } + + [Fact] + public async Task FilterCanSkipFunctionExecutionAsync() + { + // Arrange + int functionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => + { + functionInvocations++; + return parameter; + }, "Function1"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, (context, next) => + { + // Skip function execution by not calling next and setting a result + context.Result = new FunctionResult(context.Function, "Skipped"); + return Task.CompletedTask; + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert - function should not have been invoked + Assert.Equal(0, functionInvocations); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } + + #region Private Helper Methods + +#pragma warning disable CA2000 // Dispose objects before losing scope + private static List GetFunctionCallingResponses() + { + return [ + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_response.json")) } + ]; + } + + private static List GetFunctionCallingStreamingResponses() + { + return [ + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_response.txt")) } + ]; + } + + private static List GetSingleFunctionCallResponses() + { + return [ + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_single_function_call_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_response.json")) } + ]; + } + + private static List GetSingleFunctionCallStreamingResponses() + { + return [ + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_single_function_call_streaming_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_response.txt")) } + ]; + } +#pragma warning restore CA2000 + + private Kernel GetKernelWithFilter( + KernelPlugin plugin, + Func, Task>? onAutoFunctionInvocation) + { + var builder = Kernel.CreateBuilder(); + var filter = new AutoFunctionInvocationFilter(onAutoFunctionInvocation); + + builder.Plugins.Add(plugin); + builder.Services.AddSingleton(filter); + + // Use M.E.AI ChatClient registration for proper filter integration + builder.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + return builder.Build(); + } + + private sealed class AutoFunctionInvocationFilter( + Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter + { + private readonly Func, Task>? _onAutoFunctionInvocation = onAutoFunctionInvocation; + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) => + this._onAutoFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + + #endregion + + #region Additional Filter Tests + + [Fact] + public async Task FilterCanOverrideArgumentsAsync() + { + // Arrange + const string NewValue = "NewValue"; + string? receivedValue = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => + { + receivedValue = parameter; + return parameter; + }, "Function1"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + context.Arguments!["parameter"] = NewValue; + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(NewValue, receivedValue); + } + + [Fact] + public async Task FilterCanOverrideFunctionResultAsync() + { + // Arrange + const string OverriddenResult = "OverriddenResult"; + string? finalResult = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => "OriginalResult", "Function1"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + await next(context); + context.Result = new FunctionResult(context.Function, OverriddenResult); + finalResult = context.Result.GetValue(); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(OverriddenResult, finalResult); + } + + [Fact] + public async Task FilterCanAccessKernelInstanceAsync() + { + // Arrange + Kernel? receivedKernel = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + receivedKernel = context.Kernel; + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetSingleFunctionCallResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.NotNull(receivedKernel); + Assert.Same(kernel, receivedKernel); + } + + [Fact] + public async Task FilterCanHandleExceptionAsync() + { + // Arrange + string? firstFunctionResult = null; + string? secondFunctionResult = null; + bool firstFunctionCaptured = false; + bool secondFunctionCaptured = false; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + try + { + await next(context); + } + catch (KernelException exception) + { + Assert.Equal("Exception from Function1", exception.Message); + context.Result = new FunctionResult(context.Result, "Result from filter"); + } + + // Capture the result for the first invocation of each function + if (context.Function.Name == "Function1" && !firstFunctionCaptured) + { + firstFunctionResult = context.Result?.GetValue(); + firstFunctionCaptured = true; + } + else if (context.Function.Name == "Function2" && !secondFunctionCaptured) + { + secondFunctionResult = context.Result?.GetValue(); + secondFunctionCaptured = true; + } + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal("Result from filter", firstFunctionResult); + Assert.Equal("Result from Function2", secondFunctionResult); + } + + [Fact] + public async Task FilterCanHandleExceptionOnStreamingAsync() + { + // Arrange + string? firstFunctionResult = null; + string? secondFunctionResult = null; + bool firstFunctionCaptured = false; + bool secondFunctionCaptured = false; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + try + { + await next(context); + } + catch (KernelException) + { + context.Result = new FunctionResult(context.Result, "Result from filter"); + } + + // Capture the result for the first invocation of each function + if (context.Function.Name == "Function1" && !firstFunctionCaptured) + { + firstFunctionResult = context.Result?.GetValue(); + firstFunctionCaptured = true; + } + else if (context.Function.Name == "Function2" && !secondFunctionCaptured) + { + secondFunctionResult = context.Result?.GetValue(); + secondFunctionCaptured = true; + } + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + // Act + await foreach (var _ in kernel.InvokePromptStreamingAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }))) + { } + + // Assert + Assert.Equal("Result from filter", firstFunctionResult); + Assert.Equal("Result from Function2", secondFunctionResult); + } + + [Fact] + public async Task PreFilterCanTerminateOperationAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + // Terminating before first function, so all functions won't be invoked. + context.Terminate = true; + + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(0, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + } + + [Fact] + public async Task PreFilterCanTerminateOperationOnStreamingAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + // Terminating before first function, so all functions won't be invoked. + context.Terminate = true; + + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + var executionSettings = new AnthropicPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + // Act + await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings))) + { } + + // Assert + Assert.Equal(0, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + } + + [Fact] + public async Task PostFilterCanTerminateOperationAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + List requestSequenceNumbers = []; + List functionSequenceNumbers = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + requestSequenceNumbers.Add(context.RequestSequenceIndex); + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + // Terminating after first function, so second function won't be invoked. + context.Terminate = true; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.Equal(1, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + Assert.Equal([0], requestSequenceNumbers); + Assert.Equal([0], functionSequenceNumbers); + + // Results of function invoked before termination should be returned (M.E.AI returns ChatResponse) + var chatResponse = result.GetValue(); + Assert.NotNull(chatResponse); + + var functionResult = GetLastFunctionResultFromChatResponse(chatResponse); + Assert.NotNull(functionResult); + Assert.Equal("function1-value", functionResult.ToString()); + } + + [Fact] + public async Task PostFilterCanTerminateOperationOnStreamingAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + List requestSequenceNumbers = []; + List functionSequenceNumbers = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + requestSequenceNumbers.Add(context.RequestSequenceIndex); + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + // Terminating after first function, so second function won't be invoked. + context.Terminate = true; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + var executionSettings = new AnthropicPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + List streamingContent = []; + + // Act + await foreach (var update in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings))) + { + streamingContent.Add(update); + } + + // Assert + Assert.Equal(1, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + Assert.Equal([0], requestSequenceNumbers); + Assert.Equal([0], functionSequenceNumbers); + + // Results of function invoked before termination should be returned (M.E.AI returns ChatResponse) + Assert.True(streamingContent.Count >= 1); + + var chatResponse = streamingContent.ToChatResponse(); + Assert.NotNull(chatResponse); + + var functionResult = GetLastFunctionResultFromChatResponse(chatResponse); + Assert.NotNull(functionResult); + Assert.Equal("function1-value", functionResult.ToString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming) + { + // Arrange + bool? actualStreamingFlag = null; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var filter = new AutoFunctionInvocationFilter(async (context, next) => + { + actualStreamingFlag = context.IsStreaming; + await next(context); + }); + + var builder = Kernel.CreateBuilder(); + + builder.Plugins.Add(plugin); + + builder.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + builder.Services.AddSingleton(filter); + + var kernel = builder.Build(); + + var arguments = new KernelArguments(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }); + + // Act + if (isStreaming) + { + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + await kernel.InvokePromptStreamingAsync("Test prompt", arguments).ToListAsync(); + } + else + { + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + await kernel.InvokePromptAsync("Test prompt", arguments); + } + + // Assert + Assert.Equal(isStreaming, actualStreamingFlag); + } + + [Fact] + public async Task PromptExecutionSettingsArePropagatedFromInvokePromptToFilterContextAsync() + { + // Arrange + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]); + + AutoFunctionInvocationContext? actualContext = null; + + var kernel = this.GetKernelWithFilter(plugin, (context, next) => + { + actualContext = context; + return Task.CompletedTask; + }); + + // Act + await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + Assert.NotNull(actualContext); + Assert.NotNull(actualContext!.ExecutionSettings); + // Note: M.E.AI-based connectors JSON-roundtrip settings through ToChatOptions, so we verify + // value equivalence rather than reference equality (unlike direct IChatCompletionService implementations). + Assert.NotNull(actualContext.ExecutionSettings!.FunctionChoiceBehavior); + } + + [Fact] + public async Task PromptExecutionSettingsArePropagatedFromInvokePromptStreamingToFilterContextAsync() + { + // Arrange + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]); + + AutoFunctionInvocationContext? actualContext = null; + + var kernel = this.GetKernelWithFilter(plugin, (context, next) => + { + actualContext = context; + return Task.CompletedTask; + }); + + // Act + await foreach (var _ in kernel.InvokePromptStreamingAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }))) + { } + + // Assert + Assert.NotNull(actualContext); + Assert.NotNull(actualContext!.ExecutionSettings); + // Note: M.E.AI-based connectors JSON-roundtrip settings through ToChatOptions, so we verify + // value equivalence rather than reference equality (unlike direct IChatCompletionService implementations). + Assert.NotNull(actualContext.ExecutionSettings!.FunctionChoiceBehavior); + } + + [Fact] + public async Task FiltersCanSkipSelectiveFunctionExecutionAsync() + { + // Arrange + int filterInvocations = 0; + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + // Filter delegate is invoked for both functions, but next() is called only for Function2. + // Function1 execution is skipped because next() is not called for it. + if (context.Function.Name == "Function2") + { + await next(context); + } + + filterInvocations++; + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + })); + + // Assert + // GetFunctionCallingResponses() returns 2 rounds of tool calls with 2 functions each + // Filter is invoked 4 times total (2 functions × 2 rounds) + Assert.Equal(4, filterInvocations); + Assert.Equal(0, firstFunctionInvocations); // Function1 is always skipped + Assert.Equal(2, secondFunctionInvocations); // Function2 executes once per round + } + + [Fact] + public async Task FunctionSequenceIndexIsCorrectForConcurrentCallsAsync() + { + // Arrange + List functionSequenceNumbers = []; + List expectedFunctionSequenceNumbers = [0, 1, 0, 1]; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = this.GetKernelWithFilter(plugin, async (context, next) => + { + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + }); + + this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); + + // Act + var result = await kernel.InvokePromptAsync("Test prompt", new(new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() + { + AllowParallelCalls = true, + AllowConcurrentInvocation = true + }) + })); + + // Assert + Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers); + } + + private static object? GetLastFunctionResultFromChatResponse(ChatResponse chatResponse) + { + Assert.NotEmpty(chatResponse.Messages); + var chatMessage = chatResponse.Messages.Where(m => m.Role == ChatRole.Tool).Last(); + + Assert.NotEmpty(chatMessage.Contents); + Assert.Contains(chatMessage.Contents, c => c is Microsoft.Extensions.AI.FunctionResultContent); + + var resultContent = (Microsoft.Extensions.AI.FunctionResultContent)chatMessage.Contents.Last(c => c is Microsoft.Extensions.AI.FunctionResultContent); + return resultContent.Result; + } + + #endregion +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..a4c59899ca18 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs @@ -0,0 +1,441 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Anthropic; +using Anthropic.Core; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Extensions; + +/// +/// Unit tests for . +/// +public sealed class AnthropicServiceCollectionExtensionsTests +{ + #region AddAnthropicChatCompletion Registration Tests + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void AddAnthropicChatCompletionRegistersServices(InitializationType type) + { + // Arrange + var clientOptions = new ClientOptions { APIKey = "test-api-key" }; + var client = new AnthropicClient(clientOptions); + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(client); + + // Act + _ = type switch + { + InitializationType.ApiKey => builder.Services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key"), + InitializationType.ClientInline => builder.Services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", anthropicClient: null), + _ => builder.Services + }; + + var kernel = builder.Build(); + + // Assert + var chatCompletionService = kernel.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + + var textGenerationService = kernel.GetRequiredService(); + Assert.NotNull(textGenerationService); + Assert.IsType(textGenerationService); + } + + #endregion + + #region AddAnthropicChatClient Registration Tests + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void AddAnthropicChatClientRegistersService(InitializationType type) + { + // Arrange + var clientOptions = new ClientOptions { APIKey = "test-api-key" }; + var client = new AnthropicClient(clientOptions); + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(client); + + // Act + _ = type switch + { + InitializationType.ApiKey => builder.Services.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key"), + InitializationType.ClientInline => builder.Services.AddAnthropicChatClient("claude-sonnet-4-20250514", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAnthropicChatClient("claude-sonnet-4-20250514", anthropicClient: null), + _ => builder.Services + }; + + var kernel = builder.Build(); + + // Assert + var chatClient = kernel.Services.GetRequiredService(); + Assert.NotNull(chatClient); + } + + #endregion + + #region Singleton and Instance Sharing Tests + + [Fact] + public void AnthropicServicesShouldShareSameInstance() + { + // Arrange + var services = new ServiceCollection(); + + // Act + services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key"); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredService(); + var textGenerationService = serviceProvider.GetRequiredService(); + var concreteService = serviceProvider.GetRequiredService(); + + Assert.Same(chatCompletionService, textGenerationService); + Assert.Same(chatCompletionService, concreteService); + } + + [Fact] + public void AnthropicServicesWithServiceIdShouldBeRegisteredAsKeyed() + { + // Arrange + var services = new ServiceCollection(); + const string serviceId = "anthropic-service"; + + // Act + services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key", serviceId: serviceId); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredKeyedService(serviceId); + var textGenerationService = serviceProvider.GetRequiredKeyedService(serviceId); + + Assert.NotNull(chatCompletionService); + Assert.NotNull(textGenerationService); + Assert.IsType(chatCompletionService); + Assert.Same(chatCompletionService, textGenerationService); + } + + #endregion + + #region Configuration and Parameter Tests + + [Fact] + public void AnthropicServicesWithCustomBaseUrlShouldBeRegistered() + { + // Arrange + var services = new ServiceCollection(); + var customBaseUrl = new System.Uri("https://custom.anthropic.endpoint/"); + + // Act + services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key", baseUrl: customBaseUrl); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } + + [Fact] + public void MultipleAnthropicServicesCanBeRegisteredWithDifferentServiceIds() + { + // Arrange + var services = new ServiceCollection(); + const string serviceId1 = "anthropic-claude-sonnet"; + const string serviceId2 = "anthropic-claude-opus"; + + // Act + services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key-1", serviceId: serviceId1); + services.AddAnthropicChatCompletion("claude-opus-4-20250514", "test-api-key-2", serviceId: serviceId2); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var service1 = serviceProvider.GetRequiredKeyedService(serviceId1); + var service2 = serviceProvider.GetRequiredKeyedService(serviceId2); + + Assert.NotNull(service1); + Assert.NotNull(service2); + Assert.NotSame(service1, service2); + } + + [Fact] + public void AnthropicServiceAttributesContainModelId() + { + // Arrange + var services = new ServiceCollection(); + const string modelId = "claude-sonnet-4-20250514"; + + // Act + services.AddAnthropicChatCompletion(modelId, "test-api-key"); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredService(); + Assert.True(chatCompletionService.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.Equal(modelId, chatCompletionService.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void AnthropicServiceWithAllParametersShouldBeRegistered() + { + // Arrange + var services = new ServiceCollection(); + var customBaseUrl = new System.Uri("https://custom.anthropic.endpoint/"); + const string serviceId = "full-config-service"; + + // Act + services.AddAnthropicChatCompletion( + modelId: "claude-sonnet-4-20250514", + apiKey: "test-api-key", + baseUrl: customBaseUrl, + serviceId: serviceId); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredKeyedService(serviceId); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } + + [Fact] + public void KernelBuilderWithMultipleServicesReturnsCorrectService() + { + // Arrange + var kernelBuilder = Kernel.CreateBuilder(); + const string serviceId1 = "service1"; + const string serviceId2 = "service2"; + + // Act + kernelBuilder.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key-1", serviceId: serviceId1); + kernelBuilder.AddAnthropicChatCompletion("claude-opus-4-20250514", "test-api-key-2", serviceId: serviceId2); + var kernel = kernelBuilder.Build(); + + // Assert + var service1 = kernel.GetRequiredService(serviceId1); + var service2 = kernel.GetRequiredService(serviceId2); + + Assert.NotNull(service1); + Assert.NotNull(service2); + Assert.NotSame(service1, service2); + } + + [Fact] + public void KeyedServicesCanBeResolvedIndependently() + { + // Arrange + var services = new ServiceCollection(); + const string serviceId1 = "service1"; + const string serviceId2 = "service2"; + + // Act + services.AddAnthropicChatCompletion("claude-sonnet-4-20250514", "test-api-key-1", serviceId: serviceId1); + services.AddAnthropicChatCompletion("claude-opus-4-20250514", "test-api-key-2", serviceId: serviceId2); + var serviceProvider = services.BuildServiceProvider(); + + // Assert - keyed services can be resolved independently + var service1 = serviceProvider.GetRequiredKeyedService(serviceId1); + var service2 = serviceProvider.GetRequiredKeyedService(serviceId2); + + Assert.NotNull(service1); + Assert.NotNull(service2); + Assert.NotSame(service1, service2); + + // Verify they have different model IDs + Assert.Equal("claude-sonnet-4-20250514", service1.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("claude-opus-4-20250514", service2.Attributes[AIServiceExtensions.ModelIdKey]); + } + + #endregion + + #region IKernelBuilder Extension Tests + + [Fact] + public void KernelBuilderAddAnthropicChatCompletionWithClientRegistersServices() + { + // Arrange + var clientOptions = new ClientOptions { APIKey = "test-api-key" }; + var client = new AnthropicClient(clientOptions); + var kernelBuilder = Kernel.CreateBuilder(); + + // Act + kernelBuilder.AddAnthropicChatCompletion("claude-sonnet-4-20250514", client); + var kernel = kernelBuilder.Build(); + + // Assert + var chatCompletionService = kernel.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + + var textGenerationService = kernel.GetRequiredService(); + Assert.NotNull(textGenerationService); + Assert.IsType(textGenerationService); + } + + [Fact] + public void KernelBuilderAddAnthropicChatClientWithApiKeyRegistersService() + { + // Arrange + var kernelBuilder = Kernel.CreateBuilder(); + + // Act + kernelBuilder.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key"); + var kernel = kernelBuilder.Build(); + + // Assert + var chatClient = kernel.Services.GetRequiredService(); + Assert.NotNull(chatClient); + } + + [Fact] + public void KernelBuilderAddAnthropicChatClientWithClientRegistersService() + { + // Arrange + var clientOptions = new ClientOptions { APIKey = "test-api-key" }; + var client = new AnthropicClient(clientOptions); + var kernelBuilder = Kernel.CreateBuilder(); + + // Act + kernelBuilder.AddAnthropicChatClient("claude-sonnet-4-20250514", client); + var kernel = kernelBuilder.Build(); + + // Assert + var chatClient = kernel.Services.GetRequiredService(); + Assert.NotNull(chatClient); + } + + [Fact] + public void KernelBuilderAddAnthropicChatClientWithServiceIdRegistersKeyedService() + { + // Arrange + var kernelBuilder = Kernel.CreateBuilder(); + const string serviceId = "my-chat-client"; + + // Act + kernelBuilder.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key", serviceId: serviceId); + var kernel = kernelBuilder.Build(); + + // Assert + var chatClient = kernel.Services.GetRequiredKeyedService(serviceId); + Assert.NotNull(chatClient); + } + + #endregion + + #region IChatClient Keyed Service Tests + + [Fact] + public void AddAnthropicChatClientWithServiceIdShouldBeRegisteredAsKeyed() + { + // Arrange + var services = new ServiceCollection(); + const string serviceId = "anthropic-chat-client"; + + // Act + services.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key", serviceId: serviceId); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatClient = serviceProvider.GetRequiredKeyedService(serviceId); + Assert.NotNull(chatClient); + } + + [Fact] + public void MultipleAnthropicChatClientsCanBeRegisteredWithDifferentServiceIds() + { + // Arrange + var services = new ServiceCollection(); + const string serviceId1 = "chat-client-sonnet"; + const string serviceId2 = "chat-client-opus"; + + // Act + services.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key-1", serviceId: serviceId1); + services.AddAnthropicChatClient("claude-opus-4-20250514", "test-api-key-2", serviceId: serviceId2); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var client1 = serviceProvider.GetRequiredKeyedService(serviceId1); + var client2 = serviceProvider.GetRequiredKeyedService(serviceId2); + + Assert.NotNull(client1); + Assert.NotNull(client2); + Assert.NotSame(client1, client2); + } + + #endregion + + #region IChatClient Configuration Tests + + [Fact] + public void AddAnthropicChatClientWithCustomBaseUrlShouldBeRegistered() + { + // Arrange + var services = new ServiceCollection(); + var customBaseUrl = new System.Uri("https://custom.anthropic.endpoint/"); + + // Act + services.AddAnthropicChatClient("claude-sonnet-4-20250514", "test-api-key", baseUrl: customBaseUrl); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatClient = serviceProvider.GetRequiredService(); + Assert.NotNull(chatClient); + } + + [Fact] + public void AddAnthropicChatClientWithAllParametersShouldBeRegistered() + { + // Arrange + var services = new ServiceCollection(); + var customBaseUrl = new System.Uri("https://custom.anthropic.endpoint/"); + const string serviceId = "full-config-chat-client"; + + // Act + services.AddAnthropicChatClient( + modelId: "claude-sonnet-4-20250514", + apiKey: "test-api-key", + baseUrl: customBaseUrl, + serviceId: serviceId); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatClient = serviceProvider.GetRequiredKeyedService(serviceId); + Assert.NotNull(chatClient); + } + + #endregion +} + +/// +/// Specifies the type of initialization used when registering Anthropic services. +/// +public enum InitializationType +{ + /// + /// Initialize with API key string. + /// + ApiKey, + + /// + /// Initialize with an inline AnthropicClient instance. + /// + ClientInline, + + /// + /// Initialize with AnthropicClient resolved from the service provider. + /// + ClientInServiceProvider +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs new file mode 100644 index 000000000000..6286fd0fd6a8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs @@ -0,0 +1,2137 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Anthropic; +using Anthropic.Exceptions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Services; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Services; + +/// +/// Unit tests for . +/// +public sealed class AnthropicChatCompletionServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AnthropicChatCompletionServiceTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/chat_completion_response.json")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false); + + // Setup mock logger factory to return a proper mock logger + var mockLogger = new Mock>(); + this._mockLoggerFactory = new Mock(); + this._mockLoggerFactory + .Setup(f => f.CreateLogger(It.IsAny())) + .Returns(mockLogger.Object); + } + + #region Test Helpers + + /// + /// Sets up a function calling scenario with multiple HTTP responses. + /// M.E.AI's FunctionInvokingChatClient automatically processes tool calls, which requires + /// multiple HTTP responses: one for the tool call, and one for the final response. + /// + /// Test data file names (without path) to queue as responses. + private void SetupFunctionCallScenario(params string[] responseFiles) + { + foreach (var file in responseFiles) + { + this._messageHandlerStub.ResponseQueue.Enqueue(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText($"./TestData/{file}")) + }); + } + } + + /// + /// Sets up a streaming function calling scenario with multiple HTTP responses. + /// M.E.AI's FunctionInvokingChatClient automatically processes tool calls, which requires + /// multiple HTTP responses: one for the tool call, and one for the final response. + /// Uses ReadAllBytes + MemoryStream to properly support streaming content. + /// + /// Test data file names (without path) to queue as responses. + private void SetupStreamingFunctionCallScenario(params string[] responseFiles) + { + foreach (var file in responseFiles) + { + var fileContent = File.ReadAllBytes($"./TestData/{file}"); + this._messageHandlerStub.ResponseQueue.Enqueue(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }); + } + } + + #endregion + + #region Constructor Tests + + [Fact] + public void ConstructorWithApiKeyWorksCorrectly() + { + // Arrange & Act + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key"); + + // Assert + Assert.NotNull(service); + Assert.Equal("claude-sonnet-4-20250514", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyAndLoggerFactoryWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory + ? new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", loggerFactory: this._mockLoggerFactory.Object) + : new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key"); + + // Assert + Assert.NotNull(service); + Assert.Equal("claude-sonnet-4-20250514", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ConstructorWithAnthropicClientShouldWork() + { + // Arrange + string model = "claude-sonnet-4-20250514"; + var anthropicClient = new AnthropicClient(new global::Anthropic.Core.ClientOptions { APIKey = "test-api-key" }); + + // Act + var service = new AnthropicChatCompletionService(model, anthropicClient); + + // Assert + Assert.NotNull(service); + Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithAnthropicClientAndLoggerFactoryWorksCorrectly(bool includeLoggerFactory) + { + // Arrange + var anthropicClient = new AnthropicClient(new global::Anthropic.Core.ClientOptions { APIKey = "test-api-key" }); + + // Act + var service = includeLoggerFactory + ? new AnthropicChatCompletionService("claude-sonnet-4-20250514", anthropicClient, loggerFactory: this._mockLoggerFactory.Object) + : new AnthropicChatCompletionService("claude-sonnet-4-20250514", anthropicClient); + + // Assert + Assert.NotNull(service); + Assert.Equal("claude-sonnet-4-20250514", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public void ConstructorWithInvalidModelIdThrowsArgumentException(string? modelId) + { + // Act & Assert + // ThrowIfNullOrWhiteSpace throws ArgumentNullException for null, ArgumentException for empty/whitespace + Assert.ThrowsAny(() => new AnthropicChatCompletionService(modelId!, "test-api-key")); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData(" ")] + public void ConstructorWithInvalidApiKeyThrowsArgumentException(string? apiKey) + { + // Act & Assert + // ThrowIfNullOrWhiteSpace throws ArgumentNullException for null, ArgumentException for empty/whitespace + Assert.ThrowsAny(() => new AnthropicChatCompletionService("claude-sonnet-4-20250514", apiKey!)); + } + + [Fact] + public void ConstructorWithNullAnthropicClientThrowsArgumentNullException() + { + // Act & Assert + Assert.Throws(() => new AnthropicChatCompletionService("claude-sonnet-4-20250514", (AnthropicClient)null!)); + } + + #endregion + + #region Attributes Tests + + [Fact] + public void AttributesShouldContainModelId() + { + // Arrange & Act + string model = "claude-sonnet-4-20250514"; + var service = new AnthropicChatCompletionService(model, "test-api-key"); + + // Assert + Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void AttributesShouldContainEndpoint() + { + // Arrange & Act + string model = "claude-sonnet-4-20250514"; + var service = new AnthropicChatCompletionService(model, "test-api-key"); + + // Assert + Assert.Equal("https://api.anthropic.com/", service.Attributes[AIServiceExtensions.EndpointKey]); + } + + [Fact] + public void AttributesShouldContainCustomEndpoint() + { + // Arrange & Act + string model = "claude-sonnet-4-20250514"; + var customEndpoint = new Uri("https://custom.anthropic.endpoint/"); + var service = new AnthropicChatCompletionService(model, "test-api-key", baseUrl: customEndpoint); + + // Assert + Assert.Equal(customEndpoint.ToString(), service.Attributes[AIServiceExtensions.EndpointKey]); + } + + [Theory] + [InlineData("https://localhost:1234/", "https://localhost:1234/")] + [InlineData("https://localhost:8080/", "https://localhost:8080/")] + [InlineData("https://custom.anthropic.com/", "https://custom.anthropic.com/")] + [InlineData("https://custom.anthropic.com/v1", "https://custom.anthropic.com/v1")] + public void AttributesShouldContainVariousCustomEndpoints(string endpointProvided, string expectedEndpoint) + { + // Arrange & Act + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", baseUrl: new Uri(endpointProvided)); + + // Assert + Assert.Equal(expectedEndpoint, service.Attributes[AIServiceExtensions.EndpointKey]); + } + + #endregion + + #region Endpoint Request Tests + + [Fact] + public async Task ItUsesDefaultEndpointAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Anthropic API endpoint should be the default + Assert.NotNull(this._messageHandlerStub.RequestUri); + Assert.StartsWith("https://api.anthropic.com/", this._messageHandlerStub.RequestUri.ToString()); + } + + [Theory] + [InlineData("https://custom.anthropic.com/")] + [InlineData("https://localhost:8080/")] + [InlineData("https://proxy.example.com/v1/")] + public async Task ItUsesCustomEndpointWhenProvidedAsync(string customEndpoint) + { + // Arrange + var service = new AnthropicChatCompletionService( + "claude-sonnet-4-20250514", + "test-api-key", + baseUrl: new Uri(customEndpoint), + httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Request should use the custom endpoint + Assert.NotNull(this._messageHandlerStub.RequestUri); + Assert.StartsWith(customEndpoint, this._messageHandlerStub.RequestUri.ToString()); + } + + #endregion + + #region GetChatMessageContentsAsync Tests + + [Fact] + public async Task GetChatMessageContentsAsyncReturnsValidResponseAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, how are you?"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + Assert.Equal(AuthorRole.Assistant, result[0].Role); + Assert.NotNull(result[0].Content); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsCorrectRequestAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, how are you?"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("claude-sonnet-4-20250514", requestBody); + Assert.Contains("Hello, how are you?", requestBody); + } + + // NOTE: Parameter tests (Temperature, MaxTokens, TopP, TopK, StopSequences, SystemMessage, Roles) + // are consolidated in the "Request Parameters Tests" region below. + + [Fact] + public async Task GetChatMessageContentsAsyncShouldHaveModelIdDefinedAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(result[0].ModelId); + Assert.Equal("claude-sonnet-4-20250514", result[0].ModelId); + } + + [Fact] + public async Task GetChatMessageContentsAsyncReturnsMetadataAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + // Note: With M.E.AI architecture, metadata structure changed. + // Usage information is now in a "Usage" key as a UsageDetails object. + Assert.NotNull(result[0].Metadata); + Assert.True(result[0].Metadata!.ContainsKey("Usage")); + var usage = result[0].Metadata!["Usage"] as Microsoft.Extensions.AI.UsageDetails; + Assert.NotNull(usage); + Assert.NotNull(usage!.InputTokenCount); + Assert.NotNull(usage.OutputTokenCount); + } + + // NOTE: GetChatMessageContentsAsyncWithMultipleMessagesAsync moved to Request Parameters Tests region + // as GetChatMessageContentsAsyncSendsMultipleMessagesAsync + + [Fact] + public async Task GetChatMessageContentsAsyncWithMultipleSystemMessagesAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("You are a helpful assistant."); + chatHistory.AddSystemMessage("Always be polite."); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + // Both system messages should be concatenated + Assert.Contains("You are a helpful assistant.", requestBody); + Assert.Contains("Always be polite.", requestBody); + } + + #endregion + + #region Function Calling Tests + + [Fact] + public async Task FunctionCallsShouldBePropagatedViaChatMessageItemsAsync() + { + // Arrange + this.SetupFunctionCallScenario( + "chat_completion_tool_call_response.json", + "final_response_after_tool_call.json"); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather in Seattle?"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + // With M.E.AI architecture, the final response is returned after tool call processing. + // The chat history will contain the intermediate messages including the tool call. + Assert.NotNull(result); + Assert.Single(result); + + // The final response should be text content (after the tool call was processed) + Assert.Contains("weather", result[0].Content, StringComparison.OrdinalIgnoreCase); + + // Verify the chat history contains the function call from the first response + var functionCalls = chatHistory.SelectMany(m => m.Items.OfType()).ToList(); + Assert.Single(functionCalls); + + var functionCall = functionCalls[0]; + Assert.Equal("GetWeather", functionCall.FunctionName); + Assert.Equal("toolu_01A09q90qw90lq917835lq", functionCall.Id); + Assert.NotNull(functionCall.Arguments); + Assert.Equal("Seattle, WA", functionCall.Arguments["location"]?.ToString()); + } + + [Fact] + public async Task MultipleFunctionCallsShouldBePropagatedViaChatMessageItemsAsync() + { + // Arrange + this.SetupFunctionCallScenario( + "chat_completion_multiple_tool_calls_response.json", + "final_response_after_tool_call.json"); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather in Seattle and New York?"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + + // Verify the chat history contains the function calls from the first response + var functionCalls = chatHistory.SelectMany(m => m.Items.OfType()).ToList(); + Assert.Equal(2, functionCalls.Count); + + Assert.Equal("GetWeather", functionCalls[0].FunctionName); + Assert.Equal("Seattle, WA", functionCalls[0].Arguments?["location"]?.ToString()); + + Assert.Equal("GetWeather", functionCalls[1].FunctionName); + Assert.Equal("New York, NY", functionCalls[1].Arguments?["location"]?.ToString()); + } + + [Fact] + public async Task FunctionCallResponseShouldHaveUsageMetadataAsync() + { + // Arrange + this.SetupFunctionCallScenario( + "chat_completion_tool_call_response.json", + "final_response_after_tool_call.json"); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather?"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + // The final response should have metadata with Usage information + Assert.NotNull(result[0].Metadata); + Assert.True(result[0].Metadata!.ContainsKey("Usage")); + + // Verify that the chat history contains the tool call from the first response + var functionCalls = chatHistory.SelectMany(m => m.Items.OfType()).ToList(); + Assert.Single(functionCalls); + } + + [Fact] + public async Task FunctionCallResponseShouldAggregateTokenUsageAsync() + { + // Arrange + // Response 1 (tool call): input_tokens=50, output_tokens=45 + // Response 2 (final): input_tokens=150, output_tokens=30 + // Expected aggregated: input=200, output=75 + this.SetupFunctionCallScenario( + "chat_completion_tool_call_response.json", + "final_response_after_tool_call.json"); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather in Seattle?"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + Assert.NotNull(result[0].Metadata); + Assert.True(result[0].Metadata!.ContainsKey("Usage")); + + var usage = result[0].Metadata!["Usage"] as Microsoft.Extensions.AI.UsageDetails; + Assert.NotNull(usage); + + // M.E.AI FunctionInvokingChatClient aggregates token usage across all function calling iterations + // Response 1: 50 input + 45 output, Response 2: 150 input + 30 output + // Aggregated: 200 input, 75 output + Assert.Equal(200, usage!.InputTokenCount); + Assert.Equal(75, usage.OutputTokenCount); + } + + [Fact] + public async Task FunctionResultContentShouldBeSerializedToToolResultBlockAsync() + { + // Arrange - Create a chat history with a function call and its result + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather in Seattle?"); + + // Add assistant message with function call + var assistantMessage = new ChatMessageContent(AuthorRole.Assistant, string.Empty); + assistantMessage.Items.Add(new FunctionCallContent("GetWeather", "WeatherPlugin", "call_123", new KernelArguments { ["location"] = "Seattle" })); + chatHistory.Add(assistantMessage); + + // Add tool result message + var toolResultMessage = new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent( + new FunctionCallContent("GetWeather", "WeatherPlugin", "call_123"), + "Sunny, 72°F") + ]); + chatHistory.Add(toolResultMessage); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Verify the request contains the tool result in Anthropic format + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + + // Anthropic expects tool results in a user message with tool_result content blocks + var requestJson = JsonDocument.Parse(requestBody); + var messages = requestJson.RootElement.GetProperty("messages"); + + // Find the message containing the tool result + var hasToolResult = false; + foreach (var message in messages.EnumerateArray()) + { + if (message.TryGetProperty("content", out var content) && content.ValueKind == JsonValueKind.Array) + { + foreach (var contentBlock in content.EnumerateArray()) + { + if (contentBlock.TryGetProperty("type", out var typeElement) && + typeElement.GetString() == "tool_result") + { + hasToolResult = true; + Assert.True(contentBlock.TryGetProperty("tool_use_id", out var toolUseId)); + Assert.Equal("call_123", toolUseId.GetString()); + break; + } + } + } + if (hasToolResult) + { + break; + } + } + + Assert.True(hasToolResult, "Request should contain a tool_result content block"); + } + + #endregion + + #region Streaming Tests + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncReturnsContentAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + // (the stream must remain open until the async enumerable is fully consumed) + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + Assert.All(chunks, c => Assert.Equal(AuthorRole.Assistant, c.Role)); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncWithSettingsAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.5, + MaxTokens = 500 + }; + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory, settings)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + } + + /// + /// Tests streaming with tool calls returns content including function call information. + /// M.E.AI's FunctionInvokingChatClient automatically processes tool calls, which requires + /// multiple HTTP responses: one for the tool call, and one for the final response. + /// + [Fact] + public async Task GetStreamingChatMessageContentsAsyncWithToolCallsReturnsContentAsync() + { + // Arrange - Queue multiple responses for FunctionInvokingChatClient's auto-continuation + this.SetupStreamingFunctionCallScenario( + "chat_completion_streaming_tool_call_response.txt", + "final_streaming_response_after_tool_call.txt"); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather?"); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + chunks.Add(chunk); + } + + // Assert - final response content + Assert.NotEmpty(chunks); + var combinedContent = string.Join("", chunks.Select(c => c.Content ?? "")); + Assert.Contains("Seattle", combinedContent, StringComparison.OrdinalIgnoreCase); + + // Verify the chat history contains the function call from the first response + var functionCalls = chatHistory.SelectMany(m => m.Items.OfType()).ToList(); + Assert.Single(functionCalls); + Assert.Equal("GetWeather", functionCalls[0].FunctionName); + Assert.Equal("toolu_01A09q90qw90lq917835lq", functionCalls[0].Id); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncReturnsModelIdAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + Assert.All(chunks, c => Assert.Equal("claude-sonnet-4-20250514", c.ModelId)); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncReturnsChunksWithContentAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + // At least some chunks should have text content + var combinedContent = string.Join("", chunks.Select(c => c.Content ?? "")); + Assert.NotEmpty(combinedContent); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncReturnsUsageMetadataAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + + // The final chunk(s) should contain usage metadata + // Anthropic sends usage info in message_start and message_delta events + var chunksWithMetadata = chunks.Where(c => c.Metadata is not null && c.Metadata.Count > 0).ToList(); + Assert.NotEmpty(chunksWithMetadata); + + // Verify at least one chunk has Usage information + var hasUsage = chunks.Any(c => + c.Metadata is not null && + c.Metadata.TryGetValue("Usage", out var usage) && + usage is not null); + Assert.True(hasUsage, "At least one streaming chunk should contain Usage metadata"); + } + + #endregion + + #region Text Generation Tests + + [Fact] + public async Task GetTextContentsAsyncReturnsValidResponseAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + // Act + var result = await service.GetTextContentsAsync("Hello, how are you?"); + + // Assert + Assert.NotNull(result); + Assert.NotEmpty(result); + } + + [Fact] + public async Task GetTextContentsAsyncShouldHaveModelIdDefinedAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + // Act + var result = await service.GetTextContentsAsync("Hello"); + + // Assert + Assert.NotNull(result[0].ModelId); + Assert.Equal("claude-sonnet-4-20250514", result[0].ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsAsyncReturnsContentAsync() + { + // Arrange - Use ReadAllBytesAsync + MemoryStream to avoid stream disposal issues + var fileContent = await File.ReadAllBytesAsync("./TestData/chat_completion_streaming_response.txt"); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(new MemoryStream(fileContent)) + }; + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingTextContentsAsync("Hello")) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotEmpty(chunks); + } + + #endregion + + #region Error Handling Tests + + [Fact] + public async Task GetChatMessageContentsAsyncWithEmptyChatHistoryThrowsBadRequestAsync() + { + // Arrange - M.E.AI handles empty chat history by sending an empty messages array + // The Anthropic API will return an error, but the SDK handles this gracefully + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent("{\"error\": {\"type\": \"invalid_request_error\", \"message\": \"messages: at least one message is required\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + + // Act & Assert - Anthropic SDK throws AnthropicBadRequestException for empty messages + await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncWithEmptyChatHistoryThrowsAsync() + { + // Arrange - M.E.AI handles empty chat history by sending an empty messages array + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent("{\"error\": {\"type\": \"invalid_request_error\", \"message\": \"messages: at least one message is required\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + + // Act & Assert - Anthropic SDK throws AnthropicBadRequestException for empty messages + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnUnauthorizedAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.Unauthorized) + { + Content = new StringContent("{\"error\": {\"type\": \"authentication_error\", \"message\": \"Invalid API key\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "invalid-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws AnthropicUnauthorizedException for auth errors + await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnBadRequestAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent("{\"error\": {\"type\": \"invalid_request_error\", \"message\": \"Invalid request\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws AnthropicBadRequestException for bad requests + await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsAnthropicApiExceptionWithStatusCodeAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent("{\"error\": {\"type\": \"invalid_request_error\", \"message\": \"Invalid request\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK includes status code in AnthropicApiException + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + Assert.Equal(HttpStatusCode.BadRequest, exception.StatusCode); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnRateLimitAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage((HttpStatusCode)429) + { + Content = new StringContent("{\"error\": {\"type\": \"rate_limit_error\", \"message\": \"Rate limit exceeded. Please slow down.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws AnthropicRateLimitException for rate limit errors + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + Assert.Equal((HttpStatusCode)429, exception.StatusCode); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnServerErrorAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.InternalServerError) + { + Content = new StringContent("{\"error\": {\"type\": \"api_error\", \"message\": \"An internal server error occurred.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws Anthropic5xxException for server errors + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + Assert.Equal(HttpStatusCode.InternalServerError, exception.StatusCode); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnServiceUnavailableAsync() + { + // Arrange - HTTP 503 Service Unavailable (Anthropic overloaded) + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.ServiceUnavailable) + { + Content = new StringContent("{\"error\": {\"type\": \"overloaded_error\", \"message\": \"Anthropic's API is temporarily overloaded.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws Anthropic5xxException for 503 errors + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + Assert.Equal(HttpStatusCode.ServiceUnavailable, exception.StatusCode); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncThrowsOnRateLimitAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage((HttpStatusCode)429) + { + Content = new StringContent("{\"error\": {\"type\": \"rate_limit_error\", \"message\": \"Rate limit exceeded.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Streaming should also throw on rate limit + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncThrowsOnServerErrorAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.InternalServerError) + { + Content = new StringContent("{\"error\": {\"type\": \"api_error\", \"message\": \"Internal server error.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Streaming should also throw on server errors + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncThrowsOnUnauthorizedAsync() + { + // Arrange + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.Unauthorized) + { + Content = new StringContent("{\"error\": {\"type\": \"authentication_error\", \"message\": \"Invalid API key\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "invalid-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Streaming should also throw on authentication errors + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncThrowsOnServiceUnavailableAsync() + { + // Arrange - HTTP 503 Service Unavailable (Anthropic overloaded) + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.ServiceUnavailable) + { + Content = new StringContent("{\"error\": {\"type\": \"overloaded_error\", \"message\": \"Anthropic's API is temporarily overloaded.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Streaming should also throw on 503 errors + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnForbiddenAsync() + { + // Arrange - HTTP 403 Forbidden (permission denied) + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.Forbidden) + { + Content = new StringContent("{\"error\": {\"type\": \"permission_error\", \"message\": \"Your API key does not have permission to use the specified resource.\"}}") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws AnthropicForbiddenException for 403 errors + var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + Assert.Equal(HttpStatusCode.Forbidden, exception.StatusCode); + } + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsOnMalformedJsonResponseAsync() + { + // Arrange - API returns invalid JSON + using var handler = new HttpMessageHandlerStub + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent("This is not valid JSON {{{") + } + }; + using var httpClient = new HttpClient(handler, false); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Anthropic SDK throws JsonException when parsing invalid JSON + await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + } + + #endregion + + #region IChatCompletionService Interface Tests + + [Fact] + public void ServiceImplementsIChatCompletionService() + { + // Arrange & Act + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key"); + + // Assert + Assert.IsAssignableFrom(service); + } + + [Fact] + public void ServiceImplementsITextGenerationService() + { + // Arrange & Act + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key"); + + // Assert + Assert.IsAssignableFrom(service); + } + + // Note: ModelId attribute test is already covered by AttributesShouldContainModelId in Attributes Tests region + + [Fact] + public void ServiceAttributesAreReadOnly() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key"); + + // Act & Assert + Assert.IsAssignableFrom>(service.Attributes); + } + + #endregion + + #region Kernel Integration Tests + + [Fact] + public async Task ServiceCanBeUsedWithKernelAsync() + { + // Arrange + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var chatService = kernel.GetRequiredService(); + var result = await chatService.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + } + + [Fact] + public async Task ServiceCanBeUsedWithInvokePromptAsync() + { + // Arrange + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + // Act + var result = await kernel.InvokePromptAsync("Hello"); + + // Assert + Assert.NotNull(result); + } + + [Fact] + public async Task ServiceCanBeUsedWithPromptExecutionSettingsAsync() + { + // Arrange + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7, + MaxTokens = 1024 + }; + + // Act + var result = await kernel.InvokePromptAsync("Hello", new(settings)); + + // Assert + Assert.NotNull(result); + } + + #endregion + + #region FunctionChoiceBehavior Tests + + [Fact] + public async Task FunctionChoiceBehaviorAutoSendsToolsWithAutoChoiceAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "result", "TestFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("TestPlugin", [function]); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + + // Act + var chatService = kernel.GetRequiredService(); + await chatService.GetChatMessageContentsAsync(chatHistory, settings, kernel); + + // Assert - Parse JSON and verify structure + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + // Verify tools array exists and contains our function + Assert.True(requestJson.RootElement.TryGetProperty("tools", out var tools)); + Assert.Equal(JsonValueKind.Array, tools.ValueKind); + Assert.True(tools.GetArrayLength() > 0); + + var toolNames = tools.EnumerateArray() + .Where(t => t.TryGetProperty("name", out _)) + .Select(t => t.GetProperty("name").GetString()) + .ToList(); + // M.E.AI uses underscore separator for function names (TestPlugin_TestFunction) + Assert.Contains("TestPlugin_TestFunction", toolNames); + + // Verify tool_choice is "auto" + Assert.True(requestJson.RootElement.TryGetProperty("tool_choice", out var toolChoice)); + Assert.Equal("auto", toolChoice.GetProperty("type").GetString()); + } + + [Fact] + public async Task FunctionChoiceBehaviorNoneSendsToolsWithNoneChoiceAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "result", "TestFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("TestPlugin", [function]); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.None() + }; + + // Act + var chatService = kernel.GetRequiredService(); + await chatService.GetChatMessageContentsAsync(chatHistory, settings, kernel); + + // Assert - Parse JSON and verify structure + // FunctionChoiceBehavior.None() sends available functions to the model with tool_choice: "none". + // The model receives the tool definitions but is instructed NOT to call any of them. + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + // Verify tools array exists and contains our function + Assert.True(requestJson.RootElement.TryGetProperty("tools", out var tools)); + Assert.Equal(JsonValueKind.Array, tools.ValueKind); + + var toolNames = tools.EnumerateArray() + .Where(t => t.TryGetProperty("name", out _)) + .Select(t => t.GetProperty("name").GetString()) + .ToList(); + // M.E.AI uses underscore separator for function names (TestPlugin_TestFunction) + Assert.Contains("TestPlugin_TestFunction", toolNames); + + // Verify tool_choice is "none" + Assert.True(requestJson.RootElement.TryGetProperty("tool_choice", out var toolChoice)); + Assert.Equal("none", toolChoice.GetProperty("type").GetString()); + } + + [Fact] + public async Task FunctionChoiceBehaviorRequiredSendsToolChoiceAnyAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "result", "TestFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("TestPlugin", [function]); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Required() + }; + + // Act + var chatService = kernel.GetRequiredService(); + await chatService.GetChatMessageContentsAsync(chatHistory, settings, kernel); + + // Assert - Parse JSON and verify structure + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + // Verify tools array exists + Assert.True(requestJson.RootElement.TryGetProperty("tools", out var tools)); + Assert.Equal(JsonValueKind.Array, tools.ValueKind); + + // Verify tool_choice is set (Anthropic uses "any" for required, meaning model must call a tool) + Assert.True(requestJson.RootElement.TryGetProperty("tool_choice", out var toolChoice)); + Assert.Equal("any", toolChoice.GetProperty("type").GetString()); + } + + [Fact] + public async Task NoFunctionChoiceBehaviorDoesNotSendToolsAsync() + { + // Arrange - No plugins, no FunctionChoiceBehavior + var builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // No settings with FunctionChoiceBehavior + + // Act + var chatService = kernel.GetRequiredService(); + await chatService.GetChatMessageContentsAsync(chatHistory, kernel: kernel); + + // Assert - No tools or tool_choice should be in the request + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + Assert.False(requestJson.RootElement.TryGetProperty("tools", out _), + "Request should not contain 'tools' when no FunctionChoiceBehavior is set"); + Assert.False(requestJson.RootElement.TryGetProperty("tool_choice", out _), + "Request should not contain 'tool_choice' when no FunctionChoiceBehavior is set"); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task FunctionChoiceBehaviorPassesAllowParallelCallsOptionAsync(bool allowParallelCalls) + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "result", "TestFunction"); + var plugin = KernelPluginFactory.CreateFromFunctions("TestPlugin", [function]); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var options = new FunctionChoiceBehaviorOptions { AllowParallelCalls = allowParallelCalls }; + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: options) + }; + + // Act + var chatService = kernel.GetRequiredService(); + await chatService.GetChatMessageContentsAsync(chatHistory, settings, kernel); + + // Assert - Anthropic uses "disable_parallel_tool_use" (inverted logic) + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + Assert.True(requestJson.RootElement.TryGetProperty("tool_choice", out var toolChoice)); + + // Anthropic uses disable_parallel_tool_use which is the inverse of AllowParallelCalls + if (toolChoice.TryGetProperty("disable_parallel_tool_use", out var disableParallel)) + { + Assert.Equal(!allowParallelCalls, disableParallel.GetBoolean()); + } + else if (!allowParallelCalls) + { + // If AllowParallelCalls is false, disable_parallel_tool_use should be present and true + Assert.Fail("Expected 'disable_parallel_tool_use' to be present when AllowParallelCalls is false"); + } + // If AllowParallelCalls is true and disable_parallel_tool_use is not present, that's correct (default behavior) + } + + [Fact] + public async Task FunctionChoiceBehaviorAutoInvokesKernelFunctionAsync() + { + // Arrange - Set up multi-response scenario for function calling flow + // Response 1: Model returns a tool call with M.E.AI-style function name (Plugin_Function) + // Response 2: Model returns final text response after tool result + // Note: Must use auto_invoke_tool_call_response.json which has "WeatherPlugin_GetWeather" + // as the function name (M.E.AI uses underscore-separated plugin_function names) + this.SetupFunctionCallScenario( + "auto_invoke_tool_call_response.json", + "final_response_after_tool_call.json"); + + // Create a real kernel function that will be invoked + var functionWasInvoked = false; + var function = KernelFunctionFactory.CreateFromMethod( + (string location) => + { + functionWasInvoked = true; + return $"The weather in {location} is sunny and 72°F"; + }, + "GetWeather", + "Gets the current weather for a location"); + var plugin = KernelPluginFactory.CreateFromFunctions("WeatherPlugin", [function]); + + var builder = Kernel.CreateBuilder(); + builder.Plugins.Add(plugin); + builder.Services.AddSingleton( + new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient)); + var kernel = builder.Build(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What's the weather in Seattle?"); + + var settings = new AnthropicPromptExecutionSettings + { + // Auto with autoInvoke: true means the FunctionInvokingChatClient will + // automatically call the kernel function and send the result back + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true) + }; + + // Act + var chatService = kernel.GetRequiredService(); + var result = await chatService.GetChatMessageContentsAsync(chatHistory, settings, kernel); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + + // Verify the function was actually invoked by the kernel + Assert.True(functionWasInvoked, "The kernel function should have been invoked during auto-invoke"); + + // Verify the final response contains text (after tool call processing) + Assert.NotNull(result[0].Content); + Assert.NotEmpty(result[0].Content!); + } + + #endregion + + #region Multimodal Tests + + [Fact] + public async Task GetChatMessageContentsAsyncWithImageContentSendsImageAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + + // Create a message with image content + var imageBytes = new byte[] { 0x89, 0x50, 0x4E, 0x47 }; // PNG header bytes + var imageContent = new ImageContent(imageBytes, "image/png"); + var textContent = new TextContent("What's in this image?"); + + var message = new ChatMessageContent(AuthorRole.User, [textContent, imageContent]); + chatHistory.Add(message); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Verify Anthropic image schema structure + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + Assert.True(requestJson.RootElement.TryGetProperty("messages", out var messages)); + Assert.True(messages.GetArrayLength() > 0); + + var firstMessage = messages[0]; + Assert.True(firstMessage.TryGetProperty("content", out var content)); + Assert.True(content.ValueKind == JsonValueKind.Array); + + // Find the image content block + var hasImageBlock = false; + foreach (var block in content.EnumerateArray()) + { + if (block.TryGetProperty("type", out var type) && type.GetString() == "image") + { + hasImageBlock = true; + // Anthropic uses "source" with "type": "base64" for inline images + Assert.True(block.TryGetProperty("source", out var source)); + Assert.Equal("base64", source.GetProperty("type").GetString()); + Assert.Equal("image/png", source.GetProperty("media_type").GetString()); + Assert.True(source.TryGetProperty("data", out _)); + break; + } + } + Assert.True(hasImageBlock, "Request should contain an image content block"); + } + + [Fact] + public async Task GetChatMessageContentsAsyncWithImageUrlSendsImageAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + + // Create a message with image URL content + var imageContent = new ImageContent(new Uri("https://example.com/image.png")); + var textContent = new TextContent("What's in this image?"); + + var message = new ChatMessageContent(AuthorRole.User, [textContent, imageContent]); + chatHistory.Add(message); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Verify Anthropic image URL schema structure + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + Assert.True(requestJson.RootElement.TryGetProperty("messages", out var messages)); + Assert.True(messages.GetArrayLength() > 0); + + var firstMessage = messages[0]; + Assert.True(firstMessage.TryGetProperty("content", out var content)); + Assert.True(content.ValueKind == JsonValueKind.Array); + + // Find the image content block + var hasImageBlock = false; + foreach (var block in content.EnumerateArray()) + { + if (block.TryGetProperty("type", out var type) && type.GetString() == "image") + { + hasImageBlock = true; + // Anthropic uses "source" with "type": "url" for URL-based images + Assert.True(block.TryGetProperty("source", out var source)); + Assert.Equal("url", source.GetProperty("type").GetString()); + Assert.Equal("https://example.com/image.png", source.GetProperty("url").GetString()); + break; + } + } + Assert.True(hasImageBlock, "Request should contain an image content block"); + } + + [Fact] + public async Task GetChatMessageContentsAsyncWithBinaryContentSendsPdfAsync() + { + // Arrange - Anthropic supports PDF documents via BinaryContent + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + + // Create a minimal PDF-like byte array (just for testing serialization) + // Real PDFs start with %PDF- header + var pdfBytes = new byte[] { 0x25, 0x50, 0x44, 0x46, 0x2D }; // "%PDF-" + var binaryContent = new BinaryContent(pdfBytes, "application/pdf"); + var textContent = new TextContent("Summarize this document."); + + var message = new ChatMessageContent(AuthorRole.User, [textContent, binaryContent]); + chatHistory.Add(message); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert - Verify Anthropic document schema structure + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestJson = JsonDocument.Parse(this._messageHandlerStub.RequestContent); + + Assert.True(requestJson.RootElement.TryGetProperty("messages", out var messages)); + Assert.True(messages.GetArrayLength() > 0); + + var firstMessage = messages[0]; + Assert.True(firstMessage.TryGetProperty("content", out var content)); + Assert.True(content.ValueKind == JsonValueKind.Array); + + // Find the document content block + var hasDocumentBlock = false; + foreach (var block in content.EnumerateArray()) + { + if (block.TryGetProperty("type", out var type) && type.GetString() == "document") + { + hasDocumentBlock = true; + // Anthropic uses "source" with "type": "base64" for inline documents + Assert.True(block.TryGetProperty("source", out var source)); + Assert.Equal("base64", source.GetProperty("type").GetString()); + Assert.Equal("application/pdf", source.GetProperty("media_type").GetString()); + Assert.True(source.TryGetProperty("data", out _)); + break; + } + } + Assert.True(hasDocumentBlock, "Request should contain a document content block for PDF"); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task GetChatMessageContentsAsyncWithBinaryContentSendsDataCorrectlyAsync(bool useDataUri) + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + + var pdfBytes = new byte[] { 0x25, 0x50, 0x44, 0x46, 0x2D, 0x31, 0x2E, 0x34 }; // "%PDF-1.4" + var base64Data = Convert.ToBase64String(pdfBytes); + + BinaryContent binaryContent = useDataUri + ? new BinaryContent($"data:application/pdf;base64,{base64Data}") + : new BinaryContent(pdfBytes, "application/pdf"); + + var message = new ChatMessageContent(AuthorRole.User, [new TextContent("Analyze this."), binaryContent]); + chatHistory.Add(message); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + + // Should contain base64-encoded PDF data + Assert.Contains("application/pdf", requestBody); + Assert.Contains(base64Data, requestBody); + } + + #endregion + + #region Model Tests + + [Fact] + public async Task GetChatMessageContentsAsyncUsesConstructorModelIdAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("claude-sonnet-4-20250514", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncWithDifferentModelUsesConstructorModelAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-3-haiku-20240307", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("claude-3-haiku-20240307", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncReturnsCorrectModelIdInResponseAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + var result = await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.Single(result); + Assert.Equal("claude-sonnet-4-20250514", result[0].ModelId); + } + + #endregion + + #region Request Parameters Tests + + [Fact] + public async Task GetChatMessageContentsAsyncSendsTemperatureAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7 + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + // Note: Temperature is converted from double to float in ChatOptions, causing precision loss + // (0.7 double becomes 0.699999988079071 float). We verify the value is approximately correct. + Assert.Contains("\"temperature\":", requestBody); + var doc = System.Text.Json.JsonDocument.Parse(requestBody); + var temperature = doc.RootElement.GetProperty("temperature").GetDouble(); + Assert.Equal(0.7, temperature, 5); // 5 decimal places precision + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsMaxTokensAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + MaxTokens = 2048 + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("\"max_tokens\":2048", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsTopPAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + TopP = 0.9 + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + // Note: TopP is converted from double to float in ChatOptions, causing precision loss. + // We verify the value is approximately correct. + Assert.Contains("\"top_p\":", requestBody); + var doc = System.Text.Json.JsonDocument.Parse(requestBody); + var topP = doc.RootElement.GetProperty("top_p").GetDouble(); + Assert.Equal(0.9, topP, 5); // 5 decimal places precision + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsTopKAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + TopK = 50 + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("\"top_k\":50", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncWithBothTemperatureAndTopPClearsTopPAsync() + { + // Arrange - Anthropic API does not allow both temperature and top_p simultaneously. + // The connector should clear top_p when temperature is set. + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7, + TopP = 0.9 + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("\"temperature\":", requestBody); + Assert.DoesNotContain("\"top_p\"", requestBody); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncWithBothTemperatureAndTopPClearsTopPAsync() + { + // Arrange - Anthropic API does not allow both temperature and top_p simultaneously. + // The connector should clear top_p when temperature is set. + this._messageHandlerStub.ResponseQueue.Enqueue(new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/chat_completion_streaming_response.txt")) + }); + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.5, + TopP = 0.8 + }; + + // Act + var chunks = new List(); + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync(chatHistory, settings)) + { + chunks.Add(chunk); + } + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("\"temperature\":", requestBody); + Assert.DoesNotContain("\"top_p\"", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsStopSequencesAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + var settings = new AnthropicPromptExecutionSettings + { + StopSequences = ["END", "STOP"] + }; + + // Act + await service.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("stop_sequences", requestBody); + Assert.Contains("END", requestBody); + Assert.Contains("STOP", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsSystemPromptAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("You are a helpful assistant."); + chatHistory.AddUserMessage("Hello"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("system", requestBody); + Assert.Contains("You are a helpful assistant.", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsMultipleMessagesAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("First message"); + chatHistory.AddAssistantMessage("First response"); + chatHistory.AddUserMessage("Second message"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("First message", requestBody); + Assert.Contains("First response", requestBody); + Assert.Contains("Second message", requestBody); + } + + [Fact] + public async Task GetChatMessageContentsAsyncSendsCorrectRolesAsync() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("User message"); + chatHistory.AddAssistantMessage("Assistant message"); + chatHistory.AddUserMessage("Another user message"); + + // Act + await service.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + Assert.Contains("\"role\":\"user\"", requestBody); + Assert.Contains("\"role\":\"assistant\"", requestBody); + } + + #endregion + + // NOTE: Text Generation Tests are in an earlier region (see GetTextContentsAsyncReturnsValidResponseAsync, etc.) + // Duplicate region removed during code review. + + #region Logging Tests + + [Fact] + public void ServiceCanBeCreatedWithLoggerFactory() + { + // Arrange - Must setup CreateLogger to return a valid logger + var mockLogger = new Mock>(); + var loggerFactory = new Mock(); + loggerFactory + .Setup(f => f.CreateLogger(It.IsAny())) + .Returns(mockLogger.Object); + + // Act + var service = new AnthropicChatCompletionService( + "claude-sonnet-4-20250514", + "test-api-key", + loggerFactory: loggerFactory.Object); + + // Assert + Assert.NotNull(service); + } + + [Fact] + public void ServiceCanBeCreatedWithNullLoggerFactory() + { + // Arrange & Act - Service should handle null logger factory gracefully + var service = new AnthropicChatCompletionService( + "claude-sonnet-4-20250514", + "test-api-key", + loggerFactory: null); + + // Assert + Assert.NotNull(service); + } + + #endregion + + #region Dispose Tests + + [Fact] + public async Task GetChatMessageContentsAsyncThrowsAfterDispose() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + service.Dispose(); + + // Assert + await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync(chatHistory)); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncThrowsAfterDispose() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act + service.Dispose(); + + // Assert + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory)) + { + } + }); + } + + [Fact] + public async Task GetTextContentsAsyncThrowsAfterDispose() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + // Act + service.Dispose(); + + // Assert + await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Hello")); + } + + [Fact] + public async Task GetStreamingTextContentsAsyncThrowsAfterDispose() + { + // Arrange + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + + // Act + service.Dispose(); + + // Assert + await Assert.ThrowsAsync(async () => + { + await foreach (var _ in service.GetStreamingTextContentsAsync("Hello")) + { + } + }); + } + + #endregion + + #region Cancellation Token Tests + + [Fact] + public async Task GetChatMessageContentsAsyncRespectsCancellationToken() + { + // Arrange + using var cts = new CancellationTokenSource(); + cts.Cancel(); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Should throw OperationCanceledException when token is already cancelled + await Assert.ThrowsAnyAsync( + () => service.GetChatMessageContentsAsync(chatHistory, cancellationToken: cts.Token)); + } + + [Fact] + public async Task GetStreamingChatMessageContentsAsyncRespectsCancellationToken() + { + // Arrange + using var cts = new CancellationTokenSource(); + cts.Cancel(); + + var service = new AnthropicChatCompletionService("claude-sonnet-4-20250514", "test-api-key", httpClient: this._httpClient); + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello"); + + // Act & Assert - Should throw OperationCanceledException when token is already cancelled + await Assert.ThrowsAnyAsync(async () => + { + await foreach (var _ in service.GetStreamingChatMessageContentsAsync(chatHistory, cancellationToken: cts.Token)) + { + } + }); + } + + #endregion + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Settings/AnthropicPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Settings/AnthropicPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..527a80bdcff7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Settings/AnthropicPromptExecutionSettingsTests.cs @@ -0,0 +1,608 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Settings; + +/// +/// Unit tests for . +/// +public sealed class AnthropicPromptExecutionSettingsTests +{ + #region Default Values Tests + + [Fact] + public void ItCreatesAnthropicExecutionSettingsWithCorrectDefaults() + { + // Arrange & Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.TopK); + Assert.Null(executionSettings.MaxTokens); + Assert.Null(executionSettings.StopSequences); + Assert.Null(executionSettings.FunctionChoiceBehavior); + } + + [Fact] + public void ItCreatesNewInstanceWithDefaultConstructor() + { + // Arrange & Act + var settings = new AnthropicPromptExecutionSettings(); + + // Assert + Assert.NotNull(settings); + Assert.Null(settings.Temperature); + Assert.Null(settings.TopP); + Assert.Null(settings.TopK); + Assert.Null(settings.MaxTokens); + Assert.Null(settings.StopSequences); + Assert.Null(settings.ModelId); + Assert.Null(settings.ServiceId); + Assert.False(settings.IsFrozen); + } + + #endregion + + #region FromExecutionSettings Tests + + [Fact] + public void ItUsesExistingAnthropicExecutionSettings() + { + // Arrange + AnthropicPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.9, + TopK = 40, + MaxTokens = 1024, + StopSequences = ["stop1", "stop2"] + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Same(actualSettings, executionSettings); + } + + [Fact] + public void ItCreatesAnthropicExecutionSettingsFromExtensionDataSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary + { + { "max_tokens", 2000 }, + { "temperature", 0.5 }, + { "top_p", 0.8 }, + { "top_k", 50 } + } + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(2000, executionSettings.MaxTokens); + Assert.Equal(0.5, executionSettings.Temperature); + Assert.Equal(0.8, executionSettings.TopP); + Assert.Equal(50, executionSettings.TopK); + } + + [Fact] + public void ItCreatesAnthropicExecutionSettingsFromExtensionDataWithStrings() + { + // Arrange - numbers as strings in ExtensionData (tests JsonNumberHandling.AllowReadingFromString through FromExecutionSettings path) + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary + { + { "max_tokens", "2000" }, + { "temperature", "0.5" }, + { "top_p", "0.8" }, + { "top_k", "50" }, + { "stop_sequences", new List { "stop1", "stop2" } } + } + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(2000, executionSettings.MaxTokens); + Assert.Equal(0.5, executionSettings.Temperature); + Assert.Equal(0.8, executionSettings.TopP); + Assert.Equal(50, executionSettings.TopK); + Assert.Equal(["stop1", "stop2"], executionSettings.StopSequences); + } + + [Fact] + public void ItCreatesAnthropicExecutionSettingsFromJsonSnakeCase() + { + // Arrange + string json = """ + { + "temperature": 0.7, + "top_p": 0.9, + "top_k": 40, + "max_tokens": 1024, + "stop_sequences": ["stop1", "stop2"] + } + """; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.9, executionSettings.TopP); + Assert.Equal(40, executionSettings.TopK); + Assert.Equal(1024, executionSettings.MaxTokens); + Assert.Equal(["stop1", "stop2"], executionSettings.StopSequences); + } + + [Fact] + public void ItPreservesFunctionChoiceBehaviorFromBaseSettings() + { + // Arrange + var functionChoiceBehavior = FunctionChoiceBehavior.Auto(); + PromptExecutionSettings baseSettings = new() + { + FunctionChoiceBehavior = functionChoiceBehavior + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(baseSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Same(functionChoiceBehavior, executionSettings.FunctionChoiceBehavior); + } + + [Fact] + public void ItPreservesModelIdAndServiceIdFromBaseSettings() + { + // Arrange + PromptExecutionSettings baseSettings = new() + { + ModelId = "claude-sonnet-4-20250514", + ServiceId = "my-anthropic-service" + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(baseSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal("claude-sonnet-4-20250514", executionSettings.ModelId); + Assert.Equal("my-anthropic-service", executionSettings.ServiceId); + } + + [Fact] + public void ItHandlesEmptyExtensionData() + { + // Arrange + PromptExecutionSettings baseSettings = new() + { + ExtensionData = new Dictionary() + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(baseSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.MaxTokens); + } + + #endregion + + #region JSON Deserialization Tests + + [Fact] + public void ItDeserializesFromJsonWithAllProperties() + { + // Arrange + string json = """ + { + "model_id": "claude-sonnet-4-20250514", + "service_id": "my-service", + "temperature": 0.8, + "top_p": 0.95, + "top_k": 50, + "max_tokens": 2048, + "stop_sequences": ["END", "STOP", "DONE"] + } + """; + + // Act + var settings = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(settings); + Assert.Equal("claude-sonnet-4-20250514", settings.ModelId); + Assert.Equal("my-service", settings.ServiceId); + Assert.Equal(0.8, settings.Temperature); + Assert.Equal(0.95, settings.TopP); + Assert.Equal(50, settings.TopK); + Assert.Equal(2048, settings.MaxTokens); + Assert.Equal(3, settings.StopSequences!.Count); + Assert.Contains("END", settings.StopSequences); + Assert.Contains("STOP", settings.StopSequences); + Assert.Contains("DONE", settings.StopSequences); + } + + [Fact] + public void ItDeserializesFromJsonWithPartialProperties() + { + // Arrange + string json = """ + { + "temperature": 0.5, + "max_tokens": 1000 + } + """; + + // Act + var settings = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(settings); + Assert.Equal(0.5, settings.Temperature); + Assert.Equal(1000, settings.MaxTokens); + Assert.Null(settings.TopP); + Assert.Null(settings.TopK); + Assert.Null(settings.StopSequences); + } + + [Fact] + public void ItDeserializesFromJsonWithNumbersAsStrings() + { + // Arrange - JsonNumberHandling.AllowReadingFromString should handle this + string json = """ + { + "temperature": "0.7", + "max_tokens": "1024", + "top_k": "40" + } + """; + + // Act + var settings = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(settings); + Assert.Equal(0.7, settings.Temperature); + Assert.Equal(1024, settings.MaxTokens); + Assert.Equal(40, settings.TopK); + } + + [Fact] + public void ItDeserializesEmptyStopSequencesArray() + { + // Arrange + string json = """ + { + "stop_sequences": [] + } + """; + + // Act + var settings = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(settings); + Assert.NotNull(settings.StopSequences); + Assert.Empty(settings.StopSequences); + } + + #endregion + + #region Clone Tests + + [Fact] + public void PromptExecutionSettingsCloneWorksAsExpected() + { + // Arrange + string json = """ + { + "model_id": "claude-sonnet-4-20250514", + "temperature": 0.7, + "top_p": 0.9, + "top_k": 40, + "max_tokens": 1024, + "stop_sequences": ["stop1", "stop2"] + } + """; + var executionSettings = JsonSerializer.Deserialize(json); + + // Act + var clone = executionSettings!.Clone() as AnthropicPromptExecutionSettings; + + // Assert + Assert.NotNull(clone); + Assert.NotSame(executionSettings, clone); + Assert.Equal(executionSettings.ModelId, clone.ModelId); + Assert.Equal(executionSettings.Temperature, clone.Temperature); + Assert.Equal(executionSettings.TopP, clone.TopP); + Assert.Equal(executionSettings.TopK, clone.TopK); + Assert.Equal(executionSettings.MaxTokens, clone.MaxTokens); + Assert.Equivalent(executionSettings.StopSequences, clone.StopSequences); + } + + [Fact] + public void CloneCreatesDeepCopyOfStopSequences() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + StopSequences = ["stop1", "stop2"] + }; + + // Act + var clone = settings.Clone() as AnthropicPromptExecutionSettings; + clone!.StopSequences!.Add("stop3"); + + // Assert + Assert.Equal(2, settings.StopSequences.Count); + Assert.Equal(3, clone.StopSequences.Count); + } + + [Fact] + public void CloneCreatesDeepCopyOfExtensionData() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + ExtensionData = new Dictionary + { + { "custom_key", "custom_value" } + } + }; + + // Act + var clone = settings.Clone() as AnthropicPromptExecutionSettings; + clone!.ExtensionData!["new_key"] = "new_value"; + + // Assert + Assert.Single(settings.ExtensionData); + Assert.Equal(2, clone.ExtensionData.Count); + } + + [Fact] + public void ClonePreservesFunctionChoiceBehavior() + { + // Arrange + var functionChoiceBehavior = FunctionChoiceBehavior.Required(); + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = functionChoiceBehavior + }; + + // Act + var clone = settings.Clone() as AnthropicPromptExecutionSettings; + + // Assert + Assert.Same(functionChoiceBehavior, clone!.FunctionChoiceBehavior); + } + + [Fact] + public void CloneOfFrozenSettingsIsNotFrozen() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7 + }; + settings.Freeze(); + + // Act + var clone = settings.Clone() as AnthropicPromptExecutionSettings; + + // Assert + Assert.True(settings.IsFrozen); + Assert.False(clone!.IsFrozen); + clone.Temperature = 0.5; // Should not throw + Assert.Equal(0.5, clone.Temperature); + } + + #endregion + + #region Freeze Tests + + [Fact] + public void PromptExecutionSettingsFreezeWorksAsExpected() + { + // Arrange + string json = """ + { + "model_id": "claude-sonnet-4-20250514", + "temperature": 0.7, + "top_p": 0.9, + "top_k": 40, + "max_tokens": 1024, + "stop_sequences": ["stop1", "stop2"] + } + """; + var executionSettings = JsonSerializer.Deserialize(json); + + // Act + executionSettings!.Freeze(); + + // Assert + Assert.True(executionSettings.IsFrozen); + Assert.Throws(() => executionSettings.ModelId = "claude-opus-4-20250514"); + Assert.Throws(() => executionSettings.Temperature = 0.5); + Assert.Throws(() => executionSettings.TopP = 0.5); + Assert.Throws(() => executionSettings.TopK = 20); + Assert.Throws(() => executionSettings.MaxTokens = 2048); + Assert.Throws(() => executionSettings.StopSequences!.Add("stop3")); + } + + [Fact] + public void FreezeIsIdempotent() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7 + }; + + // Act + settings.Freeze(); + settings.Freeze(); // Should not throw + + // Assert + Assert.True(settings.IsFrozen); + } + + [Fact] + public void FreezePreventsFunctionChoiceBehaviorModification() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() + }; + settings.Freeze(); + + // Act & Assert + Assert.Throws(() => settings.FunctionChoiceBehavior = FunctionChoiceBehavior.Required()); + } + + [Fact] + public void FreezeWithNullStopSequencesDoesNotThrow() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings + { + Temperature = 0.7, + StopSequences = null + }; + + // Act + settings.Freeze(); + + // Assert + Assert.True(settings.IsFrozen); + Assert.Null(settings.StopSequences); + } + + #endregion + + #region Property Setter Tests + + [Fact] + public void SettingTemperatureWorks() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings(); + + // Act + settings.Temperature = 0.5; + + // Assert + Assert.Equal(0.5, settings.Temperature); + } + + [Fact] + public void SettingTopPWorks() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings(); + + // Act + settings.TopP = 0.9; + + // Assert + Assert.Equal(0.9, settings.TopP); + } + + [Fact] + public void SettingTopKWorks() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings(); + + // Act + settings.TopK = 50; + + // Assert + Assert.Equal(50, settings.TopK); + } + + [Fact] + public void SettingMaxTokensWorks() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings(); + + // Act + settings.MaxTokens = 4096; + + // Assert + Assert.Equal(4096, settings.MaxTokens); + } + + [Fact] + public void SettingStopSequencesWorks() + { + // Arrange + var settings = new AnthropicPromptExecutionSettings(); + + // Act + settings.StopSequences = ["END", "STOP"]; + + // Assert + Assert.Equal(2, settings.StopSequences!.Count); + Assert.Contains("END", settings.StopSequences); + Assert.Contains("STOP", settings.StopSequences); + } + + #endregion + + #region ToChatOptions Tests + + [Fact] + public void ItPropagatesValuesToChatOptions() + { + // Arrange + AnthropicPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.9, + TopK = 40, + MaxTokens = 1024, + StopSequences = ["foo", "bar"] + }; + + // Act + Microsoft.Extensions.AI.ChatOptions? actualOptions = actualSettings.ToChatOptions(null); + + // Assert + Assert.NotNull(actualOptions); + Assert.Equal((float)actualSettings.Temperature, (float)actualOptions.Temperature!, 3); + Assert.Equal((float)actualSettings.TopP, (float)actualOptions.TopP!, 3); + Assert.Equal(actualSettings.TopK, actualOptions.TopK); + Assert.Equal(actualSettings.MaxTokens, actualOptions.MaxOutputTokens); + Assert.Equal(actualSettings.StopSequences, actualOptions.StopSequences); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/auto_invoke_tool_call_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/auto_invoke_tool_call_response.json new file mode 100644 index 000000000000..cb5bb51bb566 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/auto_invoke_tool_call_response.json @@ -0,0 +1,27 @@ +{ + "id": "msg_01AutoInvoke", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "text", + "text": "I'll check the weather for you." + }, + { + "type": "tool_use", + "id": "toolu_auto_invoke_test", + "name": "WeatherPlugin_GetWeather", + "input": { + "location": "Seattle, WA" + } + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "tool_use", + "stop_sequence": null, + "usage": { + "input_tokens": 50, + "output_tokens": 45 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_multiple_tool_calls_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_multiple_tool_calls_response.json new file mode 100644 index 000000000000..406e92abb351 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_multiple_tool_calls_response.json @@ -0,0 +1,31 @@ +{ + "id": "msg_01Bq9w938a90dw8q", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835l1", + "name": "GetWeather", + "input": { + "location": "Seattle, WA" + } + }, + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835l2", + "name": "GetWeather", + "input": { + "location": "New York, NY" + } + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "tool_use", + "stop_sequence": null, + "usage": { + "input_tokens": 60, + "output_tokens": 55 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_response.json new file mode 100644 index 000000000000..40ed54362c34 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_response.json @@ -0,0 +1,19 @@ +{ + "id": "msg_01XFDUDYJgAACzvnptvVoYEL", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "text", + "text": "Hello! I'm doing well, thank you for asking. How can I assist you today?" + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "end_turn", + "stop_sequence": null, + "usage": { + "input_tokens": 25, + "output_tokens": 20 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_response.txt new file mode 100644 index 000000000000..01e35e433af6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_response.txt @@ -0,0 +1,43 @@ +event: message_start +data: {"type":"message_start","message":{"id":"msg_01XFDUDYJgAACzvnptvVoYEL","type":"message","role":"assistant","content":[],"model":"claude-sonnet-4-20250514","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":25,"output_tokens":1}}} + +event: content_block_start +data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hello"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"!"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" How"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" can"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" I"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" help"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" you"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":" today"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"?"}} + +event: content_block_stop +data: {"type":"content_block_stop","index":0} + +event: message_delta +data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":12}} + +event: message_stop +data: {"type":"message_stop"} + + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_tool_call_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_tool_call_response.txt new file mode 100644 index 000000000000..4e78ecb2b0c7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_streaming_tool_call_response.txt @@ -0,0 +1,31 @@ +event: message_start +data: {"type":"message_start","message":{"id":"msg_01Aq9w938a90dw8q","type":"message","role":"assistant","content":[],"model":"claude-sonnet-4-20250514","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":50,"output_tokens":1}}} + +event: content_block_start +data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"I'll check the weather for you."}} + +event: content_block_stop +data: {"type":"content_block_stop","index":0} + +event: content_block_start +data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_01A09q90qw90lq917835lq","name":"GetWeather","input":{}}} + +event: content_block_delta +data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"location\":"}} + +event: content_block_delta +data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"\"Seattle, WA\"}"}} + +event: content_block_stop +data: {"type":"content_block_stop","index":1} + +event: message_delta +data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":45}} + +event: message_stop +data: {"type":"message_stop"} + + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_tool_call_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_tool_call_response.json new file mode 100644 index 000000000000..b6c59834f92a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_completion_tool_call_response.json @@ -0,0 +1,27 @@ +{ + "id": "msg_01Aq9w938a90dw8q", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "text", + "text": "I'll check the weather for you." + }, + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835lq", + "name": "GetWeather", + "input": { + "location": "Seattle, WA" + } + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "tool_use", + "stop_sequence": null, + "usage": { + "input_tokens": 50, + "output_tokens": 45 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_multiple_function_calls_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_multiple_function_calls_response.json new file mode 100644 index 000000000000..50f6890a1f4b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_multiple_function_calls_response.json @@ -0,0 +1,31 @@ +{ + "id": "msg_01Bq9w938a90dw8q", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835l1", + "name": "MyPlugin_Function1", + "input": { + "parameter": "function1-value" + } + }, + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835l2", + "name": "MyPlugin_Function2", + "input": { + "parameter": "function2-value" + } + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "tool_use", + "stop_sequence": null, + "usage": { + "input_tokens": 60, + "output_tokens": 55 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_response.json new file mode 100644 index 000000000000..f8c57e14a403 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_response.json @@ -0,0 +1,23 @@ +{ + "id": "msg_01Aq9w938a90dw8q", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "tool_use", + "id": "toolu_01A09q90qw90lq917835lq", + "name": "MyPlugin_Function1", + "input": { + "parameter": "function1-value" + } + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "tool_use", + "stop_sequence": null, + "usage": { + "input_tokens": 50, + "output_tokens": 45 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_streaming_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_streaming_response.txt new file mode 100644 index 000000000000..e6f40c35d056 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_single_function_call_streaming_response.txt @@ -0,0 +1,18 @@ +event: message_start +data: {"type":"message_start","message":{"id":"msg_01Aq9w938a90dw8q","type":"message","role":"assistant","content":[],"model":"claude-sonnet-4-20250514","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":50,"output_tokens":0}}} + +event: content_block_start +data: {"type":"content_block_start","index":0,"content_block":{"type":"tool_use","id":"toolu_01A09q90qw90lq917835lq","name":"MyPlugin_Function1","input":{}}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"{\"parameter\":\"function1-value\"}"}} + +event: content_block_stop +data: {"type":"content_block_stop","index":0} + +event: message_delta +data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":45}} + +event: message_stop +data: {"type":"message_stop"} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_streaming_multiple_function_calls_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_streaming_multiple_function_calls_response.txt new file mode 100644 index 000000000000..9521c85f5c95 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/filters_streaming_multiple_function_calls_response.txt @@ -0,0 +1,27 @@ +event: message_start +data: {"type":"message_start","message":{"id":"msg_01Bq9w938a90dw8q","type":"message","role":"assistant","content":[],"model":"claude-sonnet-4-20250514","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":60,"output_tokens":0}}} + +event: content_block_start +data: {"type":"content_block_start","index":0,"content_block":{"type":"tool_use","id":"toolu_01A09q90qw90lq917835l1","name":"MyPlugin_Function1","input":{}}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":"{\"parameter\":\"function1-value\"}"}} + +event: content_block_stop +data: {"type":"content_block_stop","index":0} + +event: content_block_start +data: {"type":"content_block_start","index":1,"content_block":{"type":"tool_use","id":"toolu_01A09q90qw90lq917835l2","name":"MyPlugin_Function2","input":{}}} + +event: content_block_delta +data: {"type":"content_block_delta","index":1,"delta":{"type":"input_json_delta","partial_json":"{\"parameter\":\"function2-value\"}"}} + +event: content_block_stop +data: {"type":"content_block_stop","index":1} + +event: message_delta +data: {"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"output_tokens":55}} + +event: message_stop +data: {"type":"message_stop"} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_response_after_tool_call.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_response_after_tool_call.json new file mode 100644 index 000000000000..f682c268f067 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_response_after_tool_call.json @@ -0,0 +1,19 @@ +{ + "id": "msg_01FinalResponse", + "type": "message", + "role": "assistant", + "content": [ + { + "type": "text", + "text": "The weather in Seattle is sunny and 72°F." + } + ], + "model": "claude-sonnet-4-20250514", + "stop_reason": "end_turn", + "stop_sequence": null, + "usage": { + "input_tokens": 150, + "output_tokens": 30 + } +} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_streaming_response_after_tool_call.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_streaming_response_after_tool_call.txt new file mode 100644 index 000000000000..c9beef530bf8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/final_streaming_response_after_tool_call.txt @@ -0,0 +1,22 @@ +event: message_start +data: {"type":"message_start","message":{"id":"msg_01FinalResponse","type":"message","role":"assistant","content":[],"model":"claude-sonnet-4-20250514","stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":150,"output_tokens":1}}} + +event: content_block_start +data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Based on the weather data, "}} + +event: content_block_delta +data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"it's currently sunny and 72°F in Seattle."}} + +event: content_block_stop +data: {"type":"content_block_stop","index":0} + +event: message_delta +data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":30}} + +event: message_stop +data: {"type":"message_stop"} + + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/test_serialization.csx b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/test_serialization.csx new file mode 100644 index 000000000000..a615529c026d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/test_serialization.csx @@ -0,0 +1,7 @@ +using System; +using System.Text.Json; +using Microsoft.SemanticKernel.Connectors.Anthropic; + +var settings = new AnthropicPromptExecutionSettings { Temperature = 0.7 }; +var json = JsonSerializer.Serialize(settings); +Console.WriteLine(\"JSON: \" + json); diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj b/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj new file mode 100644 index 000000000000..d78d18f67935 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj @@ -0,0 +1,38 @@ + + + + + Microsoft.SemanticKernel.Connectors.Anthropic + $(AssemblyName) + net10.0;net8.0;netstandard2.0 + $(NoWarn);SKEXP0001 + true + + true + + + + + + + + + + Semantic Kernel - Anthropic Claude Connector + Semantic Kernel connector for Anthropic Claude. Contains chat completion service with function calling support. + + + + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicPipelineHelpers.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicPipelineHelpers.cs new file mode 100644 index 000000000000..20f152a7459c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicPipelineHelpers.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using Anthropic; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Internal helper methods for building Anthropic M.E.AI chat client pipelines. +/// Provides consistent pipeline configuration across DI extensions and service classes. +/// +internal static class AnthropicPipelineHelpers +{ + /// + /// Default OpenTelemetry source name for Anthropic connector telemetry. + /// + internal const string DefaultOpenTelemetrySourceName = "Microsoft.SemanticKernel.Connectors.Anthropic"; + + /// + /// Builds the M.E.AI chat client pipeline with SK integration and Anthropic-specific middleware. + /// + /// The Anthropic SDK client. + /// The model identifier. + /// The logger factory (can be null for no logging). + /// Optional custom OpenTelemetry source name. + /// Optional OpenTelemetry configuration callback. + /// The configured IChatClient pipeline. + /// + /// + /// The pipeline includes: + /// + /// Temperature/TopP mutual exclusion middleware (Anthropic API requirement) + /// SK function invocation filter integration via UseKernelFunctionInvocation() + /// OpenTelemetry instrumentation via UseOpenTelemetry() + /// Logging via UseLogging() (when loggerFactory is provided) + /// + /// + /// + internal static IChatClient BuildChatClientPipeline( + AnthropicClient anthropicClient, + string modelId, + ILoggerFactory? loggerFactory, + string? openTelemetrySourceName = null, + Action? openTelemetryConfig = null) + { + var logger = loggerFactory?.CreateLogger(typeof(AnthropicPipelineHelpers)); + + var builder = anthropicClient + .AsIChatClient(modelId) + .AsBuilder() + // Anthropic API does not allow both temperature and top_p to be set simultaneously. + // If both are set, clear top_p since temperature is typically the more commonly specified option. + // Note: The Use(sharedFunc) overload returns Task (not Task). The response is + // captured internally by AnonymousDelegatingChatClient when next() invokes the inner client. + .Use(async (messages, options, next, cancellationToken) => + { + if (options?.Temperature is not null && options.TopP is not null) + { + logger?.LogWarning( + "Anthropic API does not support both Temperature and TopP simultaneously. " + + "TopP value ({TopP}) will be ignored; Temperature ({Temperature}) will be used.", + options.TopP, + options.Temperature); + + options = options.Clone(); + options.TopP = null; + } + + await next(messages, options, cancellationToken).ConfigureAwait(false); + }) + .UseKernelFunctionInvocation(loggerFactory) // SK Filter-Integration for IAutoFunctionInvocationFilter + .UseOpenTelemetry(loggerFactory, openTelemetrySourceName ?? DefaultOpenTelemetrySourceName, openTelemetryConfig); + + if (loggerFactory is not null) + { + builder.UseLogging(loggerFactory); + } + + return builder.Build(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs new file mode 100644 index 000000000000..5390102201af --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Anthropic; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Connectors.Anthropic; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods for . +/// +[Experimental("SKEXP0001")] +public static class AnthropicKernelBuilderExtensions +{ + /// + /// Adds Anthropic chat completion service to the kernel builder. + /// + /// The kernel builder to add the service to. + /// The Anthropic model ID (e.g., claude-sonnet-4-20250514). + /// The API key for authentication. + /// The base URL for the API endpoint. Defaults to https://api.anthropic.com. + /// Optional service identifier for keyed registration. + /// The HttpClient to use with this service. + /// The kernel builder for chaining. + /// + /// Anthropic-specific options are configured via . + /// + public static IKernelBuilder AddAnthropicChatCompletion( + this IKernelBuilder builder, + string modelId, + string apiKey, + Uri? baseUrl = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + + builder.Services.AddAnthropicChatCompletion(modelId, apiKey, baseUrl, serviceId, httpClient); + return builder; + } + + /// + /// Adds Anthropic chat completion service to the kernel builder using an existing AnthropicClient. + /// + /// The kernel builder to add the service to. + /// The Anthropic model ID (e.g., claude-sonnet-4-20250514). + /// Pre-configured . If null, will be resolved from the service provider. + /// Optional service identifier for keyed registration. + /// The kernel builder for chaining. + /// + /// Anthropic-specific options are configured via . + /// + public static IKernelBuilder AddAnthropicChatCompletion( + this IKernelBuilder builder, + string modelId, + AnthropicClient? anthropicClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddAnthropicChatCompletion(modelId, anthropicClient, serviceId); + return builder; + } + + /// + /// Adds the Anthropic chat client to the kernel builder. + /// + /// The kernel builder to add the service to. + /// Anthropic model name (e.g., claude-sonnet-4-20250514). + /// Anthropic API key. + /// Base URL for the API endpoint. Defaults to https://api.anthropic.com. + /// A local identifier for the given AI service. + /// The HttpClient to use with this service. + /// An optional name for the OpenTelemetry source. + /// An optional callback that can be used to configure the instance. + /// The kernel builder for chaining. + /// + /// Anthropic-specific options are configured via . + /// + public static IKernelBuilder AddAnthropicChatClient( + this IKernelBuilder builder, + string modelId, + string apiKey, + Uri? baseUrl = null, + string? serviceId = null, + HttpClient? httpClient = null, + string? openTelemetrySourceName = null, + Action? openTelemetryConfig = null) + { + Verify.NotNull(builder); + + builder.Services.AddAnthropicChatClient(modelId, apiKey, baseUrl, serviceId, httpClient, openTelemetrySourceName, openTelemetryConfig); + return builder; + } + + /// + /// Adds the Anthropic chat client to the kernel builder using an existing AnthropicClient. + /// + /// The kernel builder to add the service to. + /// Anthropic model name (e.g., claude-sonnet-4-20250514). + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service. + /// An optional name for the OpenTelemetry source. + /// An optional callback that can be used to configure the instance. + /// The kernel builder for chaining. + /// + /// Anthropic-specific options are configured via . + /// + public static IKernelBuilder AddAnthropicChatClient( + this IKernelBuilder builder, + string modelId, + AnthropicClient? anthropicClient = null, + string? serviceId = null, + string? openTelemetrySourceName = null, + Action? openTelemetryConfig = null) + { + Verify.NotNull(builder); + + builder.Services.AddAnthropicChatClient(modelId, anthropicClient, serviceId, openTelemetrySourceName, openTelemetryConfig); + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.DependencyInjection.cs b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.DependencyInjection.cs new file mode 100644 index 000000000000..d0aef9d880f9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.DependencyInjection.cs @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using Anthropic; +using Anthropic.Core; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.Extensions.DependencyInjection; + +/// +/// Extension methods for registering Anthropic services in . +/// +[Experimental("SKEXP0001")] +public static class AnthropicServiceCollectionExtensions +{ + #region IChatClient Extensions (M.E.AI) + + /// + /// Adds the Anthropic chat client to the service collection. + /// + /// The instance to augment. + /// Anthropic model name (e.g., claude-sonnet-4-20250514). + /// Anthropic API key. + /// Base URL for the API endpoint. Defaults to https://api.anthropic.com. + /// A local identifier for the given AI service. + /// + /// The HttpClient to use with this service. + /// If not provided, one is resolved from the service provider or created with default 100-second timeout. + /// + /// An optional name for the OpenTelemetry source. + /// An optional callback that can be used to configure the instance. + /// The same instance as . + /// + /// + /// Retry and Timeout Policy: This method follows the Semantic Kernel pattern of delegating + /// retry and timeout handling to the layer rather than the SDK layer. + /// This prevents conflicting retry/timeout behavior when both layers attempt to handle failures. + /// + /// + /// Configure your (via IHttpClientFactory) with appropriate timeout and retry policies. + /// + /// + public static IServiceCollection AddAnthropicChatClient( + this IServiceCollection services, + string modelId, + string apiKey, + Uri? baseUrl = null, + string? serviceId = null, + HttpClient? httpClient = null, + string? openTelemetrySourceName = null, + Action? openTelemetryConfig = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + IChatClient Factory(IServiceProvider serviceProvider, object? _) + { + var loggerFactory = serviceProvider.GetService(); + + // Retry and timeout are intentionally disabled at SDK level. + // The HttpClient layer handles these concerns (see method remarks). + // - Default HttpClient has 100-second timeout + // - Use IHttpClientFactory + Polly for retry policies + var clientOptions = new ClientOptions + { + APIKey = apiKey, + MaxRetries = 0, // Disabled: HttpClient/Polly handles retries + Timeout = Timeout.InfiniteTimeSpan // Disabled: HttpClient.Timeout applies + }; + + if (baseUrl is not null) + { + clientOptions.BaseUrl = baseUrl; + } + + clientOptions.HttpClient = HttpClientProvider.GetHttpClient(httpClient, serviceProvider); + + var anthropicClient = new AnthropicClient(clientOptions); + + // Use shared pipeline helper for consistent behavior across Service and DI + return AnthropicPipelineHelpers.BuildChatClientPipeline( + anthropicClient, + modelId, + loggerFactory, + openTelemetrySourceName, + openTelemetryConfig); + } + + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the Anthropic chat client to the service collection using an existing AnthropicClient. + /// + /// The instance to augment. + /// Anthropic model name (e.g., claude-sonnet-4-20250514). + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service. + /// An optional name for the OpenTelemetry source. + /// An optional callback that can be used to configure the instance. + /// The same instance as . + /// + /// Retry and timeout policies are determined by the configuration and its + /// underlying . Anthropic-specific options are supplied via + /// when invoking chat or text operations. + /// + public static IServiceCollection AddAnthropicChatClient( + this IServiceCollection services, + string modelId, + AnthropicClient? anthropicClient = null, + string? serviceId = null, + string? openTelemetrySourceName = null, + Action? openTelemetryConfig = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + IChatClient Factory(IServiceProvider serviceProvider, object? _) + { + var loggerFactory = serviceProvider.GetService(); + var client = anthropicClient ?? serviceProvider.GetRequiredService(); + + // Use shared pipeline helper for consistent behavior across Service and DI + return AnthropicPipelineHelpers.BuildChatClientPipeline( + client, + modelId, + loggerFactory, + openTelemetrySourceName, + openTelemetryConfig); + } + + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + #endregion + + #region IChatCompletionService Extensions (SK Legacy) + + /// + /// Adds Anthropic chat completion service to the service collection. + /// + /// The service collection to add the service to. + /// The Anthropic model ID (e.g., claude-sonnet-4-20250514). + /// The API key for authentication. + /// The base URL for the API endpoint. Defaults to https://api.anthropic.com. + /// Optional service identifier for keyed registration. + /// The HttpClient to use with this service. + /// The service collection for chaining. + public static IServiceCollection AddAnthropicChatCompletion( + this IServiceCollection services, + string modelId, + string apiKey, + Uri? baseUrl = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + // Register the concrete service as a keyed singleton, then alias the interfaces to it. + // This ensures a single instance is shared across IChatCompletionService and ITextGenerationService. + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId, + apiKey, + baseUrl, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + services.AddKeyedSingleton(serviceId, + (serviceProvider, key) => serviceProvider.GetRequiredKeyedService(key)); + services.AddKeyedSingleton(serviceId, + (serviceProvider, key) => serviceProvider.GetRequiredKeyedService(key)); + + return services; + } + + /// + /// Adds Anthropic chat completion service to the service collection using an existing AnthropicClient. + /// + /// The service collection to add the service to. + /// The Anthropic model ID (e.g., claude-sonnet-4-20250514). + /// Pre-configured . If null, will be resolved from the service provider. + /// Optional service identifier for keyed registration. + /// The service collection for chaining. + public static IServiceCollection AddAnthropicChatCompletion( + this IServiceCollection services, + string modelId, + AnthropicClient? anthropicClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + // Register the concrete service as a keyed singleton, then alias the interfaces to it. + // This ensures a single instance is shared across IChatCompletionService and ITextGenerationService. + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId, + anthropicClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService())); + + services.AddKeyedSingleton(serviceId, + (serviceProvider, key) => serviceProvider.GetRequiredKeyedService(key)); + services.AddKeyedSingleton(serviceId, + (serviceProvider, key) => serviceProvider.GetRequiredKeyedService(key)); + + return services; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs new file mode 100644 index 000000000000..8b14278c4d71 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs @@ -0,0 +1,309 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Anthropic; +using Anthropic.Core; +using Microsoft.Extensions.AI; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Anthropic chat completion service using Microsoft.Extensions.AI (M.E.AI) architecture. +/// +/// +/// +/// Leverages the Anthropic SDK's native implementation for all LLM communication. +/// All Anthropic-specific features (Extended Thinking, PDFs, Citations, etc.) +/// are handled by the SDK's implementation. +/// +/// +/// To configure Anthropic-specific generation options (temperature, max tokens, stop sequences, etc.), +/// use as the argument. +/// +/// +[Experimental("SKEXP0001")] +public sealed class AnthropicChatCompletionService : IChatCompletionService, ITextGenerationService, IDisposable +{ + // Implementation notes (M.E.AI pattern): + // - Uses SDK's native AsIChatClient() for M.E.AI integration + // - Uses UseKernelFunctionInvocation() for SK filter pipeline (IAutoFunctionInvocationFilter) + // - Uses UseOpenTelemetry() for standardized telemetry + // - Uses AsChatCompletionService() for SK integration + + #region Private Fields + + /// The M.E.AI chat client for direct access. + private readonly IChatClient _chatClient; + + /// The SK wrapper for IChatCompletionService. + private readonly IChatCompletionService _innerService; + + /// Storage for AI service attributes. + private readonly Dictionary _attributes = new(); + + /// Logger instance. + private readonly ILogger _logger; + + /// Default base URL for the Anthropic API. + private static readonly Uri s_defaultBaseUrl = new("https://api.anthropic.com"); + + /// Disposed flag. + private bool _disposed; + + #endregion + + #region Private Methods + + /// + /// Throws if the service has been disposed. + /// + private void ThrowIfDisposed() + { +#if NET8_0_OR_GREATER + ObjectDisposedException.ThrowIf(this._disposed, this); +#else + if (this._disposed) + { + throw new ObjectDisposedException(nameof(AnthropicChatCompletionService)); + } +#endif + } + + #endregion + + #region Constructors + + /// + /// Create an instance of the Anthropic chat completion connector. + /// + /// Model name (e.g., claude-sonnet-4-20250514). + /// API Key for authentication. + /// Base URL for the API endpoint. Defaults to https://api.anthropic.com. + /// + /// Custom for HTTP requests. + /// If not provided, a new HttpClient is created with the default 100-second timeout. + /// + /// The to use for logging. If null, no logging will be performed. + /// + /// + /// Retry and Timeout Policy: This connector follows the Semantic Kernel pattern of delegating + /// retry and timeout handling to the layer rather than the SDK layer. + /// This prevents conflicting retry/timeout behavior when both layers attempt to handle failures. + /// + /// + /// When providing a custom , ensure it is configured with appropriate: + /// + /// Timeout: Set (default is 100 seconds) + /// Retry policy: Use IHttpClientFactory with Polly for transient failure handling + /// + /// + /// + public AnthropicChatCompletionService( + string modelId, + string apiKey, + Uri? baseUrl = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + loggerFactory ??= NullLoggerFactory.Instance; + this._logger = loggerFactory.CreateLogger(); + + // 1. Build ClientOptions for Anthropic SDK + // Note: Retry and timeout are intentionally disabled at SDK level. + // The HttpClient layer handles these concerns (see constructor remarks). + // - Default HttpClient has 100-second timeout + // - Use IHttpClientFactory + Polly for retry policies + var clientOptions = new ClientOptions + { + APIKey = apiKey, + BaseUrl = baseUrl ?? s_defaultBaseUrl, + MaxRetries = 0, // Disabled: HttpClient/Polly handles retries + Timeout = Timeout.InfiniteTimeSpan // Disabled: HttpClient.Timeout applies + }; + + // Only set HttpClient if provided; otherwise SDK creates its own (SK pattern) + if (httpClient is not null) + { + clientOptions.HttpClient = httpClient; + } + + // 2. Create Anthropic SDK Client + var anthropicClient = new AnthropicClient(clientOptions); + + // 3. Build M.E.AI Pipeline (using shared helper for consistent pipeline across Service and DI) + this._chatClient = AnthropicPipelineHelpers.BuildChatClientPipeline(anthropicClient, modelId, loggerFactory); + + // 4. SK Wrapper + this._innerService = this._chatClient.AsChatCompletionService(); + + // 5. Attributes + this._attributes[AIServiceExtensions.ModelIdKey] = modelId; + this._attributes[AIServiceExtensions.EndpointKey] = (baseUrl ?? s_defaultBaseUrl).ToString(); + + this._logger.LogDebug( + "AnthropicChatCompletionService created: ModelId={ModelId}, BaseUrl={BaseUrl}", + modelId, baseUrl ?? s_defaultBaseUrl); + } + + /// + /// Create an instance of the Anthropic chat completion connector using an existing AnthropicClient. + /// + /// Model name (e.g., claude-sonnet-4-20250514). + /// Pre-configured . + /// The to use for logging. If null, no logging will be performed. + /// + /// Use this constructor when you need full control over the AnthropicClient configuration. + /// Note: HttpClient injection and retry settings are the responsibility of the caller. + /// + public AnthropicChatCompletionService( + string modelId, + AnthropicClient anthropicClient, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNull(anthropicClient); + + loggerFactory ??= NullLoggerFactory.Instance; + this._logger = loggerFactory.CreateLogger(); + + // Build M.E.AI Pipeline from existing client (using shared helper for consistent pipeline across Service and DI) + this._chatClient = AnthropicPipelineHelpers.BuildChatClientPipeline(anthropicClient, modelId, loggerFactory); + + // SK Wrapper + this._innerService = this._chatClient.AsChatCompletionService(); + + // Attributes - use the actual BaseUrl from the client + this._attributes[AIServiceExtensions.ModelIdKey] = modelId; + this._attributes[AIServiceExtensions.EndpointKey] = anthropicClient.BaseUrl.ToString(); + + this._logger.LogDebug( + "AnthropicChatCompletionService created with existing client: ModelId={ModelId}, BaseUrl={BaseUrl}", + modelId, anthropicClient.BaseUrl); + } + + #endregion + + #region IChatCompletionService Implementation + + /// + public IReadOnlyDictionary Attributes => this._attributes; + + /// + public Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + this.ThrowIfDisposed(); + this._logger.LogDebug("GetChatMessageContentsAsync called with {MessageCount} messages", chatHistory.Count); + return this._innerService.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + } + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + this.ThrowIfDisposed(); + this._logger.LogDebug("GetStreamingChatMessageContentsAsync called with {MessageCount} messages", chatHistory.Count); + return this._innerService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + } + + #endregion + + #region ITextGenerationService Implementation + + /// + public async Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + this.ThrowIfDisposed(); + + // Delegate to chat completion (same pattern as OpenAI/Gemini connectors) + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(prompt); + + var results = await this.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken) + .ConfigureAwait(false); + + return results + .Select(m => new TextContent(m.Content, m.ModelId, m.InnerContent, Encoding.UTF8, m.Metadata)) + .ToList(); + } + + /// + public async IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.ThrowIfDisposed(); + + // Delegate to chat completion (same pattern as OpenAI/Gemini connectors) + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(prompt); + + await foreach (var chunk in this.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken) + .ConfigureAwait(false)) + { + yield return new StreamingTextContent( + chunk.Content, + chunk.ChoiceIndex, + chunk.ModelId, + chunk.InnerContent, + Encoding.UTF8, + chunk.Metadata); + } + } + + #endregion + + #region IDisposable + + /// + /// Disposes the service and releases the underlying resources. + /// + /// + /// Disposes the M.E.AI chat client pipeline. Note: The underlying HttpClient is NOT disposed + /// by this service or the Anthropic SDK. When no HttpClient is provided, the SDK creates a + /// long-lived instance. When an HttpClient is injected, its lifetime is the caller's responsibility. + /// This is the recommended pattern for HttpClient (long-lived instances avoid socket exhaustion). + /// + public void Dispose() + { + if (this._disposed) + { + return; + } + + this._disposed = true; + + // Dispose the M.E.AI chat client pipeline + this._chatClient.Dispose(); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Settings/AnthropicPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Anthropic/Settings/AnthropicPromptExecutionSettings.cs new file mode 100644 index 000000000000..a17e36c9298c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Settings/AnthropicPromptExecutionSettings.cs @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Anthropic prompt execution settings. +/// Extends the base PromptExecutionSettings with Anthropic-specific options. +/// +/// +/// FunctionChoiceBehavior mapping to Anthropic tool_choice: +/// +/// : Model decides whether to call functions (maps to tool_choice=auto) +/// : Model must call a function (maps to tool_choice=any) +/// : Tools are sent but model is instructed not to call any (maps to tool_choice=none). +/// This matches OpenAI semantics where the model is aware of available functions but will not invoke them. +/// +/// +[Experimental("SKEXP0001")] +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class AnthropicPromptExecutionSettings : PromptExecutionSettings +{ + /// + /// Temperature controls randomness in the response. + /// Range: 0.0 to 1.0. Defaults to 1.0. Higher values make output more random. + /// Use lower values for analytical/multiple choice, higher for creative tasks. + /// + [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? Temperature + { + get => this._temperature; + set + { + this.ThrowIfFrozen(); + this._temperature = value; + } + } + + /// + /// Maximum number of tokens to generate in the response. + /// Anthropic requires this parameter. The connector applies a default of 32000 if not specified. + /// + [JsonPropertyName("max_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxTokens + { + get => this._maxTokens; + set + { + this.ThrowIfFrozen(); + this._maxTokens = value; + } + } + + /// + /// Top-p sampling parameter. Alternative to temperature. + /// Range: 0.0 to 1.0. Lower values make output more focused. + /// + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? TopP + { + get => this._topP; + set + { + this.ThrowIfFrozen(); + this._topP = value; + } + } + + /// + /// Top-K sampling parameter. Only sample from the top K options for each token. + /// + [JsonPropertyName("top_k")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TopK + { + get => this._topK; + set + { + this.ThrowIfFrozen(); + this._topK = value; + } + } + + /// + /// Custom stop sequences where the API will stop generating further tokens. + /// + [JsonPropertyName("stop_sequences")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IList? StopSequences + { + get => this._stopSequences; + set + { + this.ThrowIfFrozen(); + this._stopSequences = value; + } + } + + /// + /// The system prompt to use when generating text using a chat model. + /// When set, this prompt is automatically inserted at the beginning of the chat history + /// if no system message is already present. + /// + [JsonPropertyName("chat_system_prompt")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ChatSystemPrompt + { + get => this._chatSystemPrompt; + set + { + this.ThrowIfFrozen(); + this._chatSystemPrompt = value; + } + } + + // Note: FunctionChoiceBehavior is inherited from PromptExecutionSettings. + // We do NOT shadow it with 'new' because that breaks polymorphism - + // when ToChatOptions() reads settings.FunctionChoiceBehavior via a base class reference, + // it would get null instead of the actual value. + + // Private backing fields + private double? _temperature; + private int? _maxTokens; + private double? _topP; + private int? _topK; + private IList? _stopSequences; + private string? _chatSystemPrompt; + + /// + /// Initializes a new instance of the class. + /// + public AnthropicPromptExecutionSettings() + { + } + + /// + public override void Freeze() + { + if (this.IsFrozen) + { + return; + } + + base.Freeze(); + + if (this._stopSequences is not null) + { + this._stopSequences = new ReadOnlyCollection(this._stopSequences is List list ? list : new List(this._stopSequences)); + } + } + + /// + public override PromptExecutionSettings Clone() + { + return new AnthropicPromptExecutionSettings + { + ModelId = this.ModelId, + ServiceId = this.ServiceId, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + Temperature = this._temperature, + MaxTokens = this._maxTokens, + TopP = this._topP, + TopK = this._topK, + StopSequences = this._stopSequences is not null ? new List(this._stopSequences) : null, + ChatSystemPrompt = this._chatSystemPrompt, + FunctionChoiceBehavior = this.FunctionChoiceBehavior + }; + } + + /// + protected override ChatHistory PrepareChatHistoryForRequest(ChatHistory chatHistory) + { + // Insert system prompt at the beginning of the chat history if set and not already present. + if (!string.IsNullOrWhiteSpace(this.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) + { + chatHistory.Insert(0, new ChatMessageContent(AuthorRole.System, this.ChatSystemPrompt)); + } + + return chatHistory; + } + + /// + /// Creates a new instance of from an existing . + /// + /// + /// This method uses JSON serialization to convert settings from other providers (e.g., OpenAI) to Anthropic settings. + /// Properties with matching JSON names (temperature, max_tokens, top_p, top_k, stop_sequences) are automatically mapped. + /// FunctionChoiceBehavior is explicitly preserved as it cannot be serialized. + /// + /// The existing execution settings to convert. + /// Default max tokens to use when not specified in settings. Anthropic requires this parameter. + /// An instance. + public static AnthropicPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + switch (executionSettings) + { + case null: + return new AnthropicPromptExecutionSettings + { + MaxTokens = defaultMaxTokens + }; + case AnthropicPromptExecutionSettings settings: + return settings; + } + + // Use JSON serialization to convert from other settings types (e.g., OpenAIPromptExecutionSettings). + // This automatically maps properties with matching JSON names. + // Important: Serialize as object to ensure derived type properties are included, not just base class. + var json = JsonSerializer.Serialize(executionSettings); + var anthropicSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive)!; + + // Apply default max tokens if not set in source settings + anthropicSettings.MaxTokens ??= defaultMaxTokens; + + // Restore FunctionChoiceBehavior that loses internal state during serialization/deserialization process. + anthropicSettings.FunctionChoiceBehavior = executionSettings.FunctionChoiceBehavior; + + return anthropicSettings; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettingsExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettingsExtensions.cs index 55213682d173..57340b919e54 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettingsExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettingsExtensions.cs @@ -164,12 +164,12 @@ public static class PromptExecutionSettingsExtensions options.AllowMultipleToolCalls = autoChoiceBehavior.Options?.AllowParallelCalls; } else - if (settings.FunctionChoiceBehavior is NoneFunctionChoiceBehavior noneFunctionChoiceBehavior) + if (settings.FunctionChoiceBehavior is NoneFunctionChoiceBehavior noneFunctionChoiceBehavior) { options.ToolMode = ChatToolMode.None; } else - if (settings.FunctionChoiceBehavior is RequiredFunctionChoiceBehavior requiredFunctionChoiceBehavior) + if (settings.FunctionChoiceBehavior is RequiredFunctionChoiceBehavior requiredFunctionChoiceBehavior) { options.ToolMode = ChatToolMode.RequireAny; options.AllowMultipleToolCalls = requiredFunctionChoiceBehavior.Options?.AllowParallelCalls;