diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index f92f0595a9c9..92ac92591101 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -321,6 +321,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Anthropic", "src\Connectors\Connectors.Anthropic\Connectors.Anthropic.csproj", "{A77031AC-5A71-4061-9451-923D3A5541E4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.Anthropic.UnitTests", "src\Connectors\Connectors.Anthropic.UnitTests\Connectors.Anthropic.UnitTests.csproj", "{3186E348-3558-42E6-B1DE-D24B816F46C5}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Ollama", "src\Connectors\Connectors.Ollama\Connectors.Ollama.csproj", "{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureCosmosDBMongoDB.UnitTests", "src\Connectors\Connectors.AzureCosmosDBMongoDB.UnitTests\Connectors.AzureCosmosDBMongoDB.UnitTests.csproj", "{2918478E-BC86-4D53-9D01-9C318F80C14F}" @@ -859,6 +863,18 @@ Global {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Publish|Any CPU.Build.0 = Debug|Any CPU {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.ActiveCfg = Release|Any CPU {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.Build.0 = Release|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Publish|Any CPU.Build.0 = Debug|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A77031AC-5A71-4061-9451-923D3A5541E4}.Release|Any CPU.Build.0 = Release|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Publish|Any CPU.Build.0 = Debug|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3186E348-3558-42E6-B1DE-D24B816F46C5}.Release|Any CPU.Build.0 = Release|Any CPU {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.Build.0 = Debug|Any CPU {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU @@ -1151,6 +1167,10 @@ Global {1D3EEB5B-0E06-4700-80D5-164956E43D0A} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {F312FCE1-12D7-4DEF-BC29-2FF6618509F3} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} {B0B3901E-AF56-432B-8FAA-858468E5D0DF} = {24503383-A8C4-4255-9998-28D70FE8E99A} + {1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {A77031AC-5A71-4061-9451-923D3A5541E4} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {3186E348-3558-42E6-B1DE-D24B816F46C5} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} {1D4667B9-9381-4E32-895F-123B94253EE8} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {5A7028A7-4DDF-4E4F-84A9-37CE8F8D7E89} {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} diff --git a/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletion.cs new file mode 100644 index 000000000000..c3bf9a0a19d8 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletion.cs @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace ChatCompletion; + +public sealed class Anthropic_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task SampleAsync() + { + Console.WriteLine("============= Anthropic - Claude Chat Completion ============="); + + string apiKey = TestConfiguration.AnthropicAI.ApiKey; + string modelId = TestConfiguration.AnthropicAI.ModelId; + + Assert.NotNull(apiKey); + Assert.NotNull(modelId); + + Kernel kernel = Kernel.CreateBuilder() + .AddAnthropicChatCompletion( + modelId: modelId, + apiKey: apiKey) + .Build(); + + await SimpleChatAsync(kernel); + } + + private async Task SimpleChatAsync(Kernel kernel) + { + Console.WriteLine("======== Simple Chat ========"); + + var chatHistory = new ChatHistory("You are an expert in the tool shop."); + var chat = kernel.GetRequiredService(); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for new power tools, any suggestion?"); + await MessageOutputAsync(chatHistory); + + // First bot assistant message + var reply = await chat.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + await MessageOutputAsync(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I'm looking for a drill, a screwdriver and a hammer."); + await MessageOutputAsync(chatHistory); + + // Second bot assistant message + reply = await chat.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + await MessageOutputAsync(chatHistory); + } + + /// + /// Outputs the last message of the chat history + /// + private Task MessageOutputAsync(ChatHistory chatHistory) + { + var message = chatHistory.Last(); + + Console.WriteLine($"{message.Role}: {message.Content}"); + Console.WriteLine("------------------------"); + + return Task.CompletedTask; + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..471107d8281b --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletionStreaming.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace ChatCompletion; + +public sealed class Anthropic_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task SampleAsync() + { + Console.WriteLine("============= Anthropic - Claude Chat Streaming ============="); + + string apiKey = TestConfiguration.AnthropicAI.ApiKey; + string modelId = TestConfiguration.AnthropicAI.ModelId; + + Assert.NotNull(apiKey); + Assert.NotNull(modelId); + + Kernel kernel = Kernel.CreateBuilder() + .AddAnthropicChatCompletion( + modelId: modelId, + apiKey: apiKey) + .Build(); + + await this.StreamingChatAsync(kernel); + } + + private async Task StreamingChatAsync(Kernel kernel) + { + Console.WriteLine("======== Streaming Chat ========"); + + var chatHistory = new ChatHistory("You are an expert in the tool shop."); + var chat = kernel.GetRequiredService(); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for alternative coffee brew methods, can you help me?"); + await MessageOutputAsync(chatHistory); + + // First bot assistant message + var streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory); + var reply = await MessageOutputAsync(streamingChat); + chatHistory.Add(reply); + + // Second user message + chatHistory.AddUserMessage("Give me the best speciality coffee roasters."); + await MessageOutputAsync(chatHistory); + + // Second bot assistant message + streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory); + reply = await MessageOutputAsync(streamingChat); + chatHistory.Add(reply); + } + + /// + /// Outputs the last message of the chat history + /// + private Task MessageOutputAsync(ChatHistory chatHistory) + { + var message = chatHistory.Last(); + + Console.WriteLine($"{message.Role}: {message.Content}"); + Console.WriteLine("------------------------"); + + return Task.CompletedTask; + } + + private async Task MessageOutputAsync(IAsyncEnumerable streamingChat) + { + bool first = true; + StringBuilder messageBuilder = new(); + await foreach (var chatMessage in streamingChat) + { + if (first) + { + Console.Write($"{chatMessage.Role}: "); + first = false; + } + + Console.Write(chatMessage.Content); + messageBuilder.Append(chatMessage.Content); + } + + Console.WriteLine(); + Console.WriteLine("------------------------"); + return new ChatMessageContent(AuthorRole.Assistant, messageBuilder.ToString()); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Anthropic_ProvidersSetup.cs b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ProvidersSetup.cs new file mode 100644 index 000000000000..753d7af61c79 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Anthropic_ProvidersSetup.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; + +namespace ChatCompletion; + +/// +/// This sample shows how to setup different providers for anthropic. +/// +public sealed class Anthropic_ProvidersSetup(ITestOutputHelper output) : BaseTest(output) +{ + public void AnthropicProvider() + { + var kernel = Kernel.CreateBuilder() + .AddAnthropicChatCompletion( + modelId: "modelId", + apiKey: "apiKey") + .Build(); + } + + /// + /// For more information on how to setup the Vertex AI provider, go to sample. + /// + public void VertexAiProvider() + { + var kernel = Kernel.CreateBuilder() + .AddAnthropicVertextAIChatCompletion( + modelId: "modelId", + bearerTokenProvider: () => ValueTask.FromResult("bearer"), + endpoint: new Uri("https://your-endpoint")) + .Build(); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs b/dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs new file mode 100644 index 000000000000..324992ed17df --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; + +namespace ChatCompletion; + +public sealed class Anthropic_Vision(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task SampleAsync() + { + Console.WriteLine("============= Anthropic - Claude Chat Completion ============="); + + string apiKey = TestConfiguration.AnthropicAI.ApiKey; + string modelId = TestConfiguration.AnthropicAI.ModelId; + + Assert.NotNull(apiKey); + Assert.NotNull(modelId); + + Kernel kernel = Kernel.CreateBuilder() + .AddAnthropicChatCompletion( + modelId: modelId, + apiKey: apiKey) + .Build(); + + var chatHistory = new ChatHistory("Your job is describing images."); + var chatCompletionService = kernel.GetRequiredService(); + + // Load the image from the resources + await using var stream = EmbeddedResource.ReadStream("sample_image.jpg")!; + using var binaryReader = new BinaryReader(stream); + var bytes = binaryReader.ReadBytes((int)stream.Length); + + chatHistory.AddUserMessage( + [ + new TextContent("What’s in this image?"), + // Vertex AI Gemini API supports both base64 and URI format + // You have to always provide the mimeType for the image + new ImageContent(bytes, "image/jpeg"), + // The Cloud Storage URI of the image to include in the prompt. + // The bucket that stores the file must be in the same Google Cloud project that's sending the request. + // new ImageContent(new Uri("gs://generativeai-downloads/images/scones.jpg"), + // metadata: new Dictionary { { "mimeType", "image/jpeg" } }) + ]); + + var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory); + + Console.WriteLine(reply.Content); + } +} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index e35c2bef0dca..fc05b04fd60d 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -55,6 +55,7 @@ + diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 15584b88685c..e727a8d51592 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -65,6 +65,10 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom - [Google_GeminiChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs) - [Google_GeminiGetModelResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiGetModelResult.cs) - [Google_GeminiVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs) +- [Anthropic_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletion.cs) +- [Anthropic_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletionStreaming.cs) +- [Anthropic_Vision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs) +- [Anthropic_ProvidersSetup](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ProvidersSetup.cs) - [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) - [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) - [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) diff --git a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj index 4ce04e354cc8..e53aa55a2a79 100644 --- a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj +++ b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj @@ -15,6 +15,7 @@ + diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs index 9d3631dbcb90..612c13b251c3 100644 --- a/dotnet/samples/Demos/AIModelRouter/Program.cs +++ b/dotnet/samples/Demos/AIModelRouter/Program.cs @@ -74,6 +74,16 @@ private static async Task Main(string[] args) Console.WriteLine("• Azure AI Inference Added - Use \"azureai\" in the prompt."); } + if (config["Anthropic:ApiKey"] is not null) + { + services.AddAnthropicChatCompletion( + serviceId: "anthropic", + modelId: config["Anthropic:ModelId"] ?? "claude-3-5-sonnet-20240620", + apiKey: config["Anthropic:ApiKey"]!); + + Console.WriteLine("• Anthropic Added - Use \"anthropic\" in the prompt."); + } + // Adding a custom filter to capture router selected service id services.AddSingleton(new SelectedServiceFilter()); @@ -92,7 +102,7 @@ private static async Task Main(string[] args) // Find the best service to use based on the user's input KernelArguments arguments = new(new PromptExecutionSettings() { - ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai", "onnx", "azureai"]) + ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai", "onnx", "azureai", "anthropic"]) }); // Invoke the prompt and print the response diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md index afb061ced3c2..7d0269977a3a 100644 --- a/dotnet/samples/Demos/AIModelRouter/README.md +++ b/dotnet/samples/Demos/AIModelRouter/README.md @@ -22,11 +22,17 @@ The sample can be configured by using the command line with .NET [Secret Manager ```powershell dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... " -dotnet user-secrets set "OpenAI:ModelId" ".. Openai model .. " (default: gpt-4o) +dotnet user-secrets set "OpenAI:ModelId" ".. Openai model id .. " (default: gpt-4o) + +dotnet user-secrets set "Anthropic:ApiKey" "... your api key ... " +dotnet user-secrets set "Anthropic:ModelId" "... Anthropic model id .. " (default: claude-3-5-sonnet-20240620) + dotnet user-secrets set "Ollama:ModelId" ".. Ollama model id .. " dotnet user-secrets set "Ollama:Endpoint" ".. Ollama endpoint .. " (default: http://localhost:11434) + dotnet user-secrets set "LMStudio:Endpoint" ".. LM Studio endpoint .. " (default: http://localhost:1234) -dotnet user-secrets set "Onnx:ModelId" ".. Onnx model id" + +dotnet user-secrets set "Onnx:ModelId" ".. Onnx model id .. " dotnet user-secrets set "Onnx:ModelPath" ".. your Onnx model folder path .." ``` @@ -53,4 +59,16 @@ dotnet run > **User** > LMStudio, what is Jupiter? Keep it simple. -> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years. \ No newline at end of file +> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years. + +> **User** > AzureAI, what is Jupiter? Keep it simple. + +> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years. + +> **User** > Anthropic, what is Jupiter? Keep it simple. + +> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years. + +> **User** > ONNX, what is Jupiter? Keep it simple. + +> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years. diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/.editorconfig new file mode 100644 index 000000000000..900bb5a52a52 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/.editorconfig @@ -0,0 +1,8 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations + +resharper_convert_constructor_to_member_initializers_highlighting = false # Disable highlighting for "Convert constructor to member initializers" quick-fix \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/AnthropicPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/AnthropicPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..1e9a24afd0c1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/AnthropicPromptExecutionSettingsTests.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests; + +public sealed class AnthropicPromptExecutionSettingsTests +{ + [Fact] + public void ItCreatesExecutionSettingsWithCorrectDefaults() + { + // Arrange + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.TopK); + Assert.Null(executionSettings.StopSequences); + Assert.Equal(AnthropicPromptExecutionSettings.DefaultTextMaxTokens, executionSettings.MaxTokens); + } + + [Fact] + public void ItUsesExistingExecutionSettings() + { + // Arrange + AnthropicPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.7f, + TopK = 20, + StopSequences = new[] { "foo", "bar" }, + MaxTokens = 128, + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(actualSettings, executionSettings); + } + + [Fact] + public void ItCreatesExecutionSettingsFromExtensionDataSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary + { + { "max_tokens", 1000 }, + { "temperature", 0 } + } + }; + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1000, executionSettings.MaxTokens); + Assert.Equal(0, executionSettings.Temperature); + } + + [Fact] + public void ItCreatesExecutionSettingsFromJsonSnakeCase() + { + // Arrange + string json = """ + { + "temperature": 0.7, + "top_p": 0.7, + "top_k": 25, + "stop_sequences": [ "foo", "bar" ], + "max_tokens": 128 + } + """; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + AnthropicPromptExecutionSettings executionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7f, executionSettings.TopP); + Assert.Equal(25, executionSettings.TopK); + Assert.Equal(new[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal(128, executionSettings.MaxTokens); + } + + [Fact] + public void PromptExecutionSettingsCloneWorksAsExpected() + { + // Arrange + string json = """ + { + "model_id": "claude-pro", + "temperature": 0.7, + "top_p": 0.7, + "top_k": 25, + "stop_sequences": [ "foo", "bar" ], + "max_tokens": 128 + } + """; + var executionSettings = JsonSerializer.Deserialize(json); + + // Act + var clone = executionSettings!.Clone() as AnthropicPromptExecutionSettings; + + // Assert + Assert.NotNull(clone); + Assert.Equal(executionSettings.ModelId, clone.ModelId); + Assert.Equal(executionSettings.Temperature, clone.Temperature); + Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + Assert.Equivalent(executionSettings.StopSequences, clone.StopSequences); + } + + [Fact] + public void PromptExecutionSettingsFreezeWorksAsExpected() + { + // Arrange + string json = """ + { + "model_id": "claude-pro", + "temperature": 0.7, + "top_p": 0.7, + "top_k": 25, + "stop_sequences": [ "foo", "bar" ], + "max_tokens": 128 + } + """; + var executionSettings = JsonSerializer.Deserialize(json); + + // Act + executionSettings!.Freeze(); + + // Assert + Assert.True(executionSettings.IsFrozen); + Assert.Throws(() => executionSettings.ModelId = "claude"); + Assert.Throws(() => executionSettings.Temperature = 0.5); + Assert.Throws(() => executionSettings.StopSequences!.Add("baz")); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj new file mode 100644 index 000000000000..a8a891daec84 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Connectors.Anthropic.UnitTests.csproj @@ -0,0 +1,48 @@ + + + + SemanticKernel.Connectors.Anthropic.UnitTests + SemanticKernel.Connectors.Anthropic.UnitTests + net8.0 + true + enable + disable + false + CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050,SKEXP0070 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + + Always + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs new file mode 100644 index 000000000000..f77f4b3a9a3a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs @@ -0,0 +1,445 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; +using Microsoft.SemanticKernel.Http; +using SemanticKernel.Connectors.Anthropic.UnitTests.Utils; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core; + +/// +/// Test for +/// +public sealed class AnthropicChatGenerationTests : IDisposable +{ + private readonly HttpClient _httpClient; + private readonly HttpMessageHandlerStub _messageHandlerStub; + private const string ChatTestDataFilePath = "./TestData/chat_one_response.json"; + + public AnthropicChatGenerationTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent( + File.ReadAllText(ChatTestDataFilePath)); + + this._httpClient = new HttpClient(this._messageHandlerStub, false); + } + + [Fact] + public async Task ShouldPassModelIdToRequestContentAsync() + { + // Arrange + string modelId = "fake-model234"; + var client = this.CreateChatCompletionClient(modelId: modelId); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicRequest? request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Contains(modelId, request.ModelId, StringComparison.Ordinal); + } + + [Fact] + public async Task ShouldContainRolesInRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicRequest? request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Collection(request.Messages, + item => Assert.Equal(chatHistory[1].Role, item.Role), + item => Assert.Equal(chatHistory[2].Role, item.Role), + item => Assert.Equal(chatHistory[3].Role, item.Role)); + } + + [Fact] + public async Task ShouldContainMessagesInRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicRequest? request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Collection(request.Messages, + item => Assert.Equal(chatHistory[1].Content, GetTextFrom(item.Contents[0])), + item => Assert.Equal(chatHistory[2].Content, GetTextFrom(item.Contents[0])), + item => Assert.Equal(chatHistory[3].Content, GetTextFrom(item.Contents[0]))); + + string? GetTextFrom(AnthropicContent content) => ((AnthropicContent)content).Text; + } + + [Fact] + public async Task ShouldReturnValidChatResponseAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var response = await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(response); + Assert.Equal("Hi! My name is Claude.", response[0].Content); + Assert.Equal(AuthorRole.Assistant, response[0].Role); + } + + [Fact] + public async Task ShouldReturnValidAnthropicMetadataAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var chatMessageContents = await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicResponse response = Deserialize( + await File.ReadAllTextAsync(ChatTestDataFilePath))!; + var textContent = chatMessageContents.SingleOrDefault(); + Assert.NotNull(textContent); + var metadata = textContent.Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + Assert.Equal(response.StopReason, metadata.FinishReason); + Assert.Equal(response.Id, metadata.MessageId); + Assert.Equal(response.StopSequence, metadata.StopSequence); + Assert.Equal(response.Usage.InputTokens, metadata.InputTokenCount); + Assert.Equal(response.Usage.OutputTokens, metadata.OutputTokenCount); + } + + [Fact] + public async Task ShouldReturnValidDictionaryMetadataAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var chatMessageContents = await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicResponse response = Deserialize( + await File.ReadAllTextAsync(ChatTestDataFilePath))!; + var textContent = chatMessageContents.SingleOrDefault(); + Assert.NotNull(textContent); + var metadata = textContent.Metadata; + Assert.NotNull(metadata); + Assert.Equal(response.StopReason, metadata[nameof(AnthropicMetadata.FinishReason)]); + Assert.Equal(response.Id, metadata[nameof(AnthropicMetadata.MessageId)]); + Assert.Equal(response.StopSequence, metadata[nameof(AnthropicMetadata.StopSequence)]); + Assert.Equal(response.Usage.InputTokens, metadata[nameof(AnthropicMetadata.InputTokenCount)]); + Assert.Equal(response.Usage.OutputTokens, metadata[nameof(AnthropicMetadata.OutputTokenCount)]); + } + + [Fact] + public async Task ShouldReturnResponseWithModelIdAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var chatMessageContents = await client.GenerateChatMessageAsync(chatHistory); + + // Assert + var response = Deserialize( + await File.ReadAllTextAsync(ChatTestDataFilePath))!; + var chatMessageContent = chatMessageContents.SingleOrDefault(); + Assert.NotNull(chatMessageContent); + Assert.Equal(response.ModelId, chatMessageContent.ModelId); + } + + [Fact] + public async Task ShouldUsePromptExecutionSettingsAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + var executionSettings = new AnthropicPromptExecutionSettings() + { + MaxTokens = 102, + Temperature = 0.45, + TopP = 0.6f + }; + + // Act + await client.GenerateChatMessageAsync(chatHistory, executionSettings: executionSettings); + + // Assert + var request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Equal(executionSettings.MaxTokens, request.MaxTokens); + Assert.Equal(executionSettings.Temperature, request.Temperature); + Assert.Equal(executionSettings.TopP, request.TopP); + } + + [Fact] + public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlySystemMessageAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory("System message"); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.GenerateChatMessageAsync(chatHistory)); + } + + [Fact] + public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlyManySystemMessagesAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory("System message"); + chatHistory.AddSystemMessage("System message 2"); + chatHistory.AddSystemMessage("System message 3"); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.GenerateChatMessageAsync(chatHistory)); + } + + [Fact] + public async Task ShouldPassSystemMessageToRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + string[] messages = ["System message", "System message 2"]; + var chatHistory = new ChatHistory(messages[0]); + chatHistory.AddSystemMessage(messages[1]); + chatHistory.AddUserMessage("Hello"); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicRequest? request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.NotNull(request.SystemPrompt); + Assert.All(messages, msg => Assert.Contains(msg, request.SystemPrompt, StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ShouldPassVersionToRequestBodyIfThirdVendorIsUsedAsync() + { + // Arrange + var options = new AmazonBedrockAnthropicClientOptions(); + var client = new AnthropicClient("fake-model", new Uri("https://fake-uri.com"), + bearerTokenProvider: () => ValueTask.FromResult("fake-token"), + options: options, httpClient: this._httpClient); + + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + AnthropicRequest? request = Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Equal(options.Version, request.Version); + } + + [Fact] + public async Task ShouldThrowArgumentExceptionIfChatHistoryIsEmptyAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory(); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.GenerateChatMessageAsync(chatHistory)); + } + + [Theory] + [InlineData(0)] + [InlineData(-15)] + public async Task ShouldThrowArgumentExceptionIfExecutionSettingMaxTokensIsLessThanOneAsync(int? maxTokens) + { + // Arrange + var client = this.CreateChatCompletionClient(); + AnthropicPromptExecutionSettings executionSettings = new() + { + MaxTokens = maxTokens + }; + + // Act & Assert + await Assert.ThrowsAsync( + () => client.GenerateChatMessageAsync(CreateSampleChatHistory(), executionSettings: executionSettings)); + } + + [Fact] + public async Task ItCreatesPostRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.Equal(HttpMethod.Post, this._messageHandlerStub.Method); + } + + [Fact] + public async Task ItCreatesRequestWithValidUserAgentAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(HttpHeaderConstant.Values.UserAgent, this._messageHandlerStub.RequestHeaders.UserAgent.ToString()); + } + + [Fact] + public async Task ItCreatesRequestWithSemanticKernelVersionHeaderAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + var expectedVersion = HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AnthropicClient)); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + var header = this._messageHandlerStub.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).SingleOrDefault(); + Assert.NotNull(header); + Assert.Equal(expectedVersion, header); + } + + [Fact] + public async Task ItCreatesRequestWithValidAnthropicVersionAsync() + { + // Arrange + var options = new AnthropicClientOptions(); + var client = this.CreateChatCompletionClient(options: options); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(options.Version, this._messageHandlerStub.RequestHeaders.GetValues("anthropic-version").SingleOrDefault()); + } + + [Fact] + public async Task ItCreatesRequestWithValidApiKeyAsync() + { + // Arrange + string apiKey = "fake-claude-key"; + var client = this.CreateChatCompletionClient(apiKey: apiKey); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(apiKey, this._messageHandlerStub.RequestHeaders.GetValues("x-api-key").SingleOrDefault()); + } + + [Fact] + public async Task ItCreatesRequestWithJsonContentTypeAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.NotNull(this._messageHandlerStub.ContentHeaders); + Assert.NotNull(this._messageHandlerStub.ContentHeaders.ContentType); + Assert.Contains("application/json", this._messageHandlerStub.ContentHeaders.ContentType.ToString()); + } + + [Theory] + [InlineData("custom-header", "custom-value")] + public async Task ItCreatesRequestWithCustomUriAndCustomHeadersAsync(string headerName, string headerValue) + { + // Arrange + Uri uri = new("https://fake-uri.com"); + using var httpHandler = new CustomHeadersHandler(headerName, headerValue, ChatTestDataFilePath); + using var httpClient = new HttpClient(httpHandler); + httpClient.BaseAddress = uri; + var client = new AnthropicClient("fake-model", "api-key", options: new(), httpClient: httpClient); + + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.Equal(uri, httpHandler.RequestUri); + Assert.NotNull(httpHandler.RequestHeaders); + Assert.Equal(headerValue, httpHandler.RequestHeaders.GetValues(headerName).SingleOrDefault()); + } + + private static ChatHistory CreateSampleChatHistory() + { + var chatHistory = new ChatHistory("You are a chatbot"); + chatHistory.AddUserMessage("Hello"); + chatHistory.AddAssistantMessage("Hi"); + chatHistory.AddUserMessage("How are you?"); + return chatHistory; + } + + private AnthropicClient CreateChatCompletionClient( + string modelId = "fake-model", + string? apiKey = null, + AnthropicClientOptions? options = null, + HttpClient? httpClient = null) + { + return new AnthropicClient(modelId, apiKey ?? "fake-key", options: new(), httpClient: this._httpClient); + } + + private static T? Deserialize(string json) + { + return JsonSerializer.Deserialize(json); + } + + private static T? Deserialize(ReadOnlySpan json) + { + return JsonSerializer.Deserialize(json); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs new file mode 100644 index 000000000000..d8d5b04a0d05 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs @@ -0,0 +1,467 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; +using Microsoft.SemanticKernel.Http; +using SemanticKernel.Connectors.Anthropic.UnitTests.Utils; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core; + +/// +/// Test for +/// +public sealed class AnthropicChatStreamingTests : IDisposable +{ + private readonly HttpClient _httpClient; + private readonly HttpMessageHandlerStub _messageHandlerStub; + private const string ChatTestDataFilePath = "./TestData/chat_stream_response.txt"; + + public AnthropicChatStreamingTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent( + File.ReadAllText(ChatTestDataFilePath)); + + this._httpClient = new HttpClient(this._messageHandlerStub, false); + } + + [Fact] + public async Task ShouldSetStreamTrueInRequestContentAsync() + { + // Arrange + string modelId = "fake-model234"; + var client = this.CreateChatCompletionClient(modelId: modelId); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.True(request.Stream); + } + + [Fact] + public async Task ShouldPassModelIdToRequestContentAsync() + { + // Arrange + string modelId = "fake-model234"; + var client = this.CreateChatCompletionClient(modelId: modelId); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Contains(modelId, request.ModelId, StringComparison.Ordinal); + } + + [Fact] + public async Task ShouldContainRolesInRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Collection(request.Messages, + item => Assert.Equal(chatHistory[1].Role, item.Role), + item => Assert.Equal(chatHistory[2].Role, item.Role), + item => Assert.Equal(chatHistory[3].Role, item.Role)); + } + + [Fact] + public async Task ShouldContainMessagesInRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Collection(request.Messages, + item => Assert.Equal(chatHistory[1].Content, GetTextFrom(item.Contents[0])), + item => Assert.Equal(chatHistory[2].Content, GetTextFrom(item.Contents[0])), + item => Assert.Equal(chatHistory[3].Content, GetTextFrom(item.Contents[0]))); + + string? GetTextFrom(AnthropicContent content) => content.Text; + } + + [Fact] + public async Task ShouldReturnValidChatResponseAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var responses = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(responses); + Assert.NotEmpty(responses); + string content = string.Concat(responses.Select(streamingContent => streamingContent.Content)); + Assert.Equal("Hi! My name is Claude.", content); + Assert.All(responses, response => Assert.Equal(AuthorRole.Assistant, response.Role)); + } + + [Fact] + public async Task ShouldReturnValidAnthropicMetadataStartMessageAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(streamingChatMessageContents); + Assert.NotEmpty(streamingChatMessageContents); + var messageContent = streamingChatMessageContents.First(); + var metadata = messageContent.Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + Assert.Null(metadata.FinishReason); + Assert.Equal("msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", metadata.MessageId); + Assert.Null(metadata.StopSequence); + Assert.Equal(25, metadata.InputTokenCount); + Assert.Equal(1, metadata.OutputTokenCount); + } + + [Fact] + public async Task ShouldReturnNullAnthropicMetadataDeltaMessagesAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(streamingChatMessageContents); + Assert.NotEmpty(streamingChatMessageContents); + var deltaMessages = streamingChatMessageContents[1..^1]; + Assert.All(deltaMessages, messageContent => Assert.Null(messageContent.Metadata)); + } + + [Fact] + public async Task ShouldReturnValidAnthropicMetadataEndMessageAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(streamingChatMessageContents); + Assert.NotEmpty(streamingChatMessageContents); + var messageContent = streamingChatMessageContents.Last(); + var metadata = messageContent.Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + Assert.Equal(AnthropicFinishReason.StopSequence, metadata.FinishReason); + Assert.Equal("msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", metadata.MessageId); + Assert.Equal("claude", metadata.StopSequence); + Assert.Equal(0, metadata.InputTokenCount); + Assert.Equal(15, metadata.OutputTokenCount); + } + + [Fact] + public async Task ShouldReturnResponseWithModelIdAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(streamingChatMessageContents); + Assert.NotEmpty(streamingChatMessageContents); + Assert.All(streamingChatMessageContents, chatMessageContent => Assert.Equal("claude-3-5-sonnet-20240620", chatMessageContent.ModelId)); + } + + [Fact] + public async Task ShouldUsePromptExecutionSettingsAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + var executionSettings = new AnthropicPromptExecutionSettings() + { + MaxTokens = 102, + Temperature = 0.45, + TopP = 0.6f + }; + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory, executionSettings: executionSettings).ToListAsync(); + + // Assert + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Equal(executionSettings.MaxTokens, request.MaxTokens); + Assert.Equal(executionSettings.Temperature, request.Temperature); + Assert.Equal(executionSettings.TopP, request.TopP); + } + + [Fact] + public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlySystemMessageAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory("System message"); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask()); + } + + [Fact] + public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlyManySystemMessagesAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory("System message"); + chatHistory.AddSystemMessage("System message 2"); + chatHistory.AddSystemMessage("System message 3"); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask()); + } + + [Fact] + public async Task ShouldPassSystemMessageToRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + string[] messages = ["System message", "System message 2"]; + var chatHistory = new ChatHistory(messages[0]); + chatHistory.AddSystemMessage(messages[1]); + chatHistory.AddUserMessage("Hello"); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.NotNull(request.SystemPrompt); + Assert.All(messages, msg => Assert.Contains(msg, request.SystemPrompt, StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ShouldPassVersionToRequestBodyIfCustomHandlerUsedAsync() + { + // Arrange + var options = new AmazonBedrockAnthropicClientOptions(); + var client = new AnthropicClient("fake-model", new Uri("https://fake-uri.com"), + bearerTokenProvider: () => ValueTask.FromResult("fake-token"), + options: options, httpClient: this._httpClient); + + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(request); + Assert.Equal(options.Version, request.Version); + } + + [Fact] + public async Task ShouldThrowArgumentExceptionIfChatHistoryIsEmptyAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = new ChatHistory(); + + // Act & Assert + await Assert.ThrowsAsync( + () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask()); + } + + [Theory] + [InlineData(0)] + [InlineData(-15)] + public async Task ShouldThrowArgumentExceptionIfExecutionSettingMaxTokensIsLessThanOneAsync(int? maxTokens) + { + // Arrange + var client = this.CreateChatCompletionClient(); + AnthropicPromptExecutionSettings executionSettings = new() + { + MaxTokens = maxTokens + }; + + // Act & Assert + await Assert.ThrowsAsync( + () => client.StreamGenerateChatMessageAsync(CreateSampleChatHistory(), executionSettings: executionSettings).ToListAsync().AsTask()); + } + + [Fact] + public async Task ItCreatesPostRequestAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.Equal(HttpMethod.Post, this._messageHandlerStub.Method); + } + + [Fact] + public async Task ItCreatesRequestWithValidUserAgentAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(HttpHeaderConstant.Values.UserAgent, this._messageHandlerStub.RequestHeaders.UserAgent.ToString()); + } + + [Fact] + public async Task ItCreatesRequestWithSemanticKernelVersionHeaderAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + var expectedVersion = HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AnthropicClient)); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + var header = this._messageHandlerStub.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).SingleOrDefault(); + Assert.NotNull(header); + Assert.Equal(expectedVersion, header); + } + + [Fact] + public async Task ItCreatesRequestWithValidAnthropicVersionAsync() + { + // Arrange + var options = new AnthropicClientOptions(); + var client = this.CreateChatCompletionClient(options: options); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(options.Version, this._messageHandlerStub.RequestHeaders.GetValues("anthropic-version").SingleOrDefault()); + } + + [Fact] + public async Task ItCreatesRequestWithValidApiKeyAsync() + { + // Arrange + string apiKey = "fake-claude-key"; + var client = this.CreateChatCompletionClient(apiKey: apiKey); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestHeaders); + Assert.Equal(apiKey, this._messageHandlerStub.RequestHeaders.GetValues("x-api-key").SingleOrDefault()); + } + + [Fact] + public async Task ItCreatesRequestWithJsonContentTypeAsync() + { + // Arrange + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotNull(this._messageHandlerStub.ContentHeaders); + Assert.NotNull(this._messageHandlerStub.ContentHeaders.ContentType); + Assert.Contains("application/json", this._messageHandlerStub.ContentHeaders.ContentType.ToString()); + } + + [Theory] + [InlineData("custom-header", "custom-value")] + public async Task ItCreatesRequestWithCustomUriAndCustomHeadersAsync(string headerName, string headerValue) + { + // Arrange + Uri uri = new("https://fake-uri.com"); + using var httpHandler = new CustomHeadersHandler(headerName, headerValue, ChatTestDataFilePath); + using var httpClient = new HttpClient(httpHandler); + httpClient.BaseAddress = uri; + var client = new AnthropicClient("fake-model", "api-key", options: new(), httpClient: httpClient); + + var chatHistory = CreateSampleChatHistory(); + + // Act + await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.Equal(uri, httpHandler.RequestUri); + Assert.NotNull(httpHandler.RequestHeaders); + Assert.Equal(headerValue, httpHandler.RequestHeaders.GetValues(headerName).SingleOrDefault()); + } + + private static ChatHistory CreateSampleChatHistory() + { + var chatHistory = new ChatHistory("You are a chatbot"); + chatHistory.AddUserMessage("Hello"); + chatHistory.AddAssistantMessage("Hi"); + chatHistory.AddUserMessage("How are you?"); + return chatHistory; + } + + private AnthropicClient CreateChatCompletionClient( + string modelId = "fake-model", + string? apiKey = null, + AnthropicClientOptions? options = null, + HttpClient? httpClient = null) + { + return new AnthropicClient(modelId, apiKey ?? "fake-key", options: new(), httpClient: this._httpClient); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs new file mode 100644 index 000000000000..e741764c90cb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core; + +public sealed class AnthropicRequestTests +{ + [Fact] + public void FromChatHistoryItReturnsWithConfiguration() + { + // Arrange + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage("user-message2"); + var executionSettings = new AnthropicPromptExecutionSettings + { + Temperature = 1.5, + MaxTokens = 10, + TopP = 0.9f, + ModelId = "claude" + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.Equal(executionSettings.Temperature, request.Temperature); + Assert.Equal(executionSettings.MaxTokens, request.MaxTokens); + Assert.Equal(executionSettings.TopP, request.TopP); + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public void FromChatHistoryItReturnsWithValidStreamingMode(bool streamMode) + { + // Arrange + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage("user-message2"); + var executionSettings = new AnthropicPromptExecutionSettings + { + Temperature = 1.5, + MaxTokens = 10, + TopP = 0.9f, + ModelId = "claude" + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings, streamMode); + + // Assert + Assert.Equal(streamMode, request.Stream); + } + + [Fact] + public void FromChatHistoryItReturnsWithChatHistory() + { + // Arrange + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage("user-message2"); + var executionSettings = new AnthropicPromptExecutionSettings + { + ModelId = "claude", + MaxTokens = 128, + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.All(request.Messages, c => Assert.IsType(c.Contents[0])); + Assert.Collection(request.Messages, + c => Assert.Equal(chatHistory[0].Content, ((AnthropicContent)c.Contents[0]).Text), + c => Assert.Equal(chatHistory[1].Content, ((AnthropicContent)c.Contents[0]).Text), + c => Assert.Equal(chatHistory[2].Content, ((AnthropicContent)c.Contents[0]).Text)); + Assert.Collection(request.Messages, + c => Assert.Equal(chatHistory[0].Role, c.Role), + c => Assert.Equal(chatHistory[1].Role, c.Role), + c => Assert.Equal(chatHistory[2].Role, c.Role)); + } + + [Fact] + public void FromChatHistoryTextAsTextContentItReturnsWithChatHistory() + { + // Arrange + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage(contentItems: [new TextContent("user-message2")]); + var executionSettings = new AnthropicPromptExecutionSettings + { + ModelId = "claude", + MaxTokens = 128, + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.All(request.Messages, c => Assert.IsType(c.Contents[0])); + Assert.Collection(request.Messages, + c => Assert.Equal(chatHistory[0].Content, ((AnthropicContent)c.Contents[0]).Text), + c => Assert.Equal(chatHistory[1].Content, ((AnthropicContent)c.Contents[0]).Text), + c => Assert.Equal(chatHistory[2].Items.Cast().Single().Text, ((AnthropicContent)c.Contents[0]).Text)); + } + + [Fact] + public void FromChatHistoryImageAsImageContentItReturnsWithChatHistory() + { + // Arrange + ReadOnlyMemory imageAsBytes = new byte[] { 0x00, 0x01, 0x02, 0x03 }; + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage(contentItems: + [new ImageContent(imageAsBytes, "image/png")]); + var executionSettings = new AnthropicPromptExecutionSettings + { + ModelId = "claude", + MaxTokens = 128, + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.Collection(request.Messages, + c => Assert.IsType(c.Contents[0]), + c => Assert.IsType(c.Contents[0]), + c => Assert.IsType(c.Contents[0])); + Assert.Collection(request.Messages, + c => Assert.Equal(chatHistory[0].Content, ((AnthropicContent)c.Contents[0]).Text), + c => Assert.Equal(chatHistory[1].Content, ((AnthropicContent)c.Contents[0]).Text), + c => + { + Assert.Equal(chatHistory[2].Items.Cast().Single().MimeType, ((AnthropicContent)c.Contents[0]).Source!.MediaType); + Assert.True(imageAsBytes.ToArray().SequenceEqual(Convert.FromBase64String(((AnthropicContent)c.Contents[0]).Source!.Data!))); + }); + } + + [Fact] + public void FromChatHistoryUnsupportedContentItThrowsNotSupportedException() + { + // Arrange + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("user-message"); + chatHistory.AddAssistantMessage("assist-message"); + chatHistory.AddUserMessage(contentItems: [new DummyContent("unsupported-content")]); + var executionSettings = new AnthropicPromptExecutionSettings + { + ModelId = "claude", + MaxTokens = 128, + }; + + // Act + void Act() => AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.Throws(Act); + } + + [Fact] + public void FromChatHistoryItReturnsWithSystemMessages() + { + // Arrange + string[] systemMessages = ["system-message1", "system-message2", "system-message3", "system-message4"]; + ChatHistory chatHistory = new(systemMessages[0]); + chatHistory.AddSystemMessage(systemMessages[1]); + chatHistory.Add(new ChatMessageContent(AuthorRole.System, + items: [new TextContent(systemMessages[2]), new TextContent(systemMessages[3])])); + chatHistory.AddUserMessage("user-message"); + var executionSettings = new AnthropicPromptExecutionSettings + { + ModelId = "claude", + MaxTokens = 128, + }; + + // Act + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chatHistory, executionSettings); + + // Assert + Assert.NotNull(request.SystemPrompt); + Assert.All(systemMessages, msg => Assert.Contains(msg, request.SystemPrompt, StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public void AddChatMessageToRequestItAddsChatMessage() + { + // Arrange + ChatHistory chat = []; + var request = AnthropicRequest.FromChatHistoryAndExecutionSettings(chat, new AnthropicPromptExecutionSettings { ModelId = "model-id", MaxTokens = 128 }); + var message = new AnthropicChatMessageContent + { + Role = AuthorRole.User, + Items = [new TextContent("user-message")], + ModelId = "model-id", + Encoding = Encoding.UTF8 + }; + + // Act + request.AddChatMessage(message); + + // Assert + Assert.Single(request.Messages, + c => c.Contents[0] is AnthropicContent content && string.Equals(message.Content, content.Text, StringComparison.Ordinal)); + Assert.Single(request.Messages, + c => Equals(message.Role, c.Role)); + } + + private sealed class DummyContent( + object? innerContent, + string? modelId = null, + IReadOnlyDictionary? metadata = null) + : KernelContent(innerContent, modelId, metadata); +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AuthorRoleConverterTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AuthorRoleConverterTests.cs new file mode 100644 index 000000000000..0bbb80d03cd6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AuthorRoleConverterTests.cs @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Buffers; +using System.Text.Json; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core; + +public sealed class AuthorRoleConverterTests +{ + [Fact] + public void ReadWhenRoleIsUserReturnsUser() + { + // Arrange + var converter = new AuthorRoleConverter(); + var reader = new Utf8JsonReader("\"user\""u8); + + // Act + reader.Read(); + var result = converter.Read(ref reader, typeof(AuthorRole?), JsonSerializerOptions.Default); + + // Assert + Assert.Equal(AuthorRole.User, result); + } + + [Fact] + public void ReadWhenRoleIsModelReturnsAssistant() + { + // Arrange + var converter = new AuthorRoleConverter(); + var reader = new Utf8JsonReader("\"assistant\""u8); + + // Act + reader.Read(); + var result = converter.Read(ref reader, typeof(AuthorRole?), JsonSerializerOptions.Default); + + // Assert + Assert.Equal(AuthorRole.Assistant, result); + } + + [Fact] + public void ReadWhenRoleIsUnknownThrows() + { + // Arrange + var converter = new AuthorRoleConverter(); + + // Act + void Act() + { + var reader = new Utf8JsonReader("\"unknown\""u8); + reader.Read(); + converter.Read(ref reader, typeof(AuthorRole?), JsonSerializerOptions.Default); + } + + // Assert + Assert.Throws(Act); + } + + [Fact] + public void WriteWhenRoleIsUserReturnsUser() + { + // Arrange + var converter = new AuthorRoleConverter(); + var bufferWriter = new ArrayBufferWriter(); + using var writer = new Utf8JsonWriter(bufferWriter); + + // Act + converter.Write(writer, AuthorRole.User, JsonSerializerOptions.Default); + + // Assert + Assert.Equal("\"user\""u8, bufferWriter.GetSpan().Trim((byte)'\0')); + } + + [Fact] + public void WriteWhenRoleIsAssistantReturnsModel() + { + // Arrange + var converter = new AuthorRoleConverter(); + var bufferWriter = new ArrayBufferWriter(); + using var writer = new Utf8JsonWriter(bufferWriter); + + // Act + converter.Write(writer, AuthorRole.Assistant, JsonSerializerOptions.Default); + + // Assert + Assert.Equal("\"assistant\""u8, bufferWriter.GetSpan().Trim((byte)'\0')); + } + + [Fact] + public void WriteWhenRoleIsNotUserOrAssistantThrows() + { + // Arrange + var converter = new AuthorRoleConverter(); + using var writer = new Utf8JsonWriter(new ArrayBufferWriter()); + + // Act + void Act() + { + converter.Write(writer, AuthorRole.System, JsonSerializerOptions.Default); + } + + // Assert + Assert.Throws(Act); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..06622e2371dc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Extensions/AnthropicServiceCollectionExtensionsTests.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Extensions; + +/// +/// Unit tests for and classes. +/// +public sealed class AnthropicServiceCollectionExtensionsTests +{ + [Fact] + public void AnthropicChatCompletionServiceShouldBeRegisteredInKernelServices() + { + // Arrange + var kernelBuilder = Kernel.CreateBuilder(); + + // Act + kernelBuilder.AddAnthropicChatCompletion("modelId", "apiKey"); + + var kernel = kernelBuilder.Build(); + + // Assert + var chatCompletionService = kernel.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } + + [Fact] + public void AnthropicChatCompletionServiceShouldBeRegisteredInServiceCollection() + { + // Arrange + var services = new ServiceCollection(); + + // Act + services.AddAnthropicChatCompletion("modelId", "apiKey"); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } + + [Fact] + public void AnthropicChatCompletionServiceCustomEndpointShouldBeRegisteredInKernelServices() + { + // Arrange + var kernelBuilder = Kernel.CreateBuilder(); + + // Act + kernelBuilder.AddAnthropicVertextAIChatCompletion("modelId", bearerTokenProvider: () => ValueTask.FromResult("token"), endpoint: new Uri("https://example.com")); + + var kernel = kernelBuilder.Build(); + + // Assert + var chatCompletionService = kernel.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } + + [Fact] + public void AnthropicChatCompletionServiceCustomEndpointShouldBeRegisteredInServiceCollection() + { + // Arrange + var services = new ServiceCollection(); + + // Act + services.AddAnthropicVertexAIChatCompletion("modelId", () => ValueTask.FromResult("token"), endpoint: new Uri("https://example.com")); + var serviceProvider = services.BuildServiceProvider(); + + // Assert + var chatCompletionService = serviceProvider.GetRequiredService(); + Assert.NotNull(chatCompletionService); + Assert.IsType(chatCompletionService); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs new file mode 100644 index 000000000000..94e8dca76b4f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Services/AnthropicChatCompletionServiceTests.cs @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Services; +using Xunit; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Services; + +public sealed class AnthropicChatCompletionServiceTests +{ + [Fact] + public void AttributesShouldContainModelId() + { + // Arrange & Act + string model = "fake-model"; + var service = new AnthropicChatCompletionService(model, "key"); + + // Assert + Assert.Equal(model, service.Attributes[AIServiceExtensions.ModelIdKey]); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_one_response.json b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_one_response.json new file mode 100644 index 000000000000..ac0e04ce73a8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_one_response.json @@ -0,0 +1,18 @@ +{ + "content": [ + { + "text": "Hi! My name is Claude.", + "type": "text" + } + ], + "id": "msg_013Zva2CMHLNnXjNJJKqJ2EF", + "model": "claude-3-5-sonnet-20240620", + "role": "assistant", + "stop_reason": "end_turn", + "stop_sequence": null, + "type": "message", + "usage": { + "input_tokens": 10, + "output_tokens": 25 + } +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt new file mode 100644 index 000000000000..61bfd832c304 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt @@ -0,0 +1,24 @@ +event: message_start +data: {"type": "message_start", "message": {"id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", "type": "message", "role": "assistant", "content": [], "model": "claude-3-5-sonnet-20240620", "stop_reason": null, "stop_sequence": null, "usage": {"input_tokens": 25, "output_tokens": 1}}} + +event: content_block_start +data: {"type": "content_block_start", "index": 0, "content_block": {"type": "text", "text": ""}} + +event: ping +data: {"type": "ping"} + +event: content_block_delta +data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "Hi! "}} + +event: content_block_delta +data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "My name is Claude."}} + +event: content_block_stop +data: {"type": "content_block_stop", "index": 0} + +event: message_delta +data: {"type": "message_delta", "delta": {"stop_reason": "stop_sequence", "stop_sequence": "claude"}, "usage": {"output_tokens": 15}} + +event: message_stop +data: {"type": "message_stop"} + diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs new file mode 100644 index 000000000000..67fea752a1df --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading.Tasks; + +namespace SemanticKernel.Connectors.Anthropic.UnitTests.Utils; + +internal sealed class CustomHeadersHandler : DelegatingHandler +{ + private readonly string _headerName; + private readonly string _headerValue; + public HttpRequestHeaders? RequestHeaders { get; private set; } + + public HttpContentHeaders? ContentHeaders { get; private set; } + + public byte[]? RequestContent { get; private set; } + + public Uri? RequestUri { get; private set; } + + public HttpMethod? Method { get; private set; } + + public CustomHeadersHandler(string headerName, string headerValue, string testDataFilePath) + { + this.InnerHandler = new HttpMessageHandlerStub + { + ResponseToReturn = { Content = new StringContent(File.ReadAllText(testDataFilePath)) } + }; + this._headerName = headerName; + this._headerValue = headerValue; + } + + protected override Task SendAsync(HttpRequestMessage request, System.Threading.CancellationToken cancellationToken) + { + request.Headers.Add(this._headerName, this._headerValue); + this.Method = request.Method; + this.RequestUri = request.RequestUri; + this.RequestHeaders = request.Headers; + this.RequestContent = request.Content is null ? null : request.Content.ReadAsByteArrayAsync(cancellationToken).Result; + + return base.SendAsync(request, cancellationToken); + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Anthropic/AssemblyInfo.cs new file mode 100644 index 000000000000..fe66371dbc58 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0070")] diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj b/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj new file mode 100644 index 000000000000..392a9844d8d4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Connectors.Anthropic.csproj @@ -0,0 +1,32 @@ + + + + + Microsoft.SemanticKernel.Connectors.Anthropic + $(AssemblyName) + netstandard2.0 + alpha + CA1707,SKEXP0001,SKEXP0070 + + + + + + + + + Semantic Kernel - Anthropic Connectors + Semantic Kernel connectors for Anthropic generation platforms. Contains generation services. + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs new file mode 100644 index 000000000000..456eadbda68a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs @@ -0,0 +1,517 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; +using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core; + +/// +/// Represents a client for interacting with the Anthropic chat completion models. +/// +internal sealed class AnthropicClient +{ + private const string ModelProvider = "anthropic"; + private const string AnthropicUrl = "https://api.anthropic.com/v1/messages"; + private readonly Func>? _bearerTokenProvider; + private readonly Dictionary _attributesInternal = new(); + + private readonly HttpClient _httpClient; + private readonly ILogger _logger; + private readonly string _modelId; + private readonly string? _apiKey; + private readonly Uri _endpoint; + private readonly string? _version; + + private static readonly string s_namespace = typeof(AnthropicChatCompletionService).Namespace!; + + /// + /// Instance of for metrics. + /// + private static readonly Meter s_meter = new(s_namespace); + + /// + /// Instance of to keep track of the number of prompt tokens used. + /// + private static readonly Counter s_promptTokensCounter = + s_meter.CreateCounter( + name: $"{s_namespace}.tokens.prompt", + unit: "{token}", + description: "Number of prompt tokens used"); + + /// + /// Instance of to keep track of the number of completion tokens used. + /// + private static readonly Counter s_completionTokensCounter = + s_meter.CreateCounter( + name: $"{s_namespace}.tokens.completion", + unit: "{token}", + description: "Number of completion tokens used"); + + /// + /// Instance of to keep track of the total number of tokens used. + /// + private static readonly Counter s_totalTokensCounter = + s_meter.CreateCounter( + name: $"{s_namespace}.tokens.total", + unit: "{token}", + description: "Number of tokens used"); + + internal IReadOnlyDictionary Attributes => this._attributesInternal; + + /// + /// Represents a client for interacting with the Anthropic chat completion models. + /// + /// Model identifier + /// ApiKey for the client + /// Options for the client + /// HttpClient instance used to send HTTP requests + /// Logger instance used for logging (optional) + internal AnthropicClient( + string modelId, + string apiKey, + AnthropicClientOptions options, + HttpClient httpClient, + ILogger? logger = null) + { + Verify.NotNullOrWhiteSpace(modelId); + + Verify.NotNull(options); + Verify.NotNull(httpClient); + + Uri targetUri = httpClient.BaseAddress; + if (httpClient.BaseAddress is null) + { + // If a custom endpoint is not provided, the ApiKey is required + Verify.NotNullOrWhiteSpace(apiKey); + this._apiKey = apiKey; + targetUri = new Uri(AnthropicUrl); + } + + this._httpClient = httpClient; + this._logger = logger ?? NullLogger.Instance; + this._modelId = modelId; + this._version = options.Version; + this._endpoint = targetUri; + + this._attributesInternal.Add(AIServiceExtensions.ModelIdKey, modelId); + } + + /// + /// Represents a client for interacting with the Anthropic chat completion models. + /// + /// Model identifier + /// Endpoint for the client + /// Bearer token provider + /// Options for the client + /// HttpClient instance used to send HTTP requests + /// Logger instance used for logging (optional) + internal AnthropicClient( + string modelId, + Uri? endpoint, + Func> bearerTokenProvider, + ClientOptions options, + HttpClient httpClient, + ILogger? logger = null) + { + this._version = options.Version; + + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNull(bearerTokenProvider); + Verify.NotNull(options); + Verify.NotNull(httpClient); + + Uri targetUri = endpoint ?? httpClient.BaseAddress + ?? throw new ArgumentException("Endpoint is required if HttpClient.BaseAddress is not set."); + + this._httpClient = httpClient; + this._logger = logger ?? NullLogger.Instance; + this._bearerTokenProvider = bearerTokenProvider; + this._modelId = modelId; + this._version = options?.Version; + this._endpoint = targetUri; + } + + /// + /// Generates a chat message asynchronously. + /// + /// The chat history containing the conversation data. + /// Optional settings for prompt execution. + /// A kernel instance. + /// A cancellation token to cancel the operation. + /// Returns a list of chat message contents. + internal async Task> GenerateChatMessageAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + var state = this.ValidateInputAndCreateChatCompletionState(chatHistory, executionSettings); + + using var activity = ModelDiagnostics.StartCompletionActivity( + this._endpoint, this._modelId, ModelProvider, chatHistory, state.ExecutionSettings); + + List chatResponses; + AnthropicResponse anthropicResponse; + try + { + anthropicResponse = await this.SendRequestAndReturnValidResponseAsync( + this._endpoint, + state.AnthropicRequest, + cancellationToken) + .ConfigureAwait(false); + + chatResponses = this.GetChatResponseFrom(anthropicResponse); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + activity?.SetCompletionResponse( + chatResponses, + anthropicResponse.Usage?.InputTokens, + anthropicResponse.Usage?.OutputTokens); + + return chatResponses; + } + + /// + /// Generates a stream of chat messages asynchronously. + /// + /// The chat history containing the conversation data. + /// Optional settings for prompt execution. + /// A kernel instance. + /// A cancellation token to cancel the operation. + /// An asynchronous enumerable of streaming chat contents. + internal async IAsyncEnumerable StreamGenerateChatMessageAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var state = this.ValidateInputAndCreateChatCompletionState(chatHistory, executionSettings); + state.AnthropicRequest.Stream = true; + + using var activity = ModelDiagnostics.StartCompletionActivity( + this._endpoint, this._modelId, ModelProvider, chatHistory, state.ExecutionSettings); + + List chatResponses = []; + + HttpRequestMessage? httpRequestMessage = null; + HttpResponseMessage? httpResponseMessage = null; + Stream? responseStream = null; + try + { + try + { + httpRequestMessage = await this.CreateHttpRequestAsync(state.AnthropicRequest, this._endpoint).ConfigureAwait(false); + httpResponseMessage = await this.SendRequestAndGetResponseImmediatelyAfterHeadersReadAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); + responseStream = await httpResponseMessage.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + AnthropicResponse? lastAnthropicResponse = null; + await foreach (var streamingResponse in SseJsonParser.ParseAsync(responseStream, cancellationToken).ConfigureAwait(false)) + { + string? content = null; + AnthropicMetadata? metadata = null; + switch (streamingResponse.Type) + { + case "message_start": + Verify.NotNull(streamingResponse.Response); + lastAnthropicResponse = streamingResponse.Response; + metadata = GetResponseMetadata(lastAnthropicResponse); + content = string.Empty; + break; + case "content_block_start" or "content_block_delta": + content = streamingResponse.ContentDelta?.Text ?? string.Empty; + break; + case "message_delta": + Verify.NotNull(lastAnthropicResponse); + metadata = GetResponseMetadata(streamingResponse, lastAnthropicResponse); + content = string.Empty; + break; + case "message_stop": + lastAnthropicResponse = null; + break; + } + + if (lastAnthropicResponse is null || content is null) + { + continue; + } + + var streamingChatMessageContent = new AnthropicStreamingChatMessageContent( + role: lastAnthropicResponse.Role, + content: content, + innerContent: lastAnthropicResponse, + modelId: lastAnthropicResponse.ModelId ?? this._modelId, + choiceIndex: streamingResponse.Index, + metadata: metadata); + chatResponses.Add(streamingChatMessageContent); + yield return streamingChatMessageContent; + } + + activity?.EndStreaming(chatResponses); + } + finally + { + httpRequestMessage?.Dispose(); + httpResponseMessage?.Dispose(); + responseStream?.Dispose(); + } + } + + private List GetChatResponseFrom(AnthropicResponse response) + { + var chatMessageContents = this.GetChatMessageContentsFromResponse(response); + this.LogUsage(chatMessageContents); + return chatMessageContents; + } + + private void LogUsage(List chatMessageContents) + { + if (chatMessageContents[0]?.Metadata is not { TotalTokenCount: > 0 } metadata) + { + this.Log(LogLevel.Debug, "Token usage information unavailable."); + return; + } + + this.Log(LogLevel.Information, + "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", + metadata.InputTokenCount, + metadata.OutputTokenCount, + metadata.TotalTokenCount); + + if (metadata.InputTokenCount.HasValue) + { + s_promptTokensCounter.Add(metadata.InputTokenCount.Value); + } + + if (metadata.OutputTokenCount.HasValue) + { + s_completionTokensCounter.Add(metadata.OutputTokenCount.Value); + } + + if (metadata.TotalTokenCount.HasValue) + { + s_totalTokensCounter.Add(metadata.TotalTokenCount.Value); + } + } + + private List GetChatMessageContentsFromResponse(AnthropicResponse response) + => response.Contents is null ? [] : response.Contents.Select(content => this.GetChatMessageContentFromAnthropicContent(response, content)).ToList(); + + private AnthropicChatMessageContent GetChatMessageContentFromAnthropicContent(AnthropicResponse response, AnthropicContent content) + { + if (!string.Equals(content.Type, "text", StringComparison.OrdinalIgnoreCase)) + { + throw new NotSupportedException($"Content type {content.Type} is not supported yet."); + } + + return new AnthropicChatMessageContent + { + Role = response.Role, + Items = [new TextContent(content.Text ?? string.Empty)], + ModelId = response.ModelId ?? this._modelId, + InnerContent = response, + Metadata = GetResponseMetadata(response) + }; + } + + private static AnthropicMetadata GetResponseMetadata(AnthropicResponse response) + => new() + { + MessageId = response.Id, + FinishReason = response.StopReason, + StopSequence = response.StopSequence, + InputTokenCount = response.Usage?.InputTokens ?? 0, + OutputTokenCount = response.Usage?.OutputTokens ?? 0 + }; + + private static AnthropicMetadata GetResponseMetadata(AnthropicStreamingResponse deltaResponse, AnthropicResponse rootResponse) + => new() + { + MessageId = rootResponse.Id, + FinishReason = deltaResponse.StopMetadata?.StopReason, + StopSequence = deltaResponse.StopMetadata?.StopSequence, + InputTokenCount = deltaResponse.Usage?.InputTokens ?? 0, + OutputTokenCount = deltaResponse.Usage?.OutputTokens ?? 0 + }; + + private async Task SendRequestAndReturnValidResponseAsync( + Uri endpoint, + AnthropicRequest anthropicRequest, + CancellationToken cancellationToken) + { + using var httpRequestMessage = await this.CreateHttpRequestAsync(anthropicRequest, endpoint).ConfigureAwait(false); + var body = await this.SendRequestAndGetStringBodyAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false); + var response = DeserializeResponse(body); + return response; + } + + private ChatCompletionState ValidateInputAndCreateChatCompletionState( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings) + { + ValidateChatHistory(chatHistory); + + var anthropicExecutionSettings = AnthropicPromptExecutionSettings.FromExecutionSettings(executionSettings); + ValidateMaxTokens(anthropicExecutionSettings.MaxTokens); + anthropicExecutionSettings.ModelId ??= this._modelId; + + this.Log(LogLevel.Trace, "ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chatHistory), + JsonSerializer.Serialize(anthropicExecutionSettings)); + + var filteredChatHistory = new ChatHistory(chatHistory.Where(IsAssistantOrUserOrSystem)); + var anthropicRequest = AnthropicRequest.FromChatHistoryAndExecutionSettings(filteredChatHistory, anthropicExecutionSettings); + if (this._endpoint.OriginalString.Equals(AnthropicUrl, StringComparison.Ordinal)) + { + anthropicRequest.Version = null; + anthropicRequest.ModelId = anthropicExecutionSettings.ModelId ?? throw new InvalidOperationException("Model ID must be provided."); + } + else + { + // Vertex and Bedrock require the model ID to be null and version to be set + anthropicRequest.Version = this._version; + anthropicRequest.ModelId = null; + } + + return new ChatCompletionState + { + ChatHistory = chatHistory, + ExecutionSettings = anthropicExecutionSettings, + AnthropicRequest = anthropicRequest + }; + + static bool IsAssistantOrUserOrSystem(ChatMessageContent msg) + => msg.Role == AuthorRole.Assistant || msg.Role == AuthorRole.User || msg.Role == AuthorRole.System; + } + + private static void ValidateMaxTokens(int? maxTokens) + { + // If maxTokens is null, it means that the user wants to use the default model value + if (maxTokens is < 1) + { + throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); + } + } + + private static void ValidateChatHistory(ChatHistory chatHistory) + { + Verify.NotNullOrEmpty(chatHistory); + if (chatHistory.All(msg => msg.Role == AuthorRole.System)) + { + throw new InvalidOperationException("Chat history can't contain only system messages."); + } + } + + private async Task SendRequestAndGetStringBodyAsync( + HttpRequestMessage httpRequestMessage, + CancellationToken cancellationToken) + { + using var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, cancellationToken) + .ConfigureAwait(false); + var body = await response.Content.ReadAsStringWithExceptionMappingAsync() + .ConfigureAwait(false); + return body; + } + + private async Task SendRequestAndGetResponseImmediatelyAfterHeadersReadAsync( + HttpRequestMessage httpRequestMessage, + CancellationToken cancellationToken) + { + var response = await this._httpClient.SendWithSuccessCheckAsync(httpRequestMessage, HttpCompletionOption.ResponseHeadersRead, cancellationToken) + .ConfigureAwait(false); + return response; + } + + private static T DeserializeResponse(string body) + { + try + { + return JsonSerializer.Deserialize(body) ?? throw new JsonException("Response is null"); + } + catch (JsonException exc) + { + throw new KernelException("Unexpected response from model", exc) + { + Data = { { "ResponseData", body } }, + }; + } + } + + private async Task CreateHttpRequestAsync(object requestData, Uri endpoint) + { + var httpRequestMessage = HttpRequest.CreatePostRequest(endpoint, requestData); + if (!httpRequestMessage.Headers.Contains("User-Agent")) + { + httpRequestMessage.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); + } + + if (!httpRequestMessage.Headers.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)) + { + httpRequestMessage.Headers.Add( + HttpHeaderConstant.Names.SemanticKernelVersion, + HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AnthropicClient))); + } + + if (!httpRequestMessage.Headers.Contains("anthropic-version")) + { + httpRequestMessage.Headers.Add("anthropic-version", this._version); + } + + if (this._apiKey is not null && !httpRequestMessage.Headers.Contains("x-api-key")) + { + httpRequestMessage.Headers.Add("x-api-key", this._apiKey); + } + else if (this._bearerTokenProvider is not null + && !httpRequestMessage.Headers.Contains("Authentication") + && await this._bearerTokenProvider().ConfigureAwait(false) is { } bearerKey) + { + httpRequestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", bearerKey); + } + + return httpRequestMessage; + } + + private void Log(LogLevel logLevel, string? message, params object?[] args) + { + if (this._logger.IsEnabled(logLevel)) + { +#pragma warning disable CA2254 // Template should be a constant string. + this._logger.Log(logLevel, message, args); +#pragma warning restore CA2254 + } + } + + private sealed class ChatCompletionState + { + internal ChatHistory ChatHistory { get; set; } = null!; + internal AnthropicRequest AnthropicRequest { get; set; } = null!; + internal AnthropicPromptExecutionSettings ExecutionSettings { get; set; } = null!; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/AuthorRoleConverter.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/AuthorRoleConverter.cs new file mode 100644 index 000000000000..d0f5d51f6a76 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/AuthorRoleConverter.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core; + +internal sealed class AuthorRoleConverter : JsonConverter +{ + public override AuthorRole Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + string? role = reader.GetString(); + if (role == null) + { + throw new InvalidOperationException("Unexpected null value for author role"); + } + + if (role.Equals("user", StringComparison.OrdinalIgnoreCase)) + { + return AuthorRole.User; + } + + if (role.Equals("assistant", StringComparison.OrdinalIgnoreCase)) + { + return AuthorRole.Assistant; + } + + throw new JsonException($"Unexpected author role: {role}"); + } + + public override void Write(Utf8JsonWriter writer, AuthorRole value, JsonSerializerOptions options) + { + if (value == AuthorRole.Assistant) + { + writer.WriteStringValue("assistant"); + } + else if (value == AuthorRole.User) + { + writer.WriteStringValue("user"); + } + else + { + throw new JsonException($"Anthropic API doesn't support author role: {value}"); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs new file mode 100644 index 000000000000..845f81fc366f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; + +internal sealed class AnthropicContent +{ + /// + /// Currently supported only base64. + /// + [JsonPropertyName("type")] + public string Type { get; set; } + + /// + /// When type is "text", the text content. + /// + [JsonPropertyName("text")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Text { get; set; } + + /// + /// When type is "image", the source of the image. + /// + [JsonPropertyName("source")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public SourceEntity? Source { get; set; } + + [JsonConstructor] + public AnthropicContent(string type) + { + this.Type = type; + } + + internal sealed class SourceEntity + { + /// + /// Currently supported only base64. + /// + [JsonPropertyName("type")] + public string? Type { get; set; } + + /// + /// The media type of the image. + /// + [JsonPropertyName("media_type")] + public string? MediaType { get; set; } + + /// + /// The base64 encoded image data. + /// + [JsonPropertyName("data")] + public string? Data { get; set; } + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs new file mode 100644 index 000000000000..10dc30c74789 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core; + +internal sealed class AnthropicRequest +{ + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("anthropic_version")] + public string? Version { get; set; } + + /// + /// Input messages.
+ /// Our models are trained to operate on alternating user and assistant conversational turns. + /// When creating a new Message, you specify the prior conversational turns with the messages parameter, + /// and the model then generates the next Message in the conversation. + /// Each input message must be an object with a role and content. You can specify a single user-role message, + /// or you can include multiple user and assistant messages. The first message must always use the user role. + /// If the final message uses the assistant role, the response content will continue immediately + /// from the content in that message. This can be used to constrain part of the model's response. + ///
+ [JsonPropertyName("messages")] + public IList Messages { get; set; } = []; + + [JsonPropertyName("model")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ModelId { get; set; } + + [JsonPropertyName("max_tokens")] + public int MaxTokens { get; set; } + + /// + /// A system prompt is a way of providing context and instructions to Anthropic, such as specifying a particular goal or persona. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("system")] + public string? SystemPrompt { get; set; } + + /// + /// Custom text sequences that will cause the model to stop generating. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("stop_sequences")] + public IList? StopSequences { get; set; } + + /// + /// Enables SSE streaming. + /// + [JsonPropertyName("stream")] + public bool Stream { get; set; } + + /// + /// Amount of randomness injected into the response.
+ /// Defaults to 1.0. Ranges from 0.0 to 1.0. Use temperature closer to 0.0 for analytical / multiple choice, and closer to 1.0 for creative and generative tasks.
+ /// Note that even with temperature of 0.0, the results will not be fully deterministic. + ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("temperature")] + public double? Temperature { get; set; } + + /// + /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token + /// in decreasing probability order and cut it off once it reaches a particular probability specified by top_p. + /// You should either alter temperature or top_p, but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("top_p")] + public float? TopP { get; set; } + + /// + /// Only sample from the top K options for each subsequent token. + /// Used to remove "long tail" low probability responses. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonPropertyName("top_k")] + public int? TopK { get; set; } + + [JsonConstructor] + internal AnthropicRequest() { } + + public void AddChatMessage(ChatMessageContent message) + { + Verify.NotNull(this.Messages); + Verify.NotNull(message); + + this.Messages.Add(CreateAnthropicMessageFromChatMessage(message)); + } + + /// + /// Creates a object from the given and . + /// + /// The chat history to be assigned to the . + /// The execution settings to be applied to the . + /// Enables SSE streaming. (optional) + /// A new instance of . + internal static AnthropicRequest FromChatHistoryAndExecutionSettings( + ChatHistory chatHistory, + AnthropicPromptExecutionSettings executionSettings, + bool streamingMode = false) + { + AnthropicRequest request = CreateRequest(chatHistory, executionSettings, streamingMode); + AddMessages(chatHistory.Where(msg => msg.Role != AuthorRole.System), request); + return request; + } + + private static void AddMessages(IEnumerable chatHistory, AnthropicRequest request) + => request.Messages.AddRange(chatHistory.Select(CreateAnthropicMessageFromChatMessage)); + + private static Message CreateAnthropicMessageFromChatMessage(ChatMessageContent message) + { + return new Message + { + Role = message.Role, + Contents = CreateAnthropicMessages(message) + }; + } + + private static AnthropicRequest CreateRequest(ChatHistory chatHistory, AnthropicPromptExecutionSettings executionSettings, bool streamingMode) + { + AnthropicRequest request = new() + { + MaxTokens = executionSettings.MaxTokens ?? throw new InvalidOperationException("Max tokens must be provided."), + SystemPrompt = string.Join("\n", chatHistory + .Where(msg => msg.Role == AuthorRole.System) + .SelectMany(msg => msg.Items) + .OfType() + .Select(content => content.Text)), + StopSequences = executionSettings.StopSequences, + Stream = streamingMode, + Temperature = executionSettings.Temperature, + TopP = executionSettings.TopP, + TopK = executionSettings.TopK + }; + return request; + } + + private static List CreateAnthropicMessages(ChatMessageContent content) + { + return content.Items.Select(GetAnthropicMessageFromKernelContent).ToList(); + } + + private static AnthropicContent GetAnthropicMessageFromKernelContent(KernelContent content) => content switch + { + TextContent textContent => new AnthropicContent("text") { Text = textContent.Text ?? string.Empty }, + ImageContent imageContent => CreateAnthropicImageContent(imageContent), + _ => throw new NotSupportedException($"Content type '{content.GetType().Name}' is not supported.") + }; + + private static AnthropicContent CreateAnthropicImageContent(ImageContent imageContent) + { + var dataUri = DataUriParser.Parse(imageContent.DataUri); + if (dataUri.DataFormat?.Equals("base64", StringComparison.OrdinalIgnoreCase) != true) + { + throw new InvalidOperationException("Image content must be base64 encoded."); + } + + return new AnthropicContent("image") + { + Source = new() + { + Type = dataUri.DataFormat, + MediaType = imageContent.MimeType ?? throw new InvalidOperationException("Image content must have a MIME type."), + Data = dataUri.Data ?? throw new InvalidOperationException("Image content must have a data.") + } + }; + } + + internal sealed class Message + { + [JsonConverter(typeof(AuthorRoleConverter))] + [JsonPropertyName("role")] + public AuthorRole Role { get; init; } + + [JsonPropertyName("content")] + public IList Contents { get; init; } = null!; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs new file mode 100644 index 000000000000..9585da07f56a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core; + +/// +/// Represents the response from the Anthropic API. +/// https://docs.anthropic.com/en/api/messages +/// +internal sealed class AnthropicResponse +{ + /// + /// Unique object identifier. + /// + [JsonRequired] + [JsonPropertyName("id")] + public string Id { get; init; } = null!; + + /// + /// Object type. + /// + [JsonRequired] + [JsonPropertyName("type")] + public string Type { get; init; } = null!; + + /// + /// Conversational role of the generated message. + /// + [JsonRequired] + [JsonPropertyName("role")] + [JsonConverter(typeof(AuthorRoleConverter))] + public AuthorRole Role { get; init; } + + /// + /// Content generated by the model. + /// This is an array of content blocks, each of which has a type that determines its shape. + /// + [JsonPropertyName("content")] + public IReadOnlyList? Contents { get; init; } + + /// + /// The model that handled the request. + /// + [JsonRequired] + [JsonPropertyName("model")] + public string ModelId { get; init; } = null!; + + /// + /// The reason that we stopped. + /// + [JsonPropertyName("stop_reason")] + public AnthropicFinishReason? StopReason { get; init; } + + /// + /// Which custom stop sequence was generated, if any. + /// This value will be a non-null string if one of your custom stop sequences was generated. + /// + [JsonPropertyName("stop_sequence")] + public string? StopSequence { get; init; } + + /// + /// Billing and rate-limit usage. + /// + [JsonRequired] + [JsonPropertyName("usage")] + public AnthropicUsage Usage { get; init; } = null!; +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs new file mode 100644 index 000000000000..1a41fa3edf91 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models; + +/// +/// Represents the response from the Anthropic streaming API. +/// +/// +internal sealed class AnthropicStreamingResponse +{ + /// + /// SSE data type. + /// + [JsonRequired] + [JsonPropertyName("type")] + public string Type { get; init; } = null!; + + /// + /// Response message, only if the type is "message_start", otherwise null. + /// + [JsonPropertyName("message")] + public AnthropicResponse? Response { get; init; } + + /// + /// Index of a message. + /// + [JsonPropertyName("index")] + public int Index { get; init; } + + // Fields are assigned via reflection +#pragma warning disable CS0649 // Field is never assigned to, and will always have its default value +#pragma warning disable IDE0044 // Add readonly modifier + [JsonPropertyName("content_block")] + [JsonInclude] + private AnthropicContent? _contentBlock; + + [JsonPropertyName("delta")] + [JsonInclude] + private JsonNode? _delta; +#pragma warning restore IDE0044 +#pragma warning restore CS0649 + + /// + /// Delta of anthropic content, only if the type is "content_block_start" or "content_block_delta", otherwise null. + /// + public AnthropicContent? ContentDelta => + this.Type switch + { + "content_block_start" => this._contentBlock, + "content_block_delta" => this._delta?.Deserialize(), + _ => null + }; + + /// + /// Usage metadata, only if the type is "message_delta", otherwise null. + /// + public AnthropicUsage? Usage { get; init; } + + /// + /// Stop reason metadata, only if the type is "message_delta", otherwise null. + /// + public StopDelta? StopMetadata => this.Type == "message_delta" ? this._delta?.Deserialize() : null; + + /// + /// Represents the reason that message streaming stopped. + /// + public sealed class StopDelta + { + /// + /// The reason that we stopped. + /// + [JsonPropertyName("stop_reason")] + public AnthropicFinishReason? StopReason { get; init; } + + /// + /// Which custom stop sequence was generated, if any. + /// This value will be a non-null string if one of your custom stop sequences was generated. + /// + [JsonPropertyName("stop_sequence")] + public string? StopSequence { get; init; } + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs new file mode 100644 index 000000000000..dbd70a2ca5db --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicKernelBuilderExtensions.cs @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel; + +/// +/// Extensions for adding Anthropic generation services to the application. +/// +public static class AnthropicKernelBuilderExtensions +{ + /// + /// Add Anthropic Chat Completion and Text Generation services to the kernel builder. + /// + /// The kernel builder. + /// Model identifier. + /// API key. + /// Optional options for the anthropic client + /// The optional custom HttpClient. + /// Service identifier. + /// The updated kernel builder. + public static IKernelBuilder AddAnthropicChatCompletion( + this IKernelBuilder builder, + string modelId, + string apiKey, + AnthropicClientOptions? options = null, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId: modelId, + apiKey: apiKey, + options: options ?? new AnthropicClientOptions(), + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService())); + + return builder; + } + + /// + /// Add Anthropic Chat Completion and Text Generation services to the kernel builder. + /// + /// The kernel builder. + /// Model identifier. + /// Bearer token provider. + /// Vertex AI Anthropic endpoint. + /// Optional options for the anthropic client + /// Service identifier. + /// The updated kernel builder. + public static IKernelBuilder AddAnthropicVertextAIChatCompletion( + this IKernelBuilder builder, + string modelId, + Func> bearerTokenProvider, + Uri? endpoint = null, + VertexAIAnthropicClientOptions? options = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId: modelId, + bearerTokenProvider: bearerTokenProvider, + options: options ?? new VertexAIAnthropicClientOptions(), + endpoint: endpoint, + httpClient: HttpClientProvider.GetHttpClient(serviceProvider), + loggerFactory: serviceProvider.GetService())); + + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.cs new file mode 100644 index 000000000000..83ed98bfafcf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Extensions/AnthropicServiceCollectionExtensions.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel; + +/// +/// Extensions for adding Anthropic generation services to the application. +/// +public static class AnthropicServiceCollectionExtensions +{ + /// + /// Add Anthropic Chat Completion to the added in service collection. + /// + /// The target service collection. + /// Model identifier. + /// API key. + /// Optional options for the anthropic client + /// Service identifier. + /// The updated service collection. + public static IServiceCollection AddAnthropicChatCompletion( + this IServiceCollection services, + string modelId, + string apiKey, + AnthropicClientOptions? options = null, + string? serviceId = null) + { + Verify.NotNull(services); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId: modelId, + apiKey: apiKey, + options: options, + httpClient: HttpClientProvider.GetHttpClient(serviceProvider), + loggerFactory: serviceProvider.GetService())); + + return services; + } + + /// + /// Add Anthropic Chat Completion to the added in service collection. + /// + /// The target service collection. + /// Model identifier. + /// Bearer token provider. + /// Vertex AI Anthropic endpoint. + /// Optional options for the anthropic client + /// Service identifier. + /// The updated service collection. + public static IServiceCollection AddAnthropicVertexAIChatCompletion( + this IServiceCollection services, + string modelId, + Func> bearerTokenProvider, + Uri? endpoint = null, + VertexAIAnthropicClientOptions? options = null, + string? serviceId = null) + { + Verify.NotNull(services); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AnthropicChatCompletionService( + modelId: modelId, + bearerTokenProvider: bearerTokenProvider, + endpoint: endpoint, + options: options ?? new VertexAIAnthropicClientOptions(), + httpClient: HttpClientProvider.GetHttpClient(serviceProvider), + loggerFactory: serviceProvider.GetService())); + + return services; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicChatMessageContent.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicChatMessageContent.cs new file mode 100644 index 000000000000..4f70b5879d83 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicChatMessageContent.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Anthropic specialized chat message content +/// +public sealed class AnthropicChatMessageContent : ChatMessageContent +{ + /// + /// Creates a new instance of the class + /// + [JsonConstructor] + internal AnthropicChatMessageContent() { } + + /// + /// The metadata associated with the content. + /// + public new AnthropicMetadata? Metadata + { + get => base.Metadata as AnthropicMetadata; + init => base.Metadata = value; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicFinishReason.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicFinishReason.cs new file mode 100644 index 000000000000..ae1313d95663 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicFinishReason.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents a Anthropic Finish Reason. +/// +[JsonConverter(typeof(AnthropicFinishReasonConverter))] +public readonly struct AnthropicFinishReason : IEquatable +{ + /// + /// Natural stop point of the model or provided stop sequence. + /// + public static AnthropicFinishReason Stop { get; } = new("end_turn"); + + /// + /// The maximum number of tokens as specified in the request was reached. + /// + public static AnthropicFinishReason MaxTokens { get; } = new("max_tokens"); + + /// + /// One of your provided custom stop sequences was generated. + /// + public static AnthropicFinishReason StopSequence { get; } = new("stop_sequence"); + + /// + /// The model invoked one or more tools + /// + public static AnthropicFinishReason ToolUse { get; } = new("tool_use"); + + /// + /// Gets the label of the property. + /// Label is used for serialization. + /// + public string Label { get; } + + /// + /// Represents a Anthropic Finish Reason. + /// + [JsonConstructor] + public AnthropicFinishReason(string label) + { + Verify.NotNullOrWhiteSpace(label, nameof(label)); + this.Label = label; + } + + /// + /// Represents the equality operator for comparing two instances of . + /// + /// The left instance to compare. + /// The right instance to compare. + /// true if the two instances are equal; otherwise, false. + public static bool operator ==(AnthropicFinishReason left, AnthropicFinishReason right) + => left.Equals(right); + + /// + /// Represents the inequality operator for comparing two instances of . + /// + /// The left instance to compare. + /// The right instance to compare. + /// true if the two instances are not equal; otherwise, false. + public static bool operator !=(AnthropicFinishReason left, AnthropicFinishReason right) + => !(left == right); + + /// + public bool Equals(AnthropicFinishReason other) + => string.Equals(this.Label, other.Label, StringComparison.OrdinalIgnoreCase); + + /// + public override bool Equals(object? obj) + => obj is AnthropicFinishReason other && this == other; + + /// + public override int GetHashCode() + => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label ?? string.Empty); + + /// + public override string ToString() => this.Label ?? string.Empty; +} + +internal sealed class AnthropicFinishReasonConverter : JsonConverter +{ + public override AnthropicFinishReason Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + => new(reader.GetString()!); + + public override void Write(Utf8JsonWriter writer, AnthropicFinishReason value, JsonSerializerOptions options) + => writer.WriteStringValue(value.Label); +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicMetadata.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicMetadata.cs new file mode 100644 index 000000000000..c7786537ddd0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicMetadata.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the metadata associated with a Anthropic response. +/// +public sealed class AnthropicMetadata : ReadOnlyDictionary +{ + internal AnthropicMetadata() : base(new Dictionary()) { } + + private AnthropicMetadata(IDictionary dictionary) : base(dictionary) { } + + /// + /// Unique message object identifier. + /// + public string MessageId + { + get => this.GetValueFromDictionary(nameof(this.MessageId)) as string ?? string.Empty; + internal init => this.SetValueInDictionary(value, nameof(this.MessageId)); + } + + /// + /// The reason generating was stopped. + /// + public AnthropicFinishReason? FinishReason + { + get => (AnthropicFinishReason?)this.GetValueFromDictionary(nameof(this.FinishReason)); + internal init => this.SetValueInDictionary(value, nameof(this.FinishReason)); + } + + /// + /// Which custom stop sequence was generated, if any. + /// + public string? StopSequence + { + get => this.GetValueFromDictionary(nameof(this.StopSequence)) as string; + internal init => this.SetValueInDictionary(value, nameof(this.StopSequence)); + } + + /// + /// The number of input tokens which were used. + /// + public int? InputTokenCount + { + get => this.GetValueFromDictionary(nameof(this.InputTokenCount)) as int?; + internal init => this.SetValueInDictionary(value, nameof(this.InputTokenCount)); + } + + /// + /// The number of output tokens which were used. + /// + public int? OutputTokenCount + { + get => this.GetValueFromDictionary(nameof(this.OutputTokenCount)) as int?; + internal init => this.SetValueInDictionary(value, nameof(this.OutputTokenCount)); + } + + /// + /// Represents the total count of tokens in the Anthropic response, + /// which is calculated by summing the input token count and the output token count. + /// + public int? TotalTokenCount => this.InputTokenCount + this.OutputTokenCount; + + /// + /// Converts a dictionary to a object. + /// + public static AnthropicMetadata FromDictionary(IReadOnlyDictionary dictionary) => dictionary switch + { + null => throw new ArgumentNullException(nameof(dictionary)), + AnthropicMetadata metadata => metadata, + IDictionary metadata => new AnthropicMetadata(metadata), + _ => new AnthropicMetadata(dictionary.ToDictionary(pair => pair.Key, pair => pair.Value)) + }; + + private void SetValueInDictionary(object? value, string propertyName) + => this.Dictionary[propertyName] = value; + + private object? GetValueFromDictionary(string propertyName) + => this.Dictionary.TryGetValue(propertyName, out var value) ? value : null; +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs new file mode 100644 index 000000000000..37fd28be42cf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Anthropic specialized streaming chat message content +/// +public sealed class AnthropicStreamingChatMessageContent : StreamingChatMessageContent +{ + /// + /// Creates a new instance of the class + /// + /// Role of the author of the message + /// Content of the message + /// Inner content object reference + /// Choice index + /// The model ID used to generate the content + /// Encoding of the chat + /// Additional metadata + [JsonConstructor] + public AnthropicStreamingChatMessageContent( + AuthorRole? role, + string? content, + object? innerContent = null, + int choiceIndex = 0, + string? modelId = null, + Encoding? encoding = null, + IReadOnlyDictionary? metadata = null) + : base(role, content, innerContent, choiceIndex, modelId, encoding, metadata) { } + + /// + /// The metadata associated with the content. + /// + public new AnthropicMetadata? Metadata + { + get => base.Metadata as AnthropicMetadata; + init => base.Metadata = value; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs new file mode 100644 index 000000000000..e7451046c3dd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Billing and rate-limit usage.
+/// Anthropic's API bills and rate-limits by token counts, as tokens represent the underlying cost to our systems.
+/// Under the hood, the API transforms requests into a format suitable for the model. +/// The model's output then goes through a parsing stage before becoming an API response. +/// As a result, the token counts in usage will not match one-to-one with the exact visible content of an API request or response.
+/// For example, OutputTokens will be non-zero, even for an empty string response from Anthropic. +///
+public sealed class AnthropicUsage +{ + /// + /// The number of input tokens which were used. + /// + [JsonPropertyName("input_tokens")] + public int? InputTokens { get; init; } + + /// + /// The number of output tokens which were used + /// + [JsonRequired] + [JsonPropertyName("output_tokens")] + public int? OutputTokens { get; init; } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AmazonBedrockAnthropicClientOptions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AmazonBedrockAnthropicClientOptions.cs new file mode 100644 index 000000000000..e9b4d1c4ea99 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AmazonBedrockAnthropicClientOptions.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the options for configuring the Anthropic client with Amazon Bedrock provider. +/// +public sealed class AmazonBedrockAnthropicClientOptions : ClientOptions +{ + private const ServiceVersion LatestVersion = ServiceVersion.V2023_05_31; + + /// The version of the service to use. + public enum ServiceVersion + { + /// Service version "bedrock-2023-05-31". + V2023_05_31, + } + + /// + /// Initializes new instance of + /// + /// + /// This parameter is optional. + /// Default value is .
+ /// + /// Provided version is not supported. + public AmazonBedrockAnthropicClientOptions(ServiceVersion version = LatestVersion) : base(version switch + { + ServiceVersion.V2023_05_31 => "bedrock-2023-05-31", + _ => throw new NotSupportedException("Unsupported service version") + }) + { + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AnthropicClientOptions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AnthropicClientOptions.cs new file mode 100644 index 000000000000..ad070b036b1e --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/AnthropicClientOptions.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the options for configuring the Anthropic client with Anthropic provider. +/// +public sealed class AnthropicClientOptions : ClientOptions +{ + internal const ServiceVersion LatestVersion = ServiceVersion.V2023_06_01; + + /// The version of the service to use. + public enum ServiceVersion + { + /// Service version "2023-01-01". + V2023_01_01, + + /// Service version "2023-06-01". + V2023_06_01, + } + + /// + /// Initializes new instance of + /// + /// + /// This parameter is optional. + /// Default value is .
+ /// + /// Provided version is not supported. + public AnthropicClientOptions(ServiceVersion version = LatestVersion) : base(version switch + { + ServiceVersion.V2023_01_01 => "2023-01-01", + ServiceVersion.V2023_06_01 => "2023-06-01", + _ => throw new NotSupportedException("Unsupported service version") + }) + { + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/ClientOptions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/ClientOptions.cs new file mode 100644 index 000000000000..bd04ee4345e9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/ClientOptions.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the options for configuring the Anthropic client. +/// +public abstract class ClientOptions +{ + internal string Version { get; init; } + + /// + /// Represents the options for configuring the Anthropic client. + /// + internal protected ClientOptions(string version) + { + this.Version = version; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/VertexAIAnthropicClientOptions.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/VertexAIAnthropicClientOptions.cs new file mode 100644 index 000000000000..4f8075226795 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Options/VertexAIAnthropicClientOptions.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the options for configuring the Anthropic client with Google VertexAI provider. +/// +public sealed class VertexAIAnthropicClientOptions : ClientOptions +{ + private const ServiceVersion LatestVersion = ServiceVersion.V2023_10_16; + + /// The version of the service to use. + public enum ServiceVersion + { + /// Service version "vertex-2023-10-16". + V2023_10_16, + } + + /// + /// Initializes new instance of + /// + /// + /// This parameter is optional. + /// Default value is .
+ /// + /// Provided version is not supported. + public VertexAIAnthropicClientOptions(ServiceVersion version = LatestVersion) : base(version switch + { + ServiceVersion.V2023_10_16 => "vertex-2023-10-16", + _ => throw new NotSupportedException("Unsupported service version") + }) + { + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Settings/AnthropicPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Settings/AnthropicPromptExecutionSettings.cs new file mode 100644 index 000000000000..e1af01ef5865 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Settings/AnthropicPromptExecutionSettings.cs @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents the settings for executing a prompt with the Anthropic models. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class AnthropicPromptExecutionSettings : PromptExecutionSettings +{ + private double? _temperature; + private float? _topP; + private int? _topK; + private int? _maxTokens; + private IList? _stopSequences; + + /// + /// Default max tokens for a text generation. + /// + public static int DefaultTextMaxTokens { get; } = 1024; + + /// + /// Temperature controls the randomness of the completion. + /// The higher the temperature, the more random the completion. + /// Range is 0.0 to 1.0. + /// + [JsonPropertyName("temperature")] + public double? Temperature + { + get => this._temperature; + set + { + this.ThrowIfFrozen(); + this._temperature = value; + } + } + + /// + /// TopP controls the diversity of the completion. + /// The higher the TopP, the more diverse the completion. + /// + [JsonPropertyName("top_p")] + public float? TopP + { + get => this._topP; + set + { + this.ThrowIfFrozen(); + this._topP = value; + } + } + + /// + /// Gets or sets the value of the TopK property. + /// The TopK property represents the maximum value of a collection or dataset. + /// + [JsonPropertyName("top_k")] + public int? TopK + { + get => this._topK; + set + { + this.ThrowIfFrozen(); + this._topK = value; + } + } + + /// + /// The maximum number of tokens to generate in the completion. + /// + [JsonPropertyName("max_tokens")] + public int? MaxTokens + { + get => this._maxTokens; + set + { + this.ThrowIfFrozen(); + this._maxTokens = value; + } + } + + /// + /// Sequences where the completion will stop generating further tokens. + /// Maximum number of stop sequences is 5. + /// + [JsonPropertyName("stop_sequences")] + public IList? StopSequences + { + get => this._stopSequences; + set + { + this.ThrowIfFrozen(); + this._stopSequences = value; + } + } + + /// + public override void Freeze() + { + if (this.IsFrozen) + { + return; + } + + base.Freeze(); + + if (this._stopSequences is not null) + { + this._stopSequences = new ReadOnlyCollection(this._stopSequences); + } + } + + /// + public override PromptExecutionSettings Clone() + { + return new AnthropicPromptExecutionSettings() + { + ModelId = this.ModelId, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + Temperature = this.Temperature, + TopP = this.TopP, + TopK = this.TopK, + MaxTokens = this.MaxTokens, + StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, + }; + } + + /// + /// Converts a object to a object. + /// + /// The object to convert. + /// + /// The converted object. If is null, + /// a new instance of is returned. If + /// is already a object, it is cast and returned. Otherwise, the method + /// tries to deserialize to a object. + /// If deserialization is successful, the converted object is returned. If deserialization fails or the converted object + /// is null, an is thrown. + /// + public static AnthropicPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) + { + switch (executionSettings) + { + case null: + return new AnthropicPromptExecutionSettings { MaxTokens = DefaultTextMaxTokens }; + case AnthropicPromptExecutionSettings settings: + return settings; + } + + var json = JsonSerializer.Serialize(executionSettings); + return JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive)!; + } +} diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs new file mode 100644 index 000000000000..ac52bde8aeaf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Anthropic/Services/AnthropicChatCompletionService.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic.Core; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Connectors.Anthropic; + +/// +/// Represents a chat completion service using Anthropic API. +/// +public sealed class AnthropicChatCompletionService : IChatCompletionService +{ + private readonly AnthropicClient _client; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Initializes a new instance of the class. + /// + /// Model identifier. + /// API key. + /// Options for the anthropic client + /// Optional HTTP client to be used for communication with the Claude API. + /// Optional logger factory to be used for logging. + public AnthropicChatCompletionService( + string modelId, + string apiKey, + AnthropicClientOptions? options = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._client = new AnthropicClient( + modelId: modelId, + apiKey: apiKey, + options: options ?? new AnthropicClientOptions(), + httpClient: HttpClientProvider.GetHttpClient(httpClient), + logger: loggerFactory?.CreateLogger(typeof(AnthropicChatCompletionService))); + } + + /// + /// Initializes a new instance of the class. + /// + /// Model identifier. + /// Bearer token provider. + /// Options for the anthropic client + /// Claude API endpoint. + /// Optional HTTP client to be used for communication with the Claude API. + /// Optional logger factory to be used for logging. + public AnthropicChatCompletionService( + string modelId, + Func> bearerTokenProvider, + ClientOptions options, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._client = new AnthropicClient( + modelId: modelId, + bearerTokenProvider: bearerTokenProvider, + options: options, + endpoint: endpoint, + httpClient: HttpClientProvider.GetHttpClient(httpClient), + logger: loggerFactory?.CreateLogger(typeof(AnthropicChatCompletionService))); + } + + /// + public Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return this._client.GenerateChatMessageAsync(chatHistory, executionSettings, kernel, cancellationToken); + } + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + return this._client.StreamGenerateChatMessageAsync(chatHistory, executionSettings, kernel, cancellationToken); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs new file mode 100644 index 000000000000..aa0a572ea1e9 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs @@ -0,0 +1,381 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using xRetry; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.Anthropic; + +public sealed class AnthropicChatCompletionTests(ITestOutputHelper output) : TestBase(output) +{ + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatGenerationReturnsValidResponseAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and expand this abbreviation: LLM"); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("Large Language Model", response.Content, StringComparison.OrdinalIgnoreCase); + Assert.Contains("Brandon", response.Content, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatStreamingReturnsValidResponseAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and write a long story about my name."); + + var sut = this.GetChatService(serviceType); + + // Act + var response = + await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotEmpty(response); + Assert.True(response.Count > 1); + var message = string.Concat(response.Select(c => c.Content)); + Assert.False(string.IsNullOrWhiteSpace(message)); + this.Output.WriteLine(message); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatGenerationVisionBinaryDataAsync(ServiceType serviceType) + { + // Arrange + Memory image = await File.ReadAllBytesAsync("./TestData/test_image_001.jpg"); + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("This is an image with a car. Which color is it? You can chose from red, blue, green, and yellow"), + new ImageContent(image, "image/jpeg") + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("green", response.Content, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatStreamingVisionBinaryDataAsync(ServiceType serviceType) + { + // Arrange + Memory image = await File.ReadAllBytesAsync("./TestData/test_image_001.jpg"); + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("This is an image with a car. Which color is it? You can chose from red, blue, green, and yellow"), + new ImageContent(image, "image/jpeg") + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotEmpty(responses); + var message = string.Concat(responses.Select(c => c.Content)); + Assert.False(string.IsNullOrWhiteSpace(message)); + this.Output.WriteLine(message); + Assert.Contains("green", message, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test needs setup first.")] + [InlineData(ServiceType.VertexAI, Skip = "This test needs setup first.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test needs setup first.")] + public async Task ChatGenerationVisionUriAsync(ServiceType serviceType) + { + // Arrange + Uri imageUri = new("gs://generativeai-downloads/images/scones.jpg"); // needs setup + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("This is an image with a car. Which color is it? You can chose from red, blue, green, and yellow"), + new ImageContent(imageUri) { MimeType = "image/jpeg" } + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + Assert.NotNull(response.Content); + this.Output.WriteLine(response.Content); + Assert.Contains("green", response.Content, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test needs setup first.")] + [InlineData(ServiceType.VertexAI, Skip = "This test needs setup first.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test needs setup first.")] + public async Task ChatStreamingVisionUriAsync(ServiceType serviceType) + { + // Arrange + Uri imageUri = new("gs://generativeai-downloads/images/scones.jpg"); // needs setup + var chatHistory = new ChatHistory(); + var messageContent = new ChatMessageContent(AuthorRole.User, items: + [ + new TextContent("This is an image with a car. Which color is it? You can chose from red, blue, green, and yellow"), + new ImageContent(imageUri) { MimeType = "image/jpeg" } + ]); + chatHistory.Add(messageContent); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + Assert.NotEmpty(responses); + var message = string.Concat(responses.Select(c => c.Content)); + Assert.False(string.IsNullOrWhiteSpace(message)); + this.Output.WriteLine(message); + Assert.Contains("green", message, StringComparison.OrdinalIgnoreCase); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatGenerationReturnsUsedTokensAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and expand this abbreviation: LLM"); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + var metadata = response.Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + foreach ((string? key, object? value) in metadata) + { + this.Output.WriteLine($"{key}: {JsonSerializer.Serialize(value)}"); + } + + Assert.True(metadata.TotalTokenCount > 0); + Assert.True(metadata.InputTokenCount > 0); + Assert.True(metadata.OutputTokenCount > 0); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatStreamingReturnsUsedTokensAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and expand this abbreviation: LLM"); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + var metadata = responses + .Where(c => c.Metadata is not null) + .Select(c => c.Metadata) + .Cast().ToList(); + Assert.NotEmpty(metadata); + this.Output.WriteLine($"TotalTokenCount: {metadata.Sum(m => m.TotalTokenCount)}"); + this.Output.WriteLine($"InputTokenCount: {metadata.Sum(m => m.InputTokenCount)}"); + this.Output.WriteLine($"OutputTokenCount: {metadata.Sum(m => m.OutputTokenCount)}"); + Assert.True(metadata.Sum(m => m.TotalTokenCount) > 0); + Assert.True(metadata.Sum(m => m.InputTokenCount) > 0); + Assert.True(metadata.Sum(m => m.OutputTokenCount) > 0); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatGenerationReturnsStopFinishReasonAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and expand this abbreviation: LLM"); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + var metadata = response.Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + this.Output.WriteLine($"FinishReason: {metadata.FinishReason}"); + Assert.Equal(AnthropicFinishReason.Stop, metadata.FinishReason); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This test is for manual verification.")] + public async Task ChatStreamingReturnsStopFinishReasonAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?"); + chatHistory.AddAssistantMessage("I'm doing well, thanks for asking."); + chatHistory.AddUserMessage("Call me by my name and expand this abbreviation: LLM"); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + var metadata = responses.Last().Metadata as AnthropicMetadata; + Assert.NotNull(metadata); + this.Output.WriteLine($"FinishReason: {metadata.FinishReason}"); + Assert.Equal(AnthropicFinishReason.Stop, metadata.FinishReason); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.VertexAI, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This can fail. Anthropic does not support this feature yet.")] + public async Task ChatGenerationOnlyAssistantMessagesAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddAssistantMessage("I'm very thirsty."); + chatHistory.AddAssistantMessage("Could you give me a glass of..."); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + string[] words = ["water", "juice", "milk", "soda", "tea", "coffee", "beer", "wine"]; + this.Output.WriteLine(response.Content); + Assert.Contains(words, word => response.Content!.Contains(word, StringComparison.OrdinalIgnoreCase)); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.VertexAI, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This can fail. Anthropic does not support this feature yet.")] + public async Task ChatStreamingOnlyAssistantMessagesAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddAssistantMessage("I'm very thirsty."); + chatHistory.AddAssistantMessage("Could you give me a glass of..."); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + string[] words = ["water", "juice", "milk", "soda", "tea", "coffee", "beer", "wine"]; + Assert.NotEmpty(responses); + var message = string.Concat(responses.Select(c => c.Content)); + this.Output.WriteLine(message); + Assert.Contains(words, word => message.Contains(word, StringComparison.OrdinalIgnoreCase)); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.VertexAI, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This can fail. Anthropic does not support this feature yet.")] + public async Task ChatGenerationOnlyUserMessagesAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("I'm very thirsty."); + chatHistory.AddUserMessage("Could you give me a glass of..."); + + var sut = this.GetChatService(serviceType); + + // Act + var response = await sut.GetChatMessageContentAsync(chatHistory); + + // Assert + string[] words = ["water", "juice", "milk", "soda", "tea", "coffee", "beer", "wine"]; + this.Output.WriteLine(response.Content); + Assert.Contains(words, word => response.Content!.Contains(word, StringComparison.OrdinalIgnoreCase)); + } + + [RetryTheory] + [InlineData(ServiceType.Anthropic, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.VertexAI, Skip = "This can fail. Anthropic does not support this feature yet.")] + [InlineData(ServiceType.AmazonBedrock, Skip = "This can fail. Anthropic does not support this feature yet.")] + public async Task ChatStreamingOnlyUserMessagesAsync(ServiceType serviceType) + { + // Arrange + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("I'm very thirsty."); + chatHistory.AddUserMessage("Could you give me a glass of..."); + + var sut = this.GetChatService(serviceType); + + // Act + var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync(); + + // Assert + string[] words = ["water", "juice", "milk", "soda", "tea", "coffee", "beer", "wine"]; + Assert.NotEmpty(responses); + var message = string.Concat(responses.Select(c => c.Content)); + this.Output.WriteLine(message); + Assert.Contains(words, word => message.Contains(word, StringComparison.OrdinalIgnoreCase)); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Anthropic/TestBase.cs b/dotnet/src/IntegrationTests/Connectors/Anthropic/TestBase.cs new file mode 100644 index 000000000000..963b719503ee --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Anthropic/TestBase.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Anthropic; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.Anthropic; + +public abstract class TestBase(ITestOutputHelper output) +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddUserSecrets() + .AddEnvironmentVariables() + .Build(); + + protected ITestOutputHelper Output { get; } = output; + + protected IChatCompletionService GetChatService(ServiceType serviceType) => serviceType switch + { + ServiceType.Anthropic => new AnthropicChatCompletionService(this.AnthropicGetModel(), this.AnthropicGetApiKey(), new()), + ServiceType.VertexAI => new AnthropicChatCompletionService(this.VertexAIGetModel(), this.VertexAIGetBearerKey(), new VertexAIAnthropicClientOptions(), this.VertexAIGetEndpoint()), + ServiceType.AmazonBedrock => new AnthropicChatCompletionService(this.VertexAIGetModel(), this.AmazonBedrockGetBearerKey(), new AmazonBedrockAnthropicClientOptions(), this.VertexAIGetEndpoint()), + _ => throw new ArgumentOutOfRangeException(nameof(serviceType), serviceType, null) + }; + + public enum ServiceType + { + Anthropic, + VertexAI, + AmazonBedrock + } + + private string AnthropicGetModel() => this._configuration.GetSection("Anthropic:ModelId").Get()!; + private string AnthropicGetApiKey() => this._configuration.GetSection("Anthropic:ApiKey").Get()!; + private string VertexAIGetModel() => this._configuration.GetSection("VertexAI:Anthropic:ModelId").Get()!; + private Uri VertexAIGetEndpoint() => new(this._configuration.GetSection("VertexAI:Anthropic:Endpoint").Get()!); + private Func> VertexAIGetBearerKey() => () => ValueTask.FromResult(this._configuration.GetSection("VertexAI:BearerKey").Get()!); + private Func> AmazonBedrockGetBearerKey() => () => ValueTask.FromResult(this._configuration.GetSection("AmazonBedrock:Anthropic:BearerKey").Get()!); + private string AmazonBedrockGetModel() => this._configuration.GetSection("AmazonBedrock:Anthropic:ModelId").Get()!; + private Uri AmazonBedrockGetEndpoint() => new(this._configuration.GetSection("AmazonBedrock:Anthropic:Endpoint").Get()!); +} diff --git a/dotnet/src/IntegrationTests/Connectors/Google/EmbeddingGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/EmbeddingGenerationTests.cs index 79fc5db80aff..a3b4716174db 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/EmbeddingGenerationTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/EmbeddingGenerationTests.cs @@ -8,7 +8,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Google; -public sealed class EmbeddingGenerationTests(ITestOutputHelper output) : TestsBase(output) +public sealed class EmbeddingGenerationTests(ITestOutputHelper output) : TestBase(output) { [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] diff --git a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs index 5732a3e4719a..615bb29f0dc8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs @@ -14,7 +14,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Google.Gemini; -public sealed class GeminiChatCompletionTests(ITestOutputHelper output) : TestsBase(output) +public sealed class GeminiChatCompletionTests(ITestOutputHelper output) : TestBase(output) { [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] diff --git a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiFunctionCallingTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiFunctionCallingTests.cs index 37c48f0842b4..53629fe191da 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiFunctionCallingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiFunctionCallingTests.cs @@ -14,7 +14,7 @@ namespace SemanticKernel.IntegrationTests.Connectors.Google.Gemini; -public sealed class GeminiFunctionCallingTests(ITestOutputHelper output) : TestsBase(output) +public sealed class GeminiFunctionCallingTests(ITestOutputHelper output) : TestBase(output) { [RetryTheory] [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")] diff --git a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs b/dotnet/src/IntegrationTests/Connectors/Google/TestBase.cs similarity index 97% rename from dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs rename to dotnet/src/IntegrationTests/Connectors/Google/TestBase.cs index 6b932727f4a6..8cf794d473b1 100644 --- a/dotnet/src/IntegrationTests/Connectors/Google/TestsBase.cs +++ b/dotnet/src/IntegrationTests/Connectors/Google/TestBase.cs @@ -9,12 +9,12 @@ namespace SemanticKernel.IntegrationTests.Connectors.Google; -public abstract class TestsBase(ITestOutputHelper output) +public abstract class TestBase(ITestOutputHelper output) { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddUserSecrets() + .AddUserSecrets() .AddEnvironmentVariables() .Build(); diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index ce65e7fe1932..47335653eda8 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -67,6 +67,7 @@
+ diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs index 9a8cdb974902..4577b0795d5e 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs @@ -44,6 +44,7 @@ public static void Initialize(IConfigurationRoot configRoot) public static ChatGPTRetrievalPluginConfig ChatGPTRetrievalPlugin => LoadSection(); public static MsGraphConfiguration MSGraph => LoadSection(); public static MistralAIConfig MistralAI => LoadSection(); + public static AnthropicAIConfig AnthropicAI => LoadSection(); public static GoogleAIConfig GoogleAI => LoadSection(); public static VertexAIConfig VertexAI => LoadSection(); public static AzureCosmosDbMongoDbConfig AzureCosmosDbMongoDb => LoadSection(); @@ -214,6 +215,12 @@ public class MistralAIConfig public string EmbeddingModelId { get; set; } } + public class AnthropicAIConfig + { + public string ApiKey { get; set; } + public string ModelId { get; set; } + } + public class GoogleAIConfig { public string ApiKey { get; set; }