diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs
index 7b9ce14ad150..f77f4b3a9a3a 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatGenerationTests.cs
@@ -4,13 +4,14 @@
using System.IO;
using System.Linq;
using System.Net.Http;
-using System.Net.Http.Headers;
using System.Text.Json;
using System.Threading.Tasks;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.Anthropic;
using Microsoft.SemanticKernel.Connectors.Anthropic.Core;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
using Microsoft.SemanticKernel.Http;
+using SemanticKernel.Connectors.Anthropic.UnitTests.Utils;
using Xunit;
namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core;
@@ -18,13 +19,13 @@ namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core;
///
/// Test for
///
-public sealed class AnthropicClientChatGenerationTests : IDisposable
+public sealed class AnthropicChatGenerationTests : IDisposable
{
private readonly HttpClient _httpClient;
private readonly HttpMessageHandlerStub _messageHandlerStub;
private const string ChatTestDataFilePath = "./TestData/chat_one_response.json";
- public AnthropicClientChatGenerationTests()
+ public AnthropicChatGenerationTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
this._messageHandlerStub.ResponseToReturn.Content = new StringContent(
@@ -243,11 +244,13 @@ public async Task ShouldPassSystemMessageToRequestAsync()
}
[Fact]
- public async Task ShouldPassVersionToRequestBodyIfCustomHandlerUsedAsync()
+ public async Task ShouldPassVersionToRequestBodyIfThirdVendorIsUsedAsync()
{
// Arrange
- var options = new AnthropicClientOptions();
- var client = new AnthropicClient("fake-model", "api-key", options: new(), httpClient: this._httpClient);
+ var options = new AmazonBedrockAnthropicClientOptions();
+ var client = new AnthropicClient("fake-model", new Uri("https://fake-uri.com"),
+ bearerTokenProvider: () => ValueTask.FromResult("fake-token"),
+ options: options, httpClient: this._httpClient);
var chatHistory = CreateSampleChatHistory();
@@ -390,7 +393,7 @@ public async Task ItCreatesRequestWithCustomUriAndCustomHeadersAsync(string head
{
// Arrange
Uri uri = new("https://fake-uri.com");
- using var httpHandler = new CustomHeadersHandler(headerName, headerValue);
+ using var httpHandler = new CustomHeadersHandler(headerName, headerValue, ChatTestDataFilePath);
using var httpClient = new HttpClient(httpHandler);
httpClient.BaseAddress = uri;
var client = new AnthropicClient("fake-model", "api-key", options: new(), httpClient: httpClient);
@@ -439,40 +442,4 @@ public void Dispose()
this._httpClient.Dispose();
this._messageHandlerStub.Dispose();
}
-
- private sealed class CustomHeadersHandler : DelegatingHandler
- {
- private readonly string _headerName;
- private readonly string _headerValue;
- public HttpRequestHeaders? RequestHeaders { get; private set; }
-
- public HttpContentHeaders? ContentHeaders { get; private set; }
-
- public byte[]? RequestContent { get; private set; }
-
- public Uri? RequestUri { get; private set; }
-
- public HttpMethod? Method { get; private set; }
-
- public CustomHeadersHandler(string headerName, string headerValue)
- {
- this.InnerHandler = new HttpMessageHandlerStub
- {
- ResponseToReturn = { Content = new StringContent(File.ReadAllText(ChatTestDataFilePath)) }
- };
- this._headerName = headerName;
- this._headerValue = headerValue;
- }
-
- protected override Task SendAsync(HttpRequestMessage request, System.Threading.CancellationToken cancellationToken)
- {
- request.Headers.Add(this._headerName, this._headerValue);
- this.Method = request.Method;
- this.RequestUri = request.RequestUri;
- this.RequestHeaders = request.Headers;
- this.RequestContent = request.Content is null ? null : request.Content.ReadAsByteArrayAsync(cancellationToken).Result;
-
- return base.SendAsync(request, cancellationToken);
- }
- }
}
diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs
new file mode 100644
index 000000000000..d8d5b04a0d05
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicChatStreamingTests.cs
@@ -0,0 +1,467 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.IO;
+using System.Linq;
+using System.Net.Http;
+using System.Text.Json;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.Anthropic;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
+using Microsoft.SemanticKernel.Http;
+using SemanticKernel.Connectors.Anthropic.UnitTests.Utils;
+using Xunit;
+
+namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core;
+
+///
+/// Test for
+///
+public sealed class AnthropicChatStreamingTests : IDisposable
+{
+ private readonly HttpClient _httpClient;
+ private readonly HttpMessageHandlerStub _messageHandlerStub;
+ private const string ChatTestDataFilePath = "./TestData/chat_stream_response.txt";
+
+ public AnthropicChatStreamingTests()
+ {
+ this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._messageHandlerStub.ResponseToReturn.Content = new StringContent(
+ File.ReadAllText(ChatTestDataFilePath));
+
+ this._httpClient = new HttpClient(this._messageHandlerStub, false);
+ }
+
+ [Fact]
+ public async Task ShouldSetStreamTrueInRequestContentAsync()
+ {
+ // Arrange
+ string modelId = "fake-model234";
+ var client = this.CreateChatCompletionClient(modelId: modelId);
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.True(request.Stream);
+ }
+
+ [Fact]
+ public async Task ShouldPassModelIdToRequestContentAsync()
+ {
+ // Arrange
+ string modelId = "fake-model234";
+ var client = this.CreateChatCompletionClient(modelId: modelId);
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.Contains(modelId, request.ModelId, StringComparison.Ordinal);
+ }
+
+ [Fact]
+ public async Task ShouldContainRolesInRequestAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.Collection(request.Messages,
+ item => Assert.Equal(chatHistory[1].Role, item.Role),
+ item => Assert.Equal(chatHistory[2].Role, item.Role),
+ item => Assert.Equal(chatHistory[3].Role, item.Role));
+ }
+
+ [Fact]
+ public async Task ShouldContainMessagesInRequestAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.Collection(request.Messages,
+ item => Assert.Equal(chatHistory[1].Content, GetTextFrom(item.Contents[0])),
+ item => Assert.Equal(chatHistory[2].Content, GetTextFrom(item.Contents[0])),
+ item => Assert.Equal(chatHistory[3].Content, GetTextFrom(item.Contents[0])));
+
+ string? GetTextFrom(AnthropicContent content) => content.Text;
+ }
+
+ [Fact]
+ public async Task ShouldReturnValidChatResponseAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ var responses = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(responses);
+ Assert.NotEmpty(responses);
+ string content = string.Concat(responses.Select(streamingContent => streamingContent.Content));
+ Assert.Equal("Hi! My name is Claude.", content);
+ Assert.All(responses, response => Assert.Equal(AuthorRole.Assistant, response.Role));
+ }
+
+ [Fact]
+ public async Task ShouldReturnValidAnthropicMetadataStartMessageAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(streamingChatMessageContents);
+ Assert.NotEmpty(streamingChatMessageContents);
+ var messageContent = streamingChatMessageContents.First();
+ var metadata = messageContent.Metadata as AnthropicMetadata;
+ Assert.NotNull(metadata);
+ Assert.Null(metadata.FinishReason);
+ Assert.Equal("msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", metadata.MessageId);
+ Assert.Null(metadata.StopSequence);
+ Assert.Equal(25, metadata.InputTokenCount);
+ Assert.Equal(1, metadata.OutputTokenCount);
+ }
+
+ [Fact]
+ public async Task ShouldReturnNullAnthropicMetadataDeltaMessagesAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(streamingChatMessageContents);
+ Assert.NotEmpty(streamingChatMessageContents);
+ var deltaMessages = streamingChatMessageContents[1..^1];
+ Assert.All(deltaMessages, messageContent => Assert.Null(messageContent.Metadata));
+ }
+
+ [Fact]
+ public async Task ShouldReturnValidAnthropicMetadataEndMessageAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(streamingChatMessageContents);
+ Assert.NotEmpty(streamingChatMessageContents);
+ var messageContent = streamingChatMessageContents.Last();
+ var metadata = messageContent.Metadata as AnthropicMetadata;
+ Assert.NotNull(metadata);
+ Assert.Equal(AnthropicFinishReason.StopSequence, metadata.FinishReason);
+ Assert.Equal("msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", metadata.MessageId);
+ Assert.Equal("claude", metadata.StopSequence);
+ Assert.Equal(0, metadata.InputTokenCount);
+ Assert.Equal(15, metadata.OutputTokenCount);
+ }
+
+ [Fact]
+ public async Task ShouldReturnResponseWithModelIdAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ var streamingChatMessageContents = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(streamingChatMessageContents);
+ Assert.NotEmpty(streamingChatMessageContents);
+ Assert.All(streamingChatMessageContents, chatMessageContent => Assert.Equal("claude-3-5-sonnet-20240620", chatMessageContent.ModelId));
+ }
+
+ [Fact]
+ public async Task ShouldUsePromptExecutionSettingsAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+ var executionSettings = new AnthropicPromptExecutionSettings()
+ {
+ MaxTokens = 102,
+ Temperature = 0.45,
+ TopP = 0.6f
+ };
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory, executionSettings: executionSettings).ToListAsync();
+
+ // Assert
+ var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.Equal(executionSettings.MaxTokens, request.MaxTokens);
+ Assert.Equal(executionSettings.Temperature, request.Temperature);
+ Assert.Equal(executionSettings.TopP, request.TopP);
+ }
+
+ [Fact]
+ public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlySystemMessageAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = new ChatHistory("System message");
+
+ // Act & Assert
+ await Assert.ThrowsAsync(
+ () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask());
+ }
+
+ [Fact]
+ public async Task ShouldThrowInvalidOperationExceptionIfChatHistoryContainsOnlyManySystemMessagesAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = new ChatHistory("System message");
+ chatHistory.AddSystemMessage("System message 2");
+ chatHistory.AddSystemMessage("System message 3");
+
+ // Act & Assert
+ await Assert.ThrowsAsync(
+ () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask());
+ }
+
+ [Fact]
+ public async Task ShouldPassSystemMessageToRequestAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ string[] messages = ["System message", "System message 2"];
+ var chatHistory = new ChatHistory(messages[0]);
+ chatHistory.AddSystemMessage(messages[1]);
+ chatHistory.AddUserMessage("Hello");
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.NotNull(request.SystemPrompt);
+ Assert.All(messages, msg => Assert.Contains(msg, request.SystemPrompt, StringComparison.OrdinalIgnoreCase));
+ }
+
+ [Fact]
+ public async Task ShouldPassVersionToRequestBodyIfCustomHandlerUsedAsync()
+ {
+ // Arrange
+ var options = new AmazonBedrockAnthropicClientOptions();
+ var client = new AnthropicClient("fake-model", new Uri("https://fake-uri.com"),
+ bearerTokenProvider: () => ValueTask.FromResult("fake-token"),
+ options: options, httpClient: this._httpClient);
+
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ AnthropicRequest? request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(request);
+ Assert.Equal(options.Version, request.Version);
+ }
+
+ [Fact]
+ public async Task ShouldThrowArgumentExceptionIfChatHistoryIsEmptyAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = new ChatHistory();
+
+ // Act & Assert
+ await Assert.ThrowsAsync(
+ () => client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync().AsTask());
+ }
+
+ [Theory]
+ [InlineData(0)]
+ [InlineData(-15)]
+ public async Task ShouldThrowArgumentExceptionIfExecutionSettingMaxTokensIsLessThanOneAsync(int? maxTokens)
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ AnthropicPromptExecutionSettings executionSettings = new()
+ {
+ MaxTokens = maxTokens
+ };
+
+ // Act & Assert
+ await Assert.ThrowsAsync(
+ () => client.StreamGenerateChatMessageAsync(CreateSampleChatHistory(), executionSettings: executionSettings).ToListAsync().AsTask());
+ }
+
+ [Fact]
+ public async Task ItCreatesPostRequestAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.Equal(HttpMethod.Post, this._messageHandlerStub.Method);
+ }
+
+ [Fact]
+ public async Task ItCreatesRequestWithValidUserAgentAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.RequestHeaders);
+ Assert.Equal(HttpHeaderConstant.Values.UserAgent, this._messageHandlerStub.RequestHeaders.UserAgent.ToString());
+ }
+
+ [Fact]
+ public async Task ItCreatesRequestWithSemanticKernelVersionHeaderAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+ var expectedVersion = HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AnthropicClient));
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.RequestHeaders);
+ var header = this._messageHandlerStub.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).SingleOrDefault();
+ Assert.NotNull(header);
+ Assert.Equal(expectedVersion, header);
+ }
+
+ [Fact]
+ public async Task ItCreatesRequestWithValidAnthropicVersionAsync()
+ {
+ // Arrange
+ var options = new AnthropicClientOptions();
+ var client = this.CreateChatCompletionClient(options: options);
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.RequestHeaders);
+ Assert.Equal(options.Version, this._messageHandlerStub.RequestHeaders.GetValues("anthropic-version").SingleOrDefault());
+ }
+
+ [Fact]
+ public async Task ItCreatesRequestWithValidApiKeyAsync()
+ {
+ // Arrange
+ string apiKey = "fake-claude-key";
+ var client = this.CreateChatCompletionClient(apiKey: apiKey);
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.RequestHeaders);
+ Assert.Equal(apiKey, this._messageHandlerStub.RequestHeaders.GetValues("x-api-key").SingleOrDefault());
+ }
+
+ [Fact]
+ public async Task ItCreatesRequestWithJsonContentTypeAsync()
+ {
+ // Arrange
+ var client = this.CreateChatCompletionClient();
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.ContentHeaders);
+ Assert.NotNull(this._messageHandlerStub.ContentHeaders.ContentType);
+ Assert.Contains("application/json", this._messageHandlerStub.ContentHeaders.ContentType.ToString());
+ }
+
+ [Theory]
+ [InlineData("custom-header", "custom-value")]
+ public async Task ItCreatesRequestWithCustomUriAndCustomHeadersAsync(string headerName, string headerValue)
+ {
+ // Arrange
+ Uri uri = new("https://fake-uri.com");
+ using var httpHandler = new CustomHeadersHandler(headerName, headerValue, ChatTestDataFilePath);
+ using var httpClient = new HttpClient(httpHandler);
+ httpClient.BaseAddress = uri;
+ var client = new AnthropicClient("fake-model", "api-key", options: new(), httpClient: httpClient);
+
+ var chatHistory = CreateSampleChatHistory();
+
+ // Act
+ await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.Equal(uri, httpHandler.RequestUri);
+ Assert.NotNull(httpHandler.RequestHeaders);
+ Assert.Equal(headerValue, httpHandler.RequestHeaders.GetValues(headerName).SingleOrDefault());
+ }
+
+ private static ChatHistory CreateSampleChatHistory()
+ {
+ var chatHistory = new ChatHistory("You are a chatbot");
+ chatHistory.AddUserMessage("Hello");
+ chatHistory.AddAssistantMessage("Hi");
+ chatHistory.AddUserMessage("How are you?");
+ return chatHistory;
+ }
+
+ private AnthropicClient CreateChatCompletionClient(
+ string modelId = "fake-model",
+ string? apiKey = null,
+ AnthropicClientOptions? options = null,
+ HttpClient? httpClient = null)
+ {
+ return new AnthropicClient(modelId, apiKey ?? "fake-key", options: new(), httpClient: this._httpClient);
+ }
+
+ public void Dispose()
+ {
+ this._httpClient.Dispose();
+ this._messageHandlerStub.Dispose();
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs
index d7925f4652bd..e741764c90cb 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Core/AnthropicRequestTests.cs
@@ -8,6 +8,7 @@
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.Anthropic;
using Microsoft.SemanticKernel.Connectors.Anthropic.Core;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
using Xunit;
namespace SemanticKernel.Connectors.Anthropic.UnitTests.Core;
diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt
new file mode 100644
index 000000000000..61bfd832c304
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/TestData/chat_stream_response.txt
@@ -0,0 +1,24 @@
+event: message_start
+data: {"type": "message_start", "message": {"id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", "type": "message", "role": "assistant", "content": [], "model": "claude-3-5-sonnet-20240620", "stop_reason": null, "stop_sequence": null, "usage": {"input_tokens": 25, "output_tokens": 1}}}
+
+event: content_block_start
+data: {"type": "content_block_start", "index": 0, "content_block": {"type": "text", "text": ""}}
+
+event: ping
+data: {"type": "ping"}
+
+event: content_block_delta
+data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "Hi! "}}
+
+event: content_block_delta
+data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "My name is Claude."}}
+
+event: content_block_stop
+data: {"type": "content_block_stop", "index": 0}
+
+event: message_delta
+data: {"type": "message_delta", "delta": {"stop_reason": "stop_sequence", "stop_sequence": "claude"}, "usage": {"output_tokens": 15}}
+
+event: message_stop
+data: {"type": "message_stop"}
+
diff --git a/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs
new file mode 100644
index 000000000000..67fea752a1df
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Anthropic.UnitTests/Utils/CustomHeadersHandler.cs
@@ -0,0 +1,45 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.IO;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Threading.Tasks;
+
+namespace SemanticKernel.Connectors.Anthropic.UnitTests.Utils;
+
+internal sealed class CustomHeadersHandler : DelegatingHandler
+{
+ private readonly string _headerName;
+ private readonly string _headerValue;
+ public HttpRequestHeaders? RequestHeaders { get; private set; }
+
+ public HttpContentHeaders? ContentHeaders { get; private set; }
+
+ public byte[]? RequestContent { get; private set; }
+
+ public Uri? RequestUri { get; private set; }
+
+ public HttpMethod? Method { get; private set; }
+
+ public CustomHeadersHandler(string headerName, string headerValue, string testDataFilePath)
+ {
+ this.InnerHandler = new HttpMessageHandlerStub
+ {
+ ResponseToReturn = { Content = new StringContent(File.ReadAllText(testDataFilePath)) }
+ };
+ this._headerName = headerName;
+ this._headerValue = headerValue;
+ }
+
+ protected override Task SendAsync(HttpRequestMessage request, System.Threading.CancellationToken cancellationToken)
+ {
+ request.Headers.Add(this._headerName, this._headerValue);
+ this.Method = request.Method;
+ this.RequestUri = request.RequestUri;
+ this.RequestHeaders = request.Headers;
+ this.RequestContent = request.Content is null ? null : request.Content.ReadAsByteArrayAsync(cancellationToken).Result;
+
+ return base.SendAsync(request, cancellationToken);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs
index 7f896389baca..456eadbda68a 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/AnthropicClient.cs
@@ -3,20 +3,22 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
+using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
-using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
using Microsoft.SemanticKernel.Diagnostics;
using Microsoft.SemanticKernel.Http;
using Microsoft.SemanticKernel.Services;
+using Microsoft.SemanticKernel.Text;
namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
@@ -26,6 +28,7 @@ namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
internal sealed class AnthropicClient
{
private const string ModelProvider = "anthropic";
+ private const string AnthropicUrl = "https://api.anthropic.com/v1/messages";
private readonly Func>? _bearerTokenProvider;
private readonly Dictionary _attributesInternal = new();
@@ -88,6 +91,7 @@ internal AnthropicClient(
ILogger? logger = null)
{
Verify.NotNullOrWhiteSpace(modelId);
+
Verify.NotNull(options);
Verify.NotNull(httpClient);
@@ -97,7 +101,7 @@ internal AnthropicClient(
// If a custom endpoint is not provided, the ApiKey is required
Verify.NotNullOrWhiteSpace(apiKey);
this._apiKey = apiKey;
- targetUri = new Uri("https://api.anthropic.com/v1/messages");
+ targetUri = new Uri(AnthropicUrl);
}
this._httpClient = httpClient;
@@ -189,6 +193,97 @@ internal async Task> GenerateChatMessageAsync(
return chatResponses;
}
+ ///
+ /// Generates a stream of chat messages asynchronously.
+ ///
+ /// The chat history containing the conversation data.
+ /// Optional settings for prompt execution.
+ /// A kernel instance.
+ /// A cancellation token to cancel the operation.
+ /// An asynchronous enumerable of streaming chat contents.
+ internal async IAsyncEnumerable StreamGenerateChatMessageAsync(
+ ChatHistory chatHistory,
+ PromptExecutionSettings? executionSettings = null,
+ Kernel? kernel = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ var state = this.ValidateInputAndCreateChatCompletionState(chatHistory, executionSettings);
+ state.AnthropicRequest.Stream = true;
+
+ using var activity = ModelDiagnostics.StartCompletionActivity(
+ this._endpoint, this._modelId, ModelProvider, chatHistory, state.ExecutionSettings);
+
+ List chatResponses = [];
+
+ HttpRequestMessage? httpRequestMessage = null;
+ HttpResponseMessage? httpResponseMessage = null;
+ Stream? responseStream = null;
+ try
+ {
+ try
+ {
+ httpRequestMessage = await this.CreateHttpRequestAsync(state.AnthropicRequest, this._endpoint).ConfigureAwait(false);
+ httpResponseMessage = await this.SendRequestAndGetResponseImmediatelyAfterHeadersReadAsync(httpRequestMessage, cancellationToken).ConfigureAwait(false);
+ responseStream = await httpResponseMessage.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false);
+ }
+ catch (Exception ex) when (activity is not null)
+ {
+ activity.SetError(ex);
+ throw;
+ }
+
+ AnthropicResponse? lastAnthropicResponse = null;
+ await foreach (var streamingResponse in SseJsonParser.ParseAsync(responseStream, cancellationToken).ConfigureAwait(false))
+ {
+ string? content = null;
+ AnthropicMetadata? metadata = null;
+ switch (streamingResponse.Type)
+ {
+ case "message_start":
+ Verify.NotNull(streamingResponse.Response);
+ lastAnthropicResponse = streamingResponse.Response;
+ metadata = GetResponseMetadata(lastAnthropicResponse);
+ content = string.Empty;
+ break;
+ case "content_block_start" or "content_block_delta":
+ content = streamingResponse.ContentDelta?.Text ?? string.Empty;
+ break;
+ case "message_delta":
+ Verify.NotNull(lastAnthropicResponse);
+ metadata = GetResponseMetadata(streamingResponse, lastAnthropicResponse);
+ content = string.Empty;
+ break;
+ case "message_stop":
+ lastAnthropicResponse = null;
+ break;
+ }
+
+ if (lastAnthropicResponse is null || content is null)
+ {
+ continue;
+ }
+
+ var streamingChatMessageContent = new AnthropicStreamingChatMessageContent(
+ role: lastAnthropicResponse.Role,
+ content: content,
+ innerContent: lastAnthropicResponse,
+ modelId: lastAnthropicResponse.ModelId ?? this._modelId,
+ choiceIndex: streamingResponse.Index,
+ metadata: metadata);
+ chatResponses.Add(streamingChatMessageContent);
+ yield return streamingChatMessageContent;
+ }
+
+ activity?.EndStreaming(chatResponses);
+ }
+ finally
+ {
+ httpRequestMessage?.Dispose();
+ httpResponseMessage?.Dispose();
+ responseStream?.Dispose();
+ }
+ }
+
private List GetChatResponseFrom(AnthropicResponse response)
{
var chatMessageContents = this.GetChatMessageContentsFromResponse(response);
@@ -198,7 +293,7 @@ private List GetChatResponseFrom(AnthropicResponse
private void LogUsage(List chatMessageContents)
{
- if (chatMessageContents[0].Metadata is not { TotalTokenCount: > 0 } metadata)
+ if (chatMessageContents[0]?.Metadata is not { TotalTokenCount: > 0 } metadata)
{
this.Log(LogLevel.Debug, "Token usage information unavailable.");
return;
@@ -227,7 +322,7 @@ private void LogUsage(List chatMessageContents)
}
private List GetChatMessageContentsFromResponse(AnthropicResponse response)
- => response.Contents.Select(content => this.GetChatMessageContentFromAnthropicContent(response, content)).ToList();
+ => response.Contents is null ? [] : response.Contents.Select(content => this.GetChatMessageContentFromAnthropicContent(response, content)).ToList();
private AnthropicChatMessageContent GetChatMessageContentFromAnthropicContent(AnthropicResponse response, AnthropicContent content)
{
@@ -256,6 +351,16 @@ private static AnthropicMetadata GetResponseMetadata(AnthropicResponse response)
OutputTokenCount = response.Usage?.OutputTokens ?? 0
};
+ private static AnthropicMetadata GetResponseMetadata(AnthropicStreamingResponse deltaResponse, AnthropicResponse rootResponse)
+ => new()
+ {
+ MessageId = rootResponse.Id,
+ FinishReason = deltaResponse.StopMetadata?.StopReason,
+ StopSequence = deltaResponse.StopMetadata?.StopSequence,
+ InputTokenCount = deltaResponse.Usage?.InputTokens ?? 0,
+ OutputTokenCount = deltaResponse.Usage?.OutputTokens ?? 0
+ };
+
private async Task SendRequestAndReturnValidResponseAsync(
Uri endpoint,
AnthropicRequest anthropicRequest,
@@ -283,7 +388,17 @@ private ChatCompletionState ValidateInputAndCreateChatCompletionState(
var filteredChatHistory = new ChatHistory(chatHistory.Where(IsAssistantOrUserOrSystem));
var anthropicRequest = AnthropicRequest.FromChatHistoryAndExecutionSettings(filteredChatHistory, anthropicExecutionSettings);
- anthropicRequest.Version = this._version;
+ if (this._endpoint.OriginalString.Equals(AnthropicUrl, StringComparison.Ordinal))
+ {
+ anthropicRequest.Version = null;
+ anthropicRequest.ModelId = anthropicExecutionSettings.ModelId ?? throw new InvalidOperationException("Model ID must be provided.");
+ }
+ else
+ {
+ // Vertex and Bedrock require the model ID to be null and version to be set
+ anthropicRequest.Version = this._version;
+ anthropicRequest.ModelId = null;
+ }
return new ChatCompletionState
{
@@ -296,25 +411,6 @@ static bool IsAssistantOrUserOrSystem(ChatMessageContent msg)
=> msg.Role == AuthorRole.Assistant || msg.Role == AuthorRole.User || msg.Role == AuthorRole.System;
}
- ///
- /// Generates a stream of chat messages asynchronously.
- ///
- /// The chat history containing the conversation data.
- /// Optional settings for prompt execution.
- /// A kernel instance.
- /// A cancellation token to cancel the operation.
- /// An asynchronous enumerable of streaming chat contents.
- internal async IAsyncEnumerable StreamGenerateChatMessageAsync(
- ChatHistory chatHistory,
- PromptExecutionSettings? executionSettings = null,
- Kernel? kernel = null,
- [EnumeratorCancellation] CancellationToken cancellationToken = default)
- {
- await Task.Yield();
- yield return new StreamingChatMessageContent(null, null);
- throw new NotImplementedException("Implement this method in next PR.");
- }
-
private static void ValidateMaxTokens(int? maxTokens)
{
// If maxTokens is null, it means that the user wants to use the default model value
@@ -392,8 +488,9 @@ private async Task CreateHttpRequestAsync(object requestData
{
httpRequestMessage.Headers.Add("x-api-key", this._apiKey);
}
- else
- if (this._bearerTokenProvider is not null && !httpRequestMessage.Headers.Contains("Authentication") && await this._bearerTokenProvider().ConfigureAwait(false) is { } bearerKey)
+ else if (this._bearerTokenProvider is not null
+ && !httpRequestMessage.Headers.Contains("Authentication")
+ && await this._bearerTokenProvider().ConfigureAwait(false) is { } bearerKey)
{
httpRequestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", bearerKey);
}
@@ -401,22 +498,6 @@ private async Task CreateHttpRequestAsync(object requestData
return httpRequestMessage;
}
- private static HttpContent? CreateJsonContent(object? payload)
- {
- HttpContent? content = null;
- if (payload is not null)
- {
- byte[] utf8Bytes = payload is string s
- ? Encoding.UTF8.GetBytes(s)
- : JsonSerializer.SerializeToUtf8Bytes(payload);
-
- content = new ByteArrayContent(utf8Bytes);
- content.Headers.ContentType = new MediaTypeHeaderValue("application/json") { CharSet = "utf-8" };
- }
-
- return content;
- }
-
private void Log(LogLevel logLevel, string? message, params object?[] args)
{
if (this._logger.IsEnabled(logLevel))
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/Message/AnthropicContent.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs
similarity index 95%
rename from dotnet/src/Connectors/Connectors.Anthropic/Core/Models/Message/AnthropicContent.cs
rename to dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs
index fab9f2b380f1..845f81fc366f 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/Message/AnthropicContent.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicContent.cs
@@ -2,7 +2,7 @@
using System.Text.Json.Serialization;
-namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
+namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
internal sealed class AnthropicContent
{
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs
index cec43a1531b9..10dc30c74789 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicRequest.cs
@@ -5,6 +5,7 @@
using System.Linq;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
using Microsoft.SemanticKernel.Text;
namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
@@ -12,6 +13,7 @@ namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
internal sealed class AnthropicRequest
{
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ [JsonPropertyName("anthropic_version")]
public string? Version { get; set; }
///
@@ -28,7 +30,8 @@ internal sealed class AnthropicRequest
public IList Messages { get; set; } = [];
[JsonPropertyName("model")]
- public string ModelId { get; set; } = null!;
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? ModelId { get; set; }
[JsonPropertyName("max_tokens")]
public int MaxTokens { get; set; }
@@ -123,7 +126,6 @@ private static AnthropicRequest CreateRequest(ChatHistory chatHistory, Anthropic
{
AnthropicRequest request = new()
{
- ModelId = executionSettings.ModelId ?? throw new InvalidOperationException("Model ID must be provided."),
MaxTokens = executionSettings.MaxTokens ?? throw new InvalidOperationException("Max tokens must be provided."),
SystemPrompt = string.Join("\n", chatHistory
.Where(msg => msg.Role == AuthorRole.System)
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs
index 0c21e18de0cb..9585da07f56a 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicResponse.cs
@@ -3,6 +3,7 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core;
@@ -38,9 +39,8 @@ internal sealed class AnthropicResponse
/// Content generated by the model.
/// This is an array of content blocks, each of which has a type that determines its shape.
///
- [JsonRequired]
[JsonPropertyName("content")]
- public IReadOnlyList Contents { get; init; } = null!;
+ public IReadOnlyList? Contents { get; init; }
///
/// The model that handled the request.
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs
new file mode 100644
index 000000000000..1a41fa3edf91
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Core/Models/AnthropicStreamingResponse.cs
@@ -0,0 +1,86 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Text.Json;
+using System.Text.Json.Nodes;
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Connectors.Anthropic.Core.Models;
+
+///
+/// Represents the response from the Anthropic streaming API.
+///
+///
+internal sealed class AnthropicStreamingResponse
+{
+ ///
+ /// SSE data type.
+ ///
+ [JsonRequired]
+ [JsonPropertyName("type")]
+ public string Type { get; init; } = null!;
+
+ ///
+ /// Response message, only if the type is "message_start", otherwise null.
+ ///
+ [JsonPropertyName("message")]
+ public AnthropicResponse? Response { get; init; }
+
+ ///
+ /// Index of a message.
+ ///
+ [JsonPropertyName("index")]
+ public int Index { get; init; }
+
+ // Fields are assigned via reflection
+#pragma warning disable CS0649 // Field is never assigned to, and will always have its default value
+#pragma warning disable IDE0044 // Add readonly modifier
+ [JsonPropertyName("content_block")]
+ [JsonInclude]
+ private AnthropicContent? _contentBlock;
+
+ [JsonPropertyName("delta")]
+ [JsonInclude]
+ private JsonNode? _delta;
+#pragma warning restore IDE0044
+#pragma warning restore CS0649
+
+ ///
+ /// Delta of anthropic content, only if the type is "content_block_start" or "content_block_delta", otherwise null.
+ ///
+ public AnthropicContent? ContentDelta =>
+ this.Type switch
+ {
+ "content_block_start" => this._contentBlock,
+ "content_block_delta" => this._delta?.Deserialize(),
+ _ => null
+ };
+
+ ///
+ /// Usage metadata, only if the type is "message_delta", otherwise null.
+ ///
+ public AnthropicUsage? Usage { get; init; }
+
+ ///
+ /// Stop reason metadata, only if the type is "message_delta", otherwise null.
+ ///
+ public StopDelta? StopMetadata => this.Type == "message_delta" ? this._delta?.Deserialize() : null;
+
+ ///
+ /// Represents the reason that message streaming stopped.
+ ///
+ public sealed class StopDelta
+ {
+ ///
+ /// The reason that we stopped.
+ ///
+ [JsonPropertyName("stop_reason")]
+ public AnthropicFinishReason? StopReason { get; init; }
+
+ ///
+ /// Which custom stop sequence was generated, if any.
+ /// This value will be a non-null string if one of your custom stop sequences was generated.
+ ///
+ [JsonPropertyName("stop_sequence")]
+ public string? StopSequence { get; init; }
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs
new file mode 100644
index 000000000000..37fd28be42cf
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicStreamingChatMessageContent.cs
@@ -0,0 +1,44 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Generic;
+using System.Text;
+using System.Text.Json.Serialization;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Connectors.Anthropic;
+
+///
+/// Anthropic specialized streaming chat message content
+///
+public sealed class AnthropicStreamingChatMessageContent : StreamingChatMessageContent
+{
+ ///
+ /// Creates a new instance of the class
+ ///
+ /// Role of the author of the message
+ /// Content of the message
+ /// Inner content object reference
+ /// Choice index
+ /// The model ID used to generate the content
+ /// Encoding of the chat
+ /// Additional metadata
+ [JsonConstructor]
+ public AnthropicStreamingChatMessageContent(
+ AuthorRole? role,
+ string? content,
+ object? innerContent = null,
+ int choiceIndex = 0,
+ string? modelId = null,
+ Encoding? encoding = null,
+ IReadOnlyDictionary? metadata = null)
+ : base(role, content, innerContent, choiceIndex, modelId, encoding, metadata) { }
+
+ ///
+ /// The metadata associated with the content.
+ ///
+ public new AnthropicMetadata? Metadata
+ {
+ get => base.Metadata as AnthropicMetadata;
+ init => base.Metadata = value;
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs
index 54a2f9db3853..e7451046c3dd 100644
--- a/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs
+++ b/dotnet/src/Connectors/Connectors.Anthropic/Models/Contents/AnthropicUsage.cs
@@ -17,7 +17,6 @@ public sealed class AnthropicUsage
///
/// The number of input tokens which were used.
///
- [JsonRequired]
[JsonPropertyName("input_tokens")]
public int? InputTokens { get; init; }
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs
index e6bc5f1ddddf..12cb1ef033e8 100644
--- a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelPluginFactory.cs
@@ -339,9 +339,9 @@ private static string ConvertOperationIdToValidFunctionName(string operationId,
}
catch (ArgumentException)
{
- // The exception indicates that the operationId is not a valid function name.
- // To comply with the KernelFunction name requirements, it needs to be converted or sanitized.
- // Therefore, it should not be re-thrown, but rather swallowed to allow the conversion below.
+ // The exception indicates that the operationId is not a valid function name.
+ // To comply with the KernelFunction name requirements, it needs to be converted or sanitized.
+ // Therefore, it should not be re-thrown, but rather swallowed to allow the conversion below.
}
// Tokenize operation id on forward and back slashes
@@ -372,5 +372,4 @@ private static string ConvertOperationIdToValidFunctionName(string operationId,
#endif
#endregion
-
}
diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiKernelPluginFactoryTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiKernelPluginFactoryTests.cs
index ed4f7fe077b9..38facbe52ccf 100644
--- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiKernelPluginFactoryTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiKernelPluginFactoryTests.cs
@@ -375,5 +375,4 @@ public void DoFakeAction(string parameter)
}
#endregion
-
}
diff --git a/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs
index 6e791d7aa5f9..aa0a572ea1e9 100644
--- a/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Anthropic/AnthropicChatCompletionTests.cs
@@ -226,14 +226,17 @@ public async Task ChatStreamingReturnsUsedTokensAsync(ServiceType serviceType)
var responses = await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync();
// Assert
- var metadata = responses.Last().Metadata as AnthropicMetadata;
- Assert.NotNull(metadata);
- this.Output.WriteLine($"TotalTokenCount: {metadata.TotalTokenCount}");
- this.Output.WriteLine($"InputTokenCount: {metadata.InputTokenCount}");
- this.Output.WriteLine($"OutputTokenCount: {metadata.OutputTokenCount}");
- Assert.True(metadata.TotalTokenCount > 0);
- Assert.True(metadata.InputTokenCount > 0);
- Assert.True(metadata.OutputTokenCount > 0);
+ var metadata = responses
+ .Where(c => c.Metadata is not null)
+ .Select(c => c.Metadata)
+ .Cast().ToList();
+ Assert.NotEmpty(metadata);
+ this.Output.WriteLine($"TotalTokenCount: {metadata.Sum(m => m.TotalTokenCount)}");
+ this.Output.WriteLine($"InputTokenCount: {metadata.Sum(m => m.InputTokenCount)}");
+ this.Output.WriteLine($"OutputTokenCount: {metadata.Sum(m => m.OutputTokenCount)}");
+ Assert.True(metadata.Sum(m => m.TotalTokenCount) > 0);
+ Assert.True(metadata.Sum(m => m.InputTokenCount) > 0);
+ Assert.True(metadata.Sum(m => m.OutputTokenCount) > 0);
}
[RetryTheory]