Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: Anthropic - samples #8585

Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;

namespace ChatCompletion;

public sealed class Anthropic_ChatCompletion(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task SampleAsync()
{
Console.WriteLine("============= Anthropic - Claude Chat Completion =============");

string apiKey = TestConfiguration.AnthropicAI.ApiKey;
string modelId = TestConfiguration.AnthropicAI.ModelId;

Assert.NotNull(apiKey);
Assert.NotNull(modelId);

Kernel kernel = Kernel.CreateBuilder()
.AddAnthropicChatCompletion(
modelId: modelId,
apiKey: apiKey)
.Build();

await SimpleChatAsync(kernel);
}

private async Task SimpleChatAsync(Kernel kernel)
{
Console.WriteLine("======== Simple Chat ========");

var chatHistory = new ChatHistory("You are an expert in the tool shop.");
var chat = kernel.GetRequiredService<IChatCompletionService>();

// First user message
chatHistory.AddUserMessage("Hi, I'm looking for new power tools, any suggestion?");
await MessageOutputAsync(chatHistory);

// First bot assistant message
var reply = await chat.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
await MessageOutputAsync(chatHistory);

// Second user message
chatHistory.AddUserMessage("I'm looking for a drill, a screwdriver and a hammer.");
await MessageOutputAsync(chatHistory);

// Second bot assistant message
reply = await chat.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
await MessageOutputAsync(chatHistory);
}

/// <summary>
/// Outputs the last message of the chat history
/// </summary>
private Task MessageOutputAsync(ChatHistory chatHistory)
{
var message = chatHistory.Last();

Console.WriteLine($"{message.Role}: {message.Content}");
Console.WriteLine("------------------------");

return Task.CompletedTask;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Text;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;

namespace ChatCompletion;

public sealed class Anthropic_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task SampleAsync()
{
Console.WriteLine("============= Anthropic - Claude Chat Streaming =============");

string apiKey = TestConfiguration.AnthropicAI.ApiKey;
string modelId = TestConfiguration.AnthropicAI.ModelId;

Assert.NotNull(apiKey);
Assert.NotNull(modelId);

Kernel kernel = Kernel.CreateBuilder()
.AddAnthropicChatCompletion(
modelId: modelId,
apiKey: apiKey)
.Build();

await this.StreamingChatAsync(kernel);
}

private async Task StreamingChatAsync(Kernel kernel)
{
Console.WriteLine("======== Streaming Chat ========");

var chatHistory = new ChatHistory("You are an expert in the tool shop.");
var chat = kernel.GetRequiredService<IChatCompletionService>();

// First user message
chatHistory.AddUserMessage("Hi, I'm looking for alternative coffee brew methods, can you help me?");
await MessageOutputAsync(chatHistory);

// First bot assistant message
var streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory);
var reply = await MessageOutputAsync(streamingChat);
chatHistory.Add(reply);

// Second user message
chatHistory.AddUserMessage("Give me the best speciality coffee roasters.");
await MessageOutputAsync(chatHistory);

// Second bot assistant message
streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory);
reply = await MessageOutputAsync(streamingChat);
chatHistory.Add(reply);
}

/// <summary>
/// Outputs the last message of the chat history
/// </summary>
private Task MessageOutputAsync(ChatHistory chatHistory)
{
var message = chatHistory.Last();

Console.WriteLine($"{message.Role}: {message.Content}");
Console.WriteLine("------------------------");

return Task.CompletedTask;
}

private async Task<ChatMessageContent> MessageOutputAsync(IAsyncEnumerable<StreamingChatMessageContent> streamingChat)
{
bool first = true;
StringBuilder messageBuilder = new();
await foreach (var chatMessage in streamingChat)
{
if (first)
{
Console.Write($"{chatMessage.Role}: ");
first = false;
}

Console.Write(chatMessage.Content);
messageBuilder.Append(chatMessage.Content);
}

Console.WriteLine();
Console.WriteLine("------------------------");
return new ChatMessageContent(AuthorRole.Assistant, messageBuilder.ToString());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;

namespace ChatCompletion;

/// <summary>
/// This sample shows how to setup different providers for anthropic.
/// </summary>
public sealed class Anthropic_ProvidersSetup(ITestOutputHelper output) : BaseTest(output)
{
public void AnthropicProvider()
{
var kernel = Kernel.CreateBuilder()
.AddAnthropicChatCompletion(
modelId: "modelId",
apiKey: "apiKey")
.Build();
}

/// <summary>
/// For more information on how to setup the Vertex AI provider, go to <see cref="Google_GeminiChatCompletion"/> sample.
/// </summary>
public void VertexAiProvider()
{
var kernel = Kernel.CreateBuilder()
.AddAnthropicVertextAIChatCompletion(
modelId: "modelId",
bearerTokenProvider: () => ValueTask.FromResult("bearer"),
endpoint: new Uri("https://your-endpoint"))
.Build();
}
}
52 changes: 52 additions & 0 deletions dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Resources;

namespace ChatCompletion;

public sealed class Anthropic_Vision(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task SampleAsync()
{
Console.WriteLine("============= Anthropic - Claude Chat Completion =============");

string apiKey = TestConfiguration.AnthropicAI.ApiKey;
string modelId = TestConfiguration.AnthropicAI.ModelId;

Assert.NotNull(apiKey);
Assert.NotNull(modelId);

Kernel kernel = Kernel.CreateBuilder()
.AddAnthropicChatCompletion(
modelId: modelId,
apiKey: apiKey)
.Build();

var chatHistory = new ChatHistory("Your job is describing images.");
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

// Load the image from the resources
await using var stream = EmbeddedResource.ReadStream("sample_image.jpg")!;
using var binaryReader = new BinaryReader(stream);
var bytes = binaryReader.ReadBytes((int)stream.Length);

chatHistory.AddUserMessage(
[
new TextContent("What’s in this image?"),
// Vertex AI Gemini API supports both base64 and URI format
// You have to always provide the mimeType for the image
new ImageContent(bytes, "image/jpeg"),
// The Cloud Storage URI of the image to include in the prompt.
// The bucket that stores the file must be in the same Google Cloud project that's sending the request.
// new ImageContent(new Uri("gs://generativeai-downloads/images/scones.jpg"),
// metadata: new Dictionary<string, object?> { { "mimeType", "image/jpeg" } })
]);

var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory);

Console.WriteLine(reply.Content);
}
}
1 change: 1 addition & 0 deletions dotnet/samples/Concepts/Concepts.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
<ProjectReference Include="..\..\src\Agents\Core\Agents.Core.csproj" />
<ProjectReference Include="..\..\src\Agents\OpenAI\Agents.OpenAI.csproj" />
<ProjectReference Include="..\..\src\Connectors\Connectors.Google\Connectors.Google.csproj" />
<ProjectReference Include="..\..\src\Connectors\Connectors.Anthropic\Connectors.Anthropic.csproj" />
<ProjectReference Include="..\..\src\Connectors\Connectors.HuggingFace\Connectors.HuggingFace.csproj" />
<ProjectReference Include="..\..\src\Connectors\Connectors.Memory.AzureAISearch\Connectors.Memory.AzureAISearch.csproj" />
<ProjectReference Include="..\..\src\Connectors\Connectors.Memory.AzureCosmosDBMongoDB\Connectors.Memory.AzureCosmosDBMongoDB.csproj" />
Expand Down
4 changes: 4 additions & 0 deletions dotnet/samples/Concepts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [Google_GeminiChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs)
- [Google_GeminiGetModelResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiGetModelResult.cs)
- [Google_GeminiVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs)
- [Anthropic_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletion.cs)
- [Anthropic_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ChatCompletionStreaming.cs)
- [Anthropic_Vision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_Vision.cs)
- [Anthropic_ProvidersSetup](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Anthropic_ProvidersSetup.cs)
- [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs)
- [OpenAI_ChatCompletionMultipleChoices](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs)
- [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs)
Expand Down
2 changes: 2 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>5ee045b0-aea3-4f08-8d31-32d1a6f8fed0</UserSecretsId>
<NoWarn>SKEXP0070</NoWarn>
</PropertyGroup>

<ItemGroup>
Expand All @@ -14,6 +15,7 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\src\Connectors\Connectors.Anthropic\Connectors.Anthropic.csproj" />
<ProjectReference Include="..\..\..\src\Connectors\Connectors.OpenAI\Connectors.OpenAI.csproj" />
</ItemGroup>

Expand Down
3 changes: 2 additions & 1 deletion dotnet/samples/Demos/AIModelRouter/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ private static async Task Main(string[] args)
.AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "N/A", endpoint: new Uri("http://localhost:1234"), apiKey: null)
.AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null)
.AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!)
.AddAnthropicChatCompletion(serviceId: "claude", modelId: "claude-3-5-sonnet-20240620", apiKey: config["Anthropic:ApiKey"]!)

// Adding a custom filter to capture router selected service id
.Services.AddSingleton<IPromptRenderFilter>(new SelectedServiceFilter());
Expand All @@ -43,7 +44,7 @@ private static async Task Main(string[] args)
// Find the best service to use based on the user's input
KernelArguments arguments = new(new PromptExecutionSettings()
{
ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai"])
ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai", "claude"])
Krzysztof318 marked this conversation as resolved.
Show resolved Hide resolved
});

// Invoke the prompt and print the response
Expand Down
3 changes: 3 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ The sample can be configured by using the command line with .NET [Secret Manager
```powershell
# OpenAI (Not required if using Azure OpenAI)
dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... "

# Anthropic
dotnet user-secrets set "Anthropic:ApiKey" "... your api key ... "
RogerBarreto marked this conversation as resolved.
Show resolved Hide resolved
```

## Running the sample
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ public static void Initialize(IConfigurationRoot configRoot)
public static ChatGPTRetrievalPluginConfig ChatGPTRetrievalPlugin => LoadSection<ChatGPTRetrievalPluginConfig>();
public static MsGraphConfiguration MSGraph => LoadSection<MsGraphConfiguration>();
public static MistralAIConfig MistralAI => LoadSection<MistralAIConfig>();
public static AnthropicAIConfig AnthropicAI => LoadSection<AnthropicAIConfig>();
public static GoogleAIConfig GoogleAI => LoadSection<GoogleAIConfig>();
public static VertexAIConfig VertexAI => LoadSection<VertexAIConfig>();
public static AzureCosmosDbMongoDbConfig AzureCosmosDbMongoDb => LoadSection<AzureCosmosDbMongoDbConfig>();
Expand Down Expand Up @@ -194,6 +195,12 @@ public class MistralAIConfig
public string EmbeddingModelId { get; set; }
}

public class AnthropicAIConfig
{
public string ApiKey { get; set; }
public string ModelId { get; set; }
}

public class GoogleAIConfig
{
public string ApiKey { get; set; }
Expand Down
Loading