From ec81697683d8aff00746303bb5e19e46b93b78eb Mon Sep 17 00:00:00 2001 From: kavin <115390646+singhk97@users.noreply.github.com> Date: Thu, 31 Oct 2024 14:13:50 -0700 Subject: [PATCH] [C#] bump: dotnet 1.8.0 (#2159) ## Linked issues closes: #minor ## Details Changes since 1.7.0 * o1 model support * Streaming error suppression bug fix * Azure AI Content Safety api version update * UI/UX features: AI Label, Feedback Loop, Citations, Sensitivity Label...etc * Streaming ## Attestation Checklist - [x] My code follows the style guidelines of this project - I have checked for/fixed spelling, linting, and other errors - I have commented my code for clarity - I have made corresponding changes to the documentation (updating the doc strings in the code is sufficient) - My changes generate no new warnings - I have added tests that validates my changes, and provides sufficient test coverage. I have tested with: - Local testing - E2E testing in Teams - New and existing unit tests pass locally with my changes --------- Co-authored-by: Lily Du Co-authored-by: lilydu --- .../AITests/ActionPlannerTests.cs | 2 +- .../AITests/AssistantMessageTests.cs | 2 +- .../AzureContentSafetyModeratorTests.cs | 12 +- .../AITests/ChatMessageTests.cs | 12 +- .../AITests/LLMClientTests.cs | 1 + .../Models/ChatCompletionToolCallTests.cs | 2 +- .../Models/ChatMessageExtensionsTests.cs | 10 +- .../AITests/Models/OpenAIModelTests.cs | 27 +- .../AITests/OpenAIEmbeddingsTests.cs | 4 +- .../Application/StreamingResponseTests.cs | 61 +++++ .../Microsoft.Teams.AI.Tests.csproj | 4 +- .../TestUtils/OpenAIModelFactory.cs | 74 ++---- .../TestUtils/TestAssistantsOpenAIClient.cs | 14 +- .../Microsoft.TeamsAI/AI/AI.cs | 1 + .../Microsoft.TeamsAI/AI/Action/AIEntity.cs | 6 + .../Microsoft.TeamsAI/AI/Clients/LLMClient.cs | 17 +- .../AI/Clients/LLMClientOptions.cs | 5 + .../AI/Embeddings/OpenAIEmbeddings.cs | 8 +- .../AI/Models/AssistantsMessage.cs | 11 +- .../AI/Models/ChatCompletionToolCall.cs | 6 +- .../AI/Models/ChatMessage.cs | 30 +-- .../AI/Models/MessageContext.cs | 10 +- .../AI/Models/OpenAIModel.cs | 105 +++++--- .../Moderator/AzureContentSafetyModerator.cs | 68 +++-- .../AzureContentSafetyModeratorOptions.cs | 10 +- .../AI/Planners/ActionPlanner.cs | 8 +- .../AI/Planners/AssistantsPlanner.cs | 28 +- .../AI/Prompts/PromptManager.cs | 2 +- .../AI/Prompts/PromptTemplate.cs | 6 +- .../AI/Prompts/Sections/LayoutSection.cs | 4 +- .../AI/Tokenizers/GPTTokenizer.cs | 4 +- .../Application/StreamingChannelData.cs | 7 + .../Application/StreamingResponse.cs | 138 ++++++++-- .../Application/TeamsAttachmentDownloader.cs | 4 +- .../Microsoft.Teams.AI.csproj | 119 +++++---- .../01.messaging.echoBot/EchoBot.csproj | 2 +- .../SearchCommand.csproj | 2 +- .../TypeAheadBot.csproj | 2 +- .../04.ai.a.teamsChefBot/TeamsChefBot.csproj | 2 +- .../GPT.csproj | 2 +- .../LightBot.csproj | 2 +- .../ListBot.csproj | 2 +- .../DevOpsBot.csproj | 2 +- .../CardGazer.csproj | 2 +- .../04.ai.g.teamsChefBot-streaming/Program.cs | 8 +- .../TeamsChefBot.csproj | 17 +- .../TwentyQuestions.csproj | 2 +- .../samples/04.e.twentyQuestions/teamsapp.yml | 1 + .../samples/05.chatModeration/.editorconfig | 240 ++++++++++++++++++ dotnet/samples/05.chatModeration/.gitignore | 25 ++ .../05.chatModeration/ActionHandlers.cs | 31 +++ .../AdapterWithErrorHandler.cs | 26 ++ .../05.chatModeration/ChatModeration.csproj | 47 ++++ .../05.chatModeration/ChatModeration.sln | 25 ++ dotnet/samples/05.chatModeration/Config.cs | 29 +++ .../Controllers/BotController.cs | 32 +++ dotnet/samples/05.chatModeration/Program.cs | 128 ++++++++++ .../Prompts/Chat/config.json | 18 ++ .../Prompts/Chat/skprompt.txt | 3 + .../Properties/launchSettings.json | 27 ++ dotnet/samples/05.chatModeration/README.md | 77 ++++++ .../appPackage/manifest.json | 48 ++++ .../appsettings.Development.json | 21 ++ .../05.chatModeration/appsettings.json | 20 ++ .../05.chatModeration/assets/moderation.png | 3 + dotnet/samples/05.chatModeration/env/.env.dev | 18 ++ .../samples/05.chatModeration/env/.env.local | 12 + .../05.chatModeration/infra/azure.bicep | 113 +++++++++ .../infra/azure.parameters.json | 36 +++ .../infra/botRegistration/azurebot.bicep | 37 +++ .../infra/botRegistration/readme.md | 1 + .../05.chatModeration/teamsapp.local.yml | 87 +++++++ dotnet/samples/05.chatModeration/teamsapp.yml | 97 +++++++ .../06.assistants.a.mathBot/MathBot.csproj | 4 +- .../06.assistants.b.orderBot/OrderBot.csproj | 6 +- .../06.assistants.b.orderBot/Program.cs | 14 +- .../samples/06.auth.oauth.bot/BotAuth.csproj | 2 +- .../MessageExtensionAuth.csproj | 2 +- .../06.auth.teamsSSO.bot/BotAuth.csproj | 2 +- .../MessageExtensionAuth.csproj | 2 +- .../AzureAISearchBot/AzureAISearchBot.csproj | 2 +- .../AzureOpenAIBot.csproj | 2 +- .../08.datasource.azureopenai/teamsapp.yml | 1 + getting-started/CONCEPTS/STREAMING.md | 1 + 84 files changed, 1780 insertions(+), 327 deletions(-) create mode 100644 dotnet/samples/05.chatModeration/.editorconfig create mode 100644 dotnet/samples/05.chatModeration/.gitignore create mode 100644 dotnet/samples/05.chatModeration/ActionHandlers.cs create mode 100644 dotnet/samples/05.chatModeration/AdapterWithErrorHandler.cs create mode 100644 dotnet/samples/05.chatModeration/ChatModeration.csproj create mode 100644 dotnet/samples/05.chatModeration/ChatModeration.sln create mode 100644 dotnet/samples/05.chatModeration/Config.cs create mode 100644 dotnet/samples/05.chatModeration/Controllers/BotController.cs create mode 100644 dotnet/samples/05.chatModeration/Program.cs create mode 100644 dotnet/samples/05.chatModeration/Prompts/Chat/config.json create mode 100644 dotnet/samples/05.chatModeration/Prompts/Chat/skprompt.txt create mode 100644 dotnet/samples/05.chatModeration/Properties/launchSettings.json create mode 100644 dotnet/samples/05.chatModeration/README.md create mode 100644 dotnet/samples/05.chatModeration/appPackage/manifest.json create mode 100644 dotnet/samples/05.chatModeration/appsettings.Development.json create mode 100644 dotnet/samples/05.chatModeration/appsettings.json create mode 100644 dotnet/samples/05.chatModeration/assets/moderation.png create mode 100644 dotnet/samples/05.chatModeration/env/.env.dev create mode 100644 dotnet/samples/05.chatModeration/env/.env.local create mode 100644 dotnet/samples/05.chatModeration/infra/azure.bicep create mode 100644 dotnet/samples/05.chatModeration/infra/azure.parameters.json create mode 100644 dotnet/samples/05.chatModeration/infra/botRegistration/azurebot.bicep create mode 100644 dotnet/samples/05.chatModeration/infra/botRegistration/readme.md create mode 100644 dotnet/samples/05.chatModeration/teamsapp.local.yml create mode 100644 dotnet/samples/05.chatModeration/teamsapp.yml diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ActionPlannerTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ActionPlannerTests.cs index 38be75e91..47cdcce5c 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ActionPlannerTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ActionPlannerTests.cs @@ -328,7 +328,7 @@ public async Task Test_ContinueTaskAsync_Streaming() await state.LoadStateAsync(null, turnContext); state.Temp.Input = "test"; var planner = new ActionPlanner(options, new TestLoggerFactory()); - var ai = new AI(new(planner)); + var ai = new AI(new(planner) { EnableFeedbackLoop = true }); // Act var result = await planner.ContinueTaskAsync(turnContext, state, ai); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs index 9d2cb6595..31725bc8a 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AssistantMessageTests.cs @@ -15,7 +15,7 @@ public void Test_Constructor() { // Arrange MessageContent content = OpenAIModelFactory.CreateMessageContent("message", "fileId"); - Mock fileClientMock = new Mock(); + Mock fileClientMock = new Mock(); fileClientMock.Setup(fileClient => fileClient.DownloadFileAsync("fileId", It.IsAny())).Returns(() => { return Task.FromResult(ClientResult.FromValue(BinaryData.FromString("test"), new Mock().Object)); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AzureContentSafetyModeratorTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AzureContentSafetyModeratorTests.cs index ed9b794f9..a3752d604 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AzureContentSafetyModeratorTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/AzureContentSafetyModeratorTests.cs @@ -97,7 +97,8 @@ public async Task Test_ReviewPrompt_Flagged(ModerationType moderate) }; var clientMock = new Mock(new Uri(endpoint), new AzureKeyCredential(apiKey)); - AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(hateResult: ContentSafetyModelFactory.TextAnalyzeSeverityResult(TextCategory.Hate, 2)); + var analyses = new List() { ContentSafetyModelFactory.TextCategoriesAnalysis(TextCategory.Hate, 2) }; + AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(null, analyses); Response? response = null; clientMock.Setup(client => client.AnalyzeTextAsync(It.IsAny(), It.IsAny())).ReturnsAsync(Response.FromValue(analyzeTextResult, response)); @@ -173,7 +174,8 @@ public async Task Test_ReviewPrompt_NotFlagged(ModerationType moderate) }; var clientMock = new Mock(new Uri(endpoint), new AzureKeyCredential(apiKey)); - AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(hateResult: ContentSafetyModelFactory.TextAnalyzeSeverityResult(TextCategory.Hate, 0)); + var analyses = new List() { ContentSafetyModelFactory.TextCategoriesAnalysis(TextCategory.Hate, 0) }; + AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(null, analyses); Response? response = null; clientMock.Setup(client => client.AnalyzeTextAsync(It.IsAny(), It.IsAny())).ReturnsAsync(Response.FromValue(analyzeTextResult, response)); @@ -237,7 +239,8 @@ public async Task Test_ReviewPlan_Flagged(ModerationType moderate) }); var clientMock = new Mock(new Uri(endpoint), new AzureKeyCredential(apiKey)); - AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(hateResult: ContentSafetyModelFactory.TextAnalyzeSeverityResult(TextCategory.Hate, 2)); + var analyses = new List() { ContentSafetyModelFactory.TextCategoriesAnalysis(TextCategory.Hate, 2) }; + AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(null, analyses); Response? response = null; clientMock.Setup(client => client.AnalyzeTextAsync(It.IsAny(), It.IsAny())).ReturnsAsync(Response.FromValue(analyzeTextResult, response)); @@ -298,7 +301,8 @@ public async Task Test_ReviewPlan_NotFlagged(ModerationType moderate) }); var clientMock = new Mock(new Uri(endpoint), new AzureKeyCredential(apiKey)); - AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(hateResult: ContentSafetyModelFactory.TextAnalyzeSeverityResult(TextCategory.Hate, 0)); + var analyses = new List() { ContentSafetyModelFactory.TextCategoriesAnalysis(TextCategory.Hate, 0) }; + AnalyzeTextResult analyzeTextResult = ContentSafetyModelFactory.AnalyzeTextResult(null, analyses); Response? response = null; clientMock.Setup(client => client.AnalyzeTextAsync(It.IsAny(), It.IsAny())).ReturnsAsync(Response.FromValue(analyzeTextResult, response)); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs index 4c7b9a0fa..ab37f6541 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/ChatMessageTests.cs @@ -48,7 +48,7 @@ public void Test_Initialization_From_OpenAISdk_ChatMessage() ""citations"": [ {{ ""title"": ""test-title"", - ""url"": ""test-url"", + ""url"": ""https://www.test-uri.com/"", ""content"": ""test-content"" }} ] @@ -69,7 +69,7 @@ public void Test_Initialization_From_OpenAISdk_ChatMessage() Assert.NotNull(context); Assert.Single(context.Citations); Assert.Equal("test-title", context.Citations[0].Title); - Assert.Equal("test-url", context.Citations[0].Url); + Assert.Equal("https://www.test-uri.com/", context.Citations[0].Url); Assert.Equal("test-content", context.Citations[0].Content); } @@ -179,10 +179,10 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_FunctionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); - Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments); + Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments.ToString()); Assert.Equal("test-name", assistantMessage.FunctionCall.FunctionName); } @@ -206,7 +206,7 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ActionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); @@ -215,7 +215,7 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ActionCall() Assert.NotNull(toolCall); Assert.Equal("test-id", toolCall.Id); Assert.Equal("test-tool-name", toolCall.FunctionName); - Assert.Equal("test-tool-arg1", toolCall.FunctionArguments); + Assert.Equal("test-tool-arg1", toolCall.FunctionArguments.ToString()); } [Fact] diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs index 7c1a51de6..4db309d8f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs @@ -176,6 +176,7 @@ public async Task Test_CompletePromptAsync_Streaming_Success() { StartStreamingMessage = "Begin streaming", EndStreamHandler = handler, + EnableFeedbackLoop = true, }; LLMClient client = new(options, null); TestMemory memory = new(); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs index 0455a759e..7105c8693 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatCompletionToolCallTests.cs @@ -11,7 +11,7 @@ public sealed class ChatCompletionToolCallTests public void Test_ChatCompletionsToolCall_ToFunctionToolCall() { // Arrange - var functionToolCall = ChatToolCall.CreateFunctionToolCall("test-id", "test-name", "test-arg1"); + var functionToolCall = ChatToolCall.CreateFunctionToolCall("test-id", "test-name", BinaryData.FromString("test-arg1")); // Act var azureSdkFunctionToolCall = ChatCompletionsToolCall.FromChatToolCall(functionToolCall); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs index ac7618e2c..a05156a69 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/ChatMessageExtensionsTests.cs @@ -86,10 +86,10 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_FunctionCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); // TODO: Uncomment when participant name issue is resolved. //Assert.Equal("test-name", assistantMessage.ParticipantName); - Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments); + Assert.Equal("test-arg1", assistantMessage.FunctionCall.FunctionArguments.ToString()); Assert.Equal("test-name", assistantMessage.FunctionCall.FunctionName); } @@ -113,14 +113,14 @@ public void Test_AssistantRole_ToOpenAISdkChatMessage_ToolCall() // Assert var assistantMessage = result as AssistantChatMessage; Assert.NotNull(assistantMessage); - Assert.Equal("test-content", assistantMessage.Content[0].Text); + Assert.Empty(assistantMessage.Content); Assert.Single(assistantMessage.ToolCalls); ChatToolCall toolCall = assistantMessage.ToolCalls[0]; Assert.NotNull(toolCall); Assert.Equal("test-id", toolCall.Id); Assert.Equal("test-tool-name", toolCall.FunctionName); - Assert.Equal("test-tool-arg1", toolCall.FunctionArguments); + Assert.Equal("test-tool-arg1", toolCall.FunctionArguments.ToString()); } [Fact] @@ -198,7 +198,7 @@ public void Test_ChatCompletionsToolCall_ToFunctionToolCall() Assert.NotNull(chatToolCall); Assert.Equal("test-id", chatToolCall.Id); Assert.Equal("test-name", chatToolCall.FunctionName); - Assert.Equal("test-arg1", chatToolCall.FunctionArguments); + Assert.Equal("test-arg1", chatToolCall.FunctionArguments.ToString()); } [Fact] diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs index 1e9e1bb47..93e63eeda 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/Models/OpenAIModelTests.cs @@ -33,6 +33,25 @@ public void Test_Constructor_OpenAI() new OpenAIModel(options); } + [Fact] + public void Test_SetMaxTokens() + { + // Arrange + var options = new OpenAIModelOptions("test-key", "test-model"); + var chatCompletionOptions = new ChatCompletionOptions(); + var model = new OpenAIModel(options); + var testTokens = 100; + + // Act + model.SetMaxTokens(testTokens, chatCompletionOptions); + + // Assert + MethodInfo info = chatCompletionOptions.GetType().GetMethod("get__deprecatedMaxTokens", BindingFlags.NonPublic | BindingFlags.Instance)!; + int maxTokens = (int)info.Invoke(chatCompletionOptions, null)!; + Assert.Equal(testTokens, maxTokens); + } + + [Fact] public void Test_Constructor_AzureOpenAI_InvalidAzureApiVersion() { @@ -40,7 +59,7 @@ public void Test_Constructor_AzureOpenAI_InvalidAzureApiVersion() var options = new AzureOpenAIModelOptions("test-key", "test-deployment", "https://test.openai.azure.com/"); var versions = new List { - "2024-04-01-preview", "2024-05-01-preview", "2024-06-01" + "2024-06-01", "2024-08-01-preview", "2024-10-01-preview" }; // Act @@ -279,8 +298,8 @@ public async Task Test_CompletePromptAsync_AzureOpenAI_Chat_WithTools() Assert.NotNull(result.Message.ActionCalls); Assert.Single(result.Message.ActionCalls); - Assert.Equal("testAction", result.Message.ActionCalls[0].Function.Name); - + Assert.Equal("testAction", result.Message.ActionCalls[0].Function!.Name); + Assert.Null(result.Error); Assert.Equal(ChatRole.Assistant, result.Message.Role); Assert.Null(result.Message.Content); @@ -326,7 +345,7 @@ public async Task Test_CompletePromptAsync_AzureOpenAI_Streaming() ] }}")); - TestAsyncResultCollection updates = new(update!, Mock.Of()); + TestAsyncCollectionResult updates = new(update!, Mock.Of()); var response = new TestResponse(200, string.Empty); clientMock.Setup((client) => diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs index b7d84bc51..46a0a8ddc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/OpenAIEmbeddingsTests.cs @@ -37,7 +37,7 @@ public async Task Test_OpenAI_CreateEmbeddings_ReturnEmbeddings() IList inputs = new List { "test" }; var clientMock = new Mock(new ApiKeyCredential(apiKey), It.IsAny()); var response = new TestResponse(200, string.Empty); - var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ + var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ ""data"": [ { ""object"": ""embedding"", @@ -76,7 +76,7 @@ public async Task Test_AzureOpenAI_CreateEmbeddings_ReturnEmbeddings() IList inputs = new List { "test" }; var clientMock = new Mock(new ApiKeyCredential(apiKey), It.IsAny()); var response = new TestResponse(200, string.Empty); - var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ + var embeddingCollection = ModelReaderWriter.Read(BinaryData.FromString(@"{ ""data"": [ { ""object"": ""embedding"", diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Application/StreamingResponseTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Application/StreamingResponseTests.cs index 5e8119b87..d75139e13 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Application/StreamingResponseTests.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Application/StreamingResponseTests.cs @@ -2,9 +2,13 @@ using AdaptiveCards; using Microsoft.Bot.Builder; using Microsoft.Bot.Schema; +using Microsoft.Teams.AI.AI.Action; +using Microsoft.Teams.AI.AI.Models; using Microsoft.Teams.AI.Application; using Microsoft.Teams.AI.Exceptions; using Microsoft.Teams.AI.Tests.TestUtils; +using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities; +using Moq; namespace Microsoft.Teams.AI.Tests.Application { @@ -196,6 +200,37 @@ void CaptureSend(Activity[] arg) Assert.Equal(2, streamer.UpdatesSent()); } + [Fact] + public async Task Test_SendTextChunk_SendsFinalMessageWithPoweredByAIFeatures() + { + // Arrange + Activity[]? activitiesToSend = null; + void CaptureSend(Activity[] arg) + { + activitiesToSend = arg; + } + var adapter = new SimpleAdapter(CaptureSend); + ITurnContext turnContext = new TurnContext(adapter, new Activity( + text: "hello", + channelId: "channelId", + recipient: new() { Id = "recipientId" }, + conversation: new() { Id = "conversationId" }, + from: new() { Id = "fromId" } + )); + StreamingResponse streamer = new(turnContext); + List citations = new List(); + citations.Add(new Citation(content: "test-content", title: "test", url: "https://example.com")); + streamer.QueueTextChunk("first", citations); + await streamer.WaitForQueue(); + streamer.QueueTextChunk("second"); + await streamer.WaitForQueue(); + streamer.EnableFeedbackLoop = true; + streamer.EnableGeneratedByAILabel = true; + streamer.SensitivityLabel = new SensitivityUsageInfo() { Name= "Sensitivity"}; + await streamer.EndStream(); + Assert.Equal(2, streamer.UpdatesSent()); + } + [Fact] public async Task Test_SendTextChunk_SendsFinalMessageWithAttachments() { @@ -233,6 +268,32 @@ void CaptureSend(Activity[] arg) await streamer.EndStream(); Assert.Equal(2, streamer.UpdatesSent()); Assert.Single(streamer.Attachments); + if (streamer.Citations != null) + { + Assert.Empty(streamer.Citations); + } + } + + [Fact] + public async Task Test_SendActivityThrowsException_AssertThrows() + { + // Arrange + Activity[]? activitiesToSend = null; + void CaptureSend(Activity[] arg) + { + activitiesToSend = arg; + } + var adapter = new SimpleAdapter(CaptureSend); + var turnContextMock = new Mock(); + turnContextMock.Setup((tc) => tc.SendActivityAsync(It.IsAny(), It.IsAny())).ThrowsAsync(new Exception("Forbidden operation")); + + // Act + StreamingResponse streamer = new(turnContextMock.Object); + Exception ex = await Assert.ThrowsAsync(() => streamer.EndStream()); + + + // Assert + Assert.Equal("Error occurred when sending activity while streaming", ex.Message); } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj index e5e48327f..422955daf 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj @@ -11,13 +11,13 @@ - + - + diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs index fdff163f4..644611a5e 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/OpenAIModelFactory.cs @@ -1,7 +1,7 @@ using OpenAI.Assistants; -using OpenAI.Files; using System.ClientModel; using System.ClientModel.Primitives; +using OAI = OpenAI; namespace Microsoft.Teams.AI.Tests.TestUtils { @@ -89,7 +89,7 @@ public static MessageContent CreateMessageContent(string message, string fileId) return threadMessage.Content[0]; } - public static OpenAIFileInfo CreateOpenAIFileInfo(string fileId) + public static OAI.Files.OpenAIFile CreateOpenAIFileInfo(string fileId) { var json = @$"{{ ""id"": ""{fileId}"", @@ -100,7 +100,7 @@ public static OpenAIFileInfo CreateOpenAIFileInfo(string fileId) ""purpose"": ""assistants"" }}"; - var fileInfo = ModelReaderWriter.Read(BinaryData.FromString(json))!; + var fileInfo = ModelReaderWriter.Read(BinaryData.FromString(json))!; return fileInfo; } @@ -160,82 +160,40 @@ public TestRequiredAction(string toolCallId, string functionName, string functio } } - internal sealed class TestAsyncPageCollection : AsyncPageCollection where T : class + internal sealed class TestAsyncCollectionResult : AsyncCollectionResult where T : class { public List Items; internal PipelineResponse _pipelineResponse; - private IAsyncEnumerator> _enumerator; - public TestAsyncPageCollection(List items, PipelineResponse response) + public TestAsyncCollectionResult(List items, PipelineResponse response) { Items = items; _pipelineResponse = response; - _enumerator = new TestAsyncEnumerator(items, response); } - protected override IAsyncEnumerator> GetAsyncEnumeratorCore(CancellationToken cancellationToken = default) + public TestAsyncCollectionResult(T item, PipelineResponse response) { - return _enumerator; - } - - protected override Task> GetCurrentPageAsyncCore() - { - return Task.FromResult(_enumerator.Current); - } - } - - internal sealed class TestAsyncEnumerator : IAsyncEnumerator> where T : class - { - private readonly List _items; - private readonly PipelineResponse _pipelineResponse; - private bool _movedOnToNext; - - public TestAsyncEnumerator(List items, PipelineResponse response) - { - _items = items; + Items = new() { item }; _pipelineResponse = response; - _movedOnToNext = false; } - public PageResult Current => PageResult.Create(_items, ContinuationToken.FromBytes(BinaryData.FromString("")), null, _pipelineResponse); - - public ValueTask DisposeAsync() + public override ContinuationToken? GetContinuationToken(ClientResult page) { - return new ValueTask(); + return ContinuationToken.FromBytes(BinaryData.FromString("")); } - public ValueTask MoveNextAsync() + public async override IAsyncEnumerable GetRawPagesAsync() { - if (!_movedOnToNext) - { - return new ValueTask(true); - } - else - { - _movedOnToNext = true; - return new ValueTask(false); - } - + yield return await Task.FromResult(ClientResult.FromValue(Items, _pipelineResponse)); } - } - - internal sealed class TestAsyncResultCollection : AsyncCollectionResult where T : class - { - public List Items = new(); - internal PipelineResponse _pipelineResponse; - - public TestAsyncResultCollection(T item, PipelineResponse response) + protected async override IAsyncEnumerable GetValuesFromPageAsync(ClientResult page) { - Items.Add(item); - _pipelineResponse = response; - } + foreach (T item in Items) + { + yield return await Task.FromResult(item); + } -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - public override async IAsyncEnumerator GetAsyncEnumerator(CancellationToken cancellationToken = default) -#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously - { - yield return FromValue(Items[0], _pipelineResponse); } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs index 19d53a52e..bf908cc2c 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/TestUtils/TestAssistantsOpenAIClient.cs @@ -76,7 +76,7 @@ private ThreadMessage _CreateMessage(string threadId, string message) return newMessage; } - public override AsyncPageCollection GetMessagesAsync(string threadId, MessageCollectionOptions options, CancellationToken cancellationToken = default) + public override AsyncCollectionResult GetMessagesAsync(string threadId, MessageCollectionOptions options, CancellationToken cancellationToken = default) { while (RemainingMessages.Count > 0) { @@ -86,12 +86,12 @@ public override AsyncPageCollection GetMessagesAsync(string threa // Sorted by oldest first List messages = Messages[threadId].ToList(); - if (options != null && options.Order != null && options.Order.Value == ListOrder.NewestFirst) + if (options != null && options.Order != null && options.Order.Value == MessageCollectionOrder.Descending) { messages.Reverse(); } - return new TestAsyncPageCollection(messages, Mock.Of()); + return new TestAsyncCollectionResult(messages, Mock.Of()); } public override Task> CreateRunAsync(string threadId, string assistantId, RunCreationOptions createRunOptions, CancellationToken cancellationToken = default) @@ -152,14 +152,14 @@ public override Task> GetRunAsync(string threadId, strin return runWithUpdatedStatus; } - public override AsyncPageCollection GetRunsAsync(string threadId, RunCollectionOptions? options = null, CancellationToken cancellationToken = default) + public override AsyncCollectionResult GetRunsAsync(string threadId, RunCollectionOptions? options = null, CancellationToken cancellationToken = default) { - AsyncPageCollection response; + AsyncCollectionResult response; // AssistantsPlanner only needs the get the latest. if (Runs[threadId].Count() == 0) { - response = new TestAsyncPageCollection(new List(), Mock.Of()); + response = new TestAsyncCollectionResult(new List(), Mock.Of()); return response; } @@ -167,7 +167,7 @@ public override AsyncPageCollection GetRunsAsync(string threadId, Run ThreadRun run = Runs[threadId][lastIndex]; ThreadRun runWithUpdatedStatus = _GetRun(threadId, run.Id)!; - response = new TestAsyncPageCollection(new List() { runWithUpdatedStatus }, Mock.Of()); + response = new TestAsyncCollectionResult(new List() { runWithUpdatedStatus }, Mock.Of()); return response; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs index ddd224933..6a3057a77 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/AI.cs @@ -36,6 +36,7 @@ public AI(AIOptions options, ILoggerFactory? loggerFactory = null) MaxSteps = options.MaxSteps ?? 25, MaxTime = options.MaxTime ?? TimeSpan.FromMilliseconds(300000), AllowLooping = options.AllowLooping ?? true, + EnableFeedbackLoop = options.EnableFeedbackLoop, }; _actions = new ActionCollection(); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/AIEntity.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/AIEntity.cs index e0280b45b..79ebe7139 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/AIEntity.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Action/AIEntity.cs @@ -43,6 +43,12 @@ public class AIEntity : Entity /// [JsonProperty(PropertyName = "citation")] public List Citation { get; set; } = new(); + + /// + /// Optional sensitivity content information. + /// + [JsonProperty(PropertyName = "usageInfo")] + public SensitivityUsageInfo? UsageInfo { get; set; } } /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs index e0097d935..bee3b26ab 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClient.cs @@ -66,6 +66,7 @@ public class LLMClient private readonly string? _startStreamingMessage; private ResponseReceivedHandler? _endStreamHandler; + private bool? _enableFeedbackLoop; /// /// Creates a new `LLMClient` instance. @@ -84,6 +85,7 @@ public LLMClient(LLMClientOptions options, ILoggerFactory? loggerFacto this._startStreamingMessage = Options.StartStreamingMessage; this._endStreamHandler = Options.EndStreamHandler; + this._enableFeedbackLoop = Options.EnableFeedbackLoop; } /// @@ -171,6 +173,14 @@ public async Task CompletePromptAsync( // Create streamer and send initial message streamer = new StreamingResponse(context); memory.SetValue("temp.streamer", streamer); + + if (this._enableFeedbackLoop != null) + { + streamer.EnableFeedbackLoop = this._enableFeedbackLoop; + } + + streamer.EnableGeneratedByAILabel = true; + if (!string.IsNullOrEmpty(this._startStreamingMessage)) { streamer.QueueInformativeUpdate(this._startStreamingMessage!); @@ -187,9 +197,12 @@ public async Task CompletePromptAsync( // Send chunk to client string text = args.Chunk.delta?.GetContent() ?? ""; + IList? citations = args.Chunk.delta?.Context?.Citations ?? null; + + if (text.Length > 0) { - streamer.QueueTextChunk(text); + streamer.QueueTextChunk(text, citations); } }); @@ -368,7 +381,7 @@ CancellationToken cancellationToken repairTemplate.Prompt = new(new() { this.Options.Template.Prompt, - new ConversationHistorySection($"{this.Options.HistoryVariable}-repair") + new ConversationHistorySection($"{this.Options.HistoryVariable}-repair", -1) }); if (this.Options.LogRepairs) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClientOptions.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClientOptions.cs index e9e47050b..e5e7d7d6a 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClientOptions.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Clients/LLMClientOptions.cs @@ -73,6 +73,11 @@ public class LLMClientOptions /// public ResponseReceivedHandler? EndStreamHandler; + /// + /// Optional, controls the feedback loop for streaming responses. + /// + public bool? EnableFeedbackLoop { get; set; } + /// /// Creates an instance of `LLMClientOptions` /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs index 63e2b7b93..fb2c3a2ea 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Embeddings/OpenAIEmbeddings.cs @@ -129,8 +129,8 @@ public async Task CreateEmbeddingsAsync(IList inputs try { DateTime startTime = DateTime.Now; - ClientResult response = await embeddingsClient.GenerateEmbeddingsAsync(inputs); - List> embeddingItems = response.Value.OrderBy(item => item.Index).Select(item => item.Vector).ToList(); + ClientResult response = await embeddingsClient.GenerateEmbeddingsAsync(inputs); + List> embeddingItems = response.Value.OrderBy(item => item.Index).Select(item => item.ToFloats()).ToList(); if (_options.LogRequests!.Value) { @@ -170,9 +170,9 @@ public async Task CreateEmbeddingsAsync(IList inputs { return apiVersion switch { - "2024-04-01-preview" => ServiceVersion.V2024_04_01_Preview, - "2024-05-01-preview" => ServiceVersion.V2024_05_01_Preview, "2024-06-01" => ServiceVersion.V2024_06_01, + "2024-08-01-preview" => ServiceVersion.V2024_08_01_Preview, + "2024-10-01-preview" => ServiceVersion.V2024_10_01_Preview, _ => null, }; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs index 13a52336e..0d09229da 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/AssistantsMessage.cs @@ -2,6 +2,7 @@ using Microsoft.Bot.Schema; using OpenAI.Assistants; using OpenAI.Files; +using OAI = OpenAI; namespace Microsoft.Teams.AI.AI.Models @@ -26,7 +27,7 @@ public class AssistantsMessage : ChatMessage /// /// The Assistants API thread message. /// The OpenAI File client. - public AssistantsMessage(MessageContent content, FileClient? fileClient = null) : base(ChatRole.Assistant) + public AssistantsMessage(MessageContent content, OpenAIFileClient? fileClient = null) : base(ChatRole.Assistant) { this.MessageContent = content; @@ -39,7 +40,7 @@ public AssistantsMessage(MessageContent content, FileClient? fileClient = null) MessageContext context = new(); List>> fileContentDownloadTasks = new(); - List>> fileInfoDownloadTasks = new(); + List>> fileInfoDownloadTasks = new(); for (int i = 0; i < content.TextAnnotations.Count; i++) { @@ -73,7 +74,7 @@ public AssistantsMessage(MessageContent content, FileClient? fileClient = null) // Create attachments out of these downloaded files // Wait for tasks to complete ClientResult[] downloadedFileContent = fileContentDownloadTasks.Select((task) => task.Result).ToArray(); - ClientResult[] downloadedFileInfo = fileInfoDownloadTasks.Select((task) => task.Result).ToArray(); + ClientResult[] downloadedFileInfo = fileInfoDownloadTasks.Select((task) => task.Result).ToArray(); for (int i = 0; i < downloadedFileContent.Length; i++) { @@ -128,7 +129,7 @@ public class OpenAIFile /// /// Represents an OpenAI File information /// - public OpenAIFileInfo FileInfo; + public OAI.Files.OpenAIFile FileInfo; /// /// Represents the contents of an OpenAI File @@ -173,7 +174,7 @@ public class OpenAIFile /// /// The OpenAI File /// The OpenAI File contents - public OpenAIFile(OpenAIFileInfo fileInfo, BinaryData fileContent) + public OpenAIFile(OAI.Files.OpenAIFile fileInfo, BinaryData fileContent) { FileInfo = fileInfo; FileContent = fileContent; diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs index d3a484d8c..1532893fc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatCompletionToolCall.cs @@ -44,7 +44,7 @@ internal ChatToolCall ToChatToolCall() if (this.Type == ToolType.Function) { ChatCompletionsFunctionToolCall functionToolCall = (ChatCompletionsFunctionToolCall)this; - return ChatToolCall.CreateFunctionToolCall(functionToolCall.Id, functionToolCall.Name, functionToolCall.Arguments); + return ChatToolCall.CreateFunctionToolCall(functionToolCall.Id, functionToolCall.Name, BinaryData.FromString(functionToolCall.Arguments)); } throw new TeamsAIException($"Invalid tool type: {this.Type}"); @@ -60,7 +60,7 @@ internal static ChatCompletionsToolCall FromChatToolCall(ChatToolCall toolCall) { if (toolCall.Kind == ChatToolCallKind.Function) { - return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArguments); + return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArguments.ToString()); } throw new TeamsAIException($"Invalid ChatCompletionsToolCall type: {toolCall.GetType().Name}"); @@ -70,7 +70,7 @@ internal static ChatCompletionsToolCall FromStreamingChatToolCall(StreamingChatT { if (toolCall.Kind == ChatToolCallKind.Function) { - return new ChatCompletionsFunctionToolCall(toolCall.Id, toolCall.FunctionName, toolCall.FunctionArgumentsUpdate); + return new ChatCompletionsFunctionToolCall(toolCall.ToolCallId, toolCall.FunctionName, toolCall.FunctionArgumentsUpdate.ToString()); } throw new TeamsAIException($"Invalid ChatCompletionsToolCall type: {toolCall.GetType().Name}"); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs index 871ddfc49..1f0cc4d4f 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/ChatMessage.cs @@ -1,6 +1,4 @@ -using System.Diagnostics; -using Azure.AI.OpenAI; -using Azure.AI.OpenAI.Chat; +using Azure.AI.OpenAI.Chat; using Microsoft.Bot.Schema; using Microsoft.Teams.AI.Exceptions; using Microsoft.Teams.AI.Utilities; @@ -113,7 +111,7 @@ internal ChatMessage(ChatCompletion chatCompletion) if (chatCompletion.FunctionCall != null && chatCompletion.FunctionCall.FunctionName != string.Empty) { this.Name = chatCompletion.FunctionCall.FunctionName; - this.FunctionCall = new FunctionCall(chatCompletion.FunctionCall.FunctionName, chatCompletion.FunctionCall.FunctionArguments); + this.FunctionCall = new FunctionCall(chatCompletion.FunctionCall.FunctionName, chatCompletion.FunctionCall.FunctionArguments.ToString()); } if (chatCompletion.ToolCalls != null && chatCompletion.ToolCalls.Count > 0) @@ -127,7 +125,7 @@ internal ChatMessage(ChatCompletion chatCompletion) } #pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - AzureChatMessageContext? azureContext = chatCompletion.GetAzureMessageContext(); + ChatMessageContext? azureContext = chatCompletion.GetMessageContext(); #pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. if (azureContext != null) { @@ -155,7 +153,7 @@ internal ChatMessage(StreamingChatCompletionUpdate streamingChatCompletionUpdate if (streamingChatCompletionUpdate.FunctionCallUpdate != null && streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName != string.Empty) { this.Name = streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName; - this.FunctionCall = new FunctionCall(streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName, streamingChatCompletionUpdate.FunctionCallUpdate.FunctionArgumentsUpdate); + this.FunctionCall = new FunctionCall(streamingChatCompletionUpdate.FunctionCallUpdate.FunctionName, streamingChatCompletionUpdate.FunctionCallUpdate.FunctionArgumentsUpdate.ToString()); } if (streamingChatCompletionUpdate.ToolCallUpdates != null && streamingChatCompletionUpdate.ToolCallUpdates.Count > 0) @@ -168,7 +166,7 @@ internal ChatMessage(StreamingChatCompletionUpdate streamingChatCompletionUpdate } #pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. - AzureChatMessageContext? azureContext = streamingChatCompletionUpdate.GetAzureMessageContext(); + ChatMessageContext? azureContext = streamingChatCompletionUpdate.GetMessageContext(); #pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. if (azureContext != null) { @@ -204,12 +202,12 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() { if (contentPart is TextContentPart textPart) { - contentItems.Add(ChatMessageContentPart.CreateTextMessageContentPart(textPart.Text)); + contentItems.Add(ChatMessageContentPart.CreateTextPart(textPart.Text)); textContentBuilder.AppendLine(textPart.Text); } else if (contentPart is ImageContentPart imagePart) { - contentItems.Add(ChatMessageContentPart.CreateImageMessageContentPart(new Uri(imagePart.ImageUrl))); + contentItems.Add(ChatMessageContentPart.CreateImagePart(new Uri(imagePart.ImageUrl))); } } } @@ -245,8 +243,8 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() if (this.FunctionCall != null) { - ChatFunctionCall functionCall = new(this.FunctionCall.Name ?? "", this.FunctionCall.Arguments ?? ""); - assistantMessage = new AssistantChatMessage(functionCall, textContent); + ChatFunctionCall functionCall = new(this.FunctionCall.Name ?? "", BinaryData.FromString(this.FunctionCall.Arguments ?? "")); + assistantMessage = new AssistantChatMessage(functionCall); } else if (this.ActionCalls != null) { @@ -255,7 +253,7 @@ internal OAI.Chat.ChatMessage ToOpenAIChatMessage() { toolCalls.Add(actionCall.ToChatToolCall()); } - assistantMessage = new AssistantChatMessage(toolCalls, textContent); + assistantMessage = new AssistantChatMessage(toolCalls); } else { @@ -394,7 +392,7 @@ public ActionCall(ChatToolCall toolCall) } Id = toolCall.Id; - Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArguments); + Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArguments.ToString()); } /// @@ -409,15 +407,15 @@ public ActionCall(StreamingChatToolCallUpdate toolCall) throw new TeamsAIException($"Invalid ActionCall type: {toolCall.GetType().Name}"); } - Id = toolCall.Id; - Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArgumentsUpdate); + Id = toolCall.ToolCallId; + Function = new ActionFunction(toolCall.FunctionName, toolCall.FunctionArgumentsUpdate.ToString()); } internal ChatToolCall ToChatToolCall() { if (this.Type == ActionCallType.Function) { - return ChatToolCall.CreateFunctionToolCall(Id, Function!.Name, Function.Arguments); + return ChatToolCall.CreateFunctionToolCall(Id, Function!.Name, BinaryData.FromString(Function.Arguments)); } throw new TeamsAIException($"Invalid tool type: {this.Type}"); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs index 10f808f4b..2b7c89534 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/MessageContext.cs @@ -27,14 +27,18 @@ public MessageContext() { } /// Creates a MessageContext using OpenAI.Chat.AzureChatMessageContext. /// /// - internal MessageContext(AzureChatMessageContext azureContext) +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + internal MessageContext(ChatMessageContext azureContext) +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. { if (azureContext.Citations != null) { - foreach (AzureChatCitation citation in azureContext.Citations) +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + foreach (ChatCitation citation in azureContext.Citations) { - this.Citations.Add(new Citation(citation.Content, citation.Title, citation.Url)); + this.Citations.Add(new Citation(citation.Content, citation.Title, citation.Uri.ToString())); } +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } this.Intent = azureContext.Intent; diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs index b3fbad3c0..8fa533e15 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Models/OpenAIModel.cs @@ -18,14 +18,36 @@ using ServiceVersion = Azure.AI.OpenAI.AzureOpenAIClientOptions.ServiceVersion; using Azure.AI.OpenAI.Chat; using OpenAI.Chat; -using Microsoft.Recognizers.Text.NumberWithUnit.Dutch; using Microsoft.Teams.AI.Application; +using System.Reflection; +using System.Runtime.CompilerServices; +[assembly: InternalsVisibleTo("Microsoft.Teams.AI.Tests")] +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. namespace Microsoft.Teams.AI.AI.Models { /// - /// A `PromptCompletionModel` for calling OpenAI and Azure OpenAI hosted models. + /// A `IPromptCompletionModel` for calling OpenAI and Azure OpenAI hosted models. /// + /// + /// The model has been updated to support calling OpenAI's new o1 family of models. That currently + /// comes with a few constraints. These constraints are mostly handled for you but are worth noting: + /// + /// * The o1 models introduce a new `max_completion_tokens` parameter and they've deprecated the + /// `max_tokens` parameter. The model will automatically convert the incoming `max_tokens` parameter + /// to `max_completion_tokens` for you. But you should be aware that o1 has hidden token usage and costs + /// that aren't constrained by the `max_completion_tokens` parameter. This means that you may see an + /// increase in token usage and costs when using the o1 models. + /// + /// * The o1 models do not currently support the sending of system messages which just means that the + /// `useSystemMessages` parameter is ignored when calling the o1 models. + /// + /// * The o1 models do not currently support setting the `temperature`, `top_p`, and `presence_penalty` + /// parameters so they will be ignored. + /// + /// * The o1 models do not currently support the use of tools so you will need to use the "monologue" + /// augmentation to call actions. + /// public class OpenAIModel : IPromptCompletionStreamingModel { private readonly BaseOpenAIModelOptions _options; @@ -161,19 +183,6 @@ public async Task CompletePromptAsync(ITurnContext turnContext, Events.OnBeforeCompletion(beforeCompletionEventArgs); } - // Setup tools if enabled - bool isToolsAugmentation = promptTemplate.Configuration.Augmentation.Type == Augmentations.AugmentationType.Tools; - List tools = new(); - - // If tools is enabled, reformat actions to schema - if (isToolsAugmentation && promptTemplate.Actions.Count > 0) - { - foreach (ChatCompletionAction action in promptTemplate.Actions) - { - tools.Add(action.ToChatTool()); - } - } - // Render prompt RenderedPromptSection> prompt = await promptTemplate.Prompt.RenderAsMessagesAsync(turnContext, memory, promptFunctions, tokenizer, maxInputTokens, cancellationToken); if (prompt.TooLong) @@ -185,7 +194,11 @@ public async Task CompletePromptAsync(ITurnContext turnContext, }; } - if (!_options.UseSystemMessages!.Value && prompt.Output.Count > 0 && prompt.Output[0].Role == ChatRole.System) + // Get the model to use. + string model = promptTemplate.Configuration.Completion.Model ?? _deploymentName; + bool isO1Model = model.StartsWith("o1-"); + bool useSystemMessages = !isO1Model && _options.UseSystemMessages.GetValueOrDefault(false); + if (!useSystemMessages && prompt.Output.Count > 0 && prompt.Output[0].Role == ChatRole.System) { prompt.Output[0].Role = ChatRole.User; } @@ -196,42 +209,58 @@ public async Task CompletePromptAsync(ITurnContext turnContext, _logger.LogTrace(JsonSerializer.Serialize(prompt.Output, _serializerOptions)); } - // Render prompt template + // Map to OpenAI ChatMessage IEnumerable chatMessages = prompt.Output.Select(chatMessage => chatMessage.ToOpenAIChatMessage()); ChatCompletionOptions chatCompletionOptions = new() { - MaxTokens = completion.MaxTokens, + MaxOutputTokenCount = completion.MaxTokens, Temperature = (float)completion.Temperature, TopP = (float)completion.TopP, PresencePenalty = (float)completion.PresencePenalty, FrequencyPenalty = (float)completion.FrequencyPenalty, }; - if (isToolsAugmentation) + if (isO1Model) { - chatCompletionOptions.ToolChoice = completion.GetOpenAIChatToolChoice(); - chatCompletionOptions.ParallelToolCallsEnabled = completion.ParallelToolCalls; - } - - foreach (ChatTool tool in tools) + chatCompletionOptions.Temperature = 1; + chatCompletionOptions.TopP = 1; + chatCompletionOptions.PresencePenalty = 0; + } else { - chatCompletionOptions.Tools.Add(tool); + // `MaxOutputTokenCount` is not supported for non-o1 Azure OpenAI models, hence it needs to be set for it to work. + SetMaxTokens(completion.MaxTokens, chatCompletionOptions); } - - if (chatCompletionOptions == null) + // Set tools configurations + bool isToolsAugmentation = promptTemplate.Configuration.Augmentation.Type == Augmentations.AugmentationType.Tools; + if (isToolsAugmentation) { - throw new TeamsAIException("Failed to create chat completions options"); + chatCompletionOptions.ToolChoice = completion.GetOpenAIChatToolChoice(); + chatCompletionOptions.AllowParallelToolCalls = completion.ParallelToolCalls; + + if (promptTemplate.Actions.Count > 0) + { + foreach (ChatCompletionAction action in promptTemplate.Actions) + { + chatCompletionOptions.Tools.Add(action.ToChatTool()); + } + } } + // Add Azure chat extension configurations IDictionary? additionalData = promptTemplate.Configuration.Completion.AdditionalData; if (_useAzure) { AddAzureChatExtensionConfigurations(chatCompletionOptions, additionalData); } - string model = promptTemplate.Configuration.Completion.Model ?? _deploymentName; + if (_options.LogRequests!.Value) + { + _logger.LogTrace("CHAT COMPLETION CONFIG:"); + _logger.LogTrace(JsonSerializer.Serialize(chatCompletionOptions, _serializerOptions)); + } + PipelineResponse? rawResponse = null; ClientResult? chatCompletionsResponse = null; @@ -319,7 +348,6 @@ public async Task CompletePromptAsync(ITurnContext turnContext, if (_options.LogRequests!.Value) { - // TODO: Colorize _logger.LogTrace("RESPONSE:"); _logger.LogTrace($"duration {(DateTime.UtcNow - startTime).TotalMilliseconds} ms"); if (promptResponse.Status == PromptResponseStatus.Success && chatCompletionsResponse != null) @@ -409,9 +437,9 @@ public async Task CompletePromptAsync(ITurnContext turnContext, { return apiVersion switch { - "2024-04-01-preview" => ServiceVersion.V2024_04_01_Preview, - "2024-05-01-preview" => ServiceVersion.V2024_05_01_Preview, "2024-06-01" => ServiceVersion.V2024_06_01, + "2024-08-01-preview" => ServiceVersion.V2024_08_01_Preview, + "2024-10-01-preview" => ServiceVersion.V2024_10_01_Preview, _ => null, }; } @@ -430,12 +458,10 @@ private void AddAzureChatExtensionConfigurations(ChatCompletionOptions options, { try { - AzureChatDataSource? dataSource = ModelReaderWriter.Read(BinaryData.FromObjectAsJson(item)); + ChatDataSource? dataSource = ModelReaderWriter.Read(BinaryData.FromObjectAsJson(item)); if (dataSource != null) { -#pragma warning disable AOAI001 options.AddDataSource(dataSource); -#pragma warning restore AOAI001 } } catch (Exception ex) @@ -445,5 +471,12 @@ private void AddAzureChatExtensionConfigurations(ChatCompletionOptions options, } } } + + internal void SetMaxTokens(int maxTokens, ChatCompletionOptions options) + { + MethodInfo setMaxTokens = options.GetType().GetMethod("set__deprecatedMaxTokens", BindingFlags.NonPublic | BindingFlags.Instance); + setMaxTokens.Invoke(options, new object[] { maxTokens }); + } } -} \ No newline at end of file +} +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs index c2a6efc44..896a49516 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModerator.cs @@ -96,11 +96,8 @@ public async Task ReviewOutputAsync(ITurnContext turnContext, TState turnS { Response response = await _client.AnalyzeTextAsync(analyzeTextOptions); - bool flagged = response.Value.BlocklistsMatchResults.Count > 0 - || _ShouldBeFlagged(response.Value.HateResult) - || _ShouldBeFlagged(response.Value.SelfHarmResult) - || _ShouldBeFlagged(response.Value.SexualResult) - || _ShouldBeFlagged(response.Value.ViolenceResult); + bool flagged = response.Value.BlocklistsMatch.Count > 0 + || response.Value.CategoriesAnalysis.Any((ca) => _ShouldBeFlagged(ca)); if (flagged) { string actionName = isModelInput ? AIConstants.FlaggedInputActionName : AIConstants.FlaggedOutputActionName; @@ -138,17 +135,54 @@ public async Task ReviewOutputAsync(ITurnContext turnContext, TState turnS return null; } - private bool _ShouldBeFlagged(TextAnalyzeSeverityResult result) + private bool _ShouldBeFlagged(TextCategoriesAnalysis result) { return result != null && result.Severity >= _options.SeverityLevel; } private ModerationResult BuildModerationResult(AnalyzeTextResult result) { - bool hate = _ShouldBeFlagged(result.HateResult); - bool selfHarm = _ShouldBeFlagged(result.SelfHarmResult); - bool sexual = _ShouldBeFlagged(result.SexualResult); - bool violence = _ShouldBeFlagged(result.ViolenceResult); + bool hate = false; + int hateSeverity = 0; + bool selfHarm = false; + int selfHarmSeverity = 0; + bool sexual = false; + int sexualSeverity = 0; + bool violence = false; + int violenceSeverity = 0; + + foreach (TextCategoriesAnalysis textAnalysis in result.CategoriesAnalysis) + { + if (textAnalysis.Severity < _options.SeverityLevel) + { + continue; + } + + int severity = textAnalysis.Severity ?? 0; + if (textAnalysis.Category == TextCategory.Hate) + { + hate = true; + hateSeverity = severity; + } + + if (textAnalysis.Category == TextCategory.Violence) + { + violence = true; + violenceSeverity = severity; + } + + if (textAnalysis.Category == TextCategory.SelfHarm) + { + selfHarm = true; + selfHarmSeverity = severity; + } + + if (textAnalysis.Category == TextCategory.Sexual) + { + sexual = true; + sexualSeverity = severity; + } + } return new() { @@ -166,13 +200,13 @@ private ModerationResult BuildModerationResult(AnalyzeTextResult result) CategoryScores = new() { // Normalize the scores to be between 0 and 1 (highest severity is 6) - Hate = (result.HateResult?.Severity ?? 0) / 6.0, - HateThreatening = (result.HateResult?.Severity ?? 0) / 6.0, - SelfHarm = (result.SelfHarmResult?.Severity ?? 0) / 6.0, - Sexual = (result.SexualResult?.Severity ?? 0) / 6.0, - SexualMinors = (result.SexualResult?.Severity ?? 0) / 6.0, - Violence = (result.ViolenceResult?.Severity ?? 0) / 6.0, - ViolenceGraphic = (result.ViolenceResult?.Severity ?? 0) / 6.0 + Hate = hateSeverity / 6.0, + HateThreatening = hateSeverity / 6.0, + SelfHarm = selfHarmSeverity / 6.0, + Sexual = sexualSeverity / 6.0, + SexualMinors = sexualSeverity / 6.0, + Violence = violenceSeverity / 6.0, + ViolenceGraphic = violenceSeverity / 6.0 } }; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModeratorOptions.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModeratorOptions.cs index 6eae81577..143689dfe 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModeratorOptions.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Moderator/AzureContentSafetyModeratorOptions.cs @@ -36,10 +36,18 @@ public class AzureContentSafetyModeratorOptions public IList? BlocklistNames { get; set; } /// - /// When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit. When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit. + /// When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit. + /// When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit. /// + [Obsolete("use HaltOnBlockListHit")] public bool? BreakByBlocklists { get; set; } + /// + /// When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit. + /// When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit. + /// + public bool? HaltOnBlockListHit { get; set; } + /// /// Create an instance of the AzureContentSafetyModeratorOptions class. /// diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs index f3751f6b4..e60c1f0bc 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/ActionPlanner.cs @@ -38,6 +38,8 @@ public class ActionPlanner : IPlanner where TState : TurnState private readonly ILoggerFactory? _logger; + private bool _enableFeedbackLoop; + /// /// Creates a new `ActionPlanner` instance. /// @@ -104,11 +106,14 @@ public async Task BeginTaskAsync(ITurnContext context, TState state, AI ContinueTaskAsync(ITurnContext context, TState state, AI ai, CancellationToken cancellationToken = default) { PromptTemplate template = await this.Options.DefaultPrompt(context, state, this); + + this._enableFeedbackLoop = ai.Options.EnableFeedbackLoop; + PromptResponse response = await this.CompletePromptAsync(context, state, template, template.Augmentation, cancellationToken); if (response.Status != PromptResponseStatus.Success) { - throw new Exception(response.Error?.Message ?? "[Action Planner]: an error has occurred"); + throw new Exception(response.Error?.Message ?? "[Action Planner]: an error has occurred", response.Error); } // Check to see if we have a response @@ -176,6 +181,7 @@ public async Task CompletePromptAsync( LogRepairs = this.Options.LogRepairs, StartStreamingMessage = this.Options.StartStreamingMessage, EndStreamHandler = this.Options.EndStreamHandler, + EnableFeedbackLoop = this._enableFeedbackLoop, }, this._logger); return await client.CompletePromptAsync(context, memory, this.Prompts, cancellationToken); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs index d352c3c26..b2f94fb24 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Planners/AssistantsPlanner.cs @@ -8,7 +8,7 @@ using Microsoft.Teams.AI.Exceptions; using Microsoft.Teams.AI.State; using Microsoft.Teams.AI.Utilities; -using OpenAI; +using OAI = OpenAI; using OpenAI.Assistants; using OpenAI.Files; using System.ClientModel; @@ -32,7 +32,7 @@ public class AssistantsPlanner : IPlanner private readonly AssistantsPlannerOptions _options; private readonly AssistantClient _client; - private readonly FileClient _fileClient; + private readonly OpenAIFileClient _fileClient; // TODO: Write trace logs #pragma warning disable IDE0052 // Remove unread private members @@ -189,14 +189,14 @@ private async Task _BlockOnInProgressRunsAsync(string threadId, CancellationToke // Loop until the last run is completed while (true) { - AsyncPageCollection? runs = _client.GetRunsAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken); + AsyncCollectionResult? runs = _client.GetRunsAsync(threadId, new() { Order = RunCollectionOrder.Descending }, cancellationToken); if (runs == null) { return; } - ThreadRun? run = runs.GetAllValuesAsync().GetAsyncEnumerator().Current; + ThreadRun? run = runs.GetAsyncEnumerator().Current; if (run == null || _IsRunCompleted(run)) { return; @@ -210,9 +210,9 @@ private async Task _BlockOnInProgressRunsAsync(string threadId, CancellationToke private async Task _GeneratePlanFromMessagesAsync(string threadId, string lastMessageId, CancellationToken cancellationToken) { // Find the new messages - AsyncPageCollection messages = _client.GetMessagesAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken); + AsyncCollectionResult messages = _client.GetMessagesAsync(threadId, new() { Order = MessageCollectionOrder.Descending }, cancellationToken); List newMessages = new(); - await foreach (ThreadMessage message in messages.GetAllValuesAsync()) + await foreach (ThreadMessage message in messages) { if (string.Equals(message.Id, lastMessageId)) { @@ -380,7 +380,7 @@ internal static AssistantClient _CreateClient(TokenCredential tokenCredential, s return azureOpenAI.GetAssistantClient(); } - internal FileClient _CreateFileClient(string apiKey, string? endpoint = null) + internal OpenAIFileClient _CreateFileClient(string apiKey, string? endpoint = null) { Verify.ParamNotNull(apiKey); @@ -388,22 +388,22 @@ internal FileClient _CreateFileClient(string apiKey, string? endpoint = null) { // Azure OpenAI AzureOpenAIClient azureOpenAI = new(new Uri(endpoint), new ApiKeyCredential(apiKey)); - return azureOpenAI.GetFileClient(); + return azureOpenAI.GetOpenAIFileClient(); } else { // OpenAI - return new FileClient(apiKey); + return new OpenAIFileClient(apiKey); } } - internal FileClient _CreateFileClient(TokenCredential tokenCredential, string endpoint) + internal OpenAIFileClient _CreateFileClient(TokenCredential tokenCredential, string endpoint) { Verify.ParamNotNull(tokenCredential); Verify.ParamNotNull(endpoint); AzureOpenAIClient azureOpenAI = new(new Uri(endpoint), tokenCredential); - return azureOpenAI.GetFileClient(); + return azureOpenAI.GetOpenAIFileClient(); } private async Task _CreateUserThreadMessageAsync(string threadId, TState state, CancellationToken cancellationToken) @@ -417,16 +417,16 @@ private async Task _CreateUserThreadMessageAsync(string threadId, IList? inputFiles = state.Temp?.InputFiles.Where((file) => file.Filename != null && file.Filename != string.Empty).ToList(); if (inputFiles != null && inputFiles.Count > 0) { - List>> fileUploadTasks = new(); + List>> fileUploadTasks = new(); foreach (InputFile file in inputFiles) { fileUploadTasks.Add(_fileClient.UploadFileAsync(file.Content, file.Filename!, FileUploadPurpose.Assistants)); } - ClientResult[] uploadedFiles = await Task.WhenAll(fileUploadTasks); + ClientResult[] uploadedFiles = await Task.WhenAll(fileUploadTasks); for (int i = 0; i < uploadedFiles.Count(); i++) { - OpenAIFileInfo file = uploadedFiles[i]; + OAI.Files.OpenAIFile file = uploadedFiles[i]; if (inputFiles[i].ContentType.StartsWith("image/")) { messages.Add(MessageContent.FromImageFileId(file.Id, MessageImageDetail.Auto)); diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs index d7b8a6a8c..e495e4745 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptManager.cs @@ -128,7 +128,7 @@ public PromptTemplate GetPrompt(string name) template.Prompt.Sections = new List() { // The "1" place holder is to make this a fixed section so it is rendered in the correct order. // TODO: When implementing the new layout engine class refactor this. - new GroupSection(ChatRole.System, template.Prompt.Sections, 1) + new GroupSection(ChatRole.System, template.Prompt.Sections, 1) }; if (template.Configuration.Completion.IncludeHistory) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs index 8796b651b..7d342db1b 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/PromptTemplate.cs @@ -314,9 +314,9 @@ internal OAI.Chat.ChatToolChoice GetOpenAIChatToolChoice() { return ToolChoice switch { - ChatToolChoice.Auto => OAI.Chat.ChatToolChoice.Auto, - ChatToolChoice.Required => OAI.Chat.ChatToolChoice.Required, - ChatToolChoice.None => OAI.Chat.ChatToolChoice.None, + ChatToolChoice.Auto => OAI.Chat.ChatToolChoice.CreateAutoChoice(), + ChatToolChoice.Required => OAI.Chat.ChatToolChoice.CreateRequiredChoice(), + ChatToolChoice.None => OAI.Chat.ChatToolChoice.CreateNoneChoice(), _ => throw new InvalidOperationException($"Unknown ChatToolChoice: {ToolChoice}"), }; } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs index edaeb4603..2e8f43fc4 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Prompts/Sections/LayoutSection.cs @@ -20,7 +20,7 @@ private List _fixedSections { get { - return this.Sections.Where(s => s.Tokens > -1).OrderBy(s => s.Required).ToList(); + return this.Sections.Where(s => s.Tokens > -1).OrderBy(s => !s.Required).ToList(); } } @@ -28,7 +28,7 @@ private List _autoSections { get { - return this.Sections.Where(s => s.Tokens == -1).OrderBy(s => s.Required).ToList(); + return this.Sections.Where(s => s.Tokens == -1).OrderBy(s => !s.Required).ToList(); } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs index f24a07993..72b6e52cd 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/AI/Tokenizers/GPTTokenizer.cs @@ -12,7 +12,7 @@ public class GPTTokenizer : ITokenizer /// /// Creates an instance of `GPTTokenizer` using "gpt-4" model name by default which is using the `cl100k_base` encoding /// - public GPTTokenizer() => _encoding = Tokenizer.CreateTiktokenForModel("gpt-4"); + public GPTTokenizer() => _encoding = TiktokenTokenizer.CreateForModel("gpt-4"); /// /// Creates an instance of `GPTTokenizer` @@ -24,7 +24,7 @@ public class GPTTokenizer : ITokenizer /// Creates an instance of `GPTTokenizer` /// /// model to encode/decode for - public GPTTokenizer(string model) => this._encoding = Tokenizer.CreateTiktokenForModel(model); + public GPTTokenizer(string model) => this._encoding = TiktokenTokenizer.CreateForModel("gpt-4"); /// /// Encode diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingChannelData.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingChannelData.cs index 3c79fa397..fa675a280 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingChannelData.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingChannelData.cs @@ -34,5 +34,12 @@ public class StreamingChannelData /// [JsonProperty(PropertyName = "streamId")] public string? streamId { get; set; } + + /// + /// Sets the Feedback Loop in Teams that allows a user to + /// give thumbs up or down to a response. + /// + [JsonProperty(PropertyName = "feedbackLoopEnabled")] + public bool? feedbackLoopEnabled { get; set; } } } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingResponse.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingResponse.cs index cbaab401a..61821abbe 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingResponse.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/StreamingResponse.cs @@ -1,6 +1,11 @@ using Microsoft.Bot.Builder; using Microsoft.Bot.Schema; +using Microsoft.Teams.AI.AI.Action; +using Microsoft.Teams.AI.AI.Models; using Microsoft.Teams.AI.Exceptions; +using Microsoft.Teams.AI.Utilities; +using Newtonsoft.Json.Linq; +using System; namespace Microsoft.Teams.AI.Application { @@ -30,6 +35,28 @@ public class StreamingResponse /// public List? Attachments { get; set; } = new(); + /// + /// Sets the Feedback Loop in Teams that allows a user to give thumbs up or down to a response. + /// Defaults to false. + /// + public bool? EnableFeedbackLoop { get; set; } = false; + + /// + /// Sets the "Generated by AI" label in Teams. + /// Defaults to false. + /// + public bool? EnableGeneratedByAILabel { get; set; } = false; + + /// + /// The citations for the response. + /// + public List? Citations { get; set; } = new(); + + /// + /// The sensitivity label for the response. + /// + public SensitivityUsageInfo? SensitivityLabel { get; set; } + /// /// Gets the stream ID of the current response. /// Assigned after the initial update is sent. @@ -93,8 +120,9 @@ public void QueueInformativeUpdate(string text) /// Queues a chunk of partial message text to be sent to the client. /// /// Partial text of the message to send. + /// Citations to include in the message. /// Throws if the stream has already ended. - public void QueueTextChunk(string text) + public void QueueTextChunk(string text, IList? citations = null) { if (this._ended) { @@ -102,6 +130,40 @@ public void QueueTextChunk(string text) } Message += text; + + if (citations != null && citations.Count > 0) + { + if (this.Citations == null) + { + this.Citations = new List(); + } + + int currPos = this.Citations.Count; + + foreach (Citation citation in citations) + { + string abs = CitationUtils.Snippet(citation.Content, 480); + + this.Citations.Add(new ClientCitation() + { + Position = $"{currPos}", + Appearance = new ClientCitationAppearance() + { + Name = citation.Title, + Abstract = abs + } + }); + currPos++; + } + + // If there are citations, modify the content so that the sources are numbers instead of [doc1], [doc2], etc. + this.Message = this.Citations.Count == 0 ? this.Message : CitationUtils.FormatCitationsResponse(this.Message); + + // If there are citations, filter out the citations unused in content. + this.Citations = this.Citations.Count > 0 ? CitationUtils.GetUsedCitations(this.Message, this.Citations) : new List(); + + } + QueueNextChunk(); } @@ -121,7 +183,7 @@ public Task EndStream() QueueNextChunk(); // Wait for the queue to drain - return this._queueSync!; + return WaitForQueue()!; } /// @@ -133,9 +195,16 @@ private void QueueActivity(Func factory) this._queue.Add(factory); // If there's no sync in progress, start one - if (this._queueSync == null) + if (this._queueSync == null || this._queueSync.IsCompleted) { this._queueSync = DrainQueue(); + + if (this._queueSync.IsFaulted) + { + Exception ex = this._queueSync.Exception; + this._queueSync = null; + throw new TeamsAIException($"Error occurred when sending activity while streaming", ex); + } } } @@ -198,25 +267,20 @@ private void QueueNextChunk() /// private async Task DrainQueue() { - await Task.Run(async () => + try { - try - { - while (this._queue.Count > 0) - { - // Get next activity from queue - Activity activity = _queue[0](); - await SendActivity(activity).ConfigureAwait(false); - _queue.RemoveAt(0); - } - } - - finally + while (this._queue.Count > 0) { - // Queue is empty, mark as idle - this._queueSync = null; + // Get next activity from queue + Activity activity = _queue[0](); + await SendActivity(activity).ConfigureAwait(false); + _queue.RemoveAt(0); } - }).ConfigureAwait(false); + } + catch (Exception ex) + { + throw ex; + } } /// @@ -244,8 +308,44 @@ private async Task SendActivity(Activity activity) activity.ChannelData = updatedChannelData; } + activity.Entities = new List{ + new Entity("streaminfo") + { + Properties = JObject.FromObject(new { + streamId = ((StreamingChannelData) activity.ChannelData).streamId, + streamType = ((StreamingChannelData) activity.ChannelData).StreamType, + streamSequence = ((StreamingChannelData) activity.ChannelData).StreamSequence, + + }) + } + }; + + // Add in Powered by AI feature flags + if (this._ended) + { + // Add in feedback loop + StreamingChannelData currChannelData = activity.GetChannelData(); + currChannelData.feedbackLoopEnabled = EnableFeedbackLoop; + activity.ChannelData = currChannelData; + + // Add in Generated by AI + if (this.EnableGeneratedByAILabel == true) + { + AIEntity entity = new AIEntity(); + if (this.Citations != null && this.Citations.Count > 0) + { + entity.Citation = this.Citations; + } + + entity.UsageInfo = this.SensitivityLabel; + activity.Entities.Add(entity); + } + } + ResourceResponse response = await this._context.SendActivityAsync(activity).ConfigureAwait(false); + await Task.Delay(TimeSpan.FromSeconds(1.5)); + // Save assigned stream ID if (string.IsNullOrEmpty(StreamId)) { diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs index 887bce6af..9b46b0a07 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Application/TeamsAttachmentDownloader.cs @@ -39,8 +39,8 @@ public TeamsAttachmentDownloader(TeamsAttachmentDownloaderOptions options, HttpC public async Task> DownloadFilesAsync(ITurnContext turnContext, TState turnState, CancellationToken cancellationToken = default) { // Filter out HTML attachments - IEnumerable attachments = turnContext.Activity.Attachments.Where((a) => !a.ContentType.StartsWith("text/html")); - if (!attachments.Any()) + IEnumerable? attachments = turnContext.Activity.Attachments?.Where((a) => !a.ContentType.StartsWith("text/html")); + if (attachments == null || !attachments.Any()) { return new List(); } diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj index 251b6ba1c..c8a2437ef 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI/Microsoft.Teams.AI.csproj @@ -1,62 +1,57 @@ - - - - netstandard2.0 - latest - enable - enable - Microsoft.Teams.AI - Microsoft Teams AI SDK - 1.7.0 - Microsoft - Microsoft - © Microsoft Corporation. All rights reserved. - SDK focused on building AI based applications for Microsoft Teams. - README.md - https://github.com/microsoft/teams-ai - git - True - - - - - https://github.com/microsoft/teams-ai - https://github-production-user-asset-6210df.s3.amazonaws.com/14900841/240368384-972a9a1b-679a-4725-bfc0-a1e76151a78a.png - MIT - true - bots;ai;teams - - NU5125 - true - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + netstandard2.0 + latest + enable + enable + Microsoft.Teams.AI + Microsoft Teams AI SDK + 1.8.0 + Microsoft + Microsoft + © Microsoft Corporation. All rights reserved. + SDK focused on building AI based applications for Microsoft Teams. + README.md + https://github.com/microsoft/teams-ai + git + True + + + + + https://github.com/microsoft/teams-ai + https://github-production-user-asset-6210df.s3.amazonaws.com/14900841/240368384-972a9a1b-679a-4725-bfc0-a1e76151a78a.png + MIT + true + bots;ai;teams + + NU5125 + true + true + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/samples/01.messaging.echoBot/EchoBot.csproj b/dotnet/samples/01.messaging.echoBot/EchoBot.csproj index d83fdff2f..e41f28dc3 100644 --- a/dotnet/samples/01.messaging.echoBot/EchoBot.csproj +++ b/dotnet/samples/01.messaging.echoBot/EchoBot.csproj @@ -17,7 +17,7 @@ - + diff --git a/dotnet/samples/02.messageExtensions.a.searchCommand/SearchCommand.csproj b/dotnet/samples/02.messageExtensions.a.searchCommand/SearchCommand.csproj index 33a856cd4..c6af635c8 100644 --- a/dotnet/samples/02.messageExtensions.a.searchCommand/SearchCommand.csproj +++ b/dotnet/samples/02.messageExtensions.a.searchCommand/SearchCommand.csproj @@ -14,7 +14,7 @@ - + diff --git a/dotnet/samples/03.adaptiveCards.a.typeAheadBot/TypeAheadBot.csproj b/dotnet/samples/03.adaptiveCards.a.typeAheadBot/TypeAheadBot.csproj index 1ab4bdc47..5292b9795 100644 --- a/dotnet/samples/03.adaptiveCards.a.typeAheadBot/TypeAheadBot.csproj +++ b/dotnet/samples/03.adaptiveCards.a.typeAheadBot/TypeAheadBot.csproj @@ -13,7 +13,7 @@ - + diff --git a/dotnet/samples/04.ai.a.teamsChefBot/TeamsChefBot.csproj b/dotnet/samples/04.ai.a.teamsChefBot/TeamsChefBot.csproj index 5f4d8076e..9ce3c4a91 100644 --- a/dotnet/samples/04.ai.a.teamsChefBot/TeamsChefBot.csproj +++ b/dotnet/samples/04.ai.a.teamsChefBot/TeamsChefBot.csproj @@ -16,7 +16,7 @@ - + diff --git a/dotnet/samples/04.ai.b.messageExtensions.gptME/GPT.csproj b/dotnet/samples/04.ai.b.messageExtensions.gptME/GPT.csproj index c1166c405..d2efcaea5 100644 --- a/dotnet/samples/04.ai.b.messageExtensions.gptME/GPT.csproj +++ b/dotnet/samples/04.ai.b.messageExtensions.gptME/GPT.csproj @@ -15,7 +15,7 @@ - + diff --git a/dotnet/samples/04.ai.c.actionMapping.lightBot/LightBot.csproj b/dotnet/samples/04.ai.c.actionMapping.lightBot/LightBot.csproj index ca4a0ca5a..1fcd96b55 100644 --- a/dotnet/samples/04.ai.c.actionMapping.lightBot/LightBot.csproj +++ b/dotnet/samples/04.ai.c.actionMapping.lightBot/LightBot.csproj @@ -18,7 +18,7 @@ - + diff --git a/dotnet/samples/04.ai.d.chainedActions.listBot/ListBot.csproj b/dotnet/samples/04.ai.d.chainedActions.listBot/ListBot.csproj index 32b7c2558..4503d835c 100644 --- a/dotnet/samples/04.ai.d.chainedActions.listBot/ListBot.csproj +++ b/dotnet/samples/04.ai.d.chainedActions.listBot/ListBot.csproj @@ -12,7 +12,7 @@ - + diff --git a/dotnet/samples/04.ai.e.chainedActions.devOpsBot/DevOpsBot.csproj b/dotnet/samples/04.ai.e.chainedActions.devOpsBot/DevOpsBot.csproj index 837189142..9a92bcaa4 100644 --- a/dotnet/samples/04.ai.e.chainedActions.devOpsBot/DevOpsBot.csproj +++ b/dotnet/samples/04.ai.e.chainedActions.devOpsBot/DevOpsBot.csproj @@ -14,7 +14,7 @@ - + diff --git a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj index fdc5435aa..05a7a3cb3 100644 --- a/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj +++ b/dotnet/samples/04.ai.f.vision.cardMaster/CardGazer.csproj @@ -18,7 +18,7 @@ - + diff --git a/dotnet/samples/04.ai.g.teamsChefBot-streaming/Program.cs b/dotnet/samples/04.ai.g.teamsChefBot-streaming/Program.cs index 3df8d3f53..0a439e50d 100644 --- a/dotnet/samples/04.ai.g.teamsChefBot-streaming/Program.cs +++ b/dotnet/samples/04.ai.g.teamsChefBot-streaming/Program.cs @@ -164,12 +164,18 @@ ); Application app = new ApplicationBuilder() - .WithAIOptions(new(planner)) + .WithAIOptions(new(planner) { EnableFeedbackLoop = true }) .WithStorage(sp.GetService()!) .Build(); app.AI.ImportActions(new ActionHandlers()); + app.OnFeedbackLoop((turnContext, turnState, feedbackLoopData, _) => + { + Console.WriteLine("Feedback loop triggered"); + return Task.CompletedTask; + }); + return app; }); diff --git a/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj b/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj index 447ddbeac..00a762075 100644 --- a/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj +++ b/dotnet/samples/04.ai.g.teamsChefBot-streaming/TeamsChefBot.csproj @@ -14,20 +14,9 @@ - - - - - - - - - - - - - - + + + diff --git a/dotnet/samples/04.e.twentyQuestions/TwentyQuestions.csproj b/dotnet/samples/04.e.twentyQuestions/TwentyQuestions.csproj index ce0215307..33faa98f3 100644 --- a/dotnet/samples/04.e.twentyQuestions/TwentyQuestions.csproj +++ b/dotnet/samples/04.e.twentyQuestions/TwentyQuestions.csproj @@ -13,7 +13,7 @@ - + diff --git a/dotnet/samples/04.e.twentyQuestions/teamsapp.yml b/dotnet/samples/04.e.twentyQuestions/teamsapp.yml index 59234e804..a535fdee3 100644 --- a/dotnet/samples/04.e.twentyQuestions/teamsapp.yml +++ b/dotnet/samples/04.e.twentyQuestions/teamsapp.yml @@ -81,3 +81,4 @@ deploy: with: artifactFolder: bin/Release/net6.0/win-x86/publish resourceId: ${{BOT_AZURE_APP_SERVICE_RESOURCE_ID}} +projectId: 6d2b99e4-1480-4218-8d1c-46630228f713 diff --git a/dotnet/samples/05.chatModeration/.editorconfig b/dotnet/samples/05.chatModeration/.editorconfig new file mode 100644 index 000000000..755bfa6c1 --- /dev/null +++ b/dotnet/samples/05.chatModeration/.editorconfig @@ -0,0 +1,240 @@ +# Remove the line below if you want to inherit .editorconfig settings from higher directories +root = true + +# C# files +[*.cs] + +#### Core EditorConfig Options #### + +# Indentation and spacing +indent_size = 4 +indent_style = space +tab_width = 4 + +# New line preferences +end_of_line = crlf +insert_final_newline = false + +#### .NET Coding Conventions #### + +# Organize usings +dotnet_separate_import_directive_groups = false +dotnet_sort_system_directives_first = false +file_header_template = unset + +# this. and Me. preferences +dotnet_style_qualification_for_event = false +dotnet_style_qualification_for_field = false +dotnet_style_qualification_for_method = false +dotnet_style_qualification_for_property = false + +# Language keywords vs BCL types preferences +dotnet_style_predefined_type_for_locals_parameters_members = true +dotnet_style_predefined_type_for_member_access = true + +# Parentheses preferences +dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_operators = never_if_unnecessary +dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity + +# Modifier preferences +dotnet_style_require_accessibility_modifiers = for_non_interface_members + +# Expression-level preferences +dotnet_style_coalesce_expression = true +dotnet_style_collection_initializer = true +dotnet_style_explicit_tuple_names = true +dotnet_style_namespace_match_folder = true +dotnet_style_null_propagation = true +dotnet_style_object_initializer = true +dotnet_style_operator_placement_when_wrapping = beginning_of_line +dotnet_style_prefer_auto_properties = true +dotnet_style_prefer_collection_expression = when_types_loosely_match +dotnet_style_prefer_compound_assignment = true +dotnet_style_prefer_conditional_expression_over_assignment = true +dotnet_style_prefer_conditional_expression_over_return = true +dotnet_style_prefer_foreach_explicit_cast_in_source = when_strongly_typed +dotnet_style_prefer_inferred_anonymous_type_member_names = true +dotnet_style_prefer_inferred_tuple_names = true +dotnet_style_prefer_is_null_check_over_reference_equality_method = true +dotnet_style_prefer_simplified_boolean_expressions = true +dotnet_style_prefer_simplified_interpolation = true + +# Field preferences +dotnet_style_readonly_field = true + +# Parameter preferences +dotnet_code_quality_unused_parameters = all + +# Suppression preferences +dotnet_remove_unnecessary_suppression_exclusions = none + +# New line preferences +dotnet_style_allow_multiple_blank_lines_experimental = true +dotnet_style_allow_statement_immediately_after_block_experimental = true + +#### C# Coding Conventions #### + +# var preferences +csharp_style_var_elsewhere = false +csharp_style_var_for_built_in_types = false +csharp_style_var_when_type_is_apparent = false + +# Expression-bodied members +csharp_style_expression_bodied_accessors = true:silent +csharp_style_expression_bodied_constructors = false:silent +csharp_style_expression_bodied_indexers = true:silent +csharp_style_expression_bodied_lambdas = true:silent +csharp_style_expression_bodied_local_functions = false:silent +csharp_style_expression_bodied_methods = false:silent +csharp_style_expression_bodied_operators = false:silent +csharp_style_expression_bodied_properties = true:silent + +# Pattern matching preferences +csharp_style_pattern_matching_over_as_with_null_check = true +csharp_style_pattern_matching_over_is_with_cast_check = true +csharp_style_prefer_extended_property_pattern = true +csharp_style_prefer_not_pattern = true +csharp_style_prefer_pattern_matching = true +csharp_style_prefer_switch_expression = true + +# Null-checking preferences +csharp_style_conditional_delegate_call = true + +# Modifier preferences +csharp_prefer_static_anonymous_function = true +csharp_prefer_static_local_function = true +csharp_preferred_modifier_order = public,private,protected,internal,file,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,required,volatile,async +csharp_style_prefer_readonly_struct = true +csharp_style_prefer_readonly_struct_member = true + +# Code-block preferences +csharp_prefer_braces = true:silent +csharp_prefer_simple_using_statement = true:suggestion +csharp_style_namespace_declarations = block_scoped:silent +csharp_style_prefer_method_group_conversion = true:silent +csharp_style_prefer_primary_constructors = true:suggestion +csharp_style_prefer_top_level_statements = true:silent + +# Expression-level preferences +csharp_prefer_simple_default_expression = true +csharp_style_deconstructed_variable_declaration = true +csharp_style_implicit_object_creation_when_type_is_apparent = true +csharp_style_inlined_variable_declaration = true +csharp_style_prefer_index_operator = true +csharp_style_prefer_local_over_anonymous_function = true +csharp_style_prefer_null_check_over_type_check = true +csharp_style_prefer_range_operator = true +csharp_style_prefer_tuple_swap = true +csharp_style_prefer_utf8_string_literals = true +csharp_style_throw_expression = true +csharp_style_unused_value_assignment_preference = discard_variable +csharp_style_unused_value_expression_statement_preference = discard_variable + +# 'using' directive preferences +csharp_using_directive_placement = outside_namespace:silent + +# New line preferences +csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = true +csharp_style_allow_blank_line_after_token_in_arrow_expression_clause_experimental = true +csharp_style_allow_blank_line_after_token_in_conditional_expression_experimental = true +csharp_style_allow_blank_lines_between_consecutive_braces_experimental = true +csharp_style_allow_embedded_statements_on_same_line_experimental = true + +#### C# Formatting Rules #### + +# New line preferences +csharp_new_line_before_catch = true +csharp_new_line_before_else = true +csharp_new_line_before_finally = true +csharp_new_line_before_members_in_anonymous_types = true +csharp_new_line_before_members_in_object_initializers = true +csharp_new_line_before_open_brace = all +csharp_new_line_between_query_expression_clauses = true + +# Indentation preferences +csharp_indent_block_contents = true +csharp_indent_braces = false +csharp_indent_case_contents = true +csharp_indent_case_contents_when_block = true +csharp_indent_labels = one_less_than_current +csharp_indent_switch_labels = true + +# Space preferences +csharp_space_after_cast = false +csharp_space_after_colon_in_inheritance_clause = true +csharp_space_after_comma = true +csharp_space_after_dot = false +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_after_semicolon_in_for_statement = true +csharp_space_around_binary_operators = before_and_after +csharp_space_around_declaration_statements = false +csharp_space_before_colon_in_inheritance_clause = true +csharp_space_before_comma = false +csharp_space_before_dot = false +csharp_space_before_open_square_brackets = false +csharp_space_before_semicolon_in_for_statement = false +csharp_space_between_empty_square_brackets = false +csharp_space_between_method_call_empty_parameter_list_parentheses = false +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_space_between_method_call_parameter_list_parentheses = false +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +csharp_space_between_method_declaration_name_and_open_parenthesis = false +csharp_space_between_method_declaration_parameter_list_parentheses = false +csharp_space_between_parentheses = false +csharp_space_between_square_brackets = false + +# Wrapping preferences +csharp_preserve_single_line_blocks = true +csharp_preserve_single_line_statements = true + +#### Naming styles #### + +# Naming rules + +dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion +dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface +dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i + +dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.types_should_be_pascal_case.symbols = types +dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case + +dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members +dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case + +# Symbol specifications + +dotnet_naming_symbols.interface.applicable_kinds = interface +dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.interface.required_modifiers = + +dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum +dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.types.required_modifiers = + +dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method +dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.non_field_members.required_modifiers = + +# Naming styles + +dotnet_naming_style.pascal_case.required_prefix = +dotnet_naming_style.pascal_case.required_suffix = +dotnet_naming_style.pascal_case.word_separator = +dotnet_naming_style.pascal_case.capitalization = pascal_case + +dotnet_naming_style.begins_with_i.required_prefix = I +dotnet_naming_style.begins_with_i.required_suffix = +dotnet_naming_style.begins_with_i.word_separator = +dotnet_naming_style.begins_with_i.capitalization = pascal_case + +[*.{cs,vb}] +dotnet_style_operator_placement_when_wrapping = beginning_of_line +tab_width = 4 +indent_size = 4 +end_of_line = crlf +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_null_propagation = true:suggestion \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/.gitignore b/dotnet/samples/05.chatModeration/.gitignore new file mode 100644 index 000000000..d9db69b0e --- /dev/null +++ b/dotnet/samples/05.chatModeration/.gitignore @@ -0,0 +1,25 @@ +# TeamsFx files +build +appPackage/build +env/.env.*.user +env/.env.local +appsettings.Development.json +.deployment + +# User-specific files +*.user + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# VS files +.vs/ diff --git a/dotnet/samples/05.chatModeration/ActionHandlers.cs b/dotnet/samples/05.chatModeration/ActionHandlers.cs new file mode 100644 index 000000000..dd4a13067 --- /dev/null +++ b/dotnet/samples/05.chatModeration/ActionHandlers.cs @@ -0,0 +1,31 @@ +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.AI.Action; +using Microsoft.Teams.AI.AI; +using System.Text.Json; + +namespace ChatModeration +{ + public class ActionHandlers + { + private static JsonSerializerOptions _jsonSerializerOptions = new() + { + WriteIndented = true, + }; + + [Action(AIConstants.FlaggedInputActionName)] + public async Task OnFlaggedInput([ActionTurnContext] ITurnContext turnContext, [ActionParameters] Dictionary entities) + { + string entitiesJsonString = JsonSerializer.Serialize(entities, _jsonSerializerOptions); + await turnContext.SendActivityAsync($"I'm sorry your message was flagged:"); + await turnContext.SendActivityAsync($"```{entitiesJsonString}"); + return ""; + } + + [Action(AIConstants.FlaggedOutputActionName)] + public async Task OnFlaggedOutput([ActionTurnContext] ITurnContext turnContext) + { + await turnContext.SendActivityAsync("I'm not allowed to talk about such things."); + return ""; + } + } +} diff --git a/dotnet/samples/05.chatModeration/AdapterWithErrorHandler.cs b/dotnet/samples/05.chatModeration/AdapterWithErrorHandler.cs new file mode 100644 index 000000000..369476aba --- /dev/null +++ b/dotnet/samples/05.chatModeration/AdapterWithErrorHandler.cs @@ -0,0 +1,26 @@ +using Microsoft.Bot.Builder.TraceExtensions; +using Microsoft.Teams.AI; + +namespace ChatModeration +{ + public class AdapterWithErrorHandler : TeamsAdapter + { + public AdapterWithErrorHandler(IConfiguration configuration, ILogger logger) + : base(configuration, null, logger) + { + OnTurnError = async (turnContext, exception) => + { + // Log any leaked exception from the application. + // NOTE: In production environment, you should consider logging this to + // Azure Application Insights. Visit https://aka.ms/bottelemetry to see how + // to add telemetry capture to your bot. + logger.LogError(exception, $"[OnTurnError] unhandled error : {exception.Message}"); + // Send a message to the user + await turnContext.SendActivityAsync($"The bot encountered an unhandled error: {exception.Message}"); + await turnContext.SendActivityAsync("To continue to run this bot, please fix the bot source code."); + // Send a trace activity + await turnContext.TraceActivityAsync("OnTurnError Trace", exception.Message, "https://www.botframework.com/schemas/error", "TurnError"); + }; + } + } +} diff --git a/dotnet/samples/05.chatModeration/ChatModeration.csproj b/dotnet/samples/05.chatModeration/ChatModeration.csproj new file mode 100644 index 000000000..fe4bf3898 --- /dev/null +++ b/dotnet/samples/05.chatModeration/ChatModeration.csproj @@ -0,0 +1,47 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PreserveNewest + None + + + + + ..\..\packages\Microsoft.TeamsAI\Microsoft.TeamsAI\obj\Debug\netstandard2.0\Microsoft.Teams.AI.dll + + + + + + diff --git a/dotnet/samples/05.chatModeration/ChatModeration.sln b/dotnet/samples/05.chatModeration/ChatModeration.sln new file mode 100644 index 000000000..c807cc153 --- /dev/null +++ b/dotnet/samples/05.chatModeration/ChatModeration.sln @@ -0,0 +1,25 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.6.33815.320 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ChatModeration", "ChatModeration.csproj", "{C2964D35-6742-4DBF-9685-5DD5A01D8D82}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C2964D35-6742-4DBF-9685-5DD5A01D8D82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C2964D35-6742-4DBF-9685-5DD5A01D8D82}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C2964D35-6742-4DBF-9685-5DD5A01D8D82}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C2964D35-6742-4DBF-9685-5DD5A01D8D82}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {30CCD595-AEBE-4CC2-B016-33E2EA023EAE} + EndGlobalSection +EndGlobal diff --git a/dotnet/samples/05.chatModeration/Config.cs b/dotnet/samples/05.chatModeration/Config.cs new file mode 100644 index 000000000..4080ba712 --- /dev/null +++ b/dotnet/samples/05.chatModeration/Config.cs @@ -0,0 +1,29 @@ +namespace ChatModeration +{ + public class ConfigOptions + { + public string? BOT_ID { get; set; } + public string? BOT_PASSWORD { get; set; } + public OpenAIConfigOptions? OpenAI { get; set; } + public AzureConfigOptions? Azure { get; set; } + } + + /// + /// Options for Open AI + /// + public class OpenAIConfigOptions + { + public string? ApiKey { get; set; } + } + + /// + /// Options for Azure OpenAI and Azure Content Safety + /// + public class AzureConfigOptions + { + public string? OpenAIApiKey { get; set; } + public string? OpenAIEndpoint { get; set; } + public string? ContentSafetyApiKey { get; set; } + public string? ContentSafetyEndpoint { get; set; } + } +} diff --git a/dotnet/samples/05.chatModeration/Controllers/BotController.cs b/dotnet/samples/05.chatModeration/Controllers/BotController.cs new file mode 100644 index 000000000..f0fbca52d --- /dev/null +++ b/dotnet/samples/05.chatModeration/Controllers/BotController.cs @@ -0,0 +1,32 @@ +using Microsoft.AspNetCore.Mvc; +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI; + +namespace ChatModeration.Controllers +{ + [Route("api/messages")] + [ApiController] + public class BotController : ControllerBase + { + private readonly TeamsAdapter _adapter; + private readonly IBot _bot; + + public BotController(TeamsAdapter adapter, IBot bot) + { + _adapter = adapter; + _bot = bot; + } + + [HttpPost] + public async Task PostAsync(CancellationToken cancellationToken = default) + { + await _adapter.ProcessAsync + ( + Request, + Response, + _bot, + cancellationToken + ); + } + } +} diff --git a/dotnet/samples/05.chatModeration/Program.cs b/dotnet/samples/05.chatModeration/Program.cs new file mode 100644 index 000000000..cc0816c24 --- /dev/null +++ b/dotnet/samples/05.chatModeration/Program.cs @@ -0,0 +1,128 @@ +using Microsoft.Bot.Builder; +using Microsoft.Bot.Builder.Integration.AspNet.Core; +using Microsoft.Bot.Connector.Authentication; +using Microsoft.Teams.AI.AI.Models; +using Microsoft.Teams.AI.AI.Planners; +using Microsoft.Teams.AI.AI.Prompts; +using Microsoft.Teams.AI.State; +using Microsoft.Teams.AI; +using ChatModeration; +using Microsoft.Teams.AI.AI.Moderator; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddControllers(); +builder.Services.AddHttpClient("WebClient", client => client.Timeout = TimeSpan.FromSeconds(600)); +builder.Services.AddHttpContextAccessor(); + +// Prepare Configuration for ConfigurationBotFrameworkAuthentication +var config = builder.Configuration.Get()!; +builder.Configuration["MicrosoftAppType"] = "MultiTenant"; +builder.Configuration["MicrosoftAppId"] = config.BOT_ID; +builder.Configuration["MicrosoftAppPassword"] = config.BOT_PASSWORD; + +// Create the Bot Framework Authentication to be used with the Bot Adapter. +builder.Services.AddSingleton(); + +// Create the Cloud Adapter with error handling enabled. +// Note: some classes expect a BotAdapter and some expect a BotFrameworkHttpAdapter, so +// register the same adapter instance for all types. +builder.Services.AddSingleton(); +builder.Services.AddSingleton(sp => sp.GetService()!); +builder.Services.AddSingleton(sp => sp.GetService()!); + +builder.Services.AddSingleton(); + +// Create AI Model +if (!string.IsNullOrEmpty(config.OpenAI?.ApiKey)) +{ + // Create OpenAI Model + builder.Services.AddSingleton(sp => new( + new OpenAIModelOptions(config.OpenAI.ApiKey, "gpt-4o") + { + LogRequests = true + }, + sp.GetService() + )); + + builder.Services.AddSingleton>(sp => new OpenAIModerator(new(apiKey: config.OpenAI.ApiKey, ModerationType.Both))); +} +else if (!string.IsNullOrEmpty(config.Azure?.OpenAIApiKey) && !string.IsNullOrEmpty(config.Azure.OpenAIEndpoint)) +{ + // Create Azure OpenAI Model + builder.Services.AddSingleton(sp => new( + new AzureOpenAIModelOptions( + config.Azure.OpenAIApiKey, + "gpt-4o", + config.Azure.OpenAIEndpoint + ) + { + LogRequests = true + }, + sp.GetService() + )); + + builder.Services.AddSingleton>(sp => + new AzureContentSafetyModerator(new(config.Azure.OpenAIApiKey, config.Azure.OpenAIEndpoint, ModerationType.Both)) + ); +} +else +{ + throw new Exception("please configure settings for either OpenAI or Azure"); +} + +// Create the bot as transient. In this case the ASP Controller is expecting an IBot. +builder.Services.AddTransient(sp => +{ + // Create loggers + ILoggerFactory loggerFactory = sp.GetService()!; + + // Create Prompt Manager + PromptManager prompts = new(new() + { + PromptFolder = "./Prompts" + }); + + // Create ActionPlanner + ActionPlanner planner = new( + options: new( + model: sp.GetService()!, + prompts: prompts, + defaultPrompt: async (context, state, planner) => + { + PromptTemplate template = prompts.GetPrompt("Chat"); + return await Task.FromResult(template); + } + ) + { LogRepairs = true }, + loggerFactory: loggerFactory + ); + + Application app = new ApplicationBuilder() + .WithAIOptions(new(planner) { Moderator = sp.GetService>() }) + .WithStorage(sp.GetService()!) + .Build(); + + app.AI.ImportActions(new ActionHandlers()); + + app.OnConversationUpdate("membersAdded", async (context, state, token) => + { + await context.SendActivityAsync("Hello and welcome! With this sample you can see the functionality of the Azure AI Content Safety Moderator " + + "or OpenAI's Moderator based on the setup configurations."); + }); + + return app; +}); + +var app = builder.Build(); + +if (app.Environment.IsDevelopment()) +{ + app.UseDeveloperExceptionPage(); +} + +app.UseStaticFiles(); +app.UseRouting(); +app.MapControllers(); + +app.Run(); diff --git a/dotnet/samples/05.chatModeration/Prompts/Chat/config.json b/dotnet/samples/05.chatModeration/Prompts/Chat/config.json new file mode 100644 index 000000000..fd6e274a4 --- /dev/null +++ b/dotnet/samples/05.chatModeration/Prompts/Chat/config.json @@ -0,0 +1,18 @@ +{ + "schema": 1.1, + "description": "A bot that is configured to use chat moderation", + "type": "completion", + "completion": { + "model": "gpt-4o", + "completion_type": "chat", + "include_history": true, + "include_input": true, + "max_input_tokens": 2000, + "max_tokens": 1000, + "temperature": 0.2, + "top_p": 0.0, + "presence_penalty": 0.6, + "frequency_penalty": 0.0, + "stop_sequences": [] + } +} \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/Prompts/Chat/skprompt.txt b/dotnet/samples/05.chatModeration/Prompts/Chat/skprompt.txt new file mode 100644 index 000000000..bc17ef457 --- /dev/null +++ b/dotnet/samples/05.chatModeration/Prompts/Chat/skprompt.txt @@ -0,0 +1,3 @@ +You are the AI assistant demonstrating the Azure OpenAI's content safety moderation capabilities. +The following is a conversation with an AI assistant. +You evaluate the moderation severity of human's input in the following categories of moderation: hate, sexual content, self harm, violence. \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/Properties/launchSettings.json b/dotnet/samples/05.chatModeration/Properties/launchSettings.json new file mode 100644 index 000000000..9efe20cd7 --- /dev/null +++ b/dotnet/samples/05.chatModeration/Properties/launchSettings.json @@ -0,0 +1,27 @@ +{ + "profiles": { + // Debug project within Teams + "Microsoft Teams (browser)": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": true, + "launchUrl": "https://teams.microsoft.com/l/app/576b3387-9ef7-4aff-9da7-acc2ad2f6d0f?installAppPackage=true&webjoin=true&appTenantId=d247b24d-59a3-4042-8253-90aa371a6eb4&login_hint=kavinsingh_microsoft.com", + "applicationUrl": "http://localhost:5130", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + }, + "hotReloadProfile": "aspnetcore" + } + //// Uncomment following profile to debug project only (without launching Teams) + //, + //"Start Project (not in Teams)": { + // "commandName": "Project", + // "dotnetRunMessages": true, + // "applicationUrl": "https://localhost:7130;http://localhost:5130", + // "environmentVariables": { + // "ASPNETCORE_ENVIRONMENT": "Development" + // }, + // "hotReloadProfile": "aspnetcore" + //} + } +} \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/README.md b/dotnet/samples/05.chatModeration/README.md new file mode 100644 index 000000000..67a9001b5 --- /dev/null +++ b/dotnet/samples/05.chatModeration/README.md @@ -0,0 +1,77 @@ +# Chat Bot with Moderation Control + +## Summary + +This sample shows how to incorporate Content Safety control into a Microsoft Teams application. + +## Set up instructions + +All the samples in the C# .NET SDK can be set up in the same way. You can find the step by step instructions here: [Setup Instructions](../README.md). + +Note that, this sample requires AI service so you need one more pre-step before Local Debug (F5). + +1. Set your Azure OpenAI related settings to *appsettings.Development.json*. + + ```json + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "", + "ContentSafetyApiKey": "", + "ContentSafetyEndpoint": "" + } + ``` + +## Interacting with the bot + +You can interact with this bot by sending it a message. If you send it a message that contains inappropriate content, the bot will response with a moderation report that contains the inappropriate content: + +![Moderation Report](./assets/moderation.png) + + +## Deploy to Azure + +You can use Teams Toolkit for Visual Studio or CLI to host the bot in Azure. The sample includes Bicep templates in the `/infra` directory which are used by the tools to create resources in Azure. + +You can find deployment instructions [here](../README.md#deploy-to-azure). + +Note that, this sample requires AI service so you need one more pre-step before deploy to Azure. To configure the Azure resources to have an environment variable for the Azure OpenAI Key and other settings: + +1. In `./env/.env.dev.user` file, paste your Azure OpenAI related variables. + + ```bash + SECRET_AZURE_OPENAI_API_KEY= + SECRET_AZURE_OPENAI_ENDPOINT= + SECRET_AZURE_CONTENT_SAFETY_API_KEY= + SECRET_AZURE_CONTENT_SAFETY_ENDPOINT= + ``` + +The `SECRET_` prefix is a convention used by Teams Toolkit to mask the value in any logging output and is optional. + +## Use OpenAI + +Above steps use Azure OpenAI as AI service, optionally, you can also use OpenAI as AI service. + +**As prerequisites** + +1. Get an OpenAI api key. + +**For debugging (F5)** + +1. Set your [OpenAI API Key](https://platform.openai.com/settings/profile?tab=api-keys) to *appsettings.Development.json*. + + ```json + "OpenAI": { + "ApiKey": "" + }, + ``` + +**For deployment to Azure** + +To configure the Azure resources to have OpenAI environment variables: + +1. In `./env/.env.dev.user` file, paste your [OpenAI API Key](https://platform.openai.com/settings/profile?tab=api-keys) to the environment variable `SECRET_OPENAI_KEY=`. + +## Further reading + +- [Teams Toolkit overview](https://aka.ms/vs-teams-toolkit-getting-started) +- [How Microsoft Teams bots work](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-basics-teams?view=azure-bot-service-4.0&tabs=csharp) diff --git a/dotnet/samples/05.chatModeration/appPackage/manifest.json b/dotnet/samples/05.chatModeration/appPackage/manifest.json new file mode 100644 index 000000000..742c92871 --- /dev/null +++ b/dotnet/samples/05.chatModeration/appPackage/manifest.json @@ -0,0 +1,48 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/teams/v1.15/MicrosoftTeams.schema.json", + "version": "1.1.0", + "manifestVersion": "1.15", + "id": "${{TEAMS_APP_ID}}", + "packageName": "com.package.name", + "name": { + "short": "Moderation${{APP_NAME_SUFFIX}}", + "full": "Moderation Bot" + }, + "developer": { + "name": "Moderation", + "mpnId": "", + "websiteUrl": "https://microsoft.com", + "privacyUrl": "https://privacy.microsoft.com/privacystatement", + "termsOfUseUrl": "https://www.microsoft.com/legal/terms-of-use" + }, + "description": { + "short": "Sample bot shows how to incorporate Content Safety control.", + "full": "Sample bot shows how to incorporate Content Safety control." + }, + "icons": { + "outline": "outline.png", + "color": "color.png" + }, + "accentColor": "#FFFFFF", + "staticTabs": [ + { + "entityId": "conversations", + "scopes": ["personal"] + }, + { + "entityId": "about", + "scopes": ["personal"] + } + ], + "bots": [ + { + "botId": "${{BOT_ID}}", + "scopes": ["personal", "team", "groupChat"], + "isNotificationOnly": false, + "supportsCalling": false, + "supportsVideo": false, + "supportsFiles": false + } + ], + "validDomains": [] +} diff --git a/dotnet/samples/05.chatModeration/appsettings.Development.json b/dotnet/samples/05.chatModeration/appsettings.Development.json new file mode 100644 index 000000000..1928e121f --- /dev/null +++ b/dotnet/samples/05.chatModeration/appsettings.Development.json @@ -0,0 +1,21 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning", + "Microsoft.Teams.AI": "Trace" + } + }, + "AllowedHosts": "*", + "BOT_ID": "${botId}", + "BOT_PASSWORD": "${botPassword}", + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "", + "ContentSafetyApiKey": "", + "ContentSafetyEndpoint": "" + }, + "OpenAI": { + "ApiKey": "" + } +} \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/appsettings.json b/dotnet/samples/05.chatModeration/appsettings.json new file mode 100644 index 000000000..9ac767903 --- /dev/null +++ b/dotnet/samples/05.chatModeration/appsettings.json @@ -0,0 +1,20 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*", + "BOT_ID": "${botId}", + "BOT_PASSWORD": "${botPassword}", + "Azure": { + "OpenAIApiKey": "", + "OpenAIEndpoint": "", + "ContentSafetyApiKey": "", + "ContentSafetyEndpoint": "" + }, + "OpenAI": { + "ApiKey": "" + } +} diff --git a/dotnet/samples/05.chatModeration/assets/moderation.png b/dotnet/samples/05.chatModeration/assets/moderation.png new file mode 100644 index 000000000..92f262e67 --- /dev/null +++ b/dotnet/samples/05.chatModeration/assets/moderation.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8c34c851b963ccdc677e158ea8e96eaecec80a0cfdb0800fcc7f6e929fcb2f5 +size 44202 diff --git a/dotnet/samples/05.chatModeration/env/.env.dev b/dotnet/samples/05.chatModeration/env/.env.dev new file mode 100644 index 000000000..efcbe1f06 --- /dev/null +++ b/dotnet/samples/05.chatModeration/env/.env.dev @@ -0,0 +1,18 @@ +# This file includes environment variables that will be committed to git by default. + +# Built-in environment variables +TEAMSFX_ENV=dev + +# Updating AZURE_SUBSCRIPTION_ID or AZURE_RESOURCE_GROUP_NAME after provision may also require an update to RESOURCE_SUFFIX, because some services require a globally unique name across subscriptions/resource groups. +AZURE_SUBSCRIPTION_ID= +AZURE_RESOURCE_GROUP_NAME= +RESOURCE_SUFFIX= + +# Generated during provision, you can also add your own variables. +BOT_ID= +TEAMS_APP_ID= +BOT_AZURE_APP_SERVICE_RESOURCE_ID= +BOT_DOMAIN= + + +APP_NAME_SUFFIX=dev diff --git a/dotnet/samples/05.chatModeration/env/.env.local b/dotnet/samples/05.chatModeration/env/.env.local new file mode 100644 index 000000000..07b69ee56 --- /dev/null +++ b/dotnet/samples/05.chatModeration/env/.env.local @@ -0,0 +1,12 @@ +# This file includes environment variables that can be committed to git. It's gitignored by default because it represents your local development environment. + +# Built-in environment variables +TEAMSFX_ENV=local + +# Generated during provision, you can also add your own variables. +BOT_ID= +TEAMS_APP_ID= +BOT_DOMAIN= + + +APP_NAME_SUFFIX=local \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/infra/azure.bicep b/dotnet/samples/05.chatModeration/infra/azure.bicep new file mode 100644 index 000000000..e3877021b --- /dev/null +++ b/dotnet/samples/05.chatModeration/infra/azure.bicep @@ -0,0 +1,113 @@ +@maxLength(20) +@minLength(4) +@description('Used to generate names for all resources in this file') +param resourceBaseName string + +@description('Required when create Azure Bot service') +param botAadAppClientId string + +@secure() +@description('Required by Bot Framework package in your bot project') +param botAadAppClientSecret string + +@secure() +param openAIApiKey string + +@secure() +param azureOpenAIApiKey string + +@secure() +param azureOpenAIEndpoint string + +@secure() +param azureContentSafetyApiKey string + +@secure() +param azureContentSafetyEndpoint string + +param webAppSKU string + +@maxLength(42) +param botDisplayName string + +param serverfarmsName string = resourceBaseName +param webAppName string = resourceBaseName +param location string = resourceGroup().location + +// Compute resources for your Web App +resource serverfarm 'Microsoft.Web/serverfarms@2021-02-01' = { + kind: 'app' + location: location + name: serverfarmsName + sku: { + name: webAppSKU + } +} + +// Web App that hosts your bot +resource webApp 'Microsoft.Web/sites@2021-02-01' = { + kind: 'app' + location: location + name: webAppName + properties: { + serverFarmId: serverfarm.id + httpsOnly: true + siteConfig: { + alwaysOn: true + appSettings: [ + { + name: 'WEBSITE_RUN_FROM_PACKAGE' + value: '1' // Run Azure APP Service from a package file + } + { + name: 'RUNNING_ON_AZURE' + value: '1' + } + { + name: 'BOT_ID' + value: botAadAppClientId + } + { + name: 'BOT_PASSWORD' + value: botAadAppClientSecret + } + { + name: 'OpenAI__ApiKey' + value: openAIApiKey + } + { + name: 'Azure__OpenAIApiKey' + value: azureOpenAIApiKey + } + { + name: 'Azure__OpenAIEndpoint' + value: azureOpenAIEndpoint + } + { + name: 'Azure__ContentSafetyApiKey' + value: azureContentSafetyApiKey + } + { + name: 'Azure__ContentSafetyEndpoint' + value: azureContentSafetyEndpoint + } + ] + ftpsState: 'FtpsOnly' + } + } +} + +// Register your web service as a bot with the Bot Framework +module azureBotRegistration './botRegistration/azurebot.bicep' = { + name: 'Azure-Bot-registration' + params: { + resourceBaseName: resourceBaseName + botAadAppClientId: botAadAppClientId + botAppDomain: webApp.properties.defaultHostName + botDisplayName: botDisplayName + } +} + +// The output will be persisted in .env.{envName}. Visit https://aka.ms/teamsfx-actions/arm-deploy for more details. +output BOT_AZURE_APP_SERVICE_RESOURCE_ID string = webApp.id +output BOT_DOMAIN string = webApp.properties.defaultHostName diff --git a/dotnet/samples/05.chatModeration/infra/azure.parameters.json b/dotnet/samples/05.chatModeration/infra/azure.parameters.json new file mode 100644 index 000000000..00e948732 --- /dev/null +++ b/dotnet/samples/05.chatModeration/infra/azure.parameters.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "resourceBaseName": { + "value": "moderationbot${{RESOURCE_SUFFIX}}" + }, + "botAadAppClientId": { + "value": "${{BOT_ID}}" + }, + "botAadAppClientSecret": { + "value": "${{SECRET_BOT_PASSWORD}}" + }, + "webAppSKU": { + "value": "B1" + }, + "botDisplayName": { + "value": "ModerationBot" + }, + "openAIApiKey": { + "value": "${{SECRET_OPENAI_API_KEY}}" + }, + "azureOpenAIApiKey": { + "value": "${{SECRET_AZURE_OPENAI_API_KEY}}" + }, + "azureOpenAIEndpoint": { + "value": "${{SECRET_AZURE_OPENAI_ENDPOINT}}" + }, + "azureContentSafetyApiKey": { + "value": "${{SECRET_AZURE_CONTENT_SAFETY_API_KEY}}" + }, + "azureContentSafetyEndpoint": { + "value": "${{SECRET_AZURE_CONTENT_SAFETY_ENDPOINT}}" + } + } +} \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/infra/botRegistration/azurebot.bicep b/dotnet/samples/05.chatModeration/infra/botRegistration/azurebot.bicep new file mode 100644 index 000000000..ab67c7a56 --- /dev/null +++ b/dotnet/samples/05.chatModeration/infra/botRegistration/azurebot.bicep @@ -0,0 +1,37 @@ +@maxLength(20) +@minLength(4) +@description('Used to generate names for all resources in this file') +param resourceBaseName string + +@maxLength(42) +param botDisplayName string + +param botServiceName string = resourceBaseName +param botServiceSku string = 'F0' +param botAadAppClientId string +param botAppDomain string + +// Register your web service as a bot with the Bot Framework +resource botService 'Microsoft.BotService/botServices@2021-03-01' = { + kind: 'azurebot' + location: 'global' + name: botServiceName + properties: { + displayName: botDisplayName + endpoint: 'https://${botAppDomain}/api/messages' + msaAppId: botAadAppClientId + } + sku: { + name: botServiceSku + } +} + +// Connect the bot service to Microsoft Teams +resource botServiceMsTeamsChannel 'Microsoft.BotService/botServices/channels@2021-03-01' = { + parent: botService + location: 'global' + name: 'MsTeamsChannel' + properties: { + channelName: 'MsTeamsChannel' + } +} diff --git a/dotnet/samples/05.chatModeration/infra/botRegistration/readme.md b/dotnet/samples/05.chatModeration/infra/botRegistration/readme.md new file mode 100644 index 000000000..d5416243c --- /dev/null +++ b/dotnet/samples/05.chatModeration/infra/botRegistration/readme.md @@ -0,0 +1 @@ +The `azurebot.bicep` module is provided to help you create Azure Bot service when you don't use Azure to host your app. If you use Azure as infrastrcture for your app, `azure.bicep` under infra folder already leverages this module to create Azure Bot service for you. You don't need to deploy `azurebot.bicep` again. \ No newline at end of file diff --git a/dotnet/samples/05.chatModeration/teamsapp.local.yml b/dotnet/samples/05.chatModeration/teamsapp.local.yml new file mode 100644 index 000000000..c1950a386 --- /dev/null +++ b/dotnet/samples/05.chatModeration/teamsapp.local.yml @@ -0,0 +1,87 @@ +# yaml-language-server: $schema=https://aka.ms/teams-toolkit/1.1.0/yaml.schema.json +# +# The teamsapp.local.yml composes automation tasks for Teams Toolkit when running locally. +# This file is used when selecting 'Prepare Teams App Dependencies' menu items in the Teams Toolkit for Visual Studio window +# +# You can customize this file. Visit https://aka.ms/teamsfx-v5.0-guide for more info about Teams Toolkit project files. +version: 1.1.0 + +# Defines what the `provision` lifecycle step does with Teams Toolkit. +provision: + # Automates the creation of a Teams app registration and saves the App ID to an environment file. + - uses: teamsApp/create + with: + # Teams app name + name: Moderation${{APP_NAME_SUFFIX}} + # Write the information of created resources into environment file for + # the specified environment variable(s). + writeToEnvironmentFile: + teamsAppId: TEAMS_APP_ID + + # Automates the creation an Azure AD app registration which is required for a bot. + # The Bot ID (AAD app client ID) and Bot Password (AAD app client secret) are saved to an environment file. + - uses: botAadApp/create + with: + # The Azure Active Directory application's display name + name: Moderation + writeToEnvironmentFile: + # The Azure Active Directory application's client id created for bot. + botId: BOT_ID + # The Azure Active Directory application's client secret created for bot. + botPassword: SECRET_BOT_PASSWORD + + # Automates the creation and configuration of a Bot Framework registration which is required for a bot. + # This configures the bot to use the Azure AD app registration created in the previous step. + - uses: botFramework/create + with: + botId: ${{BOT_ID}} + name: Moderation + messagingEndpoint: ${{BOT_ENDPOINT}}/api/messages + description: "" + channels: + - name: msteams + + # Generate runtime appsettings to JSON file + - uses: file/createOrUpdateJsonFile + with: + target: ./appsettings.Development.json + content: + BOT_ID: ${{BOT_ID}} + BOT_PASSWORD: ${{SECRET_BOT_PASSWORD}} + + # Optional: Automates schema and error checking of the Teams app manifest and outputs the results in the console. + - uses: teamsApp/validateManifest + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + + # Automates the creation of a Teams app package (.zip). + - uses: teamsApp/zipAppPackage + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + outputZipPath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + outputJsonPath: ./appPackage/build/manifest.${{TEAMSFX_ENV}}.json + + # Automates updating the Teams app manifest in Teams Developer Portal using the App ID from the mainfest file. + # This action ensures that any manifest changes are reflected when launching the app again in Teams. + - uses: teamsApp/update + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + + # Create or update debug profile in lauchsettings file + - uses: file/createOrUpdateJsonFile + with: + target: ./Properties/launchSettings.json + content: + profiles: + Microsoft Teams (browser): + commandName: "Project" + dotnetRunMessages: true + launchBrowser: true + launchUrl: "https://teams.microsoft.com/l/app/${{TEAMS_APP_ID}}?installAppPackage=true&webjoin=true&appTenantId=${{TEAMS_APP_TENANT_ID}}&login_hint=${{TEAMSFX_M365_USER_NAME}}" + applicationUrl: "http://localhost:5130" + environmentVariables: + ASPNETCORE_ENVIRONMENT: "Development" + hotReloadProfile: "aspnetcore" diff --git a/dotnet/samples/05.chatModeration/teamsapp.yml b/dotnet/samples/05.chatModeration/teamsapp.yml new file mode 100644 index 000000000..9af915ea1 --- /dev/null +++ b/dotnet/samples/05.chatModeration/teamsapp.yml @@ -0,0 +1,97 @@ +# yaml-language-server: $schema=https://aka.ms/teams-toolkit/1.1.0/yaml.schema.json +# +# The teamsapp.local.yml composes automation tasks for Teams Toolkit when running locally. +# This file is used when selecting 'Provision' or 'Deploy' menu items in the Teams Toolkit for Visual Studio window +# +# You can customize this file. Visit https://aka.ms/teamsfx-v5.0-guide for more info about Teams Toolkit project files. +version: 1.1.0 + +environmentFolderPath: ./env + +# Defines what the `provision` lifecycle step does with Teams Toolkit. +provision: + # Automates the creation of a Teams app registration and saves the App ID to an environment file. + - uses: teamsApp/create + with: + # Teams app name + name: Moderation${{APP_NAME_SUFFIX}} + # Write the information of created resources into environment file for + # the specified environment variable(s). + writeToEnvironmentFile: + teamsAppId: TEAMS_APP_ID + + # Automates the creation an Azure AD app registration which is required for a bot. + # The Bot ID (AAD app client ID) and Bot Password (AAD app client secret) are saved to an environment file. + - uses: botAadApp/create + with: + # The Azure Active Directory application's display name + name: Moderation + writeToEnvironmentFile: + # The Azure Active Directory application's client id created for bot. + botId: BOT_ID + # The Azure Active Directory application's client secret created for bot. + botPassword: SECRET_BOT_PASSWORD + + # Automates the creation of infrastructure defined in ARM templates to host the bot. + # The created resource IDs are saved to an environment file. + - uses: arm/deploy # Deploy given ARM templates parallelly. + with: + # AZURE_SUBSCRIPTION_ID is a built-in environment variable, + # if its value is empty, TeamsFx will prompt you to select a subscription. + # Referencing other environment variables with empty values + # will skip the subscription selection prompt. + subscriptionId: ${{AZURE_SUBSCRIPTION_ID}} + # AZURE_RESOURCE_GROUP_NAME is a built-in environment variable, + # if its value is empty, TeamsFx will prompt you to select or create one + # resource group. + # Referencing other environment variables with empty values + # will skip the resource group selection prompt. + resourceGroupName: ${{AZURE_RESOURCE_GROUP_NAME}} + templates: + - path: ./infra/azure.bicep # Relative path to this file + # Relative path to this yaml file. + # Placeholders will be replaced with corresponding environment + # variable before ARM deployment. + parameters: ./infra/azure.parameters.json + # Required when deploying ARM template + deploymentName: Create-resources-for-tab + # Teams Toolkit will download this bicep CLI version from github for you, + # will use bicep CLI in PATH if you remove this config. + bicepCliVersion: v0.9.1 + + # Optional: Automates schema and error checking of the Teams app manifest and outputs the results in the console. + - uses: teamsApp/validateManifest + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + + # Automates creating a final app package (.zip) by replacing any variables in the manifest.json file for the current environment. + - uses: teamsApp/zipAppPackage + with: + # Path to manifest template + manifestPath: ./appPackage/manifest.json + outputZipPath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + outputJsonPath: ./appPackage/build/manifest.${{TEAMSFX_ENV}}.json + + # Automates updating the Teams app manifest in Teams Developer Portal using the App ID from the mainfest file. + # This action ensures that any manifest changes are reflected when launching the app again in Teams. + - uses: teamsApp/update + with: + # Relative path to this file. This is the path for built zip file. + appPackagePath: ./appPackage/build/appPackage.${{TEAMSFX_ENV}}.zip + +# Triggered when 'teamsfx deploy' is executed +deploy: + - uses: cli/runDotnetCommand + with: + args: publish --configuration Release --runtime win-x86 --self-contained + + # Deploy to an Azure App Service using the zip file created in the provision step. + - uses: azureAppService/zipDeploy + with: + # deploy base folder + artifactFolder: bin/Release/net6.0/win-x86/publish + # This example uses the env var thats generated by the arm/deploy action. + # You can replace it with an existing Azure Resource ID or other + # custom environment variable. + resourceId: ${{BOT_AZURE_APP_SERVICE_RESOURCE_ID}} diff --git a/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj b/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj index 38a3405b4..3bad03104 100644 --- a/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj +++ b/dotnet/samples/06.assistants.a.mathBot/MathBot.csproj @@ -14,9 +14,9 @@ - + - + diff --git a/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj b/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj index 5c27d115c..b33d917d4 100644 --- a/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj +++ b/dotnet/samples/06.assistants.b.orderBot/OrderBot.csproj @@ -15,9 +15,9 @@ - - - + + + diff --git a/dotnet/samples/06.assistants.b.orderBot/Program.cs b/dotnet/samples/06.assistants.b.orderBot/Program.cs index 7f6c501bc..f13db0669 100644 --- a/dotnet/samples/06.assistants.b.orderBot/Program.cs +++ b/dotnet/samples/06.assistants.b.orderBot/Program.cs @@ -10,7 +10,6 @@ using OpenAI.Assistants; using Azure.Core; using Azure.Identity; -using System.Runtime.CompilerServices; using Microsoft.Teams.AI.Application; using OpenAI.Files; using OpenAI.VectorStores; @@ -86,14 +85,14 @@ // Create Vector Store var storeClient = client.GetVectorStoreClient(); - store = storeClient.CreateVectorStore(new VectorStoreCreationOptions()); + var storeCreationOperation = storeClient.CreateVectorStore(true); // Upload file. - var fileClient = client.GetFileClient(); + var fileClient = client.GetOpenAIFileClient(); var uploadedFile = fileClient.UploadFile("./assets/menu.pdf", FileUploadPurpose.Assistants); // Attach file to vector store - var fileAssociation = storeClient.AddFileToVectorStore(store, uploadedFile); + var fileAssociation = storeClient.AddFileToVectorStore(store.Id, uploadedFile.Value.Id, true); // Poll vector store until file is uploaded var maxPollCount = 5; @@ -113,10 +112,11 @@ } catch (Exception e) { - throw new Exception("Failed to upload file to vector store.", e); + throw new Exception("Failed to upload file to vector store.", e.InnerException); } - + var fileSearchTool = new FileSearchToolResources(); + fileSearchTool.VectorStoreIds.Add(store.Id); AssistantCreationOptions assistantCreationOptions = new() { Name = "Order Bot", @@ -129,7 +129,7 @@ }), ToolResources = new ToolResources() { - FileSearch = new FileSearchToolResources() { VectorStoreIds = new List() { store.Id } } + FileSearch = fileSearchTool } }; diff --git a/dotnet/samples/06.auth.oauth.bot/BotAuth.csproj b/dotnet/samples/06.auth.oauth.bot/BotAuth.csproj index 345795d3f..6b080750f 100644 --- a/dotnet/samples/06.auth.oauth.bot/BotAuth.csproj +++ b/dotnet/samples/06.auth.oauth.bot/BotAuth.csproj @@ -18,7 +18,7 @@ - + diff --git a/dotnet/samples/06.auth.oauth.messageExtension/MessageExtensionAuth.csproj b/dotnet/samples/06.auth.oauth.messageExtension/MessageExtensionAuth.csproj index 70aea86e1..e06ba9bf8 100644 --- a/dotnet/samples/06.auth.oauth.messageExtension/MessageExtensionAuth.csproj +++ b/dotnet/samples/06.auth.oauth.messageExtension/MessageExtensionAuth.csproj @@ -16,7 +16,7 @@ - + diff --git a/dotnet/samples/06.auth.teamsSSO.bot/BotAuth.csproj b/dotnet/samples/06.auth.teamsSSO.bot/BotAuth.csproj index ce59cbd46..86e26b01c 100644 --- a/dotnet/samples/06.auth.teamsSSO.bot/BotAuth.csproj +++ b/dotnet/samples/06.auth.teamsSSO.bot/BotAuth.csproj @@ -18,7 +18,7 @@ - + diff --git a/dotnet/samples/06.auth.teamsSSO.messageExtension/MessageExtensionAuth.csproj b/dotnet/samples/06.auth.teamsSSO.messageExtension/MessageExtensionAuth.csproj index c08cdef1e..26cb4a83e 100644 --- a/dotnet/samples/06.auth.teamsSSO.messageExtension/MessageExtensionAuth.csproj +++ b/dotnet/samples/06.auth.teamsSSO.messageExtension/MessageExtensionAuth.csproj @@ -16,7 +16,7 @@ - + diff --git a/dotnet/samples/08.datasource.azureaisearch/AzureAISearchBot/AzureAISearchBot.csproj b/dotnet/samples/08.datasource.azureaisearch/AzureAISearchBot/AzureAISearchBot.csproj index ad339e29f..1ea7d35da 100644 --- a/dotnet/samples/08.datasource.azureaisearch/AzureAISearchBot/AzureAISearchBot.csproj +++ b/dotnet/samples/08.datasource.azureaisearch/AzureAISearchBot/AzureAISearchBot.csproj @@ -15,7 +15,7 @@ - + diff --git a/dotnet/samples/08.datasource.azureopenai/AzureOpenAIBot.csproj b/dotnet/samples/08.datasource.azureopenai/AzureOpenAIBot.csproj index c847f2400..fbc8b66b1 100644 --- a/dotnet/samples/08.datasource.azureopenai/AzureOpenAIBot.csproj +++ b/dotnet/samples/08.datasource.azureopenai/AzureOpenAIBot.csproj @@ -15,7 +15,7 @@ - + diff --git a/dotnet/samples/08.datasource.azureopenai/teamsapp.yml b/dotnet/samples/08.datasource.azureopenai/teamsapp.yml index 09e8c1e3c..7d7b99ae6 100644 --- a/dotnet/samples/08.datasource.azureopenai/teamsapp.yml +++ b/dotnet/samples/08.datasource.azureopenai/teamsapp.yml @@ -95,3 +95,4 @@ deploy: # You can replace it with an existing Azure Resource ID or other # custom environment variable. resourceId: ${{BOT_AZURE_APP_SERVICE_RESOURCE_ID}} +projectId: 8c3187b5-ec34-4e3b-afd8-4526ab52d06e diff --git a/getting-started/CONCEPTS/STREAMING.md b/getting-started/CONCEPTS/STREAMING.md index 023016fb2..f740d7a38 100644 --- a/getting-started/CONCEPTS/STREAMING.md +++ b/getting-started/CONCEPTS/STREAMING.md @@ -65,6 +65,7 @@ Once `endStream()` is called, the stream is considered ended and no further upda - The informative message is rendered only at the beginning of each message returned from the LLM. - Attachments can only be sent in the final streamed chunk. - Streaming is not available in conjunction with AI SDK's function calls yet. +- Streaming does not work with OpenAI's `o1` models. ### Setup Instructions: