From c5f7dc0410b9ccf3798d99129cfa4fe436c7ddef Mon Sep 17 00:00:00 2001 From: Kuojian Lu Date: Sat, 6 Jan 2024 03:46:58 +0800 Subject: [PATCH 1/3] [JS] fix: fix type of image message (#1124) ## Linked issues closes: #1113 ## Details Provide a list of your changes here. If you are fixing a bug, please provide steps to reproduce the bug. #### Change details > Describe your changes, with screenshots and code snippets as appropriate **code snippets**: **screenshots**: ## Attestation Checklist - [x] My code follows the style guidelines of this project - I have checked for/fixed spelling, linting, and other errors - I have commented my code for clarity - I have made corresponding changes to the documentation (we use [TypeDoc](https://typedoc.org/) to document our code) - My changes generate no new warnings - I have added tests that validates my changes, and provides sufficient test coverage. I have tested with: - Local testing - E2E testing in Teams - New and existing unit tests pass locally with my changes ### Additional information > Feel free to add other relevant information below --- js/packages/teams-ai/src/prompts/Message.ts | 4 ++-- js/packages/teams-ai/src/prompts/UserInputMessage.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/js/packages/teams-ai/src/prompts/Message.ts b/js/packages/teams-ai/src/prompts/Message.ts index ec9b59514..bcc2ebd19 100644 --- a/js/packages/teams-ai/src/prompts/Message.ts +++ b/js/packages/teams-ai/src/prompts/Message.ts @@ -63,9 +63,9 @@ export interface TextContentPart { export interface ImageContentPart { /** - * Type of the message content. Should always be 'image'. + * Type of the message content. Should always be 'image_url'. */ - type: 'image'; + type: 'image_url'; /** * The URL of the image. diff --git a/js/packages/teams-ai/src/prompts/UserInputMessage.ts b/js/packages/teams-ai/src/prompts/UserInputMessage.ts index 9e313396c..a736435c4 100644 --- a/js/packages/teams-ai/src/prompts/UserInputMessage.ts +++ b/js/packages/teams-ai/src/prompts/UserInputMessage.ts @@ -87,7 +87,7 @@ export class UserInputMessage extends PromptSectionBase { // Add image const url = `data:${image.contentType};base64,${image.content.toString('base64')}`; - message.content!.push({ type: 'image', image_url: { url } }); + message.content!.push({ type: 'image_url', image_url: { url } }); length += 85; budget -= 85; } From 8dac0f7c34f4746139310c274438ab7a093b1846 Mon Sep 17 00:00:00 2001 From: Qianhao Dong Date: Sat, 6 Jan 2024 03:47:37 +0800 Subject: [PATCH 2/3] [C#] feat: LLMClient unit tests (#1128) ## Linked issues closes: #1114 ## Details Add unit tests for `LLMClient.cs` ## Attestation Checklist - [X] My code follows the style guidelines of this project - I have checked for/fixed spelling, linting, and other errors - I have commented my code for clarity - I have made corresponding changes to the documentation (we use [TypeDoc](https://typedoc.org/) to document our code) - My changes generate no new warnings - I have added tests that validates my changes, and provides sufficient test coverage. I have tested with: - Local testing - E2E testing in Teams - New and existing unit tests pass locally with my changes ### Additional information > Feel free to add other relevant information below --- .../AITests/LLMClientTests.cs | 488 ++++++++++++++++++ 1 file changed, 488 insertions(+) create mode 100644 dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs new file mode 100644 index 000000000..013cfd90f --- /dev/null +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/AITests/LLMClientTests.cs @@ -0,0 +1,488 @@ +using Microsoft.Bot.Builder; +using Microsoft.Teams.AI.AI.Clients; +using Microsoft.Teams.AI.AI.Models; +using Microsoft.Teams.AI.AI.Prompts; +using Microsoft.Teams.AI.AI.Tokenizers; +using Microsoft.Teams.AI.AI.Validators; +using Microsoft.Teams.AI.Exceptions; +using Microsoft.Teams.AI.State; +using Microsoft.Teams.AI.Tests.TestUtils; +using Moq; + +namespace Microsoft.Teams.AI.Tests.AITests +{ + public class LLMClientTests + { + [Fact] + public void Test_Constructor_LogRepairs_Requires_LoggerFactory() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) { LogRepairs = true }; + + // Act + Exception ex = Assert.Throws(() => new LLMClient(options, null)); + + // Assert + Assert.Equal("`loggerFactory` parameter cannot be null if `LogRepairs` option is set to true", ex.Message); + } + + [Fact] + public void Test_AddFunctionResultToHistory_MemoryUpdated() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate); + LLMClient client = new(options, null); + TestMemory memory = new(); + + // Act + client.AddFunctionResultToHistory(memory, "function", "results"); + + // Assert + var history = (List?)memory.Values.GetValueOrDefault(options.HistoryVariable); + Assert.NotNull(history); + Assert.Single(history); + Assert.Equal(history.First().Role, ChatRole.Function); + Assert.Equal(history.First().Name, "function"); + Assert.Equal(history.First().Content, "results"); + } + + [Fact] + public void Test_AddFunctionResultToHistory_ExceedMaxHistoryMessages() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) { MaxHistoryMessages = 1 }; + LLMClient client = new(options, null); + TestMemory memory = new(); + + // Act + client.AddFunctionResultToHistory(memory, "function-0", "results-0"); + client.AddFunctionResultToHistory(memory, "function-1", "results-1"); + + // Assert + var history = (List?)memory.Values.GetValueOrDefault(options.HistoryVariable); + Assert.NotNull(history); + Assert.Single(history); + Assert.Equal(history.First().Role, ChatRole.Function); + Assert.Equal(history.First().Name, "function-1"); + Assert.Equal(history.First().Content, "results-1"); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_NotSuccess() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) { MaxHistoryMessages = 1 }; + LLMClient client = new(options, null); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Error, + Error = new TeamsAIException("test") + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Error, response.Status); + Assert.NotNull(response.Error); + Assert.Equal("test", response.Error.Message); + Assert.Equal(0, memory.Values.Count); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_Success() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate); + LLMClient client = new(options, null); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Success, response.Status); + Assert.Null(response.Error); + Assert.NotNull(response.Message); + Assert.Equal(ChatRole.Assistant, response.Message.Role); + Assert.Equal("welcome", response.Message.Content); + Assert.Equal(2, memory.Values.Count); + Assert.Equal("hello", memory.Values[options.InputVariable]); + Assert.Equal(2, ((List)memory.Values[options.HistoryVariable]).Count); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_Exception() + { + // Arrange + var promptCompletionModelMock = new Mock(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModelMock.Object, promptTemplate); + LLMClient client = new(options, null); + TestMemory memory = new(); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Error, response.Status); + Assert.NotNull(response.Error); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_Repair() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + var validator = new TestValidator(); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) + { + LogRepairs = true, + Validator = validator + }; + LLMClient client = new(options, new TestLoggerFactory()); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome-repair" + } + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + validator.Results.Enqueue(new() + { + Valid = true + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Success, response.Status); + Assert.Null(response.Error); + Assert.NotNull(response.Message); + Assert.Equal(ChatRole.Assistant, response.Message.Role); + Assert.Equal("welcome-repair", response.Message.Content); + Assert.Equal(2, memory.Values.Count); + Assert.Equal("hello", memory.Values[options.InputVariable]); + Assert.Equal(2, ((List)memory.Values[options.HistoryVariable]).Count); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_RepairNotSuccess() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + var validator = new TestValidator(); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) + { + LogRepairs = true, + Validator = validator + }; + LLMClient client = new(options, new TestLoggerFactory()); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome-repair" + } + }); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Error, + Error = new("test") + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + validator.Results.Enqueue(new() + { + Valid = true + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Error, response.Status); + Assert.NotNull(response.Error); + Assert.Equal("test", response.Error.Message); + Assert.Equal(1, memory.Values.Count); + Assert.Equal("hello", memory.Values[options.InputVariable]); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_Repair_ExceedMaxRepairAttempts() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + var validator = new TestValidator(); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) + { + LogRepairs = true, + Validator = validator, + MaxRepairAttempts = 1 + }; + LLMClient client = new(options, new TestLoggerFactory()); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome-repair" + } + }); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome-repair-again" + } + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + validator.Results.Enqueue(new() + { + Valid = true + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.InvalidResponse, response.Status); + Assert.NotNull(response.Error); + Assert.Equal("The response was invalid. Try another strategy.", response.Error.Message); + Assert.Equal(1, memory.Values.Count); + Assert.Equal("hello", memory.Values[options.InputVariable]); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_DisableHistory() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) + { + HistoryVariable = string.Empty, + InputVariable = string.Empty + }; + LLMClient client = new(options, null); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager()); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Success, response.Status); + Assert.Null(response.Error); + Assert.NotNull(response.Message); + Assert.Equal(ChatRole.Assistant, response.Message.Role); + Assert.Equal("welcome", response.Message.Content); + Assert.Equal(0, memory.Values.Count); + } + + [Fact] + public async Task Test_CompletePromptAsync_PromptResponse_DisableRepair() + { + // Arrange + var promptCompletionModel = new TestPromptCompletionModel(); + var promptTemplate = new PromptTemplate( + "prompt", + new(new() { }) + ); + var validator = new TestValidator(); + LLMClientOptions options = new(promptCompletionModel, promptTemplate) + { + LogRepairs = true, + MaxRepairAttempts = 0, + Validator = validator + }; + LLMClient client = new(options, new TestLoggerFactory()); + TestMemory memory = new(); + promptCompletionModel.Results.Enqueue(new() + { + Status = PromptResponseStatus.Success, + Message = new(ChatRole.Assistant) + { + Content = "welcome" + } + }); + validator.Results.Enqueue(new() + { + Valid = false + }); + + // Act + var response = await client.CompletePromptAsync(new Mock().Object, memory, new PromptManager(), "hello"); + + // Assert + Assert.NotNull(response); + Assert.Equal(PromptResponseStatus.Success, response.Status); + Assert.Null(response.Error); + Assert.NotNull(response.Message); + Assert.Equal(ChatRole.Assistant, response.Message.Role); + Assert.Equal("welcome", response.Message.Content); + Assert.Equal(1, memory.Values.Count); + Assert.Equal("hello", memory.Values[options.InputVariable]); + } + + private sealed class TestMemory : IMemory + { + public Dictionary Values { get; set; } = new Dictionary(); + + public void DeleteValue(string path) + { + Values.Remove(path); + } + + public object? GetValue(string path) + { + return Values.GetValueOrDefault(path); + } + + public bool HasValue(string path) + { + return Values.ContainsKey(path); + } + + public void SetValue(string path, object value) + { + Values[path] = value; + } + } + + private sealed class TestPromptCompletionModel : IPromptCompletionModel + { + public Queue Results { get; set; } = new Queue(); + + public Task CompletePromptAsync(ITurnContext turnContext, IMemory memory, IPromptFunctions> promptFunctions, ITokenizer tokenizer, PromptTemplate promptTemplate, CancellationToken cancellationToken) + { + return Task.FromResult(Results.Dequeue()); + } + } + + private sealed class TestValidator : IPromptResponseValidator + { + + public Queue Results { get; set; } = new Queue(); + + public Task ValidateResponseAsync(ITurnContext context, IMemory memory, ITokenizer tokenizer, PromptResponse response, int remainingAttempts, CancellationToken cancellationToken = default) + { + return Task.FromResult(Results.Dequeue()); + } + } + } +} From c30ce61745c68fc2d2eeefd3268056d7b9b44ea5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 11:48:09 -0800 Subject: [PATCH 3/3] [C#] bump: (deps): Bump the production group in /dotnet/packages/Microsoft.TeamsAI with 11 updates (#1107) #minor Bumps the production group in /dotnet/packages/Microsoft.TeamsAI with 11 updates: | Package | From | To | | --- | --- | --- | | [Azure.AI.ContentSafety](https://github.com/Azure/azure-sdk-for-net) | `1.0.0-beta.1` | `1.0.0` | | [Azure.AI.OpenAI](https://github.com/Azure/azure-sdk-for-net) | `1.0.0-beta.9` | `1.0.0-beta.12` | | [JsonSchema.Net](https://github.com/gregsdennis/json-everything) | `5.3.1` | `5.4.3` | | [Microsoft.Bot.Builder](https://github.com/Microsoft/botbuilder-dotnet) | `4.21.1` | `4.21.2` | | [Microsoft.Bot.Builder.Dialogs](https://github.com/Microsoft/botbuilder-dotnet) | `4.21.1` | `4.21.2` | | [Microsoft.Bot.Builder.Integration.AspNet.Core](https://github.com/Microsoft/botbuilder-dotnet) | `4.21.1` | `4.21.2` | | [Microsoft.Bot.Builder](https://github.com/Microsoft/botbuilder-dotnet) | `4.21.1` | `4.21.2` | | [Microsoft.Identity.Client](https://github.com/AzureAD/microsoft-authentication-library-for-dotnet) | `4.57.0` | `4.58.1` | | [SharpToken](https://github.com/dmitry-brazhenko/SharpToken) | `1.2.12` | `1.2.14` | | [System.Text.Json](https://github.com/dotnet/runtime) | `7.0.3` | `7.0.4` | | [xunit](https://github.com/xunit/xunit) | `2.6.3` | `2.6.4` | | [xunit.runner.visualstudio](https://github.com/xunit/visualstudio.xunit) | `2.5.5` | `2.5.6` | Updates `Azure.AI.ContentSafety` from 1.0.0-beta.1 to 1.0.0
Release notes

Sourced from Azure.AI.ContentSafety's releases.

Azure.AI.ContentSafety_1.0.0

1.0.0 (2023-12-15)

Features Added

  • Support Microsoft Entra ID Authentication
  • Support 8 severity level for AnalyzeText

Breaking Changes

Contract change for AnalyzeText, AnalyzeImage, Blocklist management related methods

AnalyzeText

  • AnalyzeTextOptions
    • Renamed BreakByBlocklists to HaltOnBlocklistHit
    • Added AnalyzeTextOutputType
  • AnalyzeTextResult
    • Renamed BlocklistsMatchResults to BlocklistsMatch
    • Replaced TextAnalyzeSeverityResult by TextCategoriesAnalysis

AnalyzeImage

  • AnalyzeImageOptions
    • Replaced ImageData by ContentSafetyImageData
    • Added AnalyzeImageOutputType
  • AnalyzeImageResult
    • Replaced ImageAnalyzeSeverityResult by ImageCategoriesAnalysis

Blocklist management

  • Added BlocklistAsyncClient
  • Renamed AddBlockItemsOptions to AddOrUpdateTextBlocklistItemsOptions
  • Renamed AddBlockItemsResult to AddOrUpdateTextBlocklistItemsResult
  • Renamed RemoveBlockItemsOptions to RemoveTextBlocklistItemsOptions
  • Renamed TextBlockItemInfo to TextBlocklistItem
Commits
  • 62633d4 fix discriminator in arc-scvmm (#40683)
  • 80eec4e Add ability to use non Json interface for json format (#40646)
  • 5dd1605 Update Generator Version 3.0.0-beta.20231211.1 (#40702)
  • 13e4829 [Core] Remove netcoreapp2.1 and net5.0 targets (#40635)
  • c47ab4d Add an opt-in AOT regression check to the CI pipeline (#40629)
  • d11bb58 Support "functions" SDKType (#40695)
  • 5aa3659 Update ConfidentialLedgerClient to pass VerifyConnection flag (#40602)
  • 5235895 [AzureMonitorLiveMetrics] poc refactor metrics (#40669)
  • d810209 [Azure Content Safety] Update readme and changelog. (#40607)
  • 609f3e4 Increment package version after release of Azure.ResourceManager.Maintenance ...
  • Additional commits viewable in compare view

Updates `Azure.AI.OpenAI` from 1.0.0-beta.9 to 1.0.0-beta.12
Release notes

Sourced from Azure.AI.OpenAI's releases.

Azure.AI.OpenAI_1.0.0-beta.12

1.0.0-beta.12 (2023-12-15)

Like beta.11, beta.12 is another release that brings further refinements and fixes. It remains based on the 2023-12-01-preview service API version for Azure OpenAI and does not add any new service capabilities.

Features Added

Updates for using streaming tool calls:

  • A new .NET-specific StreamingToolCallUpdate type has been added to better represent streaming tool call updates when using chat tools.
    • This new type includes an explicit ToolCallIndex property, reflecting index in the REST schema, to allow resilient deserialization of parallel function tool calling.
  • A convenience constructor has been added for ChatRequestAssistantMessage that can automatically populate from a prior ChatResponseMessage when using non-streaming chat completions.
  • A public constructor has been added for ChatCompletionsFunctionToolCall to allow more intuitive reconstruction of ChatCompletionsToolCall instances for use in ChatRequestAssistantMessage instances made from streaming responses.

Other additions:

  • To facilitate reuse of user message contents, ChatRequestUserMessage now provides a public Content property (string) as well as a public MultimodalContentItems property (IList<ChatMessageContentItem).
    • Content is the conventional plain-text content and will be populated as non-null when the a ChatRequestUserMessage() constructor accepting a string is used to instantiate the message.
    • MultimodalContentItems is the new compound content type, currently only usable with gpt-4-vision-preview, that allows hybrid use of text and image references. It will be populated when an appropriate ChatRequestUserMessage() constructor accepting a collection of ChatMessageContentItem instances is used.
    • Role is also restored to common visibility to ChatRequestUserMessage.

Breaking Changes

  • The type of ToolCallUpdate on StreamingChatCompletionsUpdate has been changed from the non-streaming ChatCompletionsToolCall to the new StreamingToolCallUpdate type. The conversion is straightforward:
    • ToolCallUpdate.Id remains unchanged.
    • Instead of casting ToolCallUpdate to ChatCompletionsFunctionToolCall, cast it to StreamingToolCallUpdate.
    • Update cast instance use of functionToolCallUpdate.Arguments to accumulate functionToolCallUpdate.ArgumentsUpdate.
  • Removed the parameterized constructor of the ChatCompletionsOptions class that only received the messages as a parameter in favor of the parameterized constructor that receives the deployment name as well. This makes it consistent with the implementation of other Options classes.
  • Removed the setter of the Input property of the EmbeddingsOptions class as per the guidelines for collection properties.

Bugs fixed

Azure.AI.OpenAI_1.0.0-beta.11

1.0.0-beta.11 (2023-12-07)

This is a fast-following bug fix update to address some of the biggest issues reported by the community. Thank you sharing your experiences!

Breaking Changes

  • The type of ChatCompletionsOptions.ToolChoice has been updated from BinaryData to a new ChatCompletionsToolChoice type. Please use ChatCompletionsToolChoice.None, ChatCompletionsToolChoice.Auto, or provide a reference to a function or function tool definition to migrate.

... (truncated)

Commits
  • ab0ae0d Azure OpenAI: changelog.md update for beta.12 release (#40815)
  • e973301 [Azure OpenAI] Revamp customizations (#40697)
  • 81f5d2b [Azure OpenAI] Revamp customizations in the audio API (#40674)
  • 899b608 Add AOT compatibility checks documentation (#40762)
  • a396f21 Increment package version after release of Azure.ResourceManager.Redis (#40803)
  • 3cc17fc Increment package version after release of Azure.ResourceManager.EventGrid (#...
  • 33a6417 Generate Eventgrid SDK for API version 2023-12-15-preview (#40714)
  • da78e04 Azure Redis .Net SDK update for new api v2023-08-01 (#39230)
  • 0519b41 Update AutoRest C# version to 3.0.0-beta.20231214.1 (#40794)
  • f6a5a36 Increment package version after release of Microsoft.Azure.Core.NewtonsoftJso...
  • Additional commits viewable in compare view

Updates `JsonSchema.Net` from 5.3.1 to 5.4.3
Commits

Updates `Microsoft.Bot.Builder` from 4.21.1 to 4.21.2
Commits

Updates `Microsoft.Bot.Builder.Dialogs` from 4.21.1 to 4.21.2
Commits

Updates `Microsoft.Bot.Builder.Integration.AspNet.Core` from 4.21.1 to 4.21.2
Commits

Updates `Microsoft.Bot.Builder` from 4.21.1 to 4.21.2
Commits

Updates `Microsoft.Identity.Client` from 4.57.0 to 4.58.1
Release notes

Sourced from Microsoft.Identity.Client's releases.

4.58.1

New Features

  • Added WithForceRefresh support for silent flows using the Windows broker. See 4457.

Bug Fixes

  • Fixed a bug when a x-ms-pkeyauth HTTP header was incorrectly sent on Mac and Linux platforms. See 4445.
  • Fixed an issue with client capabilities and claims JSON not being merged correctly. See 4447.
  • MSAL can now be used in .NET 8 applications which use native AOT configuration binder source generator. See 4453.
  • Fixed an issue with sending an incorrect operating system descriptor in silent flows on Mac. See 4444.

4.58.0

New Features

  • Removed support for deprecated .NET 4.6.1 framework and added .NET 4.6.2 support. MSAL.NET packages will no longer include net461 binary. Existing .NET 4.6.1 apps will now reference .NET Standard 2.0 MSAL binary. See 4315.
  • MSAL.NET repository now supports Central Package Management. See 3434.
  • Added instrumentation to collect metrics with Open Telemetry. Aggregated metrics consist of successful and failed token acquisition calls, total request duration, duration in cache, and duration in a network call. See 4229.

Bug Fixes

  • Resolved the issue with dual-headed accounts that share the same UPN for both, Microsoft (MSA) and Microsoft Entra ID (Azure AD) accounts. See 4425.
  • MSAL now correctly falls back to use local cache if broker fails to return a result for AcquireTokenSilent calls. See 4395.
  • Fixed a bug when the cache level in the telemetry was not correctly set to L1 Cache when in-memory cache was used. See 4414.
  • Deprecated WithAuthority on the request builders. Set the authority on the application builders. Use WithTenantId or WithTenantIdFromAuthority on the request builder to update the tenant ID. See 4406.
  • Fixed an issue with the Windows broker dependencies when the app was targetting NativeAOT on Windows. See 4424.
  • Updated Microsoft.Identity.Client.NativeInterop reference to version 0.13.14, which includes bug fixes and stability improvements. See 4439.
Changelog

Sourced from Microsoft.Identity.Client's changelog.

4.58.1

New Features

  • Added WithForceRefresh support for silent flows using the Windows broker. See 4457.

Bug Fixes

  • Fixed a bug when a x-ms-pkeyauth HTTP header was incorrectly sent on Mac and Linux platforms. See 4445.
  • Fixed an issue with client capabilities and claims JSON not being merged correctly. See 4447.
  • MSAL can now be used in .NET 8 applications which use native AOT configuration binder source generator. See 4453.
  • Fixed an issue with sending an incorrect operating system descriptor in silent flows on Mac. See 4444.

4.58.0

New Features

  • Removed support for deprecated .NET 4.6.1 framework and added .NET 4.6.2 support. MSAL.NET packages will no longer include net461 binary. Existing .NET 4.6.1 apps will now reference .NET Standard 2.0 MSAL binary. See 4315.
  • MSAL.NET repository now supports Central Package Management. See 3434.
  • Added instrumentation to collect metrics with Open Telemetry. Aggregated metrics consist of successful and failed token acquisition calls, total request duration, duration in cache, and duration in a network call. See 4229

Bug Fixes

  • Resolved the issue with dual-headed accounts that share the same UPN for both, Microsoft (MSA) and Microsoft Entra ID (Azure AD) accounts. See 4425.
  • MSAL now correctly falls back to use local cache if broker fails to return a result for AcquireTokenSilent calls. See 4395.
  • Fixed a bug when the cache level in the telemetry was not correctly set to L1 Cache when in-memory cache was used. See 4414.
  • Deprecated WithAuthority on the request builders. Set the authority on the application builders. Use WithTenantId or WithTenantIdFromAuthority on the request builder to update the tenant ID. See 4406.
  • Fixed an issue with the Windows broker dependencies when the app was targetting NativeAOT on Windows. See 4424.
  • Updated Microsoft.Identity.Client.NativeInterop reference to version 0.13.14, which includes bug fixes and stability improvements. See 4439.
Commits

Updates `SharpToken` from 1.2.12 to 1.2.14
Release notes

Sourced from SharpToken's releases.

Release 1.2.14

Release of version 1.2.14

Release 1.2.13

Release of version 1.2.13

Commits

Updates `System.Text.Json` from 7.0.3 to 7.0.4
Release notes

Sourced from System.Text.Json's releases.

.NET 7.0.4

Release

Commits

Updates `xunit` from 2.6.3 to 2.6.4
Commits

Updates `xunit.runner.visualstudio` from 2.5.5 to 2.5.6
Commits

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj index 69995ba15..c0cd76716 100644 --- a/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj +++ b/dotnet/packages/Microsoft.TeamsAI/Microsoft.TeamsAI.Tests/Microsoft.Teams.AI.Tests.csproj @@ -16,7 +16,7 @@ - + runtime; build; native; contentfiles; analyzers; buildtransitive all