Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[C#] feat: Streaming + Sample #2078

Merged
merged 14 commits into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,8 @@ public async Task Test_ContinueTaskAsync_PlanNull()
var modelMock = new Mock<IPromptCompletionModel>();
var response = new PromptResponse()
{
Status = PromptResponseStatus.Success
Status = PromptResponseStatus.Success,
Message = new(ChatRole.System),
};
modelMock.Setup(model => model.CompletePromptAsync(
It.IsAny<ITurnContext>(),
Expand Down Expand Up @@ -230,7 +231,8 @@ public async Task Test_ContinueTaskAsync()
var modelMock = new Mock<IPromptCompletionModel>();
var response = new PromptResponse()
{
Status = PromptResponseStatus.Success
Status = PromptResponseStatus.Success,
Message = new(ChatRole.System),
};
modelMock.Setup(model => model.CompletePromptAsync(
It.IsAny<ITurnContext>(),
Expand Down Expand Up @@ -279,6 +281,64 @@ public async Task Test_ContinueTaskAsync()
Assert.Equal(planMock.Object, result);
}

[Fact]
public async Task Test_ContinueTaskAsync_Streaming()
{
// Arrange
var modelMock = new Mock<IPromptCompletionModel>();
var response = new PromptResponse()
{
Status = PromptResponseStatus.Success,
};
modelMock.Setup(model => model.CompletePromptAsync(
It.IsAny<ITurnContext>(),
It.IsAny<IMemory>(),
It.IsAny<IPromptFunctions<List<string>>>(),
It.IsAny<ITokenizer>(),
It.IsAny<PromptTemplate>(),
It.IsAny<CancellationToken>())).ReturnsAsync(response);
var promptTemplate = new PromptTemplate(
"prompt",
new(new() { })
);
var augmentationMock = new Mock<IAugmentation>();
var planMock = new Plan();
augmentationMock.Setup(augmentation => augmentation.CreatePlanFromResponseAsync(
It.IsAny<ITurnContext>(),
It.IsAny<IMemory>(),
It.IsAny<PromptResponse>(),
It.IsAny<CancellationToken>())).ReturnsAsync(planMock);
augmentationMock.Setup(augmentation => augmentation.ValidateResponseAsync(
It.IsAny<ITurnContext>(),
It.IsAny<IMemory>(),
It.IsAny<ITokenizer>(),
It.IsAny<PromptResponse>(),
It.IsAny<int>(),
It.IsAny<CancellationToken>())).ReturnsAsync(new Validation { Valid = true });
promptTemplate.Augmentation = augmentationMock.Object;
var prompts = new PromptManager();
prompts.AddPrompt("prompt", promptTemplate);
var options = new ActionPlannerOptions<TurnState>(
modelMock.Object,
prompts,
(context, state, planner) => Task.FromResult(promptTemplate)
);
var turnContext = TurnStateConfig.CreateConfiguredTurnContext();
var state = new TurnState();
await state.LoadStateAsync(null, turnContext);
state.Temp.Input = "test";
var planner = new ActionPlanner<TurnState>(options, new TestLoggerFactory());
var ai = new AI<TurnState>(new(planner));

// Act
var result = await planner.ContinueTaskAsync(turnContext, state, ai);

// Assert
Assert.Equal(planMock.Type, result.Type);
Assert.Equal(planMock.Commands, result.Commands);
}


[Fact]
public async Task Test_BeginTaskAsync_PromptResponseStatusError()
{
Expand Down Expand Up @@ -369,7 +429,8 @@ public async Task Test_BeginTaskAsync_PlanNull()
var modelMock = new Mock<IPromptCompletionModel>();
var response = new PromptResponse()
{
Status = PromptResponseStatus.Success
Status = PromptResponseStatus.Success,
Message = new(ChatRole.System),
};
modelMock.Setup(model => model.CompletePromptAsync(
It.IsAny<ITurnContext>(),
Expand Down Expand Up @@ -425,7 +486,8 @@ public async Task Test_BeginTaskAsync()
var modelMock = new Mock<IPromptCompletionModel>();
var response = new PromptResponse()
{
Status = PromptResponseStatus.Success
Status = PromptResponseStatus.Success,
Message = new(ChatRole.System),
};
modelMock.Setup(model => model.CompletePromptAsync(
It.IsAny<ITurnContext>(),
Expand Down Expand Up @@ -571,4 +633,4 @@ public void SetValue(string path, object value)
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,33 @@ public void Test_Initialization_From_OpenAISdk_ChatMessage()
Assert.Equal("test-content", context.Citations[0].Content);
}

[Fact]
public void Test_Initialization_From_OpenAISdk_StreamingChatCompletionUpdate()
{
// Arrange
var chatCompletion = ModelReaderWriter.Read<StreamingChatCompletionUpdate>(BinaryData.FromString(@$"{{
""choices"": [
{{
""finish_reason"": null,
""delta"": {{
""role"": ""assistant"",
""content"": ""hello""
}}
}}
]
}}"));

// Act
var message = new ChatMessage(chatCompletion!);

// Assert
Assert.Equal("hello", message.Content);
Assert.Equal(ChatRole.Assistant, message.Role);

var context = message.Context;
Assert.Null(context);
}

[Fact]
public void Test_InvalidRole_ToOpenAISdkChatMessage()
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
using Microsoft.Bot.Builder;
using System.ClientModel.Primitives;
using Microsoft.Bot.Builder;
using Microsoft.Teams.AI.AI.Clients;
using Microsoft.Teams.AI.AI.Models;
using Microsoft.Teams.AI.AI.Prompts;
using Microsoft.Teams.AI.AI.Tokenizers;
using Microsoft.Teams.AI.AI.Validators;
using Microsoft.Teams.AI.Application;
using Microsoft.Teams.AI.Exceptions;
using Microsoft.Teams.AI.State;
using Microsoft.Teams.AI.Tests.TestUtils;
using Moq;
using OpenAI.Chat;
using static Microsoft.Teams.AI.AI.Models.IPromptCompletionModelEvents;
using ChatMessage = Microsoft.Teams.AI.AI.Models.ChatMessage;

namespace Microsoft.Teams.AI.Tests.AITests
{
Expand Down Expand Up @@ -149,6 +154,44 @@ public async Task Test_CompletePromptAsync_PromptResponse_Success()
Assert.Equal(2, ((List<ChatMessage>)memory.Values[options.HistoryVariable]).Count);
}

[Fact]
public async Task Test_CompletePromptAsync_Streaming_Success()
{
// Arrange
List<string> chunks = new();
chunks.Add("h");
chunks.Add("i");
var promptCompletionModel = TestPromptCompletionStreamingModel.StreamTextChunks(chunks);
var promptTemplate = new PromptTemplate(
"prompt",
new(new() { })
);

ResponseReceivedHandler handler = new((object sender, ResponseReceivedEventArgs args) =>
{
Assert.Equal("hi", args.Streamer.Message);
});

LLMClientOptions<object> options = new(promptCompletionModel, promptTemplate)
{
StartStreamingMessage = "Begin streaming",
EndStreamHandler = handler,
};
LLMClient<object> client = new(options, null);
TestMemory memory = new();

// Act
var response = await client.CompletePromptAsync(new Mock<ITurnContext>().Object, memory, new PromptManager());

// Assert
Assert.NotNull(response);
Assert.Equal(PromptResponseStatus.Success, response.Status);
Assert.Null(response.Error);
Assert.NotNull(response.Message);
Assert.Equal(ChatRole.Assistant, response.Message.Role);
Assert.Equal("hi", response.Message.Content);
}

[Fact]
public async Task Test_CompletePromptAsync_PromptResponse_Exception()
{
Expand Down Expand Up @@ -483,6 +526,105 @@ public Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext, IMemor
}
}

private sealed class TestPromptCompletionStreamingModel : IPromptCompletionStreamingModel
{
public delegate Task<PromptResponse> Handler(TestPromptCompletionStreamingModel model, ITurnContext turnContext, IMemory memory, IPromptFunctions<List<string>> promptFunctions, ITokenizer tokenizer, PromptTemplate promptTemplate);

public event Handler handler;

public PromptCompletionModelEmitter? Events { get; set; } = new();

public TestPromptCompletionStreamingModel(Handler handler)
{
this.handler = handler;
}

public static TestPromptCompletionStreamingModel StreamTextChunks(IList<string> chunks, int delay = 0)
{
Handler handler = new(async (TestPromptCompletionStreamingModel model, ITurnContext turnContext, IMemory memory, IPromptFunctions<List<string>> promptFunctions, ITokenizer tokenizer, PromptTemplate promptTemplate) =>
{
BeforeCompletionEventArgs args = new(turnContext, memory, promptFunctions, tokenizer, promptTemplate, true);

model.Events = new();

model.Events.OnBeforeCompletion(args);

string content = "";

for (int i = 0; i < chunks.Count; i++)
{
await Task.Delay(TimeSpan.FromSeconds(0));
string text = chunks[i];
content += text;
if (i == 0)
{
var update = ModelReaderWriter.Read<StreamingChatCompletionUpdate>(BinaryData.FromString(@$"{{
""choices"": [
{{
""finish_reason"": null,
""delta"": {{
""role"": ""assistant"",
""content"": ""${content}""
}}
}}
]
}}"));

ChatMessage currDeltaMessage = new(update!);
PromptChunk chunk = new() { delta = currDeltaMessage };

ChunkReceivedEventArgs firstChunkArgs = new(turnContext, memory, chunk);

model.Events.OnChunkReceived(firstChunkArgs);
}
else
{
var update = ModelReaderWriter.Read<StreamingChatCompletionUpdate>(BinaryData.FromString(@$"{{
""choices"": [
{{
""finish_reason"": null,
""delta"": {{
""content"": ""${content}""
}}
}}
]
}}"));

ChatMessage currDeltaMessage = new(update!);
PromptChunk chunk = new() { delta = currDeltaMessage };

ChunkReceivedEventArgs secondChunkArgs = new(turnContext, memory, chunk);

model.Events.OnChunkReceived(secondChunkArgs);
}

}

await Task.Delay(TimeSpan.FromSeconds(delay));
PromptResponse response = new()
{
Status = PromptResponseStatus.Success,
Message = new(ChatRole.Assistant)
{
Content = content,
}
};
StreamingResponse streamer = new(turnContext);
ResponseReceivedEventArgs responseReceivedEventArgs = new(turnContext, memory, response, streamer);

model.Events.OnResponseReceived(responseReceivedEventArgs);
return response;
});

return new TestPromptCompletionStreamingModel(handler);
}

public Task<PromptResponse> CompletePromptAsync(ITurnContext turnContext, IMemory memory, IPromptFunctions<List<string>> promptFunctions, ITokenizer tokenizer, PromptTemplate promptTemplate, CancellationToken cancellationToken)
{
return this.handler(this, turnContext, memory, promptFunctions, tokenizer, promptTemplate);
}
}

private sealed class TestValidator : IPromptResponseValidator
{

Expand Down
Loading
Loading