From 270b7c10177ba33239f1640c52a6d613848fa25f Mon Sep 17 00:00:00 2001 From: edgett Date: Tue, 18 Jun 2024 11:11:39 -0400 Subject: [PATCH] Logging improvements (use ILogger not Console). (#13) Remove unnecessary project. Co-authored-by: Edgett Hilimire --- PalmHill.BlazorChat.sln | 6 ----- .../Client/Services/ChatService.cs | 8 ++++-- .../Client/Services/LocalStorageService.cs | 2 +- .../Server/PalmHill.BlazorChat.Server.csproj | 1 - .../Server/SignalR/WebSocketChat.cs | 25 +++++++++++++------ .../Server/WebApi/ApiChatController.cs | 20 ++++++++++----- .../Server/WebApi/AttachmentController.cs | 7 +++++- PalmHill.Llama/ServerlessLlmMemory.cs | 9 ++++--- .../PalmHill.Llama.Kernel.csproj | 20 --------------- 9 files changed, 51 insertions(+), 47 deletions(-) delete mode 100644 PalmHill.LlamaKernel/PalmHill.Llama.Kernel.csproj diff --git a/PalmHill.BlazorChat.sln b/PalmHill.BlazorChat.sln index e553831..71c3a2f 100644 --- a/PalmHill.BlazorChat.sln +++ b/PalmHill.BlazorChat.sln @@ -18,8 +18,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.Shared" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.BlazorChat.ApiClient", "PalmHill.BlazorChat.ApiClient\PalmHill.BlazorChat.ApiClient.csproj", "{434FEDF0-2AD0-4276-AC06-E26126EEF237}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "PalmHill.Llama.Kernel", "PalmHill.LlamaKernel\PalmHill.Llama.Kernel.csproj", "{D92943B2-5585-432B-BFEC-ECC13B5A82D1}" -EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -46,10 +44,6 @@ Global {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Debug|Any CPU.Build.0 = Debug|Any CPU {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.ActiveCfg = Release|Any CPU {434FEDF0-2AD0-4276-AC06-E26126EEF237}.Release|Any CPU.Build.0 = Release|Any CPU - {D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D92943B2-5585-432B-BFEC-ECC13B5A82D1}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PalmHill.BlazorChat/Client/Services/ChatService.cs b/PalmHill.BlazorChat/Client/Services/ChatService.cs index 8463da6..f8bd750 100644 --- a/PalmHill.BlazorChat/Client/Services/ChatService.cs +++ b/PalmHill.BlazorChat/Client/Services/ChatService.cs @@ -4,6 +4,7 @@ using PalmHill.BlazorChat.Client.Components.Settings; using PalmHill.BlazorChat.Client.Models; using PalmHill.BlazorChat.Shared.Models; +using System.Reflection.Emit; namespace PalmHill.BlazorChat.Client.Services { @@ -25,7 +26,8 @@ public ChatService( NavigationManager navigationManager, LocalStorageService localStorage, IDialogService dialogService, - BlazorChatApi blazorChatApi + BlazorChatApi blazorChatApi, + ILogger logger ) { @@ -33,6 +35,7 @@ BlazorChatApi blazorChatApi _dialogService = dialogService; _blazorChatApi = blazorChatApi; _navigationManager = navigationManager; + _logger = logger; setupAttachmentService(); setupWebSocketChatConnection(); } @@ -84,6 +87,7 @@ BlazorChatApi blazorChatApi private readonly IDialogService _dialogService; private readonly BlazorChatApi _blazorChatApi; private readonly NavigationManager _navigationManager; + private readonly ILogger _logger; /// @@ -242,7 +246,7 @@ public async Task CancelTextGeneration() SetReady(); } - Console.WriteLine($"CancelTextGeneration failed ({canceled.StatusCode}): {canceled.ReasonPhrase}"); + _logger.LogWarning($"Text generation for ConversationId {ConversationId} canceled via API: ({canceled.StatusCode}): {canceled.ReasonPhrase}"); } /// diff --git a/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs b/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs index ebf2ec1..208b2c3 100644 --- a/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs +++ b/PalmHill.BlazorChat/Client/Services/LocalStorageService.cs @@ -63,7 +63,7 @@ private async Task _getMigratedSettings() { var localStorageSettings = await _localStorage.GetItemAsync("LocalStorageSettings"); - if (localStorageSettings.SettingsVersion == LocalStorageSettings.CurrentSettingsVersion) + if (localStorageSettings?.SettingsVersion == LocalStorageSettings.CurrentSettingsVersion) { return localStorageSettings; } diff --git a/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj b/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj index 7f30b22..6781839 100644 --- a/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj +++ b/PalmHill.BlazorChat/Server/PalmHill.BlazorChat.Server.csproj @@ -12,7 +12,6 @@ - diff --git a/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs b/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs index 3e7b911..17e3611 100644 --- a/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs +++ b/PalmHill.BlazorChat/Server/SignalR/WebSocketChat.cs @@ -2,6 +2,7 @@ using LLama; using Microsoft.AspNetCore.SignalR; using Microsoft.SemanticKernel.ChatCompletion; +using PalmHill.BlazorChat.Server.WebApi; using PalmHill.BlazorChat.Shared.Models; using PalmHill.BlazorChat.Shared.Models.WebSocket; using PalmHill.Llama; @@ -16,15 +17,16 @@ namespace PalmHill.BlazorChat.Server.SignalR /// public class WebSocketChat : Hub { - public WebSocketChat(LlamaKernel llamaKernel) + public WebSocketChat(LlamaKernel llamaKernel, ILogger logger) { LlamaKernel = llamaKernel; ChatCompletion = llamaKernel.Kernel.Services.GetService(); + _logger = logger; } public LlamaKernel LlamaKernel { get; } public IChatCompletionService? ChatCompletion { get; } - + private ILogger _logger { get; } /// /// Sends a chat prompt to the client and waits for a response. The method performs inference on the chat conversation and sends the result back to the client. @@ -59,11 +61,11 @@ public async Task InferenceRequest(InferenceRequest chatConversation) inferenceStatusUpdate.Success = false; await Clients.Caller.SendAsync("InferenceStatusUpdate", inferenceStatusUpdate); // Handle the cancellation operation - Console.WriteLine($"Inference for {conversationId} was canceled."); + _logger.LogWarning($"Text generation for {conversationId} was canceled via WebSockets."); } catch (Exception ex) { - Console.WriteLine(ex); + _logger.LogError(ex, $"WebSocket text generation failed for ConversationId: {conversationId}"); } finally { @@ -96,7 +98,16 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli inferenceStopwatch.Start(); - var asyncResponse = ChatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, inferenceParams, cancellationToken: cancellationToken); + var asyncResponse = ChatCompletion?.GetStreamingChatMessageContentsAsync(chatHistory, inferenceParams, cancellationToken: cancellationToken); + + if (asyncResponse == null) + { + _logger.LogError($"{nameof(IChatCompletionService)} not implemented."); + await respondToClient.SendAsync("ReceiveInferenceString", $"Error: {nameof(IChatCompletionService)} not implemented."); + return; + } + + // Perform inference and send the response to the client await foreach (var text in asyncResponse) { @@ -131,8 +142,8 @@ private async Task DoInferenceAndRespondToClient(ISingleClientProxy respondToCli await respondToClient.SendAsync("ReceiveInferenceString", chatConversation.Id, textBuffer); } - Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); - Console.WriteLine(fullResponse); + _logger.LogInformation($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); + _logger.LogInformation(fullResponse); } /// diff --git a/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs b/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs index d1ead0c..e4903de 100644 --- a/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs +++ b/PalmHill.BlazorChat/Server/WebApi/ApiChatController.cs @@ -29,19 +29,22 @@ public class ApiChatController : ControllerBase /// The model parameters. public ApiChatController( IHubContext webSocketChat, - LlamaKernel llamaKernel + LlamaKernel llamaKernel, + ILogger logger ) { WebSocketChat = webSocketChat; LlamaKernel = llamaKernel; LlmMemory = llamaKernel.Kernel.Services.GetService(); ChatCompletion = llamaKernel.Kernel.Services.GetService(); + _logger = logger; } private IHubContext WebSocketChat { get; } public LlamaKernel LlamaKernel { get; } public ServerlessLlmMemory? LlmMemory { get; } public IChatCompletionService? ChatCompletion { get; } + private ILogger _logger { get; } /// /// Handles a chat API request. @@ -67,7 +70,7 @@ public async Task> Chat([FromBody] InferenceRequest convers catch (OperationCanceledException) { errorText = $"Inference for {conversationId} was canceled."; - Console.WriteLine(errorText); + _logger.LogWarning(errorText); return StatusCode(444, errorText); } catch (Exception ex) @@ -80,7 +83,7 @@ public async Task> Chat([FromBody] InferenceRequest convers ChatCancelation.CancelationTokens.TryRemove(conversationId, out _); } - Console.WriteLine(errorText); + _logger.LogError(errorText); return StatusCode(500, errorText); } @@ -169,10 +172,15 @@ private async Task DoInference(InferenceRequest conversation, Cancellati var chatExecutionSettings = conversation.GetPromptExecutionSettings(); inferenceStopwatch.Start(); - var asyncResponse = ChatCompletion.GetStreamingChatMessageContentsAsync(chatSession, + var asyncResponse = ChatCompletion?.GetStreamingChatMessageContentsAsync(chatSession, chatExecutionSettings, cancellationToken: cancellationToken); + if (asyncResponse == null) + { + _logger.LogError($"{nameof(IChatCompletionService)} not implemented."); + throw new InvalidOperationException($"{nameof(IChatCompletionService)} not implemented."); + } await foreach (var text in asyncResponse) { @@ -181,8 +189,8 @@ private async Task DoInference(InferenceRequest conversation, Cancellati } inferenceStopwatch.Stop(); var fullResponseString = fullResponse.ToString(); - Console.WriteLine($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); - Console.WriteLine(fullResponseString); + _logger.LogInformation($"Inference took {inferenceStopwatch.ElapsedMilliseconds}ms and generated {totalTokens} tokens. {(totalTokens / (inferenceStopwatch.ElapsedMilliseconds / (float)1000)).ToString("F2")} tokens/second."); + _logger.LogInformation(fullResponseString); return fullResponseString; } diff --git a/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs b/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs index 15a28e2..09956e3 100644 --- a/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs +++ b/PalmHill.BlazorChat/Server/WebApi/AttachmentController.cs @@ -15,16 +15,20 @@ public class AttachmentController : ControllerBase { private ServerlessLlmMemory LlmMemory { get; } private IHubContext WebSocketChat { get; } + private ILogger _logger { get; } + public AttachmentController( LlamaKernel llamaKernel, - IHubContext webSocketChat + IHubContext webSocketChat, + ILogger logger ) { LlmMemory = llamaKernel.Kernel.Services .GetService() ?? throw new InvalidOperationException($"{nameof(ServerlessLlmMemory)} not loaded."); WebSocketChat = webSocketChat; + _logger = logger; } @@ -104,6 +108,7 @@ private async Task DoImportAsync(string? userId, AttachmentInfo attachmentInfo) } catch (Exception ex) { + _logger.LogError(ex, "Error importing attachment."); attachmentInfo.Status = AttachmentStatus.Failed; await WebSocketChat.Clients.User(userId!).SendCoreAsync("AttachmentStatusUpdate", [attachmentInfo]); } diff --git a/PalmHill.Llama/ServerlessLlmMemory.cs b/PalmHill.Llama/ServerlessLlmMemory.cs index e7cafaf..125b1ba 100644 --- a/PalmHill.Llama/ServerlessLlmMemory.cs +++ b/PalmHill.Llama/ServerlessLlmMemory.cs @@ -1,4 +1,5 @@ -using Microsoft.KernelMemory; +using Microsoft.Extensions.Logging; +using Microsoft.KernelMemory; using PalmHill.BlazorChat.Shared.Models; using System; using System.Collections.Concurrent; @@ -11,13 +12,15 @@ namespace PalmHill.Llama { public class ServerlessLlmMemory { - public ServerlessLlmMemory(IKernelMemory kernelMemory) + public ServerlessLlmMemory(IKernelMemory kernelMemory, ILogger logger) { KernelMemory = kernelMemory; + _logger = logger; } public IKernelMemory KernelMemory { get; } + private readonly ILogger _logger; public ConcurrentDictionary AttachmentInfos { get; } = new ConcurrentDictionary(); @@ -53,7 +56,7 @@ public async Task ImportDocumentAsync( catch (Exception ex) { attachmentInfo.Status = AttachmentStatus.Failed; - Console.WriteLine(ex); + _logger.LogError(ex, "Error importing attachment."); } finally { diff --git a/PalmHill.LlamaKernel/PalmHill.Llama.Kernel.csproj b/PalmHill.LlamaKernel/PalmHill.Llama.Kernel.csproj deleted file mode 100644 index 8c9a4a0..0000000 --- a/PalmHill.LlamaKernel/PalmHill.Llama.Kernel.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - net8.0 - enable - enable - - - - - - - - - - - - - -