-
-
Notifications
You must be signed in to change notification settings - Fork 93
Commit
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
<Project Sdk="Microsoft.NET.Sdk"> | ||
|
||
<PropertyGroup> | ||
<OutputType>Exe</OutputType> | ||
<TargetFramework>net8.0</TargetFramework> | ||
<ImplicitUsings>enable</ImplicitUsings> | ||
<Nullable>enable</Nullable> | ||
</PropertyGroup> | ||
|
||
<ItemGroup> | ||
<ProjectReference Include="..\..\src\libs\LangChain\LangChain.csproj" /> | ||
</ItemGroup> | ||
|
||
</Project> |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
using LangChain.Memory; | ||
using LangChain.Providers.OpenAI; | ||
using static LangChain.Chains.Chain; | ||
|
||
var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? | ||
throw new InvalidOperationException("OPENAI_API_KEY environment variable is not found."); | ||
|
||
var model = new OpenAiModel(apiKey, "gpt-3.5-turbo"); | ||
|
||
|
||
// create simple template for conversation for AI to know what piece of text it is looking at | ||
var template = @" | ||
The following is a friendly conversation between a human and an AI. | ||
{history} | ||
Human: {input} | ||
AI:"; | ||
|
||
// To have a conversation thar remembers previous messages we need to use memory. | ||
// For memory to work properly we need to specify AI and Human prefixes. | ||
// Since in our template we have "AI:" and "Human:" we need to specify them here. Pay attention to spaces after prefixes. | ||
var conversationBufferMemory = new ConversationBufferMemory(new FileChatMessageHistory("messages.json")) | ||
{ | ||
AiPrefix = "AI: ", | ||
HumanPrefix = "Human: " | ||
}; | ||
|
||
// build chain. Notice that we don't set input key here. It will be set in the loop | ||
var chain = | ||
// load history. at first it will be empty, but UpdateMemory will update it every iteration | ||
LoadMemory(conversationBufferMemory, outputKey: "history") | ||
| Template(template) | ||
| LLM(model) | ||
// update memory with new request from Human and response from AI | ||
| UpdateMemory(conversationBufferMemory, requestKey: "input", responseKey: "text"); | ||
|
||
// run an endless loop of conversation | ||
while (true) | ||
{ | ||
Console.Write("Human: "); | ||
var input = Console.ReadLine(); | ||
if (input == "exit") | ||
break; | ||
|
||
// build a new chain using previous chain but with new input every time | ||
var chatChain = Set(input, "input") | ||
| chain; | ||
|
||
// get response from AI | ||
var res = await chatChain.Run("text"); | ||
|
||
|
||
Console.Write("AI: "); | ||
Console.WriteLine(res); | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
using LangChain.Providers; | ||
using System.Text.Json; | ||
|
||
namespace LangChain.Memory; | ||
|
||
/// <summary> | ||
/// Stores history in a local file. | ||
/// </summary> | ||
public class FileChatMessageHistory : BaseChatMessageHistory | ||
{ | ||
private string MessagesFilePath { get; } | ||
|
||
private List<Message> _messages; | ||
|
||
/// <inheritdoc/> | ||
public override IReadOnlyList<Message> Messages | ||
{ | ||
get | ||
{ | ||
if (_messages is null) | ||
{ | ||
LoadMessages().Wait(); | ||
} | ||
|
||
return _messages; | ||
Check warning on line 25 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
} | ||
} | ||
|
||
/// <summary> | ||
/// | ||
/// </summary> | ||
/// <param name="messagesFilePath">Path to local history file</param> | ||
/// <exception cref="ArgumentNullException"></exception> | ||
public FileChatMessageHistory(string messagesFilePath) | ||
Check warning on line 34 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
{ | ||
MessagesFilePath = messagesFilePath ?? throw new ArgumentNullException(nameof(messagesFilePath)); | ||
} | ||
|
||
/// <inheritdoc/> | ||
public override async Task AddMessage(Message message) | ||
{ | ||
_messages.Add(message); | ||
await SaveMessages(); | ||
Check warning on line 43 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
} | ||
|
||
/// <inheritdoc/> | ||
public override async Task Clear() | ||
{ | ||
_messages.Clear(); | ||
await SaveMessages(); | ||
Check warning on line 50 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
} | ||
|
||
private async Task SaveMessages() | ||
{ | ||
string json = JsonSerializer.Serialize(_messages); | ||
await Task.Run(() => File.WriteAllText(MessagesFilePath, json)); | ||
Check warning on line 56 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
} | ||
|
||
private async Task LoadMessages() | ||
{ | ||
if (File.Exists(MessagesFilePath)) | ||
{ | ||
string json = await Task.Run(() => File.ReadAllText(MessagesFilePath)); | ||
Check warning on line 63 in src/libs/LangChain.Core/Memory/FileChatMessageHistory.cs GitHub Actions / Build, test and publish / Build, test and publish
|
||
_messages = JsonSerializer.Deserialize<List<Message>>(json); | ||
} | ||
else | ||
{ | ||
_messages = new List<Message>(); | ||
} | ||
} | ||
} |