Skip to content

Commit

Permalink
feat: Refactor providers (#131)
Browse files Browse the repository at this point in the history
* refactor: Moved provider abstractions to separate package.

* feat: Converted OpenAI provider.

* feat: Converted Ollama.

* feat: Implemented LLamaSharp.

* feat: Implemented LeonardoAi.

* feat: Implemented HuggingFace.

* feat: Implemented Google.

* feat: Implemented Settings.Calculate for OpenAI.

* feat: Implemented Automatic1111.

* feat: Implemented Anyscale.

* feat: Implemented Azure.

* feat: Implemented Anthropic.

* fix: Fixed WithDebugExtensions.

* fix: Small fixes.

* test: Fixed some tests.

* feat: Adapted other code to new changes.

* feat: Implemented Bedrock.

* fix: Small fixes.

* fix: Fixed conflicts.

* feat: Added new Amazon Bedrock implementations.

* feat: Adapted Amazon.SageMaker.

* fix: Models constructors now require provider.

* fix: Removed unused FileMemory example.
  • Loading branch information
HavenDV authored Feb 19, 2024
1 parent 40a6831 commit 6e0a556
Show file tree
Hide file tree
Showing 281 changed files with 4,914 additions and 4,658 deletions.
36 changes: 19 additions & 17 deletions LangChain.sln
Original file line number Diff line number Diff line change
Expand Up @@ -172,13 +172,13 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Automat
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Automatic1111.IntegrationTests", "src\tests\LangChain.Providers.Automatic1111.IntegrationTests\LangChain.Providers.Automatic1111.IntegrationTests.csproj", "{A6CF79BC-8365-46E8-9230-1A4AD615D40B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Bedrock", "src\libs\Providers\LangChain.Providers.Bedrock\LangChain.Providers.Bedrock.csproj", "{67985CCB-F606-41F8-9D36-513459F58882}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LangChain.Providers.Abstractions", "src\libs\Providers\LangChain.Providers.Abstractions\LangChain.Providers.Abstractions.csproj", "{628DDC9D-28A1-4C2F-BA13-171BD8CF711C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Amazon.Bedrock", "src\libs\Providers\LangChain.Providers.Amazon.Bedrock\LangChain.Providers.Amazon.Bedrock.csproj", "{67985CCB-F606-41F8-9D36-513459F58882}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Amazon.Bedrock.IntegrationTests", "src\tests\LangChain.Providers.Amazon.Bedrock.IntegrationTests\LangChain.Providers.Amazon.Bedrock.IntegrationTests.csproj", "{73C76E80-95C5-4C96-A319-4F32043C903E}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Amazon.SageMaker", "src\libs\Providers\LangChain.Providers.Amazon.Sagemaker\LangChain.Providers.Amazon.SageMaker.csproj", "{F1AD6925-219C-4B17-B8D8-0ACCA6F401C4}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LangChain.Providers.Amazon.Bedrock", "src\libs\Providers\LangChain.Providers.Amazon.Bedrock\LangChain.Providers.Amazon.Bedrock.csproj", "{67985CCB-F606-41F8-9D36-513459F58882}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LangChain.Providers.Bedrock.IntegrationTests", "src\tests\LangChain.Providers.Bedrock.IntegrationTests\LangChain.Providers.Bedrock.IntegrationTests.csproj", "{4F37D6C5-38C9-485B-AD87-2691EED225E1}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LangChain.Providers.Amazon.SageMaker", "src\libs\Providers\LangChain.Providers.Amazon.Sagemaker\LangChain.Providers.Amazon.SageMaker.csproj", "{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Expand Down Expand Up @@ -410,22 +410,22 @@ Global
{A6CF79BC-8365-46E8-9230-1A4AD615D40B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A6CF79BC-8365-46E8-9230-1A4AD615D40B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A6CF79BC-8365-46E8-9230-1A4AD615D40B}.Release|Any CPU.Build.0 = Release|Any CPU
{BA701280-0BEB-4DA4-92B3-9C777082C2AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BA701280-0BEB-4DA4-92B3-9C777082C2AF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BA701280-0BEB-4DA4-92B3-9C777082C2AF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{BA701280-0BEB-4DA4-92B3-9C777082C2AF}.Release|Any CPU.Build.0 = Release|Any CPU
{628DDC9D-28A1-4C2F-BA13-171BD8CF711C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{628DDC9D-28A1-4C2F-BA13-171BD8CF711C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{628DDC9D-28A1-4C2F-BA13-171BD8CF711C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{628DDC9D-28A1-4C2F-BA13-171BD8CF711C}.Release|Any CPU.Build.0 = Release|Any CPU
{73C76E80-95C5-4C96-A319-4F32043C903E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{73C76E80-95C5-4C96-A319-4F32043C903E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{73C76E80-95C5-4C96-A319-4F32043C903E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{73C76E80-95C5-4C96-A319-4F32043C903E}.Release|Any CPU.Build.0 = Release|Any CPU
{67985CCB-F606-41F8-9D36-513459F58882}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{67985CCB-F606-41F8-9D36-513459F58882}.Debug|Any CPU.Build.0 = Debug|Any CPU
{67985CCB-F606-41F8-9D36-513459F58882}.Release|Any CPU.ActiveCfg = Release|Any CPU
{67985CCB-F606-41F8-9D36-513459F58882}.Release|Any CPU.Build.0 = Release|Any CPU
{F1AD6925-219C-4B17-B8D8-0ACCA6F401C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F1AD6925-219C-4B17-B8D8-0ACCA6F401C4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F1AD6925-219C-4B17-B8D8-0ACCA6F401C4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F1AD6925-219C-4B17-B8D8-0ACCA6F401C4}.Release|Any CPU.Build.0 = Release|Any CPU
{4F37D6C5-38C9-485B-AD87-2691EED225E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{4F37D6C5-38C9-485B-AD87-2691EED225E1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{4F37D6C5-38C9-485B-AD87-2691EED225E1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{4F37D6C5-38C9-485B-AD87-2691EED225E1}.Release|Any CPU.Build.0 = Release|Any CPU
{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB}.Debug|Any CPU.Build.0 = Debug|Any CPU
{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB}.Release|Any CPU.ActiveCfg = Release|Any CPU
{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down Expand Up @@ -494,8 +494,10 @@ Global
{4913844F-74EC-4E74-AE8A-EA825569E6BA} = {E55391DE-F8F3-4CC2-A0E3-2406C76E9C68}
{BF4C7B87-0997-4208-84EF-D368DF7B9861} = {E55391DE-F8F3-4CC2-A0E3-2406C76E9C68}
{A6CF79BC-8365-46E8-9230-1A4AD615D40B} = {FDEE2E22-C239-4921-83B2-9797F765FD6A}
{628DDC9D-28A1-4C2F-BA13-171BD8CF711C} = {E55391DE-F8F3-4CC2-A0E3-2406C76E9C68}
{73C76E80-95C5-4C96-A319-4F32043C903E} = {FDEE2E22-C239-4921-83B2-9797F765FD6A}
{67985CCB-F606-41F8-9D36-513459F58882} = {E55391DE-F8F3-4CC2-A0E3-2406C76E9C68}
{4F37D6C5-38C9-485B-AD87-2691EED225E1} = {FDEE2E22-C239-4921-83B2-9797F765FD6A}
{534CEFB8-AA11-43A2-9A38-4F6D3A6633AB} = {E55391DE-F8F3-4CC2-A0E3-2406C76E9C68}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {5C00D0F1-6138-4ED9-846B-97E43D6DFF1C}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
using LangChain.Providers.OpenAI;
using Microsoft.AspNetCore.Mvc;
using OpenAI.Constants;

namespace LangChain.Samples.AspNet.Controllers;

[ApiController]
[Route("[controller]")]
public class OpenAiSampleController : ControllerBase
{
private readonly OpenAiModel _openAi;
private readonly OpenAiProvider _openAi;
private readonly ILogger<OpenAiSampleController> _logger;

public OpenAiSampleController(
OpenAiModel openAi,
OpenAiProvider openAi,
ILogger<OpenAiSampleController> logger)
{
_openAi = openAi;
Expand All @@ -21,7 +22,8 @@ public OpenAiSampleController(
[HttpGet(Name = "GetOpenAiResponse")]
public async Task<string> Get()
{
var response = await _openAi.GenerateAsync("What is a good name for a company that sells colourful socks?");
var llm = new OpenAiChatModel(_openAi, id: ChatModels.Gpt35Turbo);
var response = await llm.GenerateAsync("What is a good name for a company that sells colourful socks?");

return response.LastMessageContent;
}
Expand Down
8 changes: 4 additions & 4 deletions examples/LangChain.Samples.Azure/Program.cs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
using LangChain.Providers;
using LangChain.Providers.Azure;
using LangChain.Providers.Azure;

var model = new AzureOpenAIModel("AZURE_OPEN_AI_KEY", "ENDPOINT", "DEPLOYMENT_NAME");
var provider = new AzureOpenAiProvider(apiKey: "AZURE_OPEN_AI_KEY", endpoint: "ENDPOINT");
var llm = new AzureOpenAiChatModel(provider, id: "DEPLOYMENT_NAME");

var result = await model.GenerateAsync("What is a good name for a company that sells colourful socks?");
var result = await llm.GenerateAsync("What is a good name for a company that sells colourful socks?");

Console.WriteLine(result);

Expand Down
7 changes: 4 additions & 3 deletions examples/LangChain.Samples.HuggingFace/Program.cs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
using LangChain.Providers;
using LangChain.Providers.HuggingFace;
using LangChain.Providers.HuggingFace;
using LangChain.Providers.HuggingFace.Predefined;

using var client = new HttpClient();
var gpt2Model = new Gpt2Model(apiKey: string.Empty, client);
var provider = new HuggingFaceProvider(apiKey: string.Empty, client);
var gpt2Model = new Gpt2Model(provider);

var response = await gpt2Model.GenerateAsync("What would be a good company name be for name a company that makes colorful socks?");

Expand Down
3 changes: 2 additions & 1 deletion examples/LangChain.Samples.Memory/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using LangChain.Providers;
using LangChain.Providers.OpenAI;
using System.Runtime.Serialization;
using LangChain.Providers.OpenAI.Predefined;
using static LangChain.Chains.Chain;

internal class Program
Expand All @@ -13,7 +14,7 @@ private static async Task Main(string[] args)
throw new InvalidOperationException("OPENAI_API_KEY environment variable is not found.");

// Use a common, general-purpose LLM
var model = new OpenAiModel(apiKey, "gpt-3.5-turbo");
var model = new Gpt35TurboModel(apiKey);

// Create a simple prompt template for the conversation to help the AI
var template = @"
Expand Down
5 changes: 2 additions & 3 deletions examples/LangChain.Samples.OpenAI/Program.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
using LangChain.Providers;
using LangChain.Providers.OpenAI;
using LangChain.Providers.OpenAI.Predefined;

var apiKey =
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InvalidOperationException("OPENAI_API_KEY environment variable is not found.");
var model = new Gpt35TurboModel(apiKey: apiKey);
var model = new Gpt35TurboModel(apiKey);

var result = await model.GenerateAsync("What is a good name for a company that sells colourful socks?");

Expand Down
2 changes: 1 addition & 1 deletion examples/LangChain.Samples.Prompts/Program.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using LangChain.Chains.LLM;
using LangChain.Prompts;
using LangChain.Providers.OpenAI;
using LangChain.Providers.OpenAI.Predefined;
using LangChain.Schema;

const string apiKey = "API-KEY";
Expand Down
7 changes: 4 additions & 3 deletions examples/LangChain.Samples.SequentialChain/Program.cs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
using LangChain.Chains.LLM;
using LangChain.Abstractions.Chains.Base;
using LangChain.Chains.LLM;
using LangChain.Chains.Sequentials;
using LangChain.Prompts;
using LangChain.Providers.OpenAI;
using LangChain.Providers.OpenAI.Predefined;
using LangChain.Schema;

using var httpClient = new HttpClient();
Expand All @@ -22,7 +23,7 @@
var chainTwo = new LlmChain(new LlmChainInput(llm, secondPrompt));

var overallChain = new SequentialChain(new SequentialChainInput(
new[]
new IChain[]
{
chainOne,
chainTwo
Expand Down
1 change: 1 addition & 0 deletions src/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
<PackageVersion Include="StackExchange.Redis" Version="2.7.4" />
<PackageVersion Include="System.Net.Http" Version="4.3.4" />
<PackageVersion Include="System.Text.Json" Version="8.0.0" />
<PackageVersion Include="System.ValueTuple" Version="4.5.0" />
<PackageVersion Include="Tiktoken" Version="1.1.3" />
<PackageVersion Include="tryAGI.OpenAI" Version="2.0.0-alpha.9" />
<PackageVersion Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
using System.Text.Json;
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Common.Converters;
using LangChain.Docstore;
using LangChain.Providers;
using LangChain.VectorStores;
using Microsoft.SemanticKernel.AI.Embeddings;
using Microsoft.SemanticKernel.Connectors.Memory.Chroma;
Expand Down Expand Up @@ -31,7 +31,7 @@ public class ChromaVectorStore : VectorStore
public ChromaVectorStore(
HttpClient httpClient,
string endpoint,
IEmbeddings embeddings,
IEmbeddingModel embeddings,
string collectionName = LangchainDefaultCollectionName)
: base(embeddings)
{
Expand Down Expand Up @@ -166,8 +166,8 @@ public override async Task<IEnumerable<Document>> SimilaritySearchByVectorAsync(
int k = 4,
CancellationToken cancellationToken = default)
{
var embeddings = await Embeddings
.EmbedQueryAsync(query, cancellationToken)
var embeddings = await EmbeddingModel
.CreateEmbeddingsAsync(query, null, cancellationToken)
.ConfigureAwait(false);

var documentsWithScores = await SimilaritySearchByVectorWithAsync(embeddings, k, cancellationToken).ConfigureAwait(false);
Expand All @@ -192,8 +192,8 @@ public override async Task<IEnumerable<Document>> MaxMarginalRelevanceSearch(
float lambdaMult = 0.5f,
CancellationToken cancellationToken = default)
{
var embeddings = await Embeddings
.EmbedQueryAsync(query, cancellationToken)
float[] embeddings = await EmbeddingModel
.CreateEmbeddingsAsync(query, null, cancellationToken)
.ConfigureAwait(false);

var documents = await MaxMarginalRelevanceSearchByVector(
Expand Down Expand Up @@ -251,8 +251,8 @@ private async Task<IEnumerable<string>> AddCoreAsync(
string[] ids,
CancellationToken cancellationToken)
{
var embeddings = await Embeddings
.EmbedDocumentsAsync(texts, cancellationToken)
float[][] embeddings = await EmbeddingModel
.CreateEmbeddingsAsync(texts, null, cancellationToken)
.ConfigureAwait(false);

var records = new MemoryRecord[texts.Length];
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Docstore;
using LangChain.Docstore;
using LangChain.Indexes;
using LangChain.Providers;
using LangChain.TextSplitters;
using LangChain.VectorStores;

Expand All @@ -15,7 +15,7 @@ namespace LangChain.Databases.InMemory;
/// <param name="embeddings"></param>
/// <param name="distanceMetrics"></param>
public class InMemoryVectorStore(
IEmbeddings embeddings,
IEmbeddingModel embeddings,
EDistanceMetrics distanceMetrics = EDistanceMetrics.Euclidean)
: VectorStore(embeddings)
{
Expand All @@ -33,7 +33,7 @@ public class InMemoryVectorStore(
/// <param name="documents"></param>
/// <returns></returns>
public static async Task<VectorStoreIndexWrapper> CreateIndexFromDocuments(
IEmbeddings embeddings,
IEmbeddingModel embeddings,
IReadOnlyCollection<Document> documents)
{
var vectorStore = new InMemoryVectorStore(embeddings);
Expand All @@ -51,9 +51,9 @@ public override async Task<IEnumerable<string>> AddDocumentsAsync(
{
var docs = documents.ToArray();

var embeddings = await Embeddings.EmbedDocumentsAsync(docs
float[][] embeddings = await EmbeddingModel.CreateEmbeddingsAsync(docs
.Select(x => x.PageContent)
.ToArray(), cancellationToken).ConfigureAwait(false);
.ToArray(), cancellationToken: cancellationToken).ConfigureAwait(false);
var ids = new List<string>();
for (var i = 0; i < docs.Length; i++)
{
Expand Down Expand Up @@ -98,9 +98,9 @@ public override async Task<IEnumerable<Document>> SimilaritySearchAsync(
int k = 4,
CancellationToken cancellationToken = default)
{
var embedding = await Embeddings.EmbedQueryAsync(
float[] embedding = await EmbeddingModel.CreateEmbeddingsAsync(
query,
cancellationToken).ConfigureAwait(false);
cancellationToken: cancellationToken).ConfigureAwait(false);

return await SimilaritySearchByVectorAsync(
embedding,
Expand Down Expand Up @@ -129,15 +129,14 @@ public override Task<IEnumerable<Document>> SimilaritySearchByVectorAsync(
int k = 4,
CancellationToken cancellationToken = default)
{
var embedding = await Embeddings.EmbedQueryAsync(
float[] embedding = await EmbeddingModel.CreateEmbeddingsAsync(
query,
cancellationToken).ConfigureAwait(false);
var arr = embedding.ToArray();
cancellationToken: cancellationToken).ConfigureAwait(false);
var distances = _storage.Select(s =>
new
{
doc = s.doc,
distance = _distanceFunction(s.vec, arr)
distance = _distanceFunction(s.vec, embedding)
}).Take(k);

return distances.Select(d => new ValueTuple<Document, float>(d.doc, d.distance));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using LangChain.Abstractions.Embeddings.Base;
using LangChain.Databases.Postgres;
using LangChain.Docstore;
using LangChain.Providers;
using LangChain.VectorStores;

namespace LangChain.Databases;
Expand All @@ -25,12 +25,12 @@ public class PostgresVectorStore : VectorStore
public PostgresVectorStore(
string connectionString,
int vectorSize,
IEmbeddings embeddings,
IEmbeddingModel embeddingModel,
string schema = DefaultSchema,
string collectionName = DefaultCollectionName,
DistanceStrategy distanceStrategy = DistanceStrategy.Cosine,
Func<float, float>? overrideRelevanceScoreFn = null)
: base(embeddings, overrideRelevanceScoreFn)
: base(embeddingModel, overrideRelevanceScoreFn)
{
_distanceStrategy = distanceStrategy;
_collectionName = collectionName;
Expand All @@ -44,8 +44,10 @@ public override async Task<IEnumerable<string>> AddDocumentsAsync(
CancellationToken cancellationToken = default)
{
var documentsArray = documents.ToArray();
var embeddings = await Embeddings
.EmbedDocumentsAsync(documentsArray.Select(d => d.PageContent).ToArray(), cancellationToken)
float[][] embeddings = await EmbeddingModel
.CreateEmbeddingsAsync(documentsArray
.Select(d => d.PageContent)
.ToArray(), null, cancellationToken)
.ConfigureAwait(false);

var ids = new string[documentsArray.Length];
Expand Down Expand Up @@ -75,8 +77,8 @@ public override async Task<IEnumerable<string>> AddTextsAsync(
var textsArray = texts.ToArray();
var metadatasArray = metadatas?.ToArray() ?? new Dictionary<string, object>?[textsArray.Length];

var embeddings = await Embeddings
.EmbedDocumentsAsync(textsArray, cancellationToken)
float[][] embeddings = await EmbeddingModel
.CreateEmbeddingsAsync(textsArray, null, cancellationToken)
.ConfigureAwait(false);

var ids = new string[textsArray.Length];
Expand Down Expand Up @@ -137,9 +139,11 @@ public override async Task<IEnumerable<Document>> SimilaritySearchByVectorAsync(
string query, int k = 4,
CancellationToken cancellationToken = default)
{
var embedding = await Embeddings.EmbedQueryAsync(query, cancellationToken).ConfigureAwait(false);
float[] embedding = await EmbeddingModel.CreateEmbeddingsAsync(
query, null, cancellationToken).ConfigureAwait(false);

return await SimilaritySearchByVectorWithScoreAsync(embedding, k, cancellationToken)
return await SimilaritySearchByVectorWithScoreAsync(
embedding, k, cancellationToken)
.ConfigureAwait(false);
}

Expand Down Expand Up @@ -183,7 +187,7 @@ public override async Task<IEnumerable<Document>> MaxMarginalRelevanceSearch(
int fetchK = 20, float lambdaMult = 0.5f,
CancellationToken cancellationToken = default)
{
var embedding = await Embeddings.EmbedQueryAsync(query, cancellationToken).ConfigureAwait(false);
float[] embedding = await EmbeddingModel.CreateEmbeddingsAsync(query, null, cancellationToken).ConfigureAwait(false);

return await MaxMarginalRelevanceSearchByVector(embedding, k, fetchK, lambdaMult, cancellationToken)
.ConfigureAwait(false);
Expand Down
Loading

0 comments on commit 6e0a556

Please sign in to comment.