Skip to content

Commit

Permalink
Merge pull request #233 from n4ze3m/langchain-latest
Browse files Browse the repository at this point in the history
Update import paths for @langchain/core modules
  • Loading branch information
n4ze3m authored Mar 9, 2024
2 parents 5f09d52 + e68393e commit 9e17102
Show file tree
Hide file tree
Showing 15 changed files with 325 additions and 246 deletions.
14 changes: 12 additions & 2 deletions server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
"keywords": [],
"author": "",
"license": "MIT",
"engines": {
"node": ">=18"
},
"dependencies": {
"@fastify/autoload": "^5.0.0",
"@fastify/cookie": "^9.1.0",
Expand All @@ -38,7 +41,11 @@
"@google-ai/generativelanguage": "^2.0.0",
"@grammyjs/files": "^1.0.4",
"@huggingface/inference": "1",
"@langchain/google-genai": "^0.0.2",
"@langchain/anthropic": "^0.1.4",
"@langchain/cohere": "^0.0.5",
"@langchain/community": "^0.0.35",
"@langchain/google-genai": "^0.0.10",
"@langchain/openai": "^0.0.18",
"@prisma/client": "^5.9.1",
"@slack/bolt": "^3.13.2",
"@supabase/supabase-js": "^2.24.0",
Expand All @@ -64,7 +71,7 @@
"html-to-text": "^9.0.5",
"ignore": "^5.2.4",
"ioredis": "^5.3.2",
"langchain": "^0.0.206",
"langchain": "^0.1.25",
"mammoth": "^1.6.0",
"pdf-parse": "^1.1.1",
"pdfjs-dist": "^3.7.107",
Expand All @@ -87,6 +94,9 @@
"tap": "^16.1.0",
"typescript": "^4.5.4"
},
"resolutions": {
"@langchain/core": "0.1.43"
},
"prisma": {
"seed": "ts-node prisma/seed.ts"
}
Expand Down
12 changes: 6 additions & 6 deletions server/src/chain/index.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
import { BaseLanguageModel } from "langchain/base_language";
import { Document } from "langchain/document";
import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { Document } from "@langchain/core/documents";
import {
ChatPromptTemplate,
MessagesPlaceholder,
PromptTemplate,
} from "langchain/prompts";
import { AIMessage, BaseMessage, HumanMessage } from "langchain/schema";
import { StringOutputParser } from "langchain/schema/output_parser";
} from "@langchain/core/prompts";
import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages";
import { StringOutputParser } from "@langchain/core/output_parsers";
import {
Runnable,
RunnableBranch,
RunnableLambda,
RunnableMap,
RunnableSequence,
} from "langchain/schema/runnable";
} from "@langchain/core/runnables";
type RetrievalChainInput = {
chat_history: string;
question: string;
Expand Down
2 changes: 1 addition & 1 deletion server/src/embeddings/transformer-embedding.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Embeddings, EmbeddingsParams } from "langchain/embeddings/base";
import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings";
import {
piplelineTransformer,
//@ts-ignore
Expand Down
2 changes: 1 addition & 1 deletion server/src/handlers/api/v1/bot/playground/chat.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { DialoqbaseVectorStore } from "../../../../../utils/store";
import { embeddings } from "../../../../../utils/embeddings";
import { chatModelProvider } from "../../../../../utils/models";
import { DialoqbaseHybridRetrival } from "../../../../../utils/hybrid";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { Document } from "langchain/document";
import {
createChain,
Expand Down
2 changes: 1 addition & 1 deletion server/src/handlers/bot/api.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { embeddings } from "../../utils/embeddings";
import { chatModelProvider } from "../../utils/models";
import { nextTick } from "./post.handler";
import { Document } from "langchain/document";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { DialoqbaseHybridRetrival } from "../../utils/hybrid";
import { createChain, groupMessagesByConversation } from "../../chain";

Expand Down
2 changes: 1 addition & 1 deletion server/src/handlers/bot/post.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { ChatRequestBody } from "./types";
import { DialoqbaseVectorStore } from "../../utils/store";
import { embeddings } from "../../utils/embeddings";
import { chatModelProvider } from "../../utils/models";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { DialoqbaseHybridRetrival } from "../../utils/hybrid";
import { Document } from "langchain/document";
import { createChain, groupMessagesByConversation } from "../../chain";
Expand Down
2 changes: 1 addition & 1 deletion server/src/integration/handlers/discord.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { DialoqbaseVectorStore } from "../../utils/store";
import { chatModelProvider } from "../../utils/models";
import { DialoqbaseHybridRetrival } from "../../utils/hybrid";
import { Document } from "langchain/document";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { createChain } from "../../chain";
const prisma = new PrismaClient();

Expand Down
2 changes: 1 addition & 1 deletion server/src/integration/handlers/telegram.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { embeddings } from "../../utils/embeddings";
import { DialoqbaseVectorStore } from "../../utils/store";
import { chatModelProvider } from "../../utils/models";
import { DialoqbaseHybridRetrival } from "../../utils/hybrid";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { createChain } from "../../chain";
const prisma = new PrismaClient();

Expand Down
2 changes: 1 addition & 1 deletion server/src/integration/handlers/whatsapp.handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { PrismaClient } from "@prisma/client";
import { embeddings } from "../../utils/embeddings";
import { DialoqbaseVectorStore } from "../../utils/store";
import { chatModelProvider } from "../../utils/models";
import { BaseRetriever } from "langchain/schema/retriever";
import { BaseRetriever } from "@langchain/core/retrievers";
import { DialoqbaseHybridRetrival } from "../../utils/hybrid";
import { createChain } from "../../chain";
const prisma = new PrismaClient();
Expand Down
208 changes: 103 additions & 105 deletions server/src/models/fireworks.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
import { BaseChatModel, BaseChatModelParams } from "langchain/chat_models/base";
import { CallbackManagerForLLMRun } from "langchain/callbacks";
import { BaseChatModel, BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import {
AIMessage,
BaseMessage,
ChatGeneration,
// ChatMessage,
ChatResult,
} from "langchain/schema";
} from "@langchain/core/messages";

import { ChatGeneration, ChatResult } from "@langchain/core/outputs";

declare interface DialoqbaseFireworksModelInput {
temperature?: number;
Expand Down Expand Up @@ -61,8 +60,7 @@ function messageToFireworkRole(message: BaseMessage): string {
}
export class DialoqbaseFireworksModel
extends BaseChatModel
implements DialoqbaseFireworksModelInput
{
implements DialoqbaseFireworksModelInput {
temperature: number | undefined;

top_p?: number | undefined;
Expand Down Expand Up @@ -118,117 +116,117 @@ export class DialoqbaseFireworksModel

let data = this.streaming
? await new Promise<any>((resolve, reject) => {
let response: any;
let rejected = false;
let resolved = false;

this.completionWithRetry(
{
...params,
messages: this.is_chat
? messagesMapped.map(({ role, content }) => ({
role,
content: content.toString(),
}))
: undefined,
prompt: !this.is_chat ? prompt : undefined,
},
options?.signal,
(event) => {
// console.log(event.data);
if (event.data === "[DONE]") {
if (resolved || rejected) {
return;
}
resolved = true;
resolve(response);
let response: any;
let rejected = false;
let resolved = false;

this.completionWithRetry(
{
...params,
messages: this.is_chat
? messagesMapped.map(({ role, content }) => ({
role,
content: content.toString(),
}))
: undefined,
prompt: !this.is_chat ? prompt : undefined,
},
options?.signal,
(event) => {
// console.log(event.data);
if (event.data === "[DONE]") {
if (resolved || rejected) {
return;
}
try {
const data = JSON.parse(event.data);
if (data?.error_code) {
if (rejected) {
return;
}
rejected = true;
reject(data);
resolved = true;
resolve(response);
return;
}
try {
const data = JSON.parse(event.data);
if (data?.error_code) {
if (rejected) {
return;
}
const message = data as {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
delta?: {
content?: string;
role?: string;
};
text?: string;
finish_reason: string;
}[];
};

if (!response) {
if (message.choices.length > 0) {
response = {
id: message.id,
object: message.object,
created: message.created,
result:
message.choices[0]?.delta?.content ||
message?.choices[0]?.text ||
"",
};
}
} else {
if (message.choices.length > 0) {
response.created = message.created;
response.result +=
rejected = true;
reject(data);
return;
}
const message = data as {
id: string;
object: string;
created: number;
model: string;
choices: {
index: number;
delta?: {
content?: string;
role?: string;
};
text?: string;
finish_reason: string;
}[];
};

if (!response) {
if (message.choices.length > 0) {
response = {
id: message.id,
object: message.object,
created: message.created,
result:
message.choices[0]?.delta?.content ||
message?.choices[0]?.text ||
"";
}
"",
};
}
void runManager?.handleLLMNewToken(
message.choices[0]?.delta?.content ||
} else {
if (message.choices.length > 0) {
response.created = message.created;
response.result +=
message.choices[0]?.delta?.content ||
message?.choices[0]?.text ||
""
);
} catch (e) {
console.error(e);

if (rejected) {
return;
"";
}
rejected = true;
reject(e);

}
void runManager?.handleLLMNewToken(
message.choices[0]?.delta?.content ||
message?.choices[0]?.text ||
""
);
} catch (e) {
console.error(e);

if (rejected) {
return;
}
}
).catch((e) => {
if (rejected) {
rejected = true;
reject(e);

return;
}
rejected = true;
reject(e);
});
})
}
).catch((e) => {
if (rejected) {
return;
}
rejected = true;
reject(e);
});
})
: await this.completionWithRetry(
{
...params,
messages: this.is_chat
? messagesMapped.map(({ role, content }) => ({
role,
content: content.toString(),
}))
: undefined,
prompt: !this.is_chat ? prompt : undefined,
},
options?.signal
);
{
...params,
messages: this.is_chat
? messagesMapped.map(({ role, content }) => ({
role,
content: content.toString(),
}))
: undefined,
prompt: !this.is_chat ? prompt : undefined,
},
options?.signal
);
// console.log(data);
const text =
data?.result ??
Expand Down
12 changes: 6 additions & 6 deletions server/src/utils/embeddings.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { CohereEmbeddings } from "langchain/embeddings/cohere";
import { HuggingFaceInferenceEmbeddings } from "langchain/embeddings/hf";
import { OpenAIEmbeddings } from "@langchain/openai";
import { CohereEmbeddings } from "@langchain/cohere";
import { HuggingFaceInferenceEmbeddings } from "@langchain/community/embeddings/hf";
import { TransformersEmbeddings } from "../embeddings/transformer-embedding";
import { GooglePaLMEmbeddings } from "langchain/embeddings/googlepalm";
import { GooglePaLMEmbeddings } from "@langchain/community/embeddings/googlepalm";
import { GoogleGenerativeAIEmbeddings } from "@langchain/google-genai";
import { OllamaEmbeddings } from "langchain/embeddings/ollama";
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";

export const embeddings = (
provider: string,
Expand All @@ -19,7 +19,7 @@ export const embeddings = (
});
case "cohere":
return new CohereEmbeddings({
modelName,
model: modelName,
});
case "huggingface-api":
return new HuggingFaceInferenceEmbeddings();
Expand Down
Loading

0 comments on commit 9e17102

Please sign in to comment.