Skip to content

Commit

Permalink
Merge pull request #142 from n4ze3m/next
Browse files Browse the repository at this point in the history
1.4.0
  • Loading branch information
n4ze3m authored Nov 9, 2023
2 parents ce03873 + 92149b2 commit 48e82e3
Show file tree
Hide file tree
Showing 28 changed files with 965 additions and 912 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,5 @@ jobs:
n4z3m/dialoqbase:latest
n4z3m/dialoqbase:${{ github.ref_name}}
push: true
cache-from: type=registry,ref=n4z3m/dialoqbase:latest
cache-to: type=inline
cache-from: type=gha
cache-to: type=gha,mode=max
4 changes: 2 additions & 2 deletions .github/workflows/next.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ jobs:
tags: |
n4z3m/dialoqbase-next:latest
push: true
cache-from: type=registry,ref=n4z3m/dialoqbase-next:latest
cache-to: type=inline
cache-from: type=gha
cache-to: type=gha,mode=max
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ RUN apt update

COPY ./server/ .

RUN yarn install
RUN yarn install --network-timeout 10000000

RUN yarn build

Expand Down Expand Up @@ -39,7 +39,7 @@ COPY --from=build /app/app/widget/dist/index.html ./public/bot.html
# Copy script
COPY --from=build /app/app/script/dist/chat.min.js ./public/chat.min.js

RUN yarn install --production
RUN yarn install --production --network-timeout 10000000

ENV NODE_ENV=production

Expand Down
2 changes: 1 addition & 1 deletion app/ui/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "app",
"private": true,
"version": "1.3.1",
"version": "1.4.0",
"type": "module",
"scripts": {
"dev": "vite",
Expand Down
4 changes: 2 additions & 2 deletions app/ui/src/components/Bot/Playground/HistoryList.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export const PlaygroundHistoryList = () => {
setDefualtTextSpeechSettings,
setElevenLabsApiKeyPresent,
setElevenLabsApiKeyValid,
setVoices
setVoices,
} = useStoreMessage();

const { data, status } = useQuery(
Expand Down Expand Up @@ -51,7 +51,7 @@ export const PlaygroundHistoryList = () => {
setHistory(
data.messages.map((item) => {
return {
message: item.message,
text: item.message,
type: item.type,
};
})
Expand Down
8 changes: 0 additions & 8 deletions app/ui/src/components/Bot/Settings/SettingsCard.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -311,14 +311,6 @@ export const SettingsCard: React.FC<BotSettings> = ({
>
<Switch />
</Form.Item>
<Form.Item
name="use_rag"
label="Use Retrieval Augmented Generation (RAG)"
valuePropName="checked"
>
<Switch />
</Form.Item>

<Form.Item
name="bot_protect"
label="Activate Public Bot Protection"
Expand Down
2 changes: 1 addition & 1 deletion app/ui/src/components/Common/BotForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ export const BotForm = ({
},
{
pattern: new RegExp(
"^(https?://)?(www.)?github.com/([a-zA-Z0-9-]+)/([a-zA-Z0-9-]+)(.git)?$"
"^(https?://)?(www\.)?github\.com/([a-zA-Z0-9-]+)/([a-zA-Z0-9_-]+)(\.git)?$"
),
message: "Please enter a valid public github repo URL",
},
Expand Down
2 changes: 1 addition & 1 deletion app/ui/src/store/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ export type Message = {

export type History = {
type: string;
message: string;
text: string;
}[];

type State = {
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "dialoqbase",
"version": "1.3.1",
"version": "1.4.0",
"description": "Create chatbots with ease",
"scripts": {
"ui:dev": "pnpm run --filter ui dev",
Expand Down
4 changes: 2 additions & 2 deletions server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"@tensorflow/tfjs-converter": "^4.7.0",
"@tensorflow/tfjs-core": "^4.7.0",
"@waylaidwanderer/fastify-sse-v2": "^3.1.0",
"@xenova/transformers": "^2.6.2",
"@xenova/transformers": "^2.7.0",
"axios": "^1.4.0",
"bcryptjs": "^2.4.3",
"bull": "^4.10.4",
Expand All @@ -61,7 +61,7 @@
"grammy": "^1.16.2",
"ignore": "^5.2.4",
"ioredis": "^5.3.2",
"langchain": "^0.0.160",
"langchain": "^0.0.183",
"mammoth": "^1.6.0",
"pdf-parse": "^1.1.1",
"pdfjs-dist": "^3.7.107",
Expand Down
4 changes: 4 additions & 0 deletions server/prisma/migrations/q_18/migration.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
INSERT INTO "DialoqbaseModels" ("name", "model_id", "model_type", "model_provider", "stream_available", "local_model", "config") VALUES
('GPT-3.5 Turbo 1106 (OpenAI)', 'gpt-3.5-turbo-1106-dbase', 'chat', 'OpenAI', true, false, '{}'),
('GPT-4 Turbo (OpenAI)', 'gpt-4-1106-preview-dbase', 'chat', 'OpenAI', true, false, '{}'),
('GPT-4 Turbo with vision (OpenAI)', 'gpt-4-vision-preview-dbase', 'chat', 'OpenAI', true, false, '{}');
172 changes: 172 additions & 0 deletions server/src/chain/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
import { BaseLanguageModel } from "langchain/base_language";
import { Document } from "langchain/document";
import {
ChatPromptTemplate,
MessagesPlaceholder,
PromptTemplate,
} from "langchain/prompts";
import { AIMessage, BaseMessage, HumanMessage } from "langchain/schema";
import { StringOutputParser } from "langchain/schema/output_parser";
import {
Runnable,
RunnableBranch,
RunnableLambda,
RunnableMap,
RunnableSequence,
} from "langchain/schema/runnable";
type RetrievalChainInput = {
chat_history: string;
question: string;
};

export function groupMessagesByConversation(messages: any[]) {
// check if messages are in even numbers if not remove the last message
if (messages.length % 2 !== 0) {
messages.pop();
}

const groupedMessages = [];
// [ { human: "", ai: "" } ]
for (let i = 0; i < messages.length; i += 2) {
groupedMessages.push({
human: messages[i].content,
ai: messages[i + 1].content,
});
}

return groupedMessages;
}

const formatChatHistoryAsString = (history: BaseMessage[]) => {
return history
.map((message) => `${message._getType()}: ${message.content}`)
.join("\n");
};

const formatDocs = (docs: Document[]) => {
return docs
.map((doc, i) => `<doc id='${i}'>${doc.pageContent}</doc>`)
.join("\n");
};

const serializeHistory = (input: any) => {
const chatHistory = input.chat_history || [];
const convertedChatHistory = [];
for (const message of chatHistory) {
if (message.human !== undefined) {
convertedChatHistory.push(new HumanMessage({ content: message.human }));
}
if (message["ai"] !== undefined) {
convertedChatHistory.push(new AIMessage({ content: message.ai }));
}
}
return convertedChatHistory;
};

const createRetrieverChain = (
llm: BaseLanguageModel,
retriever: Runnable,
question_template: string
) => {
const CONDENSE_QUESTION_PROMPT =
PromptTemplate.fromTemplate(question_template);
const condenseQuestionChain = RunnableSequence.from([
CONDENSE_QUESTION_PROMPT,
llm,
new StringOutputParser(),
]).withConfig({
runName: "CondenseQuestion",
});
const hasHistoryCheckFn = RunnableLambda.from(
(input: RetrievalChainInput) => input.chat_history.length > 0
).withConfig({ runName: "HasChatHistoryCheck" });
const conversationChain = condenseQuestionChain.pipe(retriever).withConfig({
runName: "RetrievalChainWithHistory",
});
const basicRetrievalChain = RunnableLambda.from(
(input: RetrievalChainInput) => input.question
)
.withConfig({
runName: "Itemgetter:question",
})
.pipe(retriever)
.withConfig({ runName: "RetrievalChainWithNoHistory" });

return RunnableBranch.from([
[hasHistoryCheckFn, conversationChain],
basicRetrievalChain,
]).withConfig({
runName: "FindDocs",
});
};

export const createChain = ({
llm,
question_template,
question_llm,
retriever,
response_template,
}: {
llm: BaseLanguageModel;
question_llm: BaseLanguageModel;
retriever: Runnable;
question_template: string;
response_template: string;
}) => {
const retrieverChain = createRetrieverChain(
question_llm,
retriever,
question_template
);
const context = RunnableMap.from({
context: RunnableSequence.from([
({ question, chat_history }) => {
return {
question: question,
chat_history: formatChatHistoryAsString(chat_history),
};
},
retrieverChain,
RunnableLambda.from(formatDocs).withConfig({
runName: "FormatDocumentChunks",
}),
]),
question: RunnableLambda.from(
(input: RetrievalChainInput) => input.question
).withConfig({
runName: "Itemgetter:question",
}),
chat_history: RunnableLambda.from(
(input: RetrievalChainInput) => input.chat_history
).withConfig({
runName: "Itemgetter:chat_history",
}),
}).withConfig({ tags: ["RetrieveDocs"] });
const prompt = ChatPromptTemplate.fromMessages([
["system", response_template],
new MessagesPlaceholder("chat_history"),
["human", "{question}"],
]);

const responseSynthesizerChain = RunnableSequence.from([
prompt,
llm,
new StringOutputParser(),
]).withConfig({
tags: ["GenerateResponse"],
});
return RunnableSequence.from([
{
question: RunnableLambda.from(
(input: RetrievalChainInput) => input.question
).withConfig({
runName: "Itemgetter:question",
}),
chat_history: RunnableLambda.from(serializeHistory).withConfig({
runName: "SerializeHistory",
}),
},
context,
responseSynthesizerChain,
]);
};
6 changes: 3 additions & 3 deletions server/src/integration/discord.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ function url_to_label(
if (smart_label) {
let label = url;

if (!label.includes("?")) {
if (!label?.includes("?")) {
// "https://en.wikipedia.org/wiki/Linux_kernel" => "Linux kernel"
label = decodeURI(label.split("/").slice(-1)[0].replaceAll("_", " "));
} else {
Expand Down Expand Up @@ -122,8 +122,8 @@ export default class DiscordBot {
// i.e. sources made from the same source will be "merged"
new Set(
bot_response?.sourceDocuments?.map(
(d: { metadata: { source: string } }): string =>
d.metadata.source
(d: { metadata: { source?: string; path?: string } }): string =>
d.metadata?.source || d.metadata?.path || ""
)
)
);
Expand Down
Loading

0 comments on commit 48e82e3

Please sign in to comment.