diff --git a/.changeset/new-cups-dress.md b/.changeset/new-cups-dress.md new file mode 100644 index 0000000000..ad95ba94dc --- /dev/null +++ b/.changeset/new-cups-dress.md @@ -0,0 +1,5 @@ +--- +"@llamaindex/core": patch +--- + +The compact and refine response synthesizer (retrieved by using `getResponseSynthesizer('compact')`) has been fixed to return the original source nodes that were provided to it in its response. Previous to this it was returning the compacted text chunk documents. diff --git a/.changeset/weak-cats-smash.md b/.changeset/weak-cats-smash.md new file mode 100644 index 0000000000..62246011d7 --- /dev/null +++ b/.changeset/weak-cats-smash.md @@ -0,0 +1,5 @@ +--- +"llamaindex": patch +--- + +withLlamaIndex now passes through webpack options to the passed in customized NextJS webpack config. Before it was only passing through the config. diff --git a/apps/next/src/content/docs/llamaindex/setup/typescript.mdx b/apps/next/src/content/docs/llamaindex/setup/typescript.mdx index 434ab28fd6..b25e5ee542 100644 --- a/apps/next/src/content/docs/llamaindex/setup/typescript.mdx +++ b/apps/next/src/content/docs/llamaindex/setup/typescript.mdx @@ -84,7 +84,7 @@ Imaging you put output file into `/dist/openai.js` but you are importing `llamai } ``` -In old module resolution, TypeScript will not be able to find the module because it is not follow the file structure, even you run `node index.js` successfully. (on Node.js >=16) +In old module resolution, TypeScript will not be able to find the module because it is not following the file structure, even you run `node index.js` successfully. (on Node.js >=16) See more about [moduleResolution](https://www.typescriptlang.org/docs/handbook/modules/theory.html#module-resolution) or [TypeScript 5.0 blog](https://devblogs.microsoft.com/typescript/announcing-typescript-5-0/#--moduleresolution-bundler7). diff --git a/packages/core/src/response-synthesizers/factory.ts b/packages/core/src/response-synthesizers/factory.ts index 9197150007..b513c23480 100644 --- a/packages/core/src/response-synthesizers/factory.ts +++ b/packages/core/src/response-synthesizers/factory.ts @@ -77,6 +77,16 @@ class Refine extends BaseSynthesizer { } } + async getResponse( + query: MessageContent, + nodes: NodeWithScore[], + stream: true, + ): Promise>; + async getResponse( + query: MessageContent, + nodes: NodeWithScore[], + stream: false, + ): Promise; async getResponse( query: MessageContent, nodes: NodeWithScore[], @@ -197,6 +207,16 @@ class Refine extends BaseSynthesizer { * CompactAndRefine is a slight variation of Refine that first compacts the text chunks into the smallest possible number of chunks. */ class CompactAndRefine extends Refine { + async getResponse( + query: MessageContent, + nodes: NodeWithScore[], + stream: true, + ): Promise>; + async getResponse( + query: MessageContent, + nodes: NodeWithScore[], + stream: false, + ): Promise; async getResponse( query: MessageContent, nodes: NodeWithScore[], @@ -216,17 +236,24 @@ class CompactAndRefine extends Refine { const newTexts = this.promptHelper.repack(maxPrompt, textChunks); const newNodes = newTexts.map((text) => new TextNode({ text })); if (stream) { - return super.getResponse( + const streamResponse = await super.getResponse( query, newNodes.map((node) => ({ node })), true, ); + return streamConverter(streamResponse, (chunk) => { + chunk.sourceNodes = nodes; + return chunk; + }); } - return super.getResponse( + + const originalResponse = await super.getResponse( query, newNodes.map((node) => ({ node })), false, ); + originalResponse.sourceNodes = nodes; + return originalResponse; } } diff --git a/packages/core/tests/response-synthesizers/compact-and-refine.test.ts b/packages/core/tests/response-synthesizers/compact-and-refine.test.ts new file mode 100644 index 0000000000..fa3cad252e --- /dev/null +++ b/packages/core/tests/response-synthesizers/compact-and-refine.test.ts @@ -0,0 +1,66 @@ +import { describe, expect, test, vi } from "vitest"; +import type { LLMMetadata } from "../../llms/dist/index.js"; +import { getResponseSynthesizer } from "../../response-synthesizers/dist/index.js"; +import { Document } from "../../schema/dist/index.js"; + +const mockLllm = () => ({ + complete: vi.fn().mockImplementation(({ stream }) => { + const response = { text: "unimportant" }; + if (!stream) { + return response; + } + + function* gen() { + // yield a few times to make sure each chunk has the sourceNodes + yield response; + yield response; + yield response; + } + + return gen(); + }), + chat: vi.fn(), + metadata: {} as unknown as LLMMetadata, +}); + +describe("compact and refine response synthesizer", () => { + describe("synthesize", () => { + test("should return original sourceNodes with response when stream = false", async () => { + const synthesizer = getResponseSynthesizer("compact", { + llm: mockLllm(), + }); + + const sourceNode = { node: new Document({}), score: 1 }; + + const response = await synthesizer.synthesize( + { + query: "test", + nodes: [sourceNode], + }, + false, + ); + + expect(response.sourceNodes).toEqual([sourceNode]); + }); + + test("should return original sourceNodes with response when stream = true", async () => { + const synthesizer = getResponseSynthesizer("compact", { + llm: mockLllm(), + }); + + const sourceNode = { node: new Document({}), score: 1 }; + + const response = await synthesizer.synthesize( + { + query: "test", + nodes: [sourceNode], + }, + true, + ); + + for await (const chunk of response) { + expect(chunk.sourceNodes).toEqual([sourceNode]); + } + }); + }); +}); diff --git a/packages/llamaindex/src/next.ts b/packages/llamaindex/src/next.ts index e27b524580..fb8f44527a 100644 --- a/packages/llamaindex/src/next.ts +++ b/packages/llamaindex/src/next.ts @@ -41,7 +41,7 @@ export default function withLlamaIndex(config: any) { // eslint-disable-next-line @typescript-eslint/no-explicit-any config.webpack = function (webpackConfig: any, options: any) { if (userWebpack) { - webpackConfig = userWebpack(webpackConfig); + webpackConfig = userWebpack(webpackConfig, options); } webpackConfig.resolve.alias = { ...webpackConfig.resolve.alias,